From 43a448e25889fc47095750b3fc7817ae56f78d29 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Mon, 16 Oct 2023 17:32:00 -0400 Subject: [PATCH 01/28] Refactor notebooks to include SedonaKepler and Sedona 1.5.0 Refactor examples folder to include correct updated Sedona version --- binder/ApacheSedonaSQL.ipynb | 432 ++---------------- ...naSQL_SpatialJoin_AirportsPerCountry.ipynb | 60 +-- binder/Pipfile | 3 +- examples/flink-sql/pom.xml | 2 +- .../spark-rdd-colocation-mining/build.sbt | 2 +- examples/spark-sql/build.sbt | 2 +- examples/spark-viz/build.sbt | 2 +- 7 files changed, 79 insertions(+), 424 deletions(-) diff --git a/binder/ApacheSedonaSQL.ipynb b/binder/ApacheSedonaSQL.ipynb index a58c264eed..cfba2d8035 100644 --- a/binder/ApacheSedonaSQL.ipynb +++ b/binder/ApacheSedonaSQL.ipynb @@ -24,8 +24,10 @@ }, { "cell_type": "code", - "execution_count": 1, - "metadata": {}, + "execution_count": null, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "import os\n", @@ -38,54 +40,15 @@ }, { "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-ade932f0-a9e8-47af-b559-0d52a6a087e9;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 81ms :: artifacts dl 2ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-ade932f0-a9e8-47af-b559-0d52a6a087e9\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", - "23/07/03 21:13:44 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n", - "23/07/03 21:13:44 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.\n", - "23/07/03 21:13:44 WARN Utils: Service 'SparkUI' could not bind on port 4041. Attempting port 4042.\n", - " \r" - ] - } - ], + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", + " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", " getOrCreate()\n", "\n", @@ -108,27 +71,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-----------------+\n", - "| arealandmark|\n", - "+-----------------+\n", - "|POINT (1.1 101.1)|\n", - "|POINT (2.1 102.1)|\n", - "|POINT (3.1 103.1)|\n", - "|POINT (4.1 104.1)|\n", - "|POINT (5.1 105.1)|\n", - "+-----------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "point_csv_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -150,27 +95,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+--------------------+\n", - "| name| countyshape|\n", - "+----------------+--------------------+\n", - "| Cuming County|POLYGON ((-97.019...|\n", - "|Wahkiakum County|POLYGON ((-123.43...|\n", - "| De Baca County|POLYGON ((-104.56...|\n", - "|Lancaster County|POLYGON ((-96.910...|\n", - "| Nuckolls County|POLYGON ((-98.273...|\n", - "+----------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_wkt_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -191,27 +118,9 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+--------------------+\n", - "| name| countyshape|\n", - "+----------------+--------------------+\n", - "| Cuming County|POLYGON ((-97.019...|\n", - "|Wahkiakum County|POLYGON ((-123.43...|\n", - "| De Baca County|POLYGON ((-104.56...|\n", - "|Lancaster County|POLYGON ((-96.910...|\n", - "| Nuckolls County|POLYGON ((-98.273...|\n", - "+----------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_wkb_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -232,27 +141,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| countyshape|\n", - "+--------------------+\n", - "|POLYGON ((-87.621...|\n", - "|POLYGON ((-85.719...|\n", - "|POLYGON ((-86.000...|\n", - "|POLYGON ((-86.574...|\n", - "|POLYGON ((-85.382...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_json_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -280,36 +171,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "== Physical Plan ==\n", - "BroadcastIndexJoin pointshape2#253: geometry, LeftSide, LeftSide, Inner, INTERSECTS, ( **org.apache.spark.sql.sedona_sql.expressions.ST_Distance** < 2.0) ST_INTERSECTS(pointshape1#228, pointshape2#253)\n", - ":- SpatialIndex pointshape1#228: geometry, QUADTREE, false, 2.0\n", - ": +- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape1#228, abc AS name1#229]\n", - ": +- FileScan csv [_c0#224,_c1#225] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/Desktop/sedona/sedona/binder/data/testpoint...., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", - "+- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape2#253, def AS name2#254]\n", - " +- FileScan csv [_c0#249,_c1#250] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/Desktop/sedona/sedona/binder/data/testpoint...., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", - "\n", - "\n", - "+-----------------+-----+-----------------+-----+\n", - "| pointshape1|name1| pointshape2|name2|\n", - "+-----------------+-----+-----------------+-----+\n", - "|POINT (1.1 101.1)| abc|POINT (1.1 101.1)| def|\n", - "|POINT (2.1 102.1)| abc|POINT (1.1 101.1)| def|\n", - "|POINT (1.1 101.1)| abc|POINT (2.1 102.1)| def|\n", - "|POINT (2.1 102.1)| abc|POINT (2.1 102.1)| def|\n", - "|POINT (3.1 103.1)| abc|POINT (2.1 102.1)| def|\n", - "+-----------------+-----+-----------------+-----+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "point_csv_df_1 = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -351,7 +215,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -365,57 +229,25 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- osm_id: string (nullable = true)\n", - " |-- code: long (nullable = true)\n", - " |-- fclass: string (nullable = true)\n", - " |-- name: string (nullable = true)\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "osm_points.printSchema()" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------+----+---------+--------------+--------------------+\n", - "| osm_id|code| fclass| name| geometry|\n", - "+--------+----+---------+--------------+--------------------+\n", - "|26860257|2422|camp_site| de Kroon|POINT (15.3393145...|\n", - "|26860294|2406| chalet|Leśne Ustronie|POINT (14.8709625...|\n", - "|29947493|2402| motel| |POINT (15.0946636...|\n", - "|29947498|2602| atm| |POINT (15.0732014...|\n", - "|29947499|2401| hotel| |POINT (15.0696777...|\n", - "+--------+----+---------+--------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "osm_points.show(5)" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -424,7 +256,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -441,34 +273,16 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------+----+---------+--------------+--------------------+\n", - "| osm_id|code| fclass| name| geom|\n", - "+--------+----+---------+--------------+--------------------+\n", - "|26860257|2422|camp_site| de Kroon|POINT (-3288183.3...|\n", - "|26860294|2406| chalet|Leśne Ustronie|POINT (-3341183.9...|\n", - "|29947493|2402| motel| |POINT (-3320466.5...|\n", - "|29947498|2602| atm| |POINT (-3323205.7...|\n", - "|29947499|2401| hotel| |POINT (-3323655.1...|\n", - "+--------+----+---------+--------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "transformed_df.show(5)" ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -477,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -492,49 +306,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/03 21:13:53 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------+---------+--------------------+\n", - "| id_1| id_2| geom|\n", - "+----------+---------+--------------------+\n", - "| 197624402|197624402|POINT (-3383818.5...|\n", - "| 197663196|197663196|POINT (-3383367.1...|\n", - "| 197953474|197953474|POINT (-3383763.3...|\n", - "| 262310516|262310516|POINT (-3384257.6...|\n", - "|1074233123|262310516|POINT (-3384262.1...|\n", - "| 270281140|270281140|POINT (-3385421.2...|\n", - "|1074232906|270281140|POINT (-3385408.6...|\n", - "| 270306609|270306609|POINT (-3383982.8...|\n", - "| 270306746|270306746|POINT (-3383898.4...|\n", - "| 280402616|280402616|POINT (-3378817.6...|\n", - "| 839725400|280402616|POINT (-3378841.1...|\n", - "| 293896571|293896571|POINT (-3385029.0...|\n", - "|3256728465|293896571|POINT (-3385002.4...|\n", - "| 310838954|310838954|POINT (-3390510.5...|\n", - "| 311395303|311395303|POINT (-3389444.4...|\n", - "| 311395425|311395425|POINT (-3389867.6...|\n", - "|6339786017|311395425|POINT (-3389850.1...|\n", - "| 825853330|311395425|POINT (-3389877.4...|\n", - "| 945009922|311395425|POINT (-3389878.6...|\n", - "| 320100848|320100848|POINT (-3389610.6...|\n", - "+----------+---------+--------------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "neighbours_within_1000m.show()" ] @@ -548,24 +322,16 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/03 21:13:54 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - } - ], + "outputs": [], "source": [ "df = neighbours_within_1000m.toPandas()" ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -574,129 +340,9 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
id_1id_2geom
0197624402197624402POINT (-3383818.580 4179182.169)
1197663196197663196POINT (-3383367.151 4179427.096)
2197953474197953474POINT (-3383763.332 4179408.785)
3262310516262310516POINT (-3384257.682 4178033.053)
41074233123262310516POINT (-3384262.187 4178036.442)
............
4531467855483546785548354POINT (-3271487.870 4337964.529)
4531567855483566785548356POINT (-3273379.389 4338379.126)
4531667855483576785548357POINT (-3273745.222 4338528.241)
4531767855483586785548358POINT (-3273027.996 4338093.401)
4531868174167046817416704POINT (-3214549.268 4314872.904)
\n", - "

45319 rows × 3 columns

\n", - "
" - ], - "text/plain": [ - " id_1 id_2 geom\n", - "0 197624402 197624402 POINT (-3383818.580 4179182.169)\n", - "1 197663196 197663196 POINT (-3383367.151 4179427.096)\n", - "2 197953474 197953474 POINT (-3383763.332 4179408.785)\n", - "3 262310516 262310516 POINT (-3384257.682 4178033.053)\n", - "4 1074233123 262310516 POINT (-3384262.187 4178036.442)\n", - "... ... ... ...\n", - "45314 6785548354 6785548354 POINT (-3271487.870 4337964.529)\n", - "45315 6785548356 6785548356 POINT (-3273379.389 4338379.126)\n", - "45316 6785548357 6785548357 POINT (-3273745.222 4338528.241)\n", - "45317 6785548358 6785548358 POINT (-3273027.996 4338093.401)\n", - "45318 6817416704 6817416704 POINT (-3214549.268 4314872.904)\n", - "\n", - "[45319 rows x 3 columns]" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "gdf" ] @@ -729,5 +375,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb index 225e6ad8e2..2f0b49480a 100644 --- a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb +++ b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb @@ -37,7 +37,6 @@ "\n", "\n", "from sedona.spark import *\n", - "from keplergl import KeplerGl\n", "from utilities import getConfig\n", "\n" ] @@ -54,28 +53,36 @@ "execution_count": 2, "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/16 17:17:34 WARN Utils: Your hostname, Nileshs-MacBook-Pro.local resolves to a loopback address: 127.0.0.1; using 172.24.19.124 instead (on interface en0)\n", + "23/10/16 17:17:34 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n", + "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", + "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", + "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", + "org.datasyslab#geotools-wrapper added as a dependency\n", + ":: resolving dependencies :: org.apache.spark#spark-submit-parent-1d9bf0a6-87f2-4251-a8da-102cb544a8f9;1.0\n", + "\tconfs: [default]\n", + "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.5.0 in central\n", + "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n" + ] + }, { "name": "stdout", "output_type": "stream", "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" + ":: loading settings :: url = jar:file:/Users/nileshgajwani/Downloads/spark-3.4.1-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-2ebc22b4-bd08-4a3f-a2dc-bd50e2f0f728;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 85ms :: artifacts dl 3ms\n", + ":: resolution report :: resolve 67ms :: artifacts dl 2ms\n", "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", + "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.5.0 from central in [default]\n", "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", "\t---------------------------------------------------------------------\n", "\t| | modules || artifacts |\n", @@ -83,20 +90,24 @@ "\t---------------------------------------------------------------------\n", "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-2ebc22b4-bd08-4a3f-a2dc-bd50e2f0f728\n", + ":: retrieving :: org.apache.spark#spark-submit-parent-1d9bf0a6-87f2-4251-a8da-102cb544a8f9\n", "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", - "23/07/12 14:17:39 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", + "\t0 artifacts copied, 2 already retrieved (0kB/3ms)\n", + "23/10/16 17:17:35 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", "Setting default log level to \"WARN\".\n", "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n", - "23/07/12 14:17:43 WARN SimpleFunctionRegistry: The function st_affine replaced a previously registered function.\n" + "Error while parsing JAI registry file \"file:/Users/nileshgajwani/Downloads/spark-3.4.1-bin-hadoop3/jars/sedona-spark-shaded-3.4_2.12-1.5.1-SNAPSHOT.jar!/META-INF/registryFile.jai\" :\n", + "Error in registry file at line number #31\n", + "A descriptor is already registered against the name \"org.geotools.ColorReduction\" under registry mode \"rendered\"\n", + "Error in registry file at line number #32\n", + "A descriptor is already registered against the name \"org.geotools.ColorInversion\" under registry mode \"rendered\"\n" ] } ], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", + " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", " getOrCreate()\n", "\n", @@ -225,7 +236,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "23/07/12 14:17:43 WARN package: Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.sql.debug.maxToStringFields'.\n" + "23/10/16 17:17:38 WARN package: Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.sql.debug.maxToStringFields'.\n" ] } ], @@ -308,7 +319,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "[('3.0', '2.12', '1.4.1')]\n" + "[('3.0', '2.12', '1.5.0')]\n" ] } ], @@ -355,7 +366,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "23/07/12 14:17:44 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" + "23/10/16 17:17:39 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" ] }, { @@ -500,7 +511,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "0646646608754887811eee12e5516d16", + "model_id": "3886ecefe3884d5f97a469b82a66cea5", "version_major": 2, "version_minor": 0 }, @@ -513,11 +524,8 @@ } ], "source": [ - "df = groupedresult.toPandas()\n", - "gdf = gpd.GeoDataFrame(df, geometry=\"country_geom\").rename(columns={'country_geom':'geometry'})\n", - "\n", - "map = KeplerGl(data={\"AirportCount\": gdf}, config=getConfig())\n", - "map" + "sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", + "sedona_kepler_map" ] }, { diff --git a/binder/Pipfile b/binder/Pipfile index 080c3087d6..25968ae90e 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -16,10 +16,11 @@ shapely="==1.8.4" geopandas="==0.11.1" pyspark="==3.3.2" attrs="*" -apache-sedona="==1.4.1" +apache-sedona="==1.5.0" matplotlib = "*" descartes = "*" keplergl = "==0.3.2" +pydeck = "==0.8.0" ipywidgets = "*" jupyterlab-widgets = "*" ipykernel = "*" diff --git a/examples/flink-sql/pom.xml b/examples/flink-sql/pom.xml index 13e62ab3cb..a08a147096 100644 --- a/examples/flink-sql/pom.xml +++ b/examples/flink-sql/pom.xml @@ -12,7 +12,7 @@ 1.4.0-28.2 compile 2.12 - 1.4.0 + 1.5.0 1.14.3 compile diff --git a/examples/spark-rdd-colocation-mining/build.sbt b/examples/spark-rdd-colocation-mining/build.sbt index e8df972ff3..d380f82d01 100644 --- a/examples/spark-rdd-colocation-mining/build.sbt +++ b/examples/spark-rdd-colocation-mining/build.sbt @@ -39,7 +39,7 @@ val SparkCompatibleVersion = "3.0" val HadoopVersion = "2.7.2" -val SedonaVersion = "1.4.0" +val SedonaVersion = "1.5.0" val ScalaCompatibleVersion = "2.12" diff --git a/examples/spark-sql/build.sbt b/examples/spark-sql/build.sbt index aeb45a6c4b..8ea074656b 100644 --- a/examples/spark-sql/build.sbt +++ b/examples/spark-sql/build.sbt @@ -39,7 +39,7 @@ val SparkCompatibleVersion = "3.0" val HadoopVersion = "2.7.2" -val SedonaVersion = "1.4.0" +val SedonaVersion = "1.5.0" val ScalaCompatibleVersion = "2.12" diff --git a/examples/spark-viz/build.sbt b/examples/spark-viz/build.sbt index bc2260a89a..afc084f514 100644 --- a/examples/spark-viz/build.sbt +++ b/examples/spark-viz/build.sbt @@ -39,7 +39,7 @@ val SparkCompatibleVersion = "3.0" val HadoopVersion = "2.7.2" -val SedonaVersion = "1.4.0" +val SedonaVersion = "1.5.0" val ScalaCompatibleVersion = "2.12" From 2f11a389362e0f80f9a83df9a2cb5d6221f4eadc Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 18 Oct 2023 22:53:18 -0400 Subject: [PATCH 02/28] temp commit changing notebook and binder Pipfile --- ...naSQL_SpatialJoin_AirportsPerCountry.ipynb | 343 ++++++++++++++++-- python/Pipfile | 2 +- 2 files changed, 305 insertions(+), 40 deletions(-) diff --git a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb index 2f0b49480a..2f9860a010 100644 --- a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb +++ b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb @@ -32,13 +32,12 @@ "\n", "import geopandas as gpd\n", "from pyspark.sql import SparkSession\n", - "from pyspark.sql.functions import col, expr, when\n", + "from pyspark.sql.functions import col, expr, when, explode\n", "\n", "\n", "\n", "from sedona.spark import *\n", - "from utilities import getConfig\n", - "\n" + "from utilities import getConfig" ] }, { @@ -57,16 +56,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "23/10/16 17:17:34 WARN Utils: Your hostname, Nileshs-MacBook-Pro.local resolves to a loopback address: 127.0.0.1; using 172.24.19.124 instead (on interface en0)\n", - "23/10/16 17:17:34 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n", + "23/10/18 22:38:41 WARN Utils: Your hostname, Nileshs-MacBook-Pro.local resolves to a loopback address: 127.0.0.1; using 172.24.19.124 instead (on interface en0)\n", + "23/10/18 22:38:41 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n", "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-1d9bf0a6-87f2-4251-a8da-102cb544a8f9;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.5.0 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n" + ":: resolving dependencies :: org.apache.spark#spark-submit-parent-5d1ede8b-02f5-421d-a31c-93ed390d8872;1.0\n", + "\tconfs: [default]\n" ] }, { @@ -80,7 +77,9 @@ "name": "stderr", "output_type": "stream", "text": [ - ":: resolution report :: resolve 67ms :: artifacts dl 2ms\n", + "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.5.0 in central\n", + "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", + ":: resolution report :: resolve 75ms :: artifacts dl 2ms\n", "\t:: modules in use:\n", "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.5.0 from central in [default]\n", "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", @@ -90,17 +89,12 @@ "\t---------------------------------------------------------------------\n", "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-1d9bf0a6-87f2-4251-a8da-102cb544a8f9\n", + ":: retrieving :: org.apache.spark#spark-submit-parent-5d1ede8b-02f5-421d-a31c-93ed390d8872\n", "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/3ms)\n", - "23/10/16 17:17:35 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", + "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", + "23/10/18 22:38:41 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n", - "Error while parsing JAI registry file \"file:/Users/nileshgajwani/Downloads/spark-3.4.1-bin-hadoop3/jars/sedona-spark-shaded-3.4_2.12-1.5.1-SNAPSHOT.jar!/META-INF/registryFile.jai\" :\n", - "Error in registry file at line number #31\n", - "A descriptor is already registered against the name \"org.geotools.ColorReduction\" under registry mode \"rendered\"\n", - "Error in registry file at line number #32\n", - "A descriptor is already registered against the name \"org.geotools.ColorInversion\" under registry mode \"rendered\"\n" + "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n" ] } ], @@ -236,7 +230,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "23/10/16 17:17:38 WARN package: Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.sql.debug.maxToStringFields'.\n" + "23/10/18 22:38:45 WARN package: Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.sql.debug.maxToStringFields'.\n" ] } ], @@ -366,7 +360,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "23/10/16 17:17:39 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" + "23/10/18 22:38:46 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" ] }, { @@ -486,7 +480,8 @@ "# result.createOrReplaceTempView(\"result\")\n", "result2.createOrReplaceTempView(\"result\")\n", "groupedresult = sedona.sql(\"SELECT c.NAME_EN, c.country_geom, count(*) as AirportCount FROM result c GROUP BY c.NAME_EN, c.country_geom\")\n", - "groupedresult.show()" + "groupedresult.show()\n", + "groupedresult.createOrReplaceTempView(\"grouped_result\")" ] }, { @@ -500,40 +495,310 @@ "cell_type": "code", "execution_count": 9, "metadata": {}, + "outputs": [], + "source": [ + "# sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", + "# sedona_kepler_map" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "tags": [] + }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" + "+--------------------+--------------------+--------------------+\n", + "| NAME_EN| country_geom| s2_cellID|\n", + "+--------------------+--------------------+--------------------+\n", + "|Cuba ...|MULTIPOLYGON (((-...|[-859286808902290...|\n", + "+--------------------+--------------------+--------------------+\n", + "only showing top 1 row\n", + "\n", + "root\n", + " |-- NAME_EN: string (nullable = true)\n", + " |-- country_geom: geometry (nullable = true)\n", + " |-- s2_cellID: array (nullable = true)\n", + " | |-- element: long (containsNull = true)\n", + "\n" ] - }, + } + ], + "source": [ + "h3_df = sedona.sql(\"SELECT g.NAME_EN, g.country_geom, ST_S2CellIDs(g.country_geom, 3) as s2_cellID from grouped_result g\")#groupedresult.selectExpr(\"ST_H3CellIDs(country_geom, 3, true) as h3_cellId\")\n", + "h3_df.show(1)\n", + "h3_df.printSchema()\n", + "h3_df.createOrReplaceTempView(\"grouped_s2\")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "tags": [] + }, + "outputs": [ { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "3886ecefe3884d5f97a469b82a66cea5", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "KeplerGl(config={'version': 'v1', 'config': {'visState': {'filters': [], 'layers': [{'id': 'ikzru0t', 'type': …" - ] - }, - "metadata": {}, - "output_type": "display_data" + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- NAME_EN: string (nullable = true)\n", + " |-- country_geom: geometry (nullable = true)\n", + " |-- exploded_cellIds: long (nullable = true)\n", + "\n", + "+--------------------+--------------------+--------------------+\n", + "| NAME_EN| country_geom| exploded_cellIds|\n", + "+--------------------+--------------------+--------------------+\n", + "|Cuba ...|MULTIPOLYGON (((-...|-8592868089022906368|\n", + "|Cuba ...|MULTIPOLYGON (((-...|-8556839292003942400|\n", + "+--------------------+--------------------+--------------------+\n", + "only showing top 2 rows\n", + "\n" + ] } ], "source": [ - "sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", - "sedona_kepler_map" + "exploded_df = h3_df.select(h3_df.NAME_EN, h3_df.country_geom, explode(h3_df.s2_cellID).alias(\"exploded_cellIds\"))\n", + "exploded_df.printSchema()\n", + "exploded_df.show(2)\n", + "exploded_df.createOrReplaceTempView(\"exploded_cells\")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": { "tags": [] }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------------+--------------------+\n", + "| NAME_EN| country_geom| s2|\n", + "+--------------------+--------------------+--------------------+\n", + "|Cuba ...|MULTIPOLYGON (((-...|-8592868089022906368|\n", + "|Mexico ...|MULTIPOLYGON (((-...|-8592868089022906368|\n", + "+--------------------+--------------------+--------------------+\n", + "only showing top 2 rows\n", + "\n", + "root\n", + " |-- NAME_EN: string (nullable = true)\n", + " |-- country_geom: geometry (nullable = true)\n", + " |-- s2: long (nullable = true)\n", + "\n" + ] + } + ], + "source": [ + "df_s2 = sedona.sql(\"SELECT s.NAME_EN, s.country_geom, exploded_cellIds as s2 FROM (grouped_s2 s CROSS JOIN exploded_cells e)\")\n", + "df_s2.show(2)\n", + "df_s2.printSchema()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/18 22:38:53 ERROR Executor: Exception in task 0.0 in stage 58.0 (TID 72)1]\n", + "java.lang.OutOfMemoryError: Java heap space\n", + "\tat java.base/java.nio.HeapByteBuffer.(HeapByteBuffer.java:61)\n", + "\tat java.base/java.nio.ByteBuffer.allocate(ByteBuffer.java:348)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1(SerializerHelper.scala:40)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1$adapted(SerializerHelper.scala:40)\n", + "\tat org.apache.spark.serializer.SerializerHelper$$$Lambda$2719/0x000000080131a840.apply(Unknown Source)\n", + "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)\n", + "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)\n", + "\tat com.esotericsoftware.kryo.io.Output.flush(Output.java:185)\n", + "\tat com.esotericsoftware.kryo.io.Output.require(Output.java:164)\n", + "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:251)\n", + "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:237)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:49)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:38)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeObjectOrNull(Kryo.java:629)\n", + "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:86)\n", + "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeObject(Kryo.java:575)\n", + "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:79)\n", + "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:37)\n", + "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:33)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.serializeToChunkedBuffer(SerializerHelper.scala:42)\n", + "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:599)\n", + "23/10/18 22:38:53 ERROR SparkUncaughtExceptionHandler: Uncaught exception in thread Thread[Executor task launch worker for task 0.0 in stage 58.0 (TID 72),5,main]\n", + "java.lang.OutOfMemoryError: Java heap space\n", + "\tat java.base/java.nio.HeapByteBuffer.(HeapByteBuffer.java:61)\n", + "\tat java.base/java.nio.ByteBuffer.allocate(ByteBuffer.java:348)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1(SerializerHelper.scala:40)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1$adapted(SerializerHelper.scala:40)\n", + "\tat org.apache.spark.serializer.SerializerHelper$$$Lambda$2719/0x000000080131a840.apply(Unknown Source)\n", + "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)\n", + "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)\n", + "\tat com.esotericsoftware.kryo.io.Output.flush(Output.java:185)\n", + "\tat com.esotericsoftware.kryo.io.Output.require(Output.java:164)\n", + "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:251)\n", + "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:237)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:49)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:38)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeObjectOrNull(Kryo.java:629)\n", + "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:86)\n", + "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeObject(Kryo.java:575)\n", + "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:79)\n", + "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:37)\n", + "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:33)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.serializeToChunkedBuffer(SerializerHelper.scala:42)\n", + "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:599)\n", + "23/10/18 22:38:53 WARN TaskSetManager: Lost task 0.0 in stage 58.0 (TID 72) (172.24.19.124 executor driver): java.lang.OutOfMemoryError: Java heap space\n", + "\tat java.base/java.nio.HeapByteBuffer.(HeapByteBuffer.java:61)\n", + "\tat java.base/java.nio.ByteBuffer.allocate(ByteBuffer.java:348)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1(SerializerHelper.scala:40)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1$adapted(SerializerHelper.scala:40)\n", + "\tat org.apache.spark.serializer.SerializerHelper$$$Lambda$2719/0x000000080131a840.apply(Unknown Source)\n", + "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)\n", + "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)\n", + "\tat com.esotericsoftware.kryo.io.Output.flush(Output.java:185)\n", + "\tat com.esotericsoftware.kryo.io.Output.require(Output.java:164)\n", + "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:251)\n", + "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:237)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:49)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:38)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeObjectOrNull(Kryo.java:629)\n", + "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:86)\n", + "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeObject(Kryo.java:575)\n", + "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:79)\n", + "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:37)\n", + "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:33)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", + "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", + "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", + "\tat org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)\n", + "\tat org.apache.spark.serializer.SerializerHelper$.serializeToChunkedBuffer(SerializerHelper.scala:42)\n", + "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:599)\n", + "\n", + "23/10/18 22:38:53 ERROR TaskSetManager: Task 0 in stage 58.0 failed 1 times; aborting job\n", + "ERROR:root:Exception while sending command.\n", + "Traceback (most recent call last):\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 516, in send_command\n", + " raise Py4JNetworkError(\"Answer from Java side is empty\")\n", + "py4j.protocol.Py4JNetworkError: Answer from Java side is empty\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py\", line 1038, in send_command\n", + " response = connection.send_command(command)\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 539, in send_command\n", + " raise Py4JNetworkError(\n", + "py4j.protocol.Py4JNetworkError: Error while sending or receiving\n", + "ERROR:root:Exception while sending command.\n", + "Traceback (most recent call last):\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 511, in send_command\n", + " answer = smart_decode(self.stream.readline()[:-1])\n", + " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socket.py\", line 704, in readinto\n", + " return self._sock.recv_into(b)\n", + "ConnectionResetError: [Errno 54] Connection reset by peer\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py\", line 1038, in send_command\n", + " response = connection.send_command(command)\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 539, in send_command\n", + " raise Py4JNetworkError(\n", + "py4j.protocol.Py4JNetworkError: Error while sending or receiving\n" + ] + }, + { + "ename": "ConnectionRefusedError", + "evalue": "[Errno 61] Connection refused", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mPy4JError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/sql/dataframe.py:1216\u001b[0m, in \u001b[0;36mDataFrame.collect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1215\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m SCCallSiteSync(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sc):\n\u001b[0;32m-> 1216\u001b[0m sock_info \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_jdf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollectToPython\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1217\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mlist\u001b[39m(_load_from_socket(sock_info, BatchedSerializer(CPickleSerializer())))\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py:1321\u001b[0m, in \u001b[0;36mJavaMember.__call__\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 1320\u001b[0m answer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgateway_client\u001b[38;5;241m.\u001b[39msend_command(command)\n\u001b[0;32m-> 1321\u001b[0m return_value \u001b[38;5;241m=\u001b[39m \u001b[43mget_return_value\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1322\u001b[0m \u001b[43m \u001b[49m\u001b[43manswer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgateway_client\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtarget_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1324\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m temp_arg \u001b[38;5;129;01min\u001b[39;00m temp_args:\n", + "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/errors/exceptions/captured.py:169\u001b[0m, in \u001b[0;36mcapture_sql_exception..deco\u001b[0;34m(*a, **kw)\u001b[0m\n\u001b[1;32m 168\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 169\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mf\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43ma\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkw\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 170\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m Py4JJavaError \u001b[38;5;28;01mas\u001b[39;00m e:\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/protocol.py:334\u001b[0m, in \u001b[0;36mget_return_value\u001b[0;34m(answer, gateway_client, target_id, name)\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 334\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m Py4JError(\n\u001b[1;32m 335\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAn error occurred while calling \u001b[39m\u001b[38;5;132;01m{0}\u001b[39;00m\u001b[38;5;132;01m{1}\u001b[39;00m\u001b[38;5;132;01m{2}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39m\n\u001b[1;32m 336\u001b[0m \u001b[38;5;28mformat\u001b[39m(target_id, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m, name))\n\u001b[1;32m 337\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "\u001b[0;31mPy4JError\u001b[0m: An error occurred while calling o87.collectToPython", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mConnectionRefusedError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[13], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m sedona_kepler_map_s2 \u001b[38;5;241m=\u001b[39m \u001b[43mSedonaKepler\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_map\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdf_s2\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mAirportCount\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgetConfig\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 2\u001b[0m sedona_kepler_map_s2\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/maps/SedonaKepler.py:35\u001b[0m, in \u001b[0;36mSedonaKepler.create_map\u001b[0;34m(cls, df, name, config)\u001b[0m\n\u001b[1;32m 33\u001b[0m kepler_map \u001b[38;5;241m=\u001b[39m KeplerGl()\n\u001b[1;32m 34\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m df \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m---> 35\u001b[0m \u001b[43mSedonaKepler\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43madd_df\u001b[49m\u001b[43m(\u001b[49m\u001b[43mkepler_map\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdf\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m config \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 38\u001b[0m kepler_map\u001b[38;5;241m.\u001b[39mconfig \u001b[38;5;241m=\u001b[39m config\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/maps/SedonaKepler.py:51\u001b[0m, in \u001b[0;36mSedonaKepler.add_df\u001b[0;34m(cls, kepler_map, df, name)\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[1;32m 43\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21madd_df\u001b[39m(\u001b[38;5;28mcls\u001b[39m, kepler_map, df, name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124munnamed\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[1;32m 44\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 45\u001b[0m \u001b[38;5;124;03m Adds a SedonaDataFrame to a given map object.\u001b[39;00m\n\u001b[1;32m 46\u001b[0m \u001b[38;5;124;03m :param kepler_map: Map object to add SedonaDataFrame to\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 49\u001b[0m \u001b[38;5;124;03m :return: Does not return anything, adds df directly to the given map object\u001b[39;00m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m---> 51\u001b[0m geo_df \u001b[38;5;241m=\u001b[39m \u001b[43mSedonaMapUtils\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__convert_to_gdf__\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdf\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 52\u001b[0m kepler_map\u001b[38;5;241m.\u001b[39madd_data(geo_df, name\u001b[38;5;241m=\u001b[39mname)\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/maps/SedonaMapUtils.py:34\u001b[0m, in \u001b[0;36mSedonaMapUtils.__convert_to_gdf__\u001b[0;34m(cls, df, rename, geometry_col)\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m geometry_col \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 33\u001b[0m geometry_col \u001b[38;5;241m=\u001b[39m SedonaMapUtils\u001b[38;5;241m.\u001b[39m__get_geometry_col__(df)\n\u001b[0;32m---> 34\u001b[0m pandas_df \u001b[38;5;241m=\u001b[39m \u001b[43mdf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtoPandas\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 35\u001b[0m geo_df \u001b[38;5;241m=\u001b[39m gpd\u001b[38;5;241m.\u001b[39mGeoDataFrame(pandas_df, geometry\u001b[38;5;241m=\u001b[39mgeometry_col)\n\u001b[1;32m 36\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m geometry_col \u001b[38;5;241m!=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgeometry\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m rename \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n", + "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/sql/pandas/conversion.py:208\u001b[0m, in \u001b[0;36mPandasConversionMixin.toPandas\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m\n\u001b[1;32m 207\u001b[0m \u001b[38;5;66;03m# Below is toPandas without Arrow optimization.\u001b[39;00m\n\u001b[0;32m--> 208\u001b[0m pdf \u001b[38;5;241m=\u001b[39m pd\u001b[38;5;241m.\u001b[39mDataFrame\u001b[38;5;241m.\u001b[39mfrom_records(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m, columns\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns)\n\u001b[1;32m 209\u001b[0m column_counter \u001b[38;5;241m=\u001b[39m Counter(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns)\n\u001b[1;32m 211\u001b[0m corrected_dtypes: List[Optional[Type]] \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;01mNone\u001b[39;00m] \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mschema)\n", + "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/sql/dataframe.py:1216\u001b[0m, in \u001b[0;36mDataFrame.collect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1196\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Returns all the records as a list of :class:`Row`.\u001b[39;00m\n\u001b[1;32m 1197\u001b[0m \n\u001b[1;32m 1198\u001b[0m \u001b[38;5;124;03m.. versionadded:: 1.3.0\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1213\u001b[0m \u001b[38;5;124;03m[Row(age=14, name='Tom'), Row(age=23, name='Alice'), Row(age=16, name='Bob')]\u001b[39;00m\n\u001b[1;32m 1214\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1215\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m SCCallSiteSync(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sc):\n\u001b[0;32m-> 1216\u001b[0m sock_info \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_jdf\u001b[38;5;241m.\u001b[39mcollectToPython()\n\u001b[1;32m 1217\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mlist\u001b[39m(_load_from_socket(sock_info, BatchedSerializer(CPickleSerializer())))\n", + "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/traceback_utils.py:81\u001b[0m, in \u001b[0;36mSCCallSiteSync.__exit__\u001b[0;34m(self, type, value, tb)\u001b[0m\n\u001b[1;32m 79\u001b[0m SCCallSiteSync\u001b[38;5;241m.\u001b[39m_spark_stack_depth \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 80\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m SCCallSiteSync\u001b[38;5;241m.\u001b[39m_spark_stack_depth \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[0;32m---> 81\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_context\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_jsc\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msetCallSite\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py:1320\u001b[0m, in \u001b[0;36mJavaMember.__call__\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 1313\u001b[0m args_command, temp_args \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_build_args(\u001b[38;5;241m*\u001b[39margs)\n\u001b[1;32m 1315\u001b[0m command \u001b[38;5;241m=\u001b[39m proto\u001b[38;5;241m.\u001b[39mCALL_COMMAND_NAME \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1316\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcommand_header \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1317\u001b[0m args_command \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1318\u001b[0m proto\u001b[38;5;241m.\u001b[39mEND_COMMAND_PART\n\u001b[0;32m-> 1320\u001b[0m answer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgateway_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend_command\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcommand\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1321\u001b[0m return_value \u001b[38;5;241m=\u001b[39m get_return_value(\n\u001b[1;32m 1322\u001b[0m answer, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgateway_client, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_id, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname)\n\u001b[1;32m 1324\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m temp_arg \u001b[38;5;129;01min\u001b[39;00m temp_args:\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py:1036\u001b[0m, in \u001b[0;36mGatewayClient.send_command\u001b[0;34m(self, command, retry, binary)\u001b[0m\n\u001b[1;32m 1015\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21msend_command\u001b[39m(\u001b[38;5;28mself\u001b[39m, command, retry\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, binary\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m):\n\u001b[1;32m 1016\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Sends a command to the JVM. This method is not intended to be\u001b[39;00m\n\u001b[1;32m 1017\u001b[0m \u001b[38;5;124;03m called directly by Py4J users. It is usually called by\u001b[39;00m\n\u001b[1;32m 1018\u001b[0m \u001b[38;5;124;03m :class:`JavaMember` instances.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1034\u001b[0m \u001b[38;5;124;03m if `binary` is `True`.\u001b[39;00m\n\u001b[1;32m 1035\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m-> 1036\u001b[0m connection \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_connection\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1037\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1038\u001b[0m response \u001b[38;5;241m=\u001b[39m connection\u001b[38;5;241m.\u001b[39msend_command(command)\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py:284\u001b[0m, in \u001b[0;36mJavaClient._get_connection\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 281\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n\u001b[1;32m 283\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m connection \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mor\u001b[39;00m connection\u001b[38;5;241m.\u001b[39msocket \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 284\u001b[0m connection \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_create_new_connection\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 285\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m connection\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py:291\u001b[0m, in \u001b[0;36mJavaClient._create_new_connection\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 287\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_create_new_connection\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m 288\u001b[0m connection \u001b[38;5;241m=\u001b[39m ClientServerConnection(\n\u001b[1;32m 289\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mjava_parameters, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpython_parameters,\n\u001b[1;32m 290\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgateway_property, \u001b[38;5;28mself\u001b[39m)\n\u001b[0;32m--> 291\u001b[0m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect_to_java_server\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 292\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mset_thread_connection(connection)\n\u001b[1;32m 293\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m connection\n", + "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py:438\u001b[0m, in \u001b[0;36mClientServerConnection.connect_to_java_server\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 435\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mssl_context:\n\u001b[1;32m 436\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msocket \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mssl_context\u001b[38;5;241m.\u001b[39mwrap_socket(\n\u001b[1;32m 437\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msocket, server_hostname\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mjava_address)\n\u001b[0;32m--> 438\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msocket\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjava_address\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjava_port\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 439\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstream \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msocket\u001b[38;5;241m.\u001b[39mmakefile(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrb\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 440\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mis_connected \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n", + "\u001b[0;31mConnectionRefusedError\u001b[0m: [Errno 61] Connection refused" + ] + } + ], + "source": [ + "sedona_kepler_map_s2 = SedonaKepler.create_map(df=df_s2, name=\"AirportCount\", config=getConfig())\n", + "sedona_kepler_map_s2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, "outputs": [], "source": [] } diff --git a/python/Pipfile b/python/Pipfile index 47339508a3..6b26e2e036 100644 --- a/python/Pipfile +++ b/python/Pipfile @@ -14,7 +14,7 @@ pytest-cov = "*" shapely="<=1.8.5" pandas="<=1.3.5" geopandas="<=0.10.2" -pyspark=">=2.3.0" +pyspark="*" attrs="*" pyarrow="*" keplergl = "==0.3.2" From 14a8dc7c2355cbd440e30de6349f408c5b3eb215 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Mon, 23 Oct 2023 17:41:52 -0400 Subject: [PATCH 03/28] Changes to ST notebooks to update sedona and geotools version Add code for Uber H3 cell visualization --- binder/ApacheSedonaCore.ipynb | 1269 ++--------------- binder/ApacheSedonaRaster.ipynb | 709 ++------- binder/ApacheSedonaSQL.ipynb | 9 +- ...naSQL_SpatialJoin_AirportsPerCountry.ipynb | 639 +-------- 4 files changed, 312 insertions(+), 2314 deletions(-) diff --git a/binder/ApacheSedonaCore.ipynb b/binder/ApacheSedonaCore.ipynb index e82d5b225e..28b62c99ca 100644 --- a/binder/ApacheSedonaCore.ipynb +++ b/binder/ApacheSedonaCore.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -45,53 +45,14 @@ }, { "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-7c4309f6-195c-4473-b3ac-629607126e04;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 121ms :: artifacts dl 2ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-7c4309f6-195c-4473-b3ac-629607126e04\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/3ms)\n", - "23/07/03 20:34:33 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n", - "23/07/03 20:34:33 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)" @@ -99,7 +60,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -137,29 +98,18 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "point_rdd = PointRDD(sc, \"data/arealm-small.csv\", 1, FileDataSplitter.CSV, True, 10, StorageLevel.MEMORY_ONLY, \"epsg:4326\", \"epsg:4326\")" + "point_rdd = PointRDD(sc, \"data/arealm-small.csv\", 1, FileDataSplitter.CSV, True, 10)" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "3000" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "## Getting approximate total count\n", "point_rdd.approximateTotalCount" @@ -167,37 +117,9 @@ }, { "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:27: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.minx = minx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:28: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxx = maxx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:29: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.miny = miny\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:30: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxy = maxy\n" - ] - }, - { - "data": { - "image/svg+xml": [ - "" - ], - "text/plain": [ - "Envelope(-173.120769, -84.965961, 30.244859, 71.355134)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# getting boundary for PointRDD or any other SpatialRDD, it returns Enelope object which inherits from\n", "# shapely.geometry.Polygon\n", @@ -206,20 +128,9 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# To run analyze please use function analyze\n", "point_rdd.analyze()" @@ -227,23 +138,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": [ - "" - ], - "text/plain": [ - "Envelope(-173.120769, -84.965961, 30.244859, 71.355134)" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Finding boundary envelope for PointRDD or any other SpatialRDD, it returns Enelope object which inherits from\n", "# shapely.geometry.Polygon\n", @@ -252,20 +149,9 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "2996" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Calculate number of records without duplicates\n", "point_rdd.countWithoutDuplicates()" @@ -273,20 +159,9 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'epsg:4326'" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Getting source epsg code\n", "point_rdd.getSourceEpsgCode()" @@ -294,20 +169,9 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'epsg:4326'" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Getting target epsg code\n", "point_rdd.getTargetEpsgCode()" @@ -315,20 +179,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Spatial partitioning data\n", "point_rdd.spatialPartitioning(GridType.KDBTREE)" @@ -354,30 +207,9 @@ }, { "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - "[Stage 8:> (0 + 1) / 1]\r", - "\r", - " \r" - ] - }, - { - "data": { - "text/plain": [ - "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "# take firs element\n", "point_rdd.rawSpatialRDD.take(1)" @@ -385,24 +217,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# collect to Python list\n", "point_rdd.rawSpatialRDD.collect()[:5]" @@ -410,24 +227,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[111.08786851399313,\n", - " 110.92828303170774,\n", - " 111.1385974283527,\n", - " 110.97450594034112,\n", - " 110.97122518072091]" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# apply map functions, for example distance to Point(52 21)\n", "point_rdd.rawSpatialRDD.map(lambda x: x.geom.distance(Point(21, 52))).take(5)" @@ -456,7 +258,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -465,7 +267,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -476,90 +278,9 @@ }, { "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
geomattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", - "
" - ], - "text/plain": [ - " geom attr1 attr2 attr3\n", - "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", - "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", - "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", - "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", - "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "point_gdf[:5]" ] @@ -573,7 +294,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -582,7 +303,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -595,117 +316,18 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------+--------------+--------------+----------------------------+\n", - "|attr1 |attr2 |attr3 |geom |\n", - "+--------------+--------------+--------------+----------------------------+\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.331492 32.324142)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.175933 32.360763)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.388954 32.357073)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.221102 32.35078) |\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.323995 32.950671)|\n", - "+--------------+--------------+--------------+----------------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "spatial_gdf.show(5, False)" ] }, { "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
attr1attr2attr3geom
0testattribute0testattribute1testattribute2POINT (-88.33149 32.32414)
1testattribute0testattribute1testattribute2POINT (-88.17593 32.36076)
2testattribute0testattribute1testattribute2POINT (-88.38895 32.35707)
3testattribute0testattribute1testattribute2POINT (-88.22110 32.35078)
4testattribute0testattribute1testattribute2POINT (-88.32399 32.95067)
\n", - "
" - ], - "text/plain": [ - " attr1 attr2 attr3 geom\n", - "0 testattribute0 testattribute1 testattribute2 POINT (-88.33149 32.32414)\n", - "1 testattribute0 testattribute1 testattribute2 POINT (-88.17593 32.36076)\n", - "2 testattribute0 testattribute1 testattribute2 POINT (-88.38895 32.35707)\n", - "3 testattribute0 testattribute1 testattribute2 POINT (-88.22110 32.35078)\n", - "4 testattribute0 testattribute1 testattribute2 POINT (-88.32399 32.95067)" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "gpd.GeoDataFrame(spatial_gdf.toPandas(), geometry=\"geom\")[:5]" ] @@ -719,7 +341,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -735,7 +357,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -744,90 +366,9 @@ }, { "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
geometryattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", - "
" - ], - "text/plain": [ - " geometry attr1 attr2 attr3\n", - "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", - "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", - "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", - "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", - "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "gpd.GeoDataFrame(geo_df.toPandas(), geometry=\"geometry\")[:5]" ] @@ -853,32 +394,21 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "rectangle_rdd = RectangleRDD(sc, \"data/zcta510-small.csv\", FileDataSplitter.CSV, True, 11)\n", "point_rdd = PointRDD(sc, \"data/arealm-small.csv\", 1, FileDataSplitter.CSV, False, 11)\n", "polygon_rdd = PolygonRDD(sc, \"data/primaryroads-polygon.csv\", FileDataSplitter.CSV, True, 11)\n", - "linestring_rdd = LineStringRDD(sc, \"data/primaryroads-linestring.csv\", FileDataSplitter.CSV, True, StorageLevel.MEMORY_ONLY)" + "linestring_rdd = LineStringRDD(sc, \"data/primaryroads-linestring.csv\", FileDataSplitter.CSV, True)" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "rectangle_rdd.analyze()\n", "point_rdd.analyze()\n", @@ -902,20 +432,9 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "point_rdd.spatialPartitioning(GridType.KDBTREE)" ] @@ -936,7 +455,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -984,7 +503,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1007,83 +526,36 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MapPartitionsRDD[64] at map at FlatPairRddConverter.scala:30" - ] - }, - "execution_count": 33, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result" ] }, { "cell_type": "code", - "execution_count": 34, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ]]" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result.take(2)" ] }, { "cell_type": "code", - "execution_count": 35, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ]]" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result.collect()[:3]" ] }, { "cell_type": "code", - "execution_count": 36, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[0.0, 0.0, 0.0, 0.0, 0.0]" - ] - }, - "execution_count": 36, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# getting distance using SpatialObjects\n", "result.map(lambda x: x[0].geom.distance(x[1].geom)).take(5)" @@ -1091,24 +563,9 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[0.026651558685001447,\n", - " 0.051572544132000575,\n", - " 0.051572544132000575,\n", - " 0.051572544132000575,\n", - " 0.05189354027999942]" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# getting area of polygon data\n", "result.map(lambda x: x[0].geom.area).take(5)" @@ -1116,7 +573,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1125,7 +582,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1139,27 +596,9 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+\n", - "| geom_left| geom_right|\n", - "+--------------------+--------------------+\n", - "|POLYGON ((-87.229...|POINT (-87.204299...|\n", - "|POLYGON ((-87.082...|POINT (-87.059583...|\n", - "|POLYGON ((-87.082...|POINT (-87.075409...|\n", - "|POLYGON ((-87.082...|POINT (-87.08084 ...|\n", - "|POLYGON ((-87.092...|POINT (-87.08084 ...|\n", - "+--------------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# Set verifySchema to False\n", "spatial_join_result = result.map(lambda x: [x[0].geom, x[1].geom])\n", @@ -1168,7 +607,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1177,20 +616,9 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geom_left: geometry (nullable = false)\n", - " |-- geom_right: geometry (nullable = false)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "sedona.createDataFrame(spatial_join_result, schema, verifySchema=False).printSchema()" ] @@ -1204,27 +632,9 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+-----+--------------------+-----+\n", - "| geom_1|attr1| geom_2|attr2|\n", - "+--------------------+-----+--------------------+-----+\n", - "|POLYGON ((-87.229...| |POINT (-87.204299...| |\n", - "|POLYGON ((-87.082...| |POINT (-87.059583...| |\n", - "|POLYGON ((-87.082...| |POINT (-87.075409...| |\n", - "|POLYGON ((-87.082...| |POINT (-87.08084 ...| |\n", - "|POLYGON ((-87.092...| |POINT (-87.08084 ...| |\n", - "+--------------------+-----+--------------------+-----+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).show(5, True)" ] @@ -1238,22 +648,9 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geom_1: geometry (nullable = true)\n", - " |-- attr1: string (nullable = true)\n", - " |-- geom_2: geometry (nullable = true)\n", - " |-- attr2: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).printSchema()" ] @@ -1275,7 +672,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1285,7 +682,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1294,7 +691,7 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1304,7 +701,7 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1316,31 +713,9 @@ }, { "cell_type": "code", - "execution_count": 49, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+----------------+\n", - "| geometry|number_of_points|\n", - "+--------------------+----------------+\n", - "|POLYGON ((-86.749...| 4|\n", - "|POLYGON ((-87.229...| 7|\n", - "|POLYGON ((-87.114...| 15|\n", - "|POLYGON ((-87.082...| 12|\n", - "|POLYGON ((-86.697...| 1|\n", - "|POLYGON ((-86.816...| 6|\n", - "|POLYGON ((-87.285...| 26|\n", - "|POLYGON ((-87.105...| 15|\n", - "|POLYGON ((-86.860...| 12|\n", - "|POLYGON ((-87.092...| 5|\n", - "+--------------------+----------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "sedona.createDataFrame(number_of_points, schema, verifySchema=False).show()" ] @@ -1368,7 +743,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1377,7 +752,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1386,24 +761,9 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ]" - ] - }, - "execution_count": 52, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result" ] @@ -1417,7 +777,7 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1431,44 +791,18 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ]" - ] - }, - "execution_count": 54, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "polygons_nearby" ] }, { "cell_type": "code", - "execution_count": 55, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'POLYGON ((-83.993559 34.087259, -83.993559 34.131247, -83.959903 34.131247, -83.959903 34.087259, -83.993559 34.087259))'" - ] - }, - "execution_count": 55, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "polygons_nearby[0].geom.wkt" ] @@ -1494,7 +828,7 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1503,24 +837,9 @@ }, { "cell_type": "code", - "execution_count": 57, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:27: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.minx = minx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:28: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxx = maxx\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:29: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.miny = miny\n", - "/Users/nileshgajwani/Desktop/sedona/sedona/venv/lib/python3.9/site-packages/sedona/core/geom/envelope.py:30: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxy = maxy\n" - ] - } - ], + "outputs": [], "source": [ "query_envelope = Envelope(-85.01, -60.01, 34.01, 50.01)\n", "\n", @@ -1529,59 +848,25 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MapPartitionsRDD[128] at map at GeometryRddConverter.scala:30" - ] - }, - "execution_count": 58, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result_range_query" ] }, { "cell_type": "code", - "execution_count": 59, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/07/03 20:34:42 WARN BlockManager: Task 405 already completed, not releasing lock for rdd_45_0\n" - ] - }, - { - "data": { - "text/plain": [ - "[Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ]" - ] - }, - "execution_count": 59, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "result_range_query.take(6)" ] }, { "cell_type": "code", - "execution_count": 60, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1590,7 +875,7 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1599,27 +884,9 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|LINESTRING (-72.1...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-73.4...|\n", - "|LINESTRING (-73.6...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "sedona.createDataFrame(\n", " result_range_query.map(lambda x: [x.geom]),\n", @@ -1648,7 +915,7 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1657,7 +924,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1666,54 +933,25 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 65, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "shape_rdd" ] }, { "cell_type": "code", - "execution_count": 66, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|MULTIPOLYGON (((1...|\n", - "|MULTIPOLYGON (((-...|\n", - "|MULTIPOLYGON (((1...|\n", - "|POLYGON ((118.362...|\n", - "|MULTIPOLYGON (((-...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(shape_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": 67, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1731,7 +969,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1740,54 +978,25 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 69, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "geo_json_rdd" ] }, { "cell_type": "code", - "execution_count": 70, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "| geometry|STATEFP|COUNTYFP|TRACTCE|BLKGRPCE| AFFGEOID| GEOID|NAME|LSAD| ALAND|\n", - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "|POLYGON ((-87.621...| 01| 077| 011501| 5|1500000US01077011...|010770115015| 5| BG| 6844991|\n", - "|POLYGON ((-85.719...| 01| 045| 021102| 4|1500000US01045021...|010450211024| 4| BG|11360854|\n", - "|POLYGON ((-86.000...| 01| 055| 001300| 3|1500000US01055001...|010550013003| 3| BG| 1378742|\n", - "|POLYGON ((-86.574...| 01| 089| 001700| 2|1500000US01089001...|010890017002| 2| BG| 1040641|\n", - "|POLYGON ((-85.382...| 01| 069| 041400| 1|1500000US01069041...|010690414001| 1| BG| 8243574|\n", - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(geo_json_rdd, sedona).drop(\"AWATER\").show(5, True)" ] }, { "cell_type": "code", - "execution_count": 71, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1796,7 +1005,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1805,73 +1014,34 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 73, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "wkt_rdd" ] }, { "cell_type": "code", - "execution_count": 74, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkt_rdd, sedona).printSchema()" ] }, { "cell_type": "code", - "execution_count": 75, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|POLYGON ((-97.019...|\n", - "|POLYGON ((-123.43...|\n", - "|POLYGON ((-104.56...|\n", - "|POLYGON ((-96.910...|\n", - "|POLYGON ((-98.273...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkt_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": 76, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1880,7 +1050,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1889,27 +1059,9 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|POLYGON ((-97.019...|\n", - "|POLYGON ((-123.43...|\n", - "|POLYGON ((-104.56...|\n", - "|POLYGON ((-96.910...|\n", - "|POLYGON ((-98.273...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkb_rdd, sedona).show(5, True)" ] @@ -1923,7 +1075,7 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1937,7 +1089,7 @@ }, { "cell_type": "code", - "execution_count": 80, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1947,67 +1099,27 @@ }, { "cell_type": "code", - "execution_count": 81, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- leftgeometry: geometry (nullable = true)\n", - " |-- rightgeometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.printSchema()" ] }, { "cell_type": "code", - "execution_count": 82, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+\n", - "| leftgeometry| rightgeometry|\n", - "+--------------------+--------------------+\n", - "|POLYGON ((-87.285...|POINT (-87.28468 ...|\n", - "|POLYGON ((-87.285...|POINT (-87.215491...|\n", - "|POLYGON ((-87.285...|POINT (-87.210001...|\n", - "|POLYGON ((-87.285...|POINT (-87.278485...|\n", - "|POLYGON ((-87.285...|POINT (-87.280556...|\n", - "+--------------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.show(5)" ] }, { "cell_type": "code", - "execution_count": 83, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Row(leftgeometry=, rightgeometry=)" - ] - }, - "execution_count": 83, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "geometry_df.collect()[0]" ] @@ -2021,7 +1133,7 @@ }, { "cell_type": "code", - "execution_count": 84, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2030,27 +1142,9 @@ }, { "cell_type": "code", - "execution_count": 85, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------+--------------------+---------------+\n", - "| leftgeometry|left_user_data| rightgeometry|right_user_data|\n", - "+--------------------+--------------+--------------------+---------------+\n", - "|POLYGON ((-87.285...| |POINT (-87.28468 ...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.215491...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.210001...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.278485...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.280556...| null|\n", - "+--------------------+--------------+--------------------+---------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.show(5)" ] @@ -2064,7 +1158,7 @@ }, { "cell_type": "code", - "execution_count": 86, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2075,7 +1169,7 @@ }, { "cell_type": "code", - "execution_count": 87, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2085,53 +1179,25 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|LINESTRING (-72.1...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-73.4...|\n", - "|LINESTRING (-73.6...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf.show(5)" ] }, { "cell_type": "code", - "execution_count": 89, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf.printSchema()" ] }, { "cell_type": "code", - "execution_count": 90, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2142,47 +1208,18 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+---+\n", - "| geometry|_c1|\n", - "+--------------------+---+\n", - "|LINESTRING (-72.1...| |\n", - "|LINESTRING (-72.4...| |\n", - "|LINESTRING (-72.4...| |\n", - "|LINESTRING (-73.4...| |\n", - "|LINESTRING (-73.6...| |\n", - "+--------------------+---+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf_with_columns.show(5)" ] }, { "cell_type": "code", - "execution_count": 92, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- _c1: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf_with_columns.printSchema()" ] diff --git a/binder/ApacheSedonaRaster.ipynb b/binder/ApacheSedonaRaster.ipynb index d3c3b8ae0b..23f3a1cae5 100644 --- a/binder/ApacheSedonaRaster.ipynb +++ b/binder/ApacheSedonaRaster.ipynb @@ -24,8 +24,10 @@ }, { "cell_type": "code", - "execution_count": 1, - "metadata": {}, + "execution_count": null, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "from IPython.display import display, HTML\n", @@ -51,52 +53,16 @@ }, { "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Desktop/spark/spark-3.4.0-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-f6cc1c05-35e7-48b0-8060-745906834ca0;1.0\n", - "\tconfs: [default]\n", - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 79ms :: artifacts dl 2ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.4.1 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-f6cc1c05-35e7-48b0-8060-745906834ca0\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/3ms)\n", - "23/06/30 14:06:36 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n" - ] - } - ], + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)\n", @@ -116,8 +82,10 @@ }, { "cell_type": "code", - "execution_count": 3, - "metadata": {}, + "execution_count": null, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# Path to directory of geotiff images \n", @@ -126,28 +94,12 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": { - "scrolled": true + "scrolled": true, + "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- image: struct (nullable = true)\n", - " | |-- origin: string (nullable = true)\n", - " | |-- geometry: string (nullable = true)\n", - " | |-- height: integer (nullable = true)\n", - " | |-- width: integer (nullable = true)\n", - " | |-- nBands: integer (nullable = true)\n", - " | |-- data: array (nullable = true)\n", - " | | |-- element: double (containsNull = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").option(\"disableErrorInCRS\", False).load(DATA_DIR)\n", "df.printSchema()" @@ -155,39 +107,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - "[Stage 3:> (0 + 1) / 1]\r" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "| origin| Geom|height|width| data|bands|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "|file:/Users/niles...|POLYGON ((-58.702...| 32| 32|[1081.0, 1068.0, ...| 4|\n", - "|file:/Users/niles...|POLYGON ((-58.286...| 32| 32|[1151.0, 1141.0, ...| 4|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\r", - " \r" - ] - } - ], + "outputs": [], "source": [ "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", "df.show(5)" @@ -202,23 +124,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "| Geom| Band1| Band2| Band3| Band4|\n", - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "|POLYGON ((-58.702...|[1081.0, 1068.0, ...|[909.0, 909.0, 82...|[677.0, 660.0, 66...|[654.0, 652.0, 66...|\n", - "|POLYGON ((-58.286...|[1151.0, 1141.0, ...|[894.0, 956.0, 10...|[751.0, 802.0, 87...|[0.0, 0.0, 0.0, 0...|\n", - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_GetBand() will fetch a particular band from given data array which is the concatenation of all the bands'''\n", "\n", @@ -236,23 +144,9 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| normDiff|\n", - "+--------------------+\n", - "|[-0.09, -0.08, -0...|\n", - "|[-0.13, -0.09, -0...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_NormalizedDifference can be used to calculate NDVI for a particular geotiff image since it uses same computational formula as ndvi'''\n", "\n", @@ -262,23 +156,9 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-------+\n", - "| mean|\n", - "+-------+\n", - "|1153.85|\n", - "|1293.77|\n", - "+-------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Mean() can used to calculate mean of piel values in a particular spatial band'''\n", "meanDF = df.selectExpr(\"RS_Mean(Band1) as mean\")\n", @@ -287,23 +167,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+\n", - "| mode|\n", - "+----------------+\n", - "| [1011.0, 927.0]|\n", - "|[1176.0, 1230.0]|\n", - "+----------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "\"\"\" RS_Mode() is used to calculate mode in an array of pixels and returns a array of double with size 1 in case of unique mode\"\"\"\n", "modeDF = df.selectExpr(\"RS_Mode(Band1) as mode\")\n", @@ -312,23 +178,9 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| greaterthan|\n", - "+--------------------+\n", - "|[1.0, 1.0, 1.0, 0...|\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_GreaterThan() is used to mask all the values with 1 which are greater than a particular threshold'''\n", "greaterthanDF = sedona.sql(\"Select RS_GreaterThan(Band1,1000.0) as greaterthan from allbands\")\n", @@ -337,23 +189,9 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| greaterthanEqual|\n", - "+--------------------+\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_GreaterThanEqual() is used to mask all the values with 1 which are greater than a particular threshold'''\n", "\n", @@ -363,23 +201,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| lessthan|\n", - "+--------------------+\n", - "|[0.0, 0.0, 0.0, 1...|\n", - "|[0.0, 0.0, 0.0, 0...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_LessThan() is used to mask all the values with 1 which are less than a particular threshold'''\n", "lessthanDF = sedona.sql(\"Select RS_LessThan(Band1,1000.0) as lessthan from allbands\")\n", @@ -388,23 +212,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| lessthanequal|\n", - "+--------------------+\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "|[1.0, 1.0, 1.0, 1...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_LessThanEqual() is used to mask all the values with 1 which are less than equal to a particular threshold'''\n", "lessthanEqualDF = sedona.sql(\"Select RS_LessThanEqual(Band1,2890.0) as lessthanequal from allbands\")\n", @@ -413,23 +223,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| sumOfBand|\n", - "+--------------------+\n", - "|[1990.0, 1977.0, ...|\n", - "|[2045.0, 2097.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Add() can add two spatial bands together'''\n", "sumDF = df.selectExpr(\"RS_Add(Band1, Band2) as sumOfBand\")\n", @@ -438,23 +234,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| diffOfBand|\n", - "+--------------------+\n", - "|[-172.0, -159.0, ...|\n", - "|[-257.0, -185.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Subtract() can subtract two spatial bands together'''\n", "subtractDF = df.selectExpr(\"RS_Subtract(Band1, Band2) as diffOfBand\")\n", @@ -463,23 +245,9 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| productOfBand|\n", - "+--------------------+\n", - "|[982629.0, 970812...|\n", - "|[1028994.0, 10907...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Multiply() can multiple two bands together'''\n", "multiplyDF = df.selectExpr(\"RS_Multiply(Band1, Band2) as productOfBand\")\n", @@ -488,23 +256,9 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| divisionOfBand|\n", - "+--------------------+\n", - "|[1.19, 1.17, 1.25...|\n", - "|[1.29, 1.19, 1.19...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Divide() can divide two bands together'''\n", "divideDF = df.selectExpr(\"RS_Divide(Band1, Band2) as divisionOfBand\")\n", @@ -513,23 +267,9 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| target|\n", - "+--------------------+\n", - "|[1818.0, 1818.0, ...|\n", - "|[1788.0, 1912.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_MultiplyFactor() will multiply a factor to a spatial band'''\n", "mulfacDF = df.selectExpr(\"RS_MultiplyFactor(Band2, 2) as target\")\n", @@ -538,23 +278,9 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| AND|\n", - "+--------------------+\n", - "|[9.0, 12.0, 2.0, ...|\n", - "|[126.0, 52.0, 102...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_BitwiseAND() will return AND between two values of Bands'''\n", "bitwiseAND = df.selectExpr(\"RS_BitwiseAND(Band1, Band2) as AND\")\n", @@ -563,23 +289,9 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| OR|\n", - "+--------------------+\n", - "|[1981.0, 1965.0, ...|\n", - "|[1919.0, 2045.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_BitwiseOR() will return OR between two values of Bands'''\n", "bitwiseOR = df.selectExpr(\"RS_BitwiseOR(Band1, Band2) as OR\")\n", @@ -588,23 +300,9 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-----+\n", - "|count|\n", - "+-----+\n", - "| 753|\n", - "| 1017|\n", - "+-----+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Count() will calculate the total number of occurrence of a target value'''\n", "countDF = df.selectExpr(\"RS_Count(RS_GreaterThan(Band1,1000.0), 1.0) as count\")\n", @@ -613,23 +311,9 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| modulo|\n", - "+--------------------+\n", - "|[10.0, 18.0, 18.0...|\n", - "|[17.0, 7.0, 2.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Modulo() will calculate the modulus of band value with respect to a given number'''\n", "moduloDF = df.selectExpr(\"RS_Modulo(Band1, 21.0) as modulo \")\n", @@ -638,23 +322,9 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| root|\n", - "+--------------------+\n", - "|[32.88, 32.68, 32...|\n", - "|[33.93, 33.78, 35...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_SquareRoot() will calculate calculate square root of all the band values up to two decimal places'''\n", "rootDF = df.selectExpr(\"RS_SquareRoot(Band1) as root\")\n", @@ -663,23 +333,9 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| loggDifference|\n", - "+--------------------+\n", - "|[1081.0, 1068.0, ...|\n", - "|[1151.0, 1141.0, ...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_LogicalDifference() will return value from band1 if value at that particular location is not equal tp band1 else it will return 0'''\n", "logDiff = df.selectExpr(\"RS_LogicalDifference(Band1, Band2) as loggDifference\")\n", @@ -688,23 +344,9 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| logicalOver|\n", - "+--------------------+\n", - "|[677.0, 660.0, 66...|\n", - "|[751.0, 802.0, 87...|\n", - "+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_LogicalOver() will iterate over two bands and return value of first band if it is not equal to 0 else it will return value from later band'''\n", "logOver = df.selectExpr(\"RS_LogicalOver(Band3, Band2) as logicalOver\")\n", @@ -725,23 +367,9 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "| Geom| RedBand| BlueBand| GreenBand| CombinedBand|\n", - "+--------------------+--------------------+--------------------+--------------------+--------------------+\n", - "|POLYGON ((-58.702...|\n", - " \n", - " \n", - " \n", - " Geom\n", - " RedBand\n", - " BlueBand\n", - " GreenBand\n", - " CombinedBand\n", - " \n", - " \n", - " \n", - " \n", - " 0\n", - " POLYGON ((-58.70271939504447 -34.418775445554786, -58.702776058228636 -34.421569880680615, -58.6994039180242 -34.42161679331493, -58.69934736692278 -34.4188223533111, -58.70271939504447 -34.418775445554786))\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " 1\n", - " POLYGON ((-58.286636576261145 -34.758580906202866, -58.286679941749476 -34.76137571668496, -58.28329340123003 -34.76141146033393, -58.28325014980317 -34.75861664615162, -58.286636576261145 -34.758580906202866))\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "display(HTML(df_HTML.limit(2).toPandas().to_html(escape=False)))" ] @@ -814,7 +400,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -829,7 +415,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -839,7 +425,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -855,51 +441,9 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
GeomRedBandBlueBandGreenBandCombinedBand
0POLYGON ((-58.702667236328125 -34.418819427490234, -58.702667236328125 -34.421573638916016, -58.69945526123047 -34.421573638916016, -58.69945526123047 -34.418819427490234, -58.702667236328125 -34.418819427490234))
1POLYGON ((-58.286582946777344 -34.75862503051758, -58.286582946777344 -34.76136779785156, -58.28334426879883 -34.76136779785156, -58.28334426879883 -34.75862503051758, -58.286582946777344 -34.75862503051758))
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "'''Load and Visualize Written GeoTiff Image.'''\n", "\n", @@ -921,23 +465,9 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "| origin| geom|height|width| data|bands|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "|file:/Users/niles...|POLYGON ((-58.702...| 32| 32|[1081.0, 1068.0, ...| 4|\n", - "|file:/Users/niles...|POLYGON ((-58.286...| 32| 32|[1151.0, 1141.0, ...| 4|\n", - "+--------------------+--------------------+------+-----+--------------------+-----+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''First load GeoTiff Images'''\n", "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").option(\"disableErrorInCRS\", False).load(DATA_DIR)\n", @@ -947,23 +477,9 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+-----+------+--------------------+-----+--------------------+\n", - "| origin| geom|width|height| data|bands|normalizedDifference|\n", - "+--------------------+--------------------+-----+------+--------------------+-----+--------------------+\n", - "|file:/Users/niles...|POLYGON ((-58.702...| 32| 32|[1081.0, 1068.0, ...| 4|[0.09, 0.08, 0.11...|\n", - "|file:/Users/niles...|POLYGON ((-58.286...| 32| 32|[1151.0, 1141.0, ...| 4|[0.13, 0.09, 0.09...|\n", - "+--------------------+--------------------+-----+------+--------------------+-----+--------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# First extract the bands for which normalized difference index needs to be calculated\n", "df = df.selectExpr(\"origin\", \"geom\", \"width\", \"height\", \"data\", \"bands\", \"RS_GetBand(data, 1, bands) as band1\", \"RS_GetBand(data, 2, bands) as band2\")\n", @@ -974,23 +490,9 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+------+-----+------------+\n", - "| origin| geom| data_edited|height|width|nBand_edited|\n", - "+--------------------+--------------------+--------------------+------+-----+------------+\n", - "|file:/Users/niles...|POLYGON ((-58.702...|[1081.0, 1068.0, ...| 32| 32| 5|\n", - "|file:/Users/niles...|POLYGON ((-58.286...|[1151.0, 1141.0, ...| 32| 32| 5|\n", - "+--------------------+--------------------+--------------------+------+-----+------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''RS_Append() takes the data array containing bands, a new band to be appended, and number of total bands in the data array.\n", " It appends the new band to the end of the data array and returns the appended data'''\n", @@ -1002,7 +504,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1020,23 +522,9 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+------+\n", - "| sum|\n", - "+------+\n", - "| 753.0|\n", - "|1017.0|\n", - "+------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "'''Sample UDF calculates sum of all the values in a band which are greater than 1000.0'''\n", "\n", @@ -1060,42 +548,9 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
Geomselectedregion
0POLYGON ((-58.70271939504447 -34.418775445554786, -58.702776058228636 -34.421569880680615, -58.6994039180242 -34.42161679331493, -58.69934736692278 -34.4188223533111, -58.70271939504447 -34.418775445554786))
1POLYGON ((-58.286636576261145 -34.758580906202866, -58.286679941749476 -34.76137571668496, -58.28329340123003 -34.76141146033393, -58.28325014980317 -34.75861664615162, -58.286636576261145 -34.758580906202866))
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "'''Sample UDF to visualize a particular region of a GeoTiff image'''\n", "\n", @@ -1144,5 +599,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/binder/ApacheSedonaSQL.ipynb b/binder/ApacheSedonaSQL.ipynb index cfba2d8035..32b0c7fae7 100644 --- a/binder/ApacheSedonaSQL.ipynb +++ b/binder/ApacheSedonaSQL.ipynb @@ -49,7 +49,7 @@ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)\n" @@ -346,13 +346,6 @@ "source": [ "gdf" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb index 2f9860a010..c07dca8efb 100644 --- a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb +++ b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -32,7 +32,7 @@ "\n", "import geopandas as gpd\n", "from pyspark.sql import SparkSession\n", - "from pyspark.sql.functions import col, expr, when, explode\n", + "from pyspark.sql.functions import col, expr, when, explode, hex\n", "\n", "\n", "\n", @@ -49,60 +49,14 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/18 22:38:41 WARN Utils: Your hostname, Nileshs-MacBook-Pro.local resolves to a loopback address: 127.0.0.1; using 172.24.19.124 instead (on interface en0)\n", - "23/10/18 22:38:41 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n", - "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", - "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", - "org.apache.sedona#sedona-spark-shaded-3.0_2.12 added as a dependency\n", - "org.datasyslab#geotools-wrapper added as a dependency\n", - ":: resolving dependencies :: org.apache.spark#spark-submit-parent-5d1ede8b-02f5-421d-a31c-93ed390d8872;1.0\n", - "\tconfs: [default]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - ":: loading settings :: url = jar:file:/Users/nileshgajwani/Downloads/spark-3.4.1-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\tfound org.apache.sedona#sedona-spark-shaded-3.0_2.12;1.5.0 in central\n", - "\tfound org.datasyslab#geotools-wrapper;1.4.0-28.2 in central\n", - ":: resolution report :: resolve 75ms :: artifacts dl 2ms\n", - "\t:: modules in use:\n", - "\torg.apache.sedona#sedona-spark-shaded-3.0_2.12;1.5.0 from central in [default]\n", - "\torg.datasyslab#geotools-wrapper;1.4.0-28.2 from central in [default]\n", - "\t---------------------------------------------------------------------\n", - "\t| | modules || artifacts |\n", - "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", - "\t---------------------------------------------------------------------\n", - "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", - "\t---------------------------------------------------------------------\n", - ":: retrieving :: org.apache.spark#spark-submit-parent-5d1ede8b-02f5-421d-a31c-93ed390d8872\n", - "\tconfs: [default]\n", - "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", - "23/10/18 22:38:41 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", - "Setting default log level to \"WARN\".\n", - "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n" - ] - } - ], + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)\n", @@ -120,120 +74,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- featurecla: string (nullable = true)\n", - " |-- scalerank: string (nullable = true)\n", - " |-- LABELRANK: string (nullable = true)\n", - " |-- SOVEREIGNT: string (nullable = true)\n", - " |-- SOV_A3: string (nullable = true)\n", - " |-- ADM0_DIF: string (nullable = true)\n", - " |-- LEVEL: string (nullable = true)\n", - " |-- TYPE: string (nullable = true)\n", - " |-- ADMIN: string (nullable = true)\n", - " |-- ADM0_A3: string (nullable = true)\n", - " |-- GEOU_DIF: string (nullable = true)\n", - " |-- GEOUNIT: string (nullable = true)\n", - " |-- GU_A3: string (nullable = true)\n", - " |-- SU_DIF: string (nullable = true)\n", - " |-- SUBUNIT: string (nullable = true)\n", - " |-- SU_A3: string (nullable = true)\n", - " |-- BRK_DIFF: string (nullable = true)\n", - " |-- NAME: string (nullable = true)\n", - " |-- NAME_LONG: string (nullable = true)\n", - " |-- BRK_A3: string (nullable = true)\n", - " |-- BRK_NAME: string (nullable = true)\n", - " |-- BRK_GROUP: string (nullable = true)\n", - " |-- ABBREV: string (nullable = true)\n", - " |-- POSTAL: string (nullable = true)\n", - " |-- FORMAL_EN: string (nullable = true)\n", - " |-- FORMAL_FR: string (nullable = true)\n", - " |-- NAME_CIAWF: string (nullable = true)\n", - " |-- NOTE_ADM0: string (nullable = true)\n", - " |-- NOTE_BRK: string (nullable = true)\n", - " |-- NAME_SORT: string (nullable = true)\n", - " |-- NAME_ALT: string (nullable = true)\n", - " |-- MAPCOLOR7: string (nullable = true)\n", - " |-- MAPCOLOR8: string (nullable = true)\n", - " |-- MAPCOLOR9: string (nullable = true)\n", - " |-- MAPCOLOR13: string (nullable = true)\n", - " |-- POP_EST: string (nullable = true)\n", - " |-- POP_RANK: string (nullable = true)\n", - " |-- GDP_MD_EST: string (nullable = true)\n", - " |-- POP_YEAR: string (nullable = true)\n", - " |-- LASTCENSUS: string (nullable = true)\n", - " |-- GDP_YEAR: string (nullable = true)\n", - " |-- ECONOMY: string (nullable = true)\n", - " |-- INCOME_GRP: string (nullable = true)\n", - " |-- WIKIPEDIA: string (nullable = true)\n", - " |-- FIPS_10_: string (nullable = true)\n", - " |-- ISO_A2: string (nullable = true)\n", - " |-- ISO_A3: string (nullable = true)\n", - " |-- ISO_A3_EH: string (nullable = true)\n", - " |-- ISO_N3: string (nullable = true)\n", - " |-- UN_A3: string (nullable = true)\n", - " |-- WB_A2: string (nullable = true)\n", - " |-- WB_A3: string (nullable = true)\n", - " |-- WOE_ID: string (nullable = true)\n", - " |-- WOE_ID_EH: string (nullable = true)\n", - " |-- WOE_NOTE: string (nullable = true)\n", - " |-- ADM0_A3_IS: string (nullable = true)\n", - " |-- ADM0_A3_US: string (nullable = true)\n", - " |-- ADM0_A3_UN: string (nullable = true)\n", - " |-- ADM0_A3_WB: string (nullable = true)\n", - " |-- CONTINENT: string (nullable = true)\n", - " |-- REGION_UN: string (nullable = true)\n", - " |-- SUBREGION: string (nullable = true)\n", - " |-- REGION_WB: string (nullable = true)\n", - " |-- NAME_LEN: string (nullable = true)\n", - " |-- LONG_LEN: string (nullable = true)\n", - " |-- ABBREV_LEN: string (nullable = true)\n", - " |-- TINY: string (nullable = true)\n", - " |-- HOMEPART: string (nullable = true)\n", - " |-- MIN_ZOOM: string (nullable = true)\n", - " |-- MIN_LABEL: string (nullable = true)\n", - " |-- MAX_LABEL: string (nullable = true)\n", - " |-- NE_ID: string (nullable = true)\n", - " |-- WIKIDATAID: string (nullable = true)\n", - " |-- NAME_AR: string (nullable = true)\n", - " |-- NAME_BN: string (nullable = true)\n", - " |-- NAME_DE: string (nullable = true)\n", - " |-- NAME_EN: string (nullable = true)\n", - " |-- NAME_ES: string (nullable = true)\n", - " |-- NAME_FR: string (nullable = true)\n", - " |-- NAME_EL: string (nullable = true)\n", - " |-- NAME_HI: string (nullable = true)\n", - " |-- NAME_HU: string (nullable = true)\n", - " |-- NAME_ID: string (nullable = true)\n", - " |-- NAME_IT: string (nullable = true)\n", - " |-- NAME_JA: string (nullable = true)\n", - " |-- NAME_KO: string (nullable = true)\n", - " |-- NAME_NL: string (nullable = true)\n", - " |-- NAME_PL: string (nullable = true)\n", - " |-- NAME_PT: string (nullable = true)\n", - " |-- NAME_RU: string (nullable = true)\n", - " |-- NAME_SV: string (nullable = true)\n", - " |-- NAME_TR: string (nullable = true)\n", - " |-- NAME_VI: string (nullable = true)\n", - " |-- NAME_ZH: string (nullable = true)\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/18 22:38:45 WARN package: Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.sql.debug.maxToStringFields'.\n" - ] - } - ], + "outputs": [], "source": [ "countries = ShapefileReader.readToGeometryRDD(sc, \"data/ne_50m_admin_0_countries_lakes/\")\n", "countries_df = Adapter.toDf(countries, sedona)\n", @@ -251,29 +94,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- scalerank: string (nullable = true)\n", - " |-- featurecla: string (nullable = true)\n", - " |-- type: string (nullable = true)\n", - " |-- name: string (nullable = true)\n", - " |-- abbrev: string (nullable = true)\n", - " |-- location: string (nullable = true)\n", - " |-- gps_code: string (nullable = true)\n", - " |-- iata_code: string (nullable = true)\n", - " |-- wikipedia: string (nullable = true)\n", - " |-- natlscale: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "airports = ShapefileReader.readToGeometryRDD(sc, \"data/ne_50m_airports/\")\n", "airports_df = Adapter.toDf(airports, sedona)\n", @@ -285,12 +108,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ + "\n", + "\n", "## Run Spatial Join using SQL API" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -306,17 +131,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[('3.0', '2.12', '1.5.0')]\n" - ] - } - ], + "outputs": [], "source": [ "airports_rdd = Adapter.toSpatialRdd(airports_df, \"geometry\")\n", "# Drop the duplicate name column in countries_df\n", @@ -353,75 +170,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/18 22:38:46 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+--------------------+\n", - "| country_geom| NAME_EN| airport_geom| name|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "|MULTIPOLYGON (((1...|Taiwan ...|POINT (121.231370...|Taoyuan ...|\n", - "|MULTIPOLYGON (((5...|Netherlands ...|POINT (4.76437693...|Schiphol ...|\n", - "|POLYGON ((103.969...|Singapore ...|POINT (103.986413...|Singapore Changi ...|\n", - "|MULTIPOLYGON (((-...|United Kingdom ...|POINT (-0.4531566...|London Heathrow ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-149.98172...|Anchorage Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", - "|MULTIPOLYGON (((1...|People's Republic...|POINT (116.588174...|Beijing Capital ...|\n", - "|MULTIPOLYGON (((-...|Colombia ...|POINT (-74.143371...|Eldorado Int'l ...|\n", - "|MULTIPOLYGON (((6...|India ...|POINT (72.8745639...|Chhatrapati Shiva...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-71.016406...|Gen E L Logan Int...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-76.668642...|Baltimore-Washing...|\n", - "|POLYGON ((36.8713...|Egypt ...|POINT (31.3997430...|Cairo Int'l ...|\n", - "|POLYGON ((-2.2196...|Morocco ...|POINT (-7.6632188...|Casablanca-Anfa ...|\n", - "|MULTIPOLYGON (((-...|Venezuela ...|POINT (-67.005748...|Simon Bolivar Int...|\n", - "|MULTIPOLYGON (((2...|South Africa ...|POINT (18.5976565...|Cape Town Int'l ...|\n", - "|MULTIPOLYGON (((1...|People's Republic...|POINT (103.956136...|Chengdushuang Liu...|\n", - "|MULTIPOLYGON (((6...|India ...|POINT (77.0878362...|Indira Gandhi Int...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-104.67379...|Denver Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", - "|MULTIPOLYGON (((1...|Thailand ...|POINT (100.602578...|Don Muang Int'l ...|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "only showing top 20 rows\n", - "\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "| country_geom| NAME_EN| airport_geom| name|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.145258...|Fort Lauderdale H...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.278971...|Miami Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-95.333704...|George Bush Inter...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-90.256693...|New Orleans Int'l...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-81.307371...|Orlando Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-82.534824...|Tampa Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-112.01363...|Sky Harbor Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-118.40246...|Los Angeles Int'l...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-116.97547...|General Abelardo ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", - "|POLYGON ((-69.965...|Peru ...|POINT (-77.107565...|Jorge Chavez ...|\n", - "|MULTIPOLYGON (((-...|Panama ...|POINT (-79.387134...|Tocumen Int'l ...|\n", - "|POLYGON ((-83.157...|Nicaragua ...|POINT (-86.171284...|Augusto Cesar San...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-96.183570...|Gen. Heriberto Ja...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-106.27001...|General Rafael Bu...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.754508...|General Juan N Al...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.570649...|Jose Maria Morelo...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-98.375759...|Puebla ...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.082607...|Lic Benito Juarez...|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# The result of SQL API\n", "result.show()\n", @@ -438,44 +189,11 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------------+\n", - "| NAME_EN| country_geom|AirportCount|\n", - "+--------------------+--------------------+------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...| 1|\n", - "|Mexico ...|MULTIPOLYGON (((-...| 12|\n", - "|Panama ...|MULTIPOLYGON (((-...| 1|\n", - "|Nicaragua ...|POLYGON ((-83.157...| 1|\n", - "|Honduras ...|MULTIPOLYGON (((-...| 1|\n", - "|Colombia ...|MULTIPOLYGON (((-...| 4|\n", - "|United States of ...|MULTIPOLYGON (((-...| 35|\n", - "|Ecuador ...|MULTIPOLYGON (((-...| 1|\n", - "|The Bahamas ...|MULTIPOLYGON (((-...| 1|\n", - "|Peru ...|POLYGON ((-69.965...| 1|\n", - "|Guatemala ...|POLYGON ((-92.235...| 1|\n", - "|Canada ...|MULTIPOLYGON (((-...| 15|\n", - "|Venezuela ...|MULTIPOLYGON (((-...| 3|\n", - "|Argentina ...|MULTIPOLYGON (((-...| 3|\n", - "|Bolivia ...|MULTIPOLYGON (((-...| 2|\n", - "|Paraguay ...|POLYGON ((-58.159...| 1|\n", - "|Benin ...|POLYGON ((1.62265...| 1|\n", - "|Guinea ...|POLYGON ((-10.283...| 1|\n", - "|Chile ...|MULTIPOLYGON (((-...| 5|\n", - "|Nigeria ...|MULTIPOLYGON (((7...| 3|\n", - "+--------------------+--------------------+------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# result.createOrReplaceTempView(\"result\")\n", "result2.createOrReplaceTempView(\"result\")\n", @@ -493,307 +211,102 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "# sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", - "# sedona_kepler_map" + "sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", + "sedona_kepler_map" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize Uber H3 cells using SedonaKepler\n", + "The following tutorial depicts how Uber H3 cells can be generated using Sedona and visualized using SedonaKepler." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Generate H3 cell IDs\n", + "[ST_H3CellIDs](https://sedona.apache.org/1.5.0/api/flink/Function/#st_h3cellids) can be used to generated cell IDs for given geometries" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+\n", - "| NAME_EN| country_geom| s2_cellID|\n", - "+--------------------+--------------------+--------------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...|[-859286808902290...|\n", - "+--------------------+--------------------+--------------------+\n", - "only showing top 1 row\n", - "\n", - "root\n", - " |-- NAME_EN: string (nullable = true)\n", - " |-- country_geom: geometry (nullable = true)\n", - " |-- s2_cellID: array (nullable = true)\n", - " | |-- element: long (containsNull = true)\n", - "\n" - ] - } - ], + "outputs": [], + "source": [ + "h3_df = sedona.sql(\"SELECT g.NAME_EN, g.country_geom, ST_H3CellIDs(g.country_geom, 3, false) as h3_cellID from grouped_result g\")\n", + "h3_df.show(2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, "source": [ - "h3_df = sedona.sql(\"SELECT g.NAME_EN, g.country_geom, ST_S2CellIDs(g.country_geom, 3) as s2_cellID from grouped_result g\")#groupedresult.selectExpr(\"ST_H3CellIDs(country_geom, 3, true) as h3_cellId\")\n", - "h3_df.show(1)\n", - "h3_df.printSchema()\n", - "h3_df.createOrReplaceTempView(\"grouped_s2\")" + "### Since each geometry can have multiple H3 cell IDs, let's explode the generated H3 cell ID array to get individual cells" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- NAME_EN: string (nullable = true)\n", - " |-- country_geom: geometry (nullable = true)\n", - " |-- exploded_cellIds: long (nullable = true)\n", - "\n", - "+--------------------+--------------------+--------------------+\n", - "| NAME_EN| country_geom| exploded_cellIds|\n", - "+--------------------+--------------------+--------------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...|-8592868089022906368|\n", - "|Cuba ...|MULTIPOLYGON (((-...|-8556839292003942400|\n", - "+--------------------+--------------------+--------------------+\n", - "only showing top 2 rows\n", - "\n" - ] - } - ], + "outputs": [], + "source": [ + "exploded_h3 = h3_df.select(h3_df.NAME_EN, h3_df.country_geom, explode(h3_df.h3_cellID).alias(\"h3\"))\n", + "exploded_h3.show(2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, "source": [ - "exploded_df = h3_df.select(h3_df.NAME_EN, h3_df.country_geom, explode(h3_df.s2_cellID).alias(\"exploded_cellIds\"))\n", - "exploded_df.printSchema()\n", - "exploded_df.show(2)\n", - "exploded_df.createOrReplaceTempView(\"exploded_cells\")" + "### Convert generated long H3 cell ID to a hex cell ID\n", + "SedonaKepler accepts each H3 cell ID as a hexadecimal to automatically visualize them. Also, let us sample the data to be able to visualize sparse cells on the map." ] }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+\n", - "| NAME_EN| country_geom| s2|\n", - "+--------------------+--------------------+--------------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...|-8592868089022906368|\n", - "|Mexico ...|MULTIPOLYGON (((-...|-8592868089022906368|\n", - "+--------------------+--------------------+--------------------+\n", - "only showing top 2 rows\n", - "\n", - "root\n", - " |-- NAME_EN: string (nullable = true)\n", - " |-- country_geom: geometry (nullable = true)\n", - " |-- s2: long (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "df_s2 = sedona.sql(\"SELECT s.NAME_EN, s.country_geom, exploded_cellIds as s2 FROM (grouped_s2 s CROSS JOIN exploded_cells e)\")\n", - "df_s2.show(2)\n", - "df_s2.printSchema()" + "exploded_h3 = exploded_h3.sample(0.3)\n", + "exploded_h3.createOrReplaceTempView(\"exploded_h3\")\n", + "hex_exploded_h3 = exploded_h3.select(exploded_h3.NAME_EN, hex(exploded_h3.h3).alias(\"ex_h3\"))\n", + "hex_exploded_h3.show(2)" ] }, { - "cell_type": "code", - "execution_count": 13, + "cell_type": "markdown", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/18 22:38:53 ERROR Executor: Exception in task 0.0 in stage 58.0 (TID 72)1]\n", - "java.lang.OutOfMemoryError: Java heap space\n", - "\tat java.base/java.nio.HeapByteBuffer.(HeapByteBuffer.java:61)\n", - "\tat java.base/java.nio.ByteBuffer.allocate(ByteBuffer.java:348)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1(SerializerHelper.scala:40)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1$adapted(SerializerHelper.scala:40)\n", - "\tat org.apache.spark.serializer.SerializerHelper$$$Lambda$2719/0x000000080131a840.apply(Unknown Source)\n", - "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)\n", - "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)\n", - "\tat com.esotericsoftware.kryo.io.Output.flush(Output.java:185)\n", - "\tat com.esotericsoftware.kryo.io.Output.require(Output.java:164)\n", - "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:251)\n", - "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:237)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:49)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:38)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeObjectOrNull(Kryo.java:629)\n", - "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:86)\n", - "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeObject(Kryo.java:575)\n", - "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:79)\n", - "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:37)\n", - "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:33)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.serializeToChunkedBuffer(SerializerHelper.scala:42)\n", - "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:599)\n", - "23/10/18 22:38:53 ERROR SparkUncaughtExceptionHandler: Uncaught exception in thread Thread[Executor task launch worker for task 0.0 in stage 58.0 (TID 72),5,main]\n", - "java.lang.OutOfMemoryError: Java heap space\n", - "\tat java.base/java.nio.HeapByteBuffer.(HeapByteBuffer.java:61)\n", - "\tat java.base/java.nio.ByteBuffer.allocate(ByteBuffer.java:348)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1(SerializerHelper.scala:40)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1$adapted(SerializerHelper.scala:40)\n", - "\tat org.apache.spark.serializer.SerializerHelper$$$Lambda$2719/0x000000080131a840.apply(Unknown Source)\n", - "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)\n", - "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)\n", - "\tat com.esotericsoftware.kryo.io.Output.flush(Output.java:185)\n", - "\tat com.esotericsoftware.kryo.io.Output.require(Output.java:164)\n", - "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:251)\n", - "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:237)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:49)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:38)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeObjectOrNull(Kryo.java:629)\n", - "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:86)\n", - "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeObject(Kryo.java:575)\n", - "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:79)\n", - "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:37)\n", - "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:33)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.serializeToChunkedBuffer(SerializerHelper.scala:42)\n", - "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:599)\n", - "23/10/18 22:38:53 WARN TaskSetManager: Lost task 0.0 in stage 58.0 (TID 72) (172.24.19.124 executor driver): java.lang.OutOfMemoryError: Java heap space\n", - "\tat java.base/java.nio.HeapByteBuffer.(HeapByteBuffer.java:61)\n", - "\tat java.base/java.nio.ByteBuffer.allocate(ByteBuffer.java:348)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1(SerializerHelper.scala:40)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.$anonfun$serializeToChunkedBuffer$1$adapted(SerializerHelper.scala:40)\n", - "\tat org.apache.spark.serializer.SerializerHelper$$$Lambda$2719/0x000000080131a840.apply(Unknown Source)\n", - "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.allocateNewChunkIfNeeded(ChunkedByteBufferOutputStream.scala:87)\n", - "\tat org.apache.spark.util.io.ChunkedByteBufferOutputStream.write(ChunkedByteBufferOutputStream.scala:75)\n", - "\tat com.esotericsoftware.kryo.io.Output.flush(Output.java:185)\n", - "\tat com.esotericsoftware.kryo.io.Output.require(Output.java:164)\n", - "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:251)\n", - "\tat com.esotericsoftware.kryo.io.Output.writeBytes(Output.java:237)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:49)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ByteArraySerializer.write(DefaultArraySerializers.java:38)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeObjectOrNull(Kryo.java:629)\n", - "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:86)\n", - "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeObject(Kryo.java:575)\n", - "\tat com.esotericsoftware.kryo.serializers.ObjectField.write(ObjectField.java:79)\n", - "\tat com.esotericsoftware.kryo.serializers.FieldSerializer.write(FieldSerializer.java:508)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:37)\n", - "\tat com.twitter.chill.Tuple2Serializer.write(TupleSerializers.scala:33)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:361)\n", - "\tat com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.write(DefaultArraySerializers.java:302)\n", - "\tat com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:651)\n", - "\tat org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:274)\n", - "\tat org.apache.spark.serializer.SerializerHelper$.serializeToChunkedBuffer(SerializerHelper.scala:42)\n", - "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:599)\n", - "\n", - "23/10/18 22:38:53 ERROR TaskSetManager: Task 0 in stage 58.0 failed 1 times; aborting job\n", - "ERROR:root:Exception while sending command.\n", - "Traceback (most recent call last):\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 516, in send_command\n", - " raise Py4JNetworkError(\"Answer from Java side is empty\")\n", - "py4j.protocol.Py4JNetworkError: Answer from Java side is empty\n", - "\n", - "During handling of the above exception, another exception occurred:\n", - "\n", - "Traceback (most recent call last):\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py\", line 1038, in send_command\n", - " response = connection.send_command(command)\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 539, in send_command\n", - " raise Py4JNetworkError(\n", - "py4j.protocol.Py4JNetworkError: Error while sending or receiving\n", - "ERROR:root:Exception while sending command.\n", - "Traceback (most recent call last):\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 511, in send_command\n", - " answer = smart_decode(self.stream.readline()[:-1])\n", - " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socket.py\", line 704, in readinto\n", - " return self._sock.recv_into(b)\n", - "ConnectionResetError: [Errno 54] Connection reset by peer\n", - "\n", - "During handling of the above exception, another exception occurred:\n", - "\n", - "Traceback (most recent call last):\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py\", line 1038, in send_command\n", - " response = connection.send_command(command)\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py\", line 539, in send_command\n", - " raise Py4JNetworkError(\n", - "py4j.protocol.Py4JNetworkError: Error while sending or receiving\n" - ] - }, - { - "ename": "ConnectionRefusedError", - "evalue": "[Errno 61] Connection refused", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mPy4JError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/sql/dataframe.py:1216\u001b[0m, in \u001b[0;36mDataFrame.collect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1215\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m SCCallSiteSync(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sc):\n\u001b[0;32m-> 1216\u001b[0m sock_info \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_jdf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollectToPython\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1217\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mlist\u001b[39m(_load_from_socket(sock_info, BatchedSerializer(CPickleSerializer())))\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py:1321\u001b[0m, in \u001b[0;36mJavaMember.__call__\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 1320\u001b[0m answer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgateway_client\u001b[38;5;241m.\u001b[39msend_command(command)\n\u001b[0;32m-> 1321\u001b[0m return_value \u001b[38;5;241m=\u001b[39m \u001b[43mget_return_value\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1322\u001b[0m \u001b[43m \u001b[49m\u001b[43manswer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgateway_client\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtarget_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1324\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m temp_arg \u001b[38;5;129;01min\u001b[39;00m temp_args:\n", - "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/errors/exceptions/captured.py:169\u001b[0m, in \u001b[0;36mcapture_sql_exception..deco\u001b[0;34m(*a, **kw)\u001b[0m\n\u001b[1;32m 168\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 169\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mf\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43ma\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkw\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 170\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m Py4JJavaError \u001b[38;5;28;01mas\u001b[39;00m e:\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/protocol.py:334\u001b[0m, in \u001b[0;36mget_return_value\u001b[0;34m(answer, gateway_client, target_id, name)\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 334\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m Py4JError(\n\u001b[1;32m 335\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAn error occurred while calling \u001b[39m\u001b[38;5;132;01m{0}\u001b[39;00m\u001b[38;5;132;01m{1}\u001b[39;00m\u001b[38;5;132;01m{2}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39m\n\u001b[1;32m 336\u001b[0m \u001b[38;5;28mformat\u001b[39m(target_id, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m, name))\n\u001b[1;32m 337\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", - "\u001b[0;31mPy4JError\u001b[0m: An error occurred while calling o87.collectToPython", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mConnectionRefusedError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[13], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m sedona_kepler_map_s2 \u001b[38;5;241m=\u001b[39m \u001b[43mSedonaKepler\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_map\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdf\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdf_s2\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mAirportCount\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgetConfig\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 2\u001b[0m sedona_kepler_map_s2\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/maps/SedonaKepler.py:35\u001b[0m, in \u001b[0;36mSedonaKepler.create_map\u001b[0;34m(cls, df, name, config)\u001b[0m\n\u001b[1;32m 33\u001b[0m kepler_map \u001b[38;5;241m=\u001b[39m KeplerGl()\n\u001b[1;32m 34\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m df \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m---> 35\u001b[0m \u001b[43mSedonaKepler\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43madd_df\u001b[49m\u001b[43m(\u001b[49m\u001b[43mkepler_map\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdf\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m config \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 38\u001b[0m kepler_map\u001b[38;5;241m.\u001b[39mconfig \u001b[38;5;241m=\u001b[39m config\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/maps/SedonaKepler.py:51\u001b[0m, in \u001b[0;36mSedonaKepler.add_df\u001b[0;34m(cls, kepler_map, df, name)\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[1;32m 43\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21madd_df\u001b[39m(\u001b[38;5;28mcls\u001b[39m, kepler_map, df, name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124munnamed\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[1;32m 44\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 45\u001b[0m \u001b[38;5;124;03m Adds a SedonaDataFrame to a given map object.\u001b[39;00m\n\u001b[1;32m 46\u001b[0m \u001b[38;5;124;03m :param kepler_map: Map object to add SedonaDataFrame to\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 49\u001b[0m \u001b[38;5;124;03m :return: Does not return anything, adds df directly to the given map object\u001b[39;00m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m---> 51\u001b[0m geo_df \u001b[38;5;241m=\u001b[39m \u001b[43mSedonaMapUtils\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__convert_to_gdf__\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdf\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 52\u001b[0m kepler_map\u001b[38;5;241m.\u001b[39madd_data(geo_df, name\u001b[38;5;241m=\u001b[39mname)\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/maps/SedonaMapUtils.py:34\u001b[0m, in \u001b[0;36mSedonaMapUtils.__convert_to_gdf__\u001b[0;34m(cls, df, rename, geometry_col)\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m geometry_col \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 33\u001b[0m geometry_col \u001b[38;5;241m=\u001b[39m SedonaMapUtils\u001b[38;5;241m.\u001b[39m__get_geometry_col__(df)\n\u001b[0;32m---> 34\u001b[0m pandas_df \u001b[38;5;241m=\u001b[39m \u001b[43mdf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtoPandas\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 35\u001b[0m geo_df \u001b[38;5;241m=\u001b[39m gpd\u001b[38;5;241m.\u001b[39mGeoDataFrame(pandas_df, geometry\u001b[38;5;241m=\u001b[39mgeometry_col)\n\u001b[1;32m 36\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m geometry_col \u001b[38;5;241m!=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgeometry\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m rename \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n", - "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/sql/pandas/conversion.py:208\u001b[0m, in \u001b[0;36mPandasConversionMixin.toPandas\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m\n\u001b[1;32m 207\u001b[0m \u001b[38;5;66;03m# Below is toPandas without Arrow optimization.\u001b[39;00m\n\u001b[0;32m--> 208\u001b[0m pdf \u001b[38;5;241m=\u001b[39m pd\u001b[38;5;241m.\u001b[39mDataFrame\u001b[38;5;241m.\u001b[39mfrom_records(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m, columns\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns)\n\u001b[1;32m 209\u001b[0m column_counter \u001b[38;5;241m=\u001b[39m Counter(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns)\n\u001b[1;32m 211\u001b[0m corrected_dtypes: List[Optional[Type]] \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;01mNone\u001b[39;00m] \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mschema)\n", - "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/sql/dataframe.py:1216\u001b[0m, in \u001b[0;36mDataFrame.collect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1196\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Returns all the records as a list of :class:`Row`.\u001b[39;00m\n\u001b[1;32m 1197\u001b[0m \n\u001b[1;32m 1198\u001b[0m \u001b[38;5;124;03m.. versionadded:: 1.3.0\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1213\u001b[0m \u001b[38;5;124;03m[Row(age=14, name='Tom'), Row(age=23, name='Alice'), Row(age=16, name='Bob')]\u001b[39;00m\n\u001b[1;32m 1214\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1215\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m SCCallSiteSync(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sc):\n\u001b[0;32m-> 1216\u001b[0m sock_info \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_jdf\u001b[38;5;241m.\u001b[39mcollectToPython()\n\u001b[1;32m 1217\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mlist\u001b[39m(_load_from_socket(sock_info, BatchedSerializer(CPickleSerializer())))\n", - "File \u001b[0;32m~/Downloads/spark-3.4.1-bin-hadoop3/python/pyspark/traceback_utils.py:81\u001b[0m, in \u001b[0;36mSCCallSiteSync.__exit__\u001b[0;34m(self, type, value, tb)\u001b[0m\n\u001b[1;32m 79\u001b[0m SCCallSiteSync\u001b[38;5;241m.\u001b[39m_spark_stack_depth \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 80\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m SCCallSiteSync\u001b[38;5;241m.\u001b[39m_spark_stack_depth \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[0;32m---> 81\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_context\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_jsc\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msetCallSite\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py:1320\u001b[0m, in \u001b[0;36mJavaMember.__call__\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 1313\u001b[0m args_command, temp_args \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_build_args(\u001b[38;5;241m*\u001b[39margs)\n\u001b[1;32m 1315\u001b[0m command \u001b[38;5;241m=\u001b[39m proto\u001b[38;5;241m.\u001b[39mCALL_COMMAND_NAME \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1316\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcommand_header \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1317\u001b[0m args_command \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1318\u001b[0m proto\u001b[38;5;241m.\u001b[39mEND_COMMAND_PART\n\u001b[0;32m-> 1320\u001b[0m answer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgateway_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend_command\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcommand\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1321\u001b[0m return_value \u001b[38;5;241m=\u001b[39m get_return_value(\n\u001b[1;32m 1322\u001b[0m answer, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgateway_client, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_id, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname)\n\u001b[1;32m 1324\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m temp_arg \u001b[38;5;129;01min\u001b[39;00m temp_args:\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/java_gateway.py:1036\u001b[0m, in \u001b[0;36mGatewayClient.send_command\u001b[0;34m(self, command, retry, binary)\u001b[0m\n\u001b[1;32m 1015\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21msend_command\u001b[39m(\u001b[38;5;28mself\u001b[39m, command, retry\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, binary\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m):\n\u001b[1;32m 1016\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Sends a command to the JVM. This method is not intended to be\u001b[39;00m\n\u001b[1;32m 1017\u001b[0m \u001b[38;5;124;03m called directly by Py4J users. It is usually called by\u001b[39;00m\n\u001b[1;32m 1018\u001b[0m \u001b[38;5;124;03m :class:`JavaMember` instances.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1034\u001b[0m \u001b[38;5;124;03m if `binary` is `True`.\u001b[39;00m\n\u001b[1;32m 1035\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m-> 1036\u001b[0m connection \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_connection\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1037\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1038\u001b[0m response \u001b[38;5;241m=\u001b[39m connection\u001b[38;5;241m.\u001b[39msend_command(command)\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py:284\u001b[0m, in \u001b[0;36mJavaClient._get_connection\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 281\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n\u001b[1;32m 283\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m connection \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mor\u001b[39;00m connection\u001b[38;5;241m.\u001b[39msocket \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 284\u001b[0m connection \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_create_new_connection\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 285\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m connection\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py:291\u001b[0m, in \u001b[0;36mJavaClient._create_new_connection\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 287\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_create_new_connection\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m 288\u001b[0m connection \u001b[38;5;241m=\u001b[39m ClientServerConnection(\n\u001b[1;32m 289\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mjava_parameters, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpython_parameters,\n\u001b[1;32m 290\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgateway_property, \u001b[38;5;28mself\u001b[39m)\n\u001b[0;32m--> 291\u001b[0m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect_to_java_server\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 292\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mset_thread_connection(connection)\n\u001b[1;32m 293\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m connection\n", - "File \u001b[0;32m~/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/py4j/clientserver.py:438\u001b[0m, in \u001b[0;36mClientServerConnection.connect_to_java_server\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 435\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mssl_context:\n\u001b[1;32m 436\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msocket \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mssl_context\u001b[38;5;241m.\u001b[39mwrap_socket(\n\u001b[1;32m 437\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msocket, server_hostname\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mjava_address)\n\u001b[0;32m--> 438\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msocket\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjava_address\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjava_port\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 439\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstream \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msocket\u001b[38;5;241m.\u001b[39mmakefile(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrb\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 440\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mis_connected \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n", - "\u001b[0;31mConnectionRefusedError\u001b[0m: [Errno 61] Connection refused" - ] - } - ], "source": [ - "sedona_kepler_map_s2 = SedonaKepler.create_map(df=df_s2, name=\"AirportCount\", config=getConfig())\n", - "sedona_kepler_map_s2" + "### Visualize using SedonaKepler\n", + "Now, simply provide the final df to SedonaKepler.create_map and you can automagically visualize the H3 cells on the map!" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], - "source": [] + "source": [ + "sedona_kepler_h3 = SedonaKepler.create_map(df=hex_exploded_h3, name=\"h3\")\n", + "sedona_kepler_h3" + ] }, { "cell_type": "code", From 5bf00edd60cdb590adb206f967b805463175a643 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 24 Oct 2023 14:43:13 -0400 Subject: [PATCH 04/28] change pyspark version in binder Pipfile --- python/Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/Pipfile b/python/Pipfile index 6b26e2e036..ba044c7aaa 100644 --- a/python/Pipfile +++ b/python/Pipfile @@ -14,7 +14,7 @@ pytest-cov = "*" shapely="<=1.8.5" pandas="<=1.3.5" geopandas="<=0.10.2" -pyspark="*" +pyspark=">=3.0.0" attrs="*" pyarrow="*" keplergl = "==0.3.2" From fcc1ebf6c3c8c86b6b66a012c62f721fd59b4fcb Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 25 Oct 2023 16:41:37 -0400 Subject: [PATCH 05/28] pin matplotlib to 3.5.3 --- binder/Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/binder/Pipfile b/binder/Pipfile index 25968ae90e..57cbb3e2a4 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -17,7 +17,7 @@ geopandas="==0.11.1" pyspark="==3.3.2" attrs="*" apache-sedona="==1.5.0" -matplotlib = "*" +matplotlib = "==3.5.3" descartes = "*" keplergl = "==0.3.2" pydeck = "==0.8.0" From b80e268011079663fcd24b54e15f6caef181dc41 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Mon, 30 Oct 2023 20:42:16 -0400 Subject: [PATCH 06/28] revert all changes to examples --- examples/flink-sql/pom.xml | 2 +- examples/spark-rdd-colocation-mining/build.sbt | 2 +- examples/spark-sql/build.sbt | 2 +- examples/spark-viz/build.sbt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/flink-sql/pom.xml b/examples/flink-sql/pom.xml index a08a147096..13e62ab3cb 100644 --- a/examples/flink-sql/pom.xml +++ b/examples/flink-sql/pom.xml @@ -12,7 +12,7 @@ 1.4.0-28.2 compile 2.12 - 1.5.0 + 1.4.0 1.14.3 compile diff --git a/examples/spark-rdd-colocation-mining/build.sbt b/examples/spark-rdd-colocation-mining/build.sbt index d380f82d01..e8df972ff3 100644 --- a/examples/spark-rdd-colocation-mining/build.sbt +++ b/examples/spark-rdd-colocation-mining/build.sbt @@ -39,7 +39,7 @@ val SparkCompatibleVersion = "3.0" val HadoopVersion = "2.7.2" -val SedonaVersion = "1.5.0" +val SedonaVersion = "1.4.0" val ScalaCompatibleVersion = "2.12" diff --git a/examples/spark-sql/build.sbt b/examples/spark-sql/build.sbt index 8ea074656b..aeb45a6c4b 100644 --- a/examples/spark-sql/build.sbt +++ b/examples/spark-sql/build.sbt @@ -39,7 +39,7 @@ val SparkCompatibleVersion = "3.0" val HadoopVersion = "2.7.2" -val SedonaVersion = "1.5.0" +val SedonaVersion = "1.4.0" val ScalaCompatibleVersion = "2.12" diff --git a/examples/spark-viz/build.sbt b/examples/spark-viz/build.sbt index afc084f514..17c22e8566 100644 --- a/examples/spark-viz/build.sbt +++ b/examples/spark-viz/build.sbt @@ -39,14 +39,14 @@ val SparkCompatibleVersion = "3.0" val HadoopVersion = "2.7.2" -val SedonaVersion = "1.5.0" +val SedonaVersion = "1.4.0" val ScalaCompatibleVersion = "2.12" // Change the dependency scope to "provided" when you run "sbt assembly" val dependencyScope = "compile" -val geotoolsVersion = "1.4.0-28.2" +val geotoolsVersion = "1.5.0-28.2" //val jacksonVersion = "2.10.0" From 43a7c36ed9acef197f208bc7211f917eba506854 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Mon, 30 Oct 2023 20:44:01 -0400 Subject: [PATCH 07/28] change spark and pyspark version to 3.4.0 --- binder/Pipfile | 2 +- binder/postBuild | 6 +++--- binder/start | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/binder/Pipfile b/binder/Pipfile index 57cbb3e2a4..4709b49f4f 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -14,7 +14,7 @@ pytest-cov = "*" pandas="1.3.5" shapely="==1.8.4" geopandas="==0.11.1" -pyspark="==3.3.2" +pyspark="==3.4.0" attrs="*" apache-sedona="==1.5.0" matplotlib = "==3.5.3" diff --git a/binder/postBuild b/binder/postBuild index 88f44ad81a..2ed713ded2 100644 --- a/binder/postBuild +++ b/binder/postBuild @@ -1,6 +1,6 @@ #Download Apache Spark -wget https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz -tar -xzf spark-3.3.2-bin-hadoop3.tgz +wget https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.4.0-bin-hadoop3.tgz +tar -xzf spark-3.4.0-bin-hadoop3.tgz #Tidy up -rm spark-3.3.2-bin-hadoop3.tgz \ No newline at end of file +rm spark-3.4.0-bin-hadoop3.tgz \ No newline at end of file diff --git a/binder/start b/binder/start index fe679bda2c..923b372b7c 100644 --- a/binder/start +++ b/binder/start @@ -1,6 +1,6 @@ #!/bin/bash -SPARK_HOME=$HOME/spark-3.3.2-bin-hadoop3 +SPARK_HOME=$HOME/spark-3.4.0-bin-hadoop3 export PATH=$SPARK_HOME/bin:$PATH export PYTHONPATH=$SPARK_HOME/python:$PYTHONPATH export PYSPARK_SUBMIT_ARGS="--master local[*] pyspark-shell" From 9767392437883a362957df18745f8ec5ea8e6386 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Mon, 30 Oct 2023 21:33:39 -0400 Subject: [PATCH 08/28] Change outputs to keep useful ones intact --- binder/ApacheSedonaCore.ipynb | 1180 +++++++++++++++-- binder/ApacheSedonaSQL.ipynb | 392 +++++- ...naSQL_SpatialJoin_AirportsPerCountry.ipynb | 291 +++- 3 files changed, 1665 insertions(+), 198 deletions(-) diff --git a/binder/ApacheSedonaCore.ipynb b/binder/ApacheSedonaCore.ipynb index 28b62c99ca..2dd0d61939 100644 --- a/binder/ApacheSedonaCore.ipynb +++ b/binder/ApacheSedonaCore.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -60,7 +60,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -98,7 +98,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -107,9 +107,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "3000" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "## Getting approximate total count\n", "point_rdd.approximateTotalCount" @@ -128,9 +139,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# To run analyze please use function analyze\n", "point_rdd.analyze()" @@ -138,9 +160,23 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/svg+xml": [ + "" + ], + "text/plain": [ + "Envelope(-173.120769, -84.965961, 30.244859, 71.355134)" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Finding boundary envelope for PointRDD or any other SpatialRDD, it returns Enelope object which inherits from\n", "# shapely.geometry.Polygon\n", @@ -149,9 +185,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "2996" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Calculate number of records without duplicates\n", "point_rdd.countWithoutDuplicates()" @@ -159,9 +206,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "''" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Getting source epsg code\n", "point_rdd.getSourceEpsgCode()" @@ -169,9 +227,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "''" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Getting target epsg code\n", "point_rdd.getTargetEpsgCode()" @@ -179,9 +248,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Spatial partitioning data\n", "point_rdd.spatialPartitioning(GridType.KDBTREE)" @@ -207,9 +287,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "data": { + "text/plain": [ + "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# take firs element\n", "point_rdd.rawSpatialRDD.take(1)" @@ -217,9 +315,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", + " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", + " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", + " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", + " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# collect to Python list\n", "point_rdd.rawSpatialRDD.collect()[:5]" @@ -227,9 +340,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[111.08786851399313,\n", + " 110.92828303170774,\n", + " 111.1385974283527,\n", + " 110.97450594034112,\n", + " 110.97122518072091]" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# apply map functions, for example distance to Point(52 21)\n", "point_rdd.rawSpatialRDD.map(lambda x: x.geom.distance(Point(21, 52))).take(5)" @@ -246,7 +374,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Loaded data can be transfomred to GeoPandas DataFrame few ways" + "## Loaded data can be transformed to GeoPandas DataFrame in a few ways" ] }, { @@ -258,7 +386,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -267,7 +395,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -278,9 +406,90 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
geomattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", + "
" + ], + "text/plain": [ + " geom attr1 attr2 attr3\n", + "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", + "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", + "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", + "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", + "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "point_gdf[:5]" ] @@ -294,16 +503,18 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, + "execution_count": 91, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "# Adapter allow you to convert geospatial data types introduced with sedona to other ones" + "# Adapter allows you to convert geospatial data types introduced with sedona to other ones" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ @@ -316,18 +527,117 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------+--------------+--------------+----------------------------+\n", + "|attr1 |attr2 |attr3 |geom |\n", + "+--------------+--------------+--------------+----------------------------+\n", + "|testattribute0|testattribute1|testattribute2|POINT (-88.331492 32.324142)|\n", + "|testattribute0|testattribute1|testattribute2|POINT (-88.175933 32.360763)|\n", + "|testattribute0|testattribute1|testattribute2|POINT (-88.388954 32.357073)|\n", + "|testattribute0|testattribute1|testattribute2|POINT (-88.221102 32.35078) |\n", + "|testattribute0|testattribute1|testattribute2|POINT (-88.323995 32.950671)|\n", + "+--------------+--------------+--------------+----------------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "spatial_gdf.show(5, False)" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
attr1attr2attr3geom
0testattribute0testattribute1testattribute2POINT (-88.33149 32.32414)
1testattribute0testattribute1testattribute2POINT (-88.17593 32.36076)
2testattribute0testattribute1testattribute2POINT (-88.38895 32.35707)
3testattribute0testattribute1testattribute2POINT (-88.22110 32.35078)
4testattribute0testattribute1testattribute2POINT (-88.32399 32.95067)
\n", + "
" + ], + "text/plain": [ + " attr1 attr2 attr3 geom\n", + "0 testattribute0 testattribute1 testattribute2 POINT (-88.33149 32.32414)\n", + "1 testattribute0 testattribute1 testattribute2 POINT (-88.17593 32.36076)\n", + "2 testattribute0 testattribute1 testattribute2 POINT (-88.38895 32.35707)\n", + "3 testattribute0 testattribute1 testattribute2 POINT (-88.22110 32.35078)\n", + "4 testattribute0 testattribute1 testattribute2 POINT (-88.32399 32.95067)" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "gpd.GeoDataFrame(spatial_gdf.toPandas(), geometry=\"geom\")[:5]" ] @@ -341,7 +651,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ @@ -357,7 +667,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -366,9 +676,90 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
geometryattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", + "
" + ], + "text/plain": [ + " geometry attr1 attr2 attr3\n", + "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", + "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", + "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", + "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", + "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "gpd.GeoDataFrame(geo_df.toPandas(), geometry=\"geometry\")[:5]" ] @@ -394,7 +785,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -406,9 +797,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "rectangle_rdd.analyze()\n", "point_rdd.analyze()\n", @@ -432,9 +834,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "point_rdd.spatialPartitioning(GridType.KDBTREE)" ] @@ -455,7 +868,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ @@ -503,7 +916,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "metadata": {}, "outputs": [], "source": [ @@ -526,36 +939,83 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 31, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "MapPartitionsRDD[63] at map at FlatPairRddConverter.scala:30" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 32, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", + " [Geometry: Polygon userData: , Geometry: Point userData: ]]" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result.take(2)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 33, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", + " [Geometry: Polygon userData: , Geometry: Point userData: ],\n", + " [Geometry: Polygon userData: , Geometry: Point userData: ]]" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result.collect()[:3]" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 34, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[0.0, 0.0, 0.0, 0.0, 0.0]" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# getting distance using SpatialObjects\n", "result.map(lambda x: x[0].geom.distance(x[1].geom)).take(5)" @@ -563,9 +1023,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 35, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[0.054270513955000516,\n", + " 0.054270513955000516,\n", + " 0.051572544132000575,\n", + " 0.051572544132000575,\n", + " 0.051572544132000575]" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# getting area of polygon data\n", "result.map(lambda x: x[0].geom.area).take(5)" @@ -573,7 +1048,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 36, "metadata": {}, "outputs": [], "source": [ @@ -582,7 +1057,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 37, "metadata": {}, "outputs": [], "source": [ @@ -596,9 +1071,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 38, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------------+\n", + "| geom_left| geom_right|\n", + "+--------------------+--------------------+\n", + "|POLYGON ((-86.860...|POINT (-86.818935...|\n", + "|POLYGON ((-86.860...|POINT (-86.794378...|\n", + "|POLYGON ((-87.082...|POINT (-87.059583...|\n", + "|POLYGON ((-87.082...|POINT (-86.818935...|\n", + "|POLYGON ((-87.082...|POINT (-86.932213...|\n", + "+--------------------+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "# Set verifySchema to False\n", "spatial_join_result = result.map(lambda x: [x[0].geom, x[1].geom])\n", @@ -607,7 +1100,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 39, "metadata": {}, "outputs": [], "source": [ @@ -616,9 +1109,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 40, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- geom_left: geometry (nullable = false)\n", + " |-- geom_right: geometry (nullable = false)\n", + "\n" + ] + } + ], "source": [ "sedona.createDataFrame(spatial_join_result, schema, verifySchema=False).printSchema()" ] @@ -632,9 +1136,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 41, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+-----+--------------------+-----+\n", + "| geom_1|attr1| geom_2|attr2|\n", + "+--------------------+-----+--------------------+-----+\n", + "|POLYGON ((-86.860...| |POINT (-86.818935...| |\n", + "|POLYGON ((-86.860...| |POINT (-86.794378...| |\n", + "|POLYGON ((-87.082...| |POINT (-87.059583...| |\n", + "|POLYGON ((-87.082...| |POINT (-86.818935...| |\n", + "|POLYGON ((-87.082...| |POINT (-86.932213...| |\n", + "+--------------------+-----+--------------------+-----+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).show(5, True)" ] @@ -648,9 +1170,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 42, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- geom_1: geometry (nullable = true)\n", + " |-- attr1: string (nullable = true)\n", + " |-- geom_2: geometry (nullable = true)\n", + " |-- attr2: string (nullable = true)\n", + "\n" + ] + } + ], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).printSchema()" ] @@ -672,7 +1207,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 43, "metadata": {}, "outputs": [], "source": [ @@ -682,7 +1217,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 44, "metadata": {}, "outputs": [], "source": [ @@ -691,7 +1226,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 45, "metadata": {}, "outputs": [], "source": [ @@ -701,7 +1236,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 46, "metadata": {}, "outputs": [], "source": [ @@ -713,9 +1248,31 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 47, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+----------------+\n", + "| geometry|number_of_points|\n", + "+--------------------+----------------+\n", + "|POLYGON ((-87.082...| 12|\n", + "|POLYGON ((-87.092...| 5|\n", + "|POLYGON ((-87.114...| 15|\n", + "|POLYGON ((-86.697...| 1|\n", + "|POLYGON ((-87.285...| 26|\n", + "|POLYGON ((-87.229...| 7|\n", + "|POLYGON ((-86.816...| 6|\n", + "|POLYGON ((-86.749...| 4|\n", + "|POLYGON ((-87.105...| 15|\n", + "|POLYGON ((-86.860...| 12|\n", + "+--------------------+----------------+\n", + "\n" + ] + } + ], "source": [ "sedona.createDataFrame(number_of_points, schema, verifySchema=False).show()" ] @@ -742,17 +1299,15 @@ ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# Finds 5 closest points from PointRDD to given Point" + "### Finds 5 closest points from PointRDD to given Point" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 49, "metadata": {}, "outputs": [], "source": [ @@ -761,9 +1316,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 50, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[Geometry: Point userData: ,\n", + " Geometry: Point userData: ,\n", + " Geometry: Point userData: ,\n", + " Geometry: Point userData: ,\n", + " Geometry: Point userData: ]" + ] + }, + "execution_count": 50, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result" ] @@ -777,7 +1347,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 51, "metadata": {}, "outputs": [], "source": [ @@ -791,18 +1361,44 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 52, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[Geometry: Polygon userData: ,\n", + " Geometry: Polygon userData: ,\n", + " Geometry: Polygon userData: ,\n", + " Geometry: Polygon userData: ,\n", + " Geometry: Polygon userData: ]" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "polygons_nearby" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 53, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'POLYGON ((-83.993559 34.087259, -83.993559 34.131247, -83.959903 34.131247, -83.959903 34.087259, -83.993559 34.087259))'" + ] + }, + "execution_count": 53, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "polygons_nearby[0].geom.wkt" ] @@ -828,7 +1424,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 54, "metadata": {}, "outputs": [], "source": [ @@ -837,9 +1433,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 55, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:27: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", + " self.minx = minx\n", + "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:28: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", + " self.maxx = maxx\n", + "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:29: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", + " self.miny = miny\n", + "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:30: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", + " self.maxy = maxy\n" + ] + } + ], "source": [ "query_envelope = Envelope(-85.01, -60.01, 34.01, 50.01)\n", "\n", @@ -848,25 +1459,52 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 56, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "MapPartitionsRDD[127] at map at GeometryRddConverter.scala:30" + ] + }, + "execution_count": 56, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result_range_query" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 57, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[Geometry: LineString userData: ,\n", + " Geometry: LineString userData: ,\n", + " Geometry: LineString userData: ,\n", + " Geometry: LineString userData: ,\n", + " Geometry: LineString userData: ,\n", + " Geometry: LineString userData: ]" + ] + }, + "execution_count": 57, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result_range_query.take(6)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 58, "metadata": {}, "outputs": [], "source": [ @@ -875,7 +1513,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 59, "metadata": {}, "outputs": [], "source": [ @@ -884,9 +1522,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 60, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+\n", + "| geometry|\n", + "+--------------------+\n", + "|LINESTRING (-72.1...|\n", + "|LINESTRING (-72.4...|\n", + "|LINESTRING (-72.4...|\n", + "|LINESTRING (-73.4...|\n", + "|LINESTRING (-73.6...|\n", + "+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "sedona.createDataFrame(\n", " result_range_query.map(lambda x: [x.geom]),\n", @@ -915,7 +1571,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 61, "metadata": {}, "outputs": [], "source": [ @@ -924,7 +1580,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 62, "metadata": {}, "outputs": [], "source": [ @@ -933,25 +1589,54 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 63, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 63, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "shape_rdd" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 64, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+\n", + "| geometry|\n", + "+--------------------+\n", + "|MULTIPOLYGON (((1...|\n", + "|MULTIPOLYGON (((-...|\n", + "|MULTIPOLYGON (((1...|\n", + "|POLYGON ((118.362...|\n", + "|MULTIPOLYGON (((-...|\n", + "+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "Adapter.toDf(shape_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 65, "metadata": {}, "outputs": [], "source": [ @@ -969,7 +1654,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 66, "metadata": {}, "outputs": [], "source": [ @@ -978,25 +1663,54 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 67, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 67, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "geo_json_rdd" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 68, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", + "| geometry|STATEFP|COUNTYFP|TRACTCE|BLKGRPCE| AFFGEOID| GEOID|NAME|LSAD| ALAND|\n", + "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", + "|POLYGON ((-87.621...| 01| 077| 011501| 5|1500000US01077011...|010770115015| 5| BG| 6844991|\n", + "|POLYGON ((-85.719...| 01| 045| 021102| 4|1500000US01045021...|010450211024| 4| BG|11360854|\n", + "|POLYGON ((-86.000...| 01| 055| 001300| 3|1500000US01055001...|010550013003| 3| BG| 1378742|\n", + "|POLYGON ((-86.574...| 01| 089| 001700| 2|1500000US01089001...|010890017002| 2| BG| 1040641|\n", + "|POLYGON ((-85.382...| 01| 069| 041400| 1|1500000US01069041...|010690414001| 1| BG| 8243574|\n", + "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "Adapter.toDf(geo_json_rdd, sedona).drop(\"AWATER\").show(5, True)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 69, "metadata": {}, "outputs": [], "source": [ @@ -1005,7 +1719,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 70, "metadata": {}, "outputs": [], "source": [ @@ -1014,34 +1728,73 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 71, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 71, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "wkt_rdd" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 72, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- geometry: geometry (nullable = true)\n", + "\n" + ] + } + ], "source": [ "Adapter.toDf(wkt_rdd, sedona).printSchema()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 73, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+\n", + "| geometry|\n", + "+--------------------+\n", + "|POLYGON ((-97.019...|\n", + "|POLYGON ((-123.43...|\n", + "|POLYGON ((-104.56...|\n", + "|POLYGON ((-96.910...|\n", + "|POLYGON ((-98.273...|\n", + "+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "Adapter.toDf(wkt_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 74, "metadata": {}, "outputs": [], "source": [ @@ -1050,7 +1803,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 75, "metadata": {}, "outputs": [], "source": [ @@ -1059,9 +1812,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 76, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+\n", + "| geometry|\n", + "+--------------------+\n", + "|POLYGON ((-97.019...|\n", + "|POLYGON ((-123.43...|\n", + "|POLYGON ((-104.56...|\n", + "|POLYGON ((-96.910...|\n", + "|POLYGON ((-98.273...|\n", + "+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "Adapter.toDf(wkb_rdd, sedona).show(5, True)" ] @@ -1075,7 +1846,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 77, "metadata": {}, "outputs": [], "source": [ @@ -1089,7 +1860,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 78, "metadata": {}, "outputs": [], "source": [ @@ -1099,27 +1870,67 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 79, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- leftgeometry: geometry (nullable = true)\n", + " |-- rightgeometry: geometry (nullable = true)\n", + "\n" + ] + } + ], "source": [ "geometry_df.printSchema()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 80, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------------+\n", + "| leftgeometry| rightgeometry|\n", + "+--------------------+--------------------+\n", + "|POLYGON ((-87.229...|POINT (-87.204033...|\n", + "|POLYGON ((-87.229...|POINT (-87.204299...|\n", + "|POLYGON ((-87.229...|POINT (-87.19351 ...|\n", + "|POLYGON ((-87.229...|POINT (-87.18222 ...|\n", + "|POLYGON ((-87.285...|POINT (-87.28468 ...|\n", + "+--------------------+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "geometry_df.show(5)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 81, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "Row(leftgeometry=, rightgeometry=)" + ] + }, + "execution_count": 81, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "geometry_df.collect()[0]" ] @@ -1133,7 +1944,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 82, "metadata": {}, "outputs": [], "source": [ @@ -1142,9 +1953,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 83, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------+--------------------+---------------+\n", + "| leftgeometry|left_user_data| rightgeometry|right_user_data|\n", + "+--------------------+--------------+--------------------+---------------+\n", + "|POLYGON ((-87.229...| |POINT (-87.204033...| null|\n", + "|POLYGON ((-87.229...| |POINT (-87.204299...| null|\n", + "|POLYGON ((-87.229...| |POINT (-87.19351 ...| null|\n", + "|POLYGON ((-87.229...| |POINT (-87.18222 ...| null|\n", + "|POLYGON ((-87.285...| |POINT (-87.28468 ...| null|\n", + "+--------------------+--------------+--------------------+---------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "geometry_df.show(5)" ] @@ -1158,7 +1987,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 84, "metadata": {}, "outputs": [], "source": [ @@ -1169,7 +1998,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 85, "metadata": {}, "outputs": [], "source": [ @@ -1179,25 +2008,53 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 86, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+\n", + "| geometry|\n", + "+--------------------+\n", + "|LINESTRING (-72.1...|\n", + "|LINESTRING (-72.4...|\n", + "|LINESTRING (-72.4...|\n", + "|LINESTRING (-73.4...|\n", + "|LINESTRING (-73.6...|\n", + "+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "gdf.show(5)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 87, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- geometry: geometry (nullable = true)\n", + "\n" + ] + } + ], "source": [ "gdf.printSchema()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 88, "metadata": {}, "outputs": [], "source": [ @@ -1208,18 +2065,47 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 89, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+---+\n", + "| geometry|_c1|\n", + "+--------------------+---+\n", + "|LINESTRING (-72.1...| |\n", + "|LINESTRING (-72.4...| |\n", + "|LINESTRING (-72.4...| |\n", + "|LINESTRING (-73.4...| |\n", + "|LINESTRING (-73.6...| |\n", + "+--------------------+---+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "gdf_with_columns.show(5)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 90, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- geometry: geometry (nullable = true)\n", + " |-- _c1: string (nullable = true)\n", + "\n" + ] + } + ], "source": [ "gdf_with_columns.printSchema()" ] diff --git a/binder/ApacheSedonaSQL.ipynb b/binder/ApacheSedonaSQL.ipynb index 32b0c7fae7..f2352992bd 100644 --- a/binder/ApacheSedonaSQL.ipynb +++ b/binder/ApacheSedonaSQL.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": { "tags": [] }, @@ -40,11 +40,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/30 20:48:53 WARN UDTRegistration: Cannot register UDT for org.locationtech.jts.geom.Geometry, which is already registered.\n", + "23/10/30 20:48:53 WARN UDTRegistration: Cannot register UDT for org.locationtech.jts.index.SpatialIndex, which is already registered.\n", + "23/10/30 20:48:53 WARN UDTRegistration: Cannot register UDT for org.geotools.coverage.grid.GridCoverage2D, which is already registered.\n", + "23/10/30 20:48:53 WARN SimpleFunctionRegistry: The function st_union_aggr replaced a previously registered function.\n", + "23/10/30 20:48:53 WARN SimpleFunctionRegistry: The function st_envelope_aggr replaced a previously registered function.\n", + "23/10/30 20:48:53 WARN SimpleFunctionRegistry: The function st_intersection_aggr replaced a previously registered function.\n" + ] + } + ], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", @@ -71,9 +84,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+-----------------+\n", + "| arealandmark|\n", + "+-----------------+\n", + "|POINT (1.1 101.1)|\n", + "|POINT (2.1 102.1)|\n", + "|POINT (3.1 103.1)|\n", + "|POINT (4.1 104.1)|\n", + "|POINT (5.1 105.1)|\n", + "+-----------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "point_csv_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -95,9 +126,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+----------------+--------------------+\n", + "| name| countyshape|\n", + "+----------------+--------------------+\n", + "| Cuming County|POLYGON ((-97.019...|\n", + "|Wahkiakum County|POLYGON ((-123.43...|\n", + "| De Baca County|POLYGON ((-104.56...|\n", + "|Lancaster County|POLYGON ((-96.910...|\n", + "| Nuckolls County|POLYGON ((-98.273...|\n", + "+----------------+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "polygon_wkt_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -118,9 +167,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+----------------+--------------------+\n", + "| name| countyshape|\n", + "+----------------+--------------------+\n", + "| Cuming County|POLYGON ((-97.019...|\n", + "|Wahkiakum County|POLYGON ((-123.43...|\n", + "| De Baca County|POLYGON ((-104.56...|\n", + "|Lancaster County|POLYGON ((-96.910...|\n", + "| Nuckolls County|POLYGON ((-98.273...|\n", + "+----------------+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "polygon_wkb_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -141,9 +208,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+\n", + "| countyshape|\n", + "+--------------------+\n", + "|POLYGON ((-87.621...|\n", + "|POLYGON ((-85.719...|\n", + "|POLYGON ((-86.000...|\n", + "|POLYGON ((-86.574...|\n", + "|POLYGON ((-85.382...|\n", + "+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "polygon_json_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -171,9 +256,36 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "== Physical Plan ==\n", + "BroadcastIndexJoin pointshape2#614: geometry, LeftSide, LeftSide, Inner, INTERSECTS, ( **org.apache.spark.sql.sedona_sql.expressions.ST_Distance** < 2.0) ST_INTERSECTS(pointshape1#589, pointshape2#614)\n", + ":- SpatialIndex pointshape1#589: geometry, QUADTREE, false, false, 2.0\n", + ": +- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape1#589, abc AS name1#590]\n", + ": +- FileScan csv [_c0#585,_c1#586] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/sedona/binder/data/testpoint.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", + "+- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape2#614, def AS name2#615]\n", + " +- FileScan csv [_c0#610,_c1#611] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/sedona/binder/data/testpoint.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", + "\n", + "\n", + "+-----------------+-----+-----------------+-----+\n", + "| pointshape1|name1| pointshape2|name2|\n", + "+-----------------+-----+-----------------+-----+\n", + "|POINT (1.1 101.1)| abc|POINT (1.1 101.1)| def|\n", + "|POINT (2.1 102.1)| abc|POINT (1.1 101.1)| def|\n", + "|POINT (1.1 101.1)| abc|POINT (2.1 102.1)| def|\n", + "|POINT (2.1 102.1)| abc|POINT (2.1 102.1)| def|\n", + "|POINT (3.1 103.1)| abc|POINT (2.1 102.1)| def|\n", + "+-----------------+-----+-----------------+-----+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "point_csv_df_1 = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -215,7 +327,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ @@ -229,25 +341,57 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- osm_id: string (nullable = true)\n", + " |-- code: long (nullable = true)\n", + " |-- fclass: string (nullable = true)\n", + " |-- name: string (nullable = true)\n", + " |-- geometry: geometry (nullable = true)\n", + "\n" + ] + } + ], "source": [ "osm_points.printSchema()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 31, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------+----+---------+--------------+--------------------+\n", + "| osm_id|code| fclass| name| geometry|\n", + "+--------+----+---------+--------------+--------------------+\n", + "|26860257|2422|camp_site| de Kroon|POINT (15.3393145...|\n", + "|26860294|2406| chalet|Leśne Ustronie|POINT (14.8709625...|\n", + "|29947493|2402| motel| |POINT (15.0946636...|\n", + "|29947498|2602| atm| |POINT (15.0732014...|\n", + "|29947499|2401| hotel| |POINT (15.0696777...|\n", + "+--------+----+---------+--------------+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "osm_points.show(5)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 32, "metadata": {}, "outputs": [], "source": [ @@ -256,7 +400,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 33, "metadata": {}, "outputs": [], "source": [ @@ -273,16 +417,34 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 34, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------+----+---------+--------------+--------------------+\n", + "| osm_id|code| fclass| name| geom|\n", + "+--------+----+---------+--------------+--------------------+\n", + "|26860257|2422|camp_site| de Kroon|POINT (250776.778...|\n", + "|26860294|2406| chalet|Leśne Ustronie|POINT (221076.709...|\n", + "|29947493|2402| motel| |POINT (233902.541...|\n", + "|29947498|2602| atm| |POINT (232447.203...|\n", + "|29947499|2401| hotel| |POINT (232208.377...|\n", + "+--------+----+---------+--------------+--------------------+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], "source": [ "transformed_df.show(5)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 35, "metadata": {}, "outputs": [], "source": [ @@ -291,7 +453,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 36, "metadata": {}, "outputs": [], "source": [ @@ -306,9 +468,49 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 37, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+----------+---------+--------------------+\n", + "| id_1| id_2| geom|\n", + "+----------+---------+--------------------+\n", + "| 197624402|197624402|POINT (203703.035...|\n", + "| 197663196|197663196|POINT (203936.327...|\n", + "| 197953474|197953474|POINT (203724.746...|\n", + "|1074233127|262310516|POINT (203524.110...|\n", + "| 262310516|262310516|POINT (203507.730...|\n", + "|1074233123|262310516|POINT (203505.198...|\n", + "| 270281140|270281140|POINT (202809.394...|\n", + "|1074232906|270281140|POINT (202816.420...|\n", + "| 270306609|270306609|POINT (203639.141...|\n", + "|1257728000|270306746|POINT (203730.740...|\n", + "| 270306746|270306746|POINT (203694.827...|\n", + "|1401424769|270306746|POINT (203717.829...|\n", + "| 293896571|293896571|POINT (203064.162...|\n", + "|3256728465|293896571|POINT (203078.302...|\n", + "| 371203685|371203685|POINT (204114.915...|\n", + "| 387403536|387403536|POINT (205969.794...|\n", + "| 387403537|387403537|POINT (204667.758...|\n", + "|2857654988|387403537|POINT (204659.690...|\n", + "| 413542774|413542774|POINT (200735.109...|\n", + "| 448151936|448151936|POINT (203784.389...|\n", + "+----------+---------+--------------------+\n", + "only showing top 20 rows\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/30 20:48:55 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" + ] + } + ], "source": [ "neighbours_within_1000m.show()" ] @@ -322,16 +524,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 38, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/30 20:48:55 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" + ] + } + ], "source": [ "df = neighbours_within_1000m.toPandas()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 39, "metadata": {}, "outputs": [], "source": [ @@ -340,9 +550,129 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 40, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
id_1id_2geom
0197624402197624402POINT (203703.036 418398.613)
1197663196197663196POINT (203936.327 418662.604)
2197953474197953474POINT (203724.747 418602.854)
31074233127262310516POINT (203524.111 417303.619)
4262310516262310516POINT (203507.731 417345.373)
............
6567050437666846635874242POINT (281113.731 517914.616)
6567166358742426635874242POINT (281121.096 517899.875)
6567266358742486635874248POINT (281238.276 518490.396)
6567367367721856736772185POINT (291347.707 557611.803)
6567468174167046817416704POINT (286325.570 557253.517)
\n", + "

65675 rows × 3 columns

\n", + "
" + ], + "text/plain": [ + " id_1 id_2 geom\n", + "0 197624402 197624402 POINT (203703.036 418398.613)\n", + "1 197663196 197663196 POINT (203936.327 418662.604)\n", + "2 197953474 197953474 POINT (203724.747 418602.854)\n", + "3 1074233127 262310516 POINT (203524.111 417303.619)\n", + "4 262310516 262310516 POINT (203507.731 417345.373)\n", + "... ... ... ...\n", + "65670 5043766684 6635874242 POINT (281113.731 517914.616)\n", + "65671 6635874242 6635874242 POINT (281121.096 517899.875)\n", + "65672 6635874248 6635874248 POINT (281238.276 518490.396)\n", + "65673 6736772185 6736772185 POINT (291347.707 557611.803)\n", + "65674 6817416704 6817416704 POINT (286325.570 557253.517)\n", + "\n", + "[65675 rows x 3 columns]" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "gdf" ] diff --git a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb index c07dca8efb..e9ad784db2 100644 --- a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb +++ b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -94,9 +94,29 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- geometry: geometry (nullable = true)\n", + " |-- scalerank: string (nullable = true)\n", + " |-- featurecla: string (nullable = true)\n", + " |-- type: string (nullable = true)\n", + " |-- name: string (nullable = true)\n", + " |-- abbrev: string (nullable = true)\n", + " |-- location: string (nullable = true)\n", + " |-- gps_code: string (nullable = true)\n", + " |-- iata_code: string (nullable = true)\n", + " |-- wikipedia: string (nullable = true)\n", + " |-- natlscale: string (nullable = true)\n", + "\n" + ] + } + ], "source": [ "airports = ShapefileReader.readToGeometryRDD(sc, \"data/ne_50m_airports/\")\n", "airports_df = Adapter.toDf(airports, sedona)\n", @@ -115,7 +135,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -131,9 +151,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('3.0', '2.12', '1.5.0')]\n" + ] + } + ], "source": [ "airports_rdd = Adapter.toSpatialRdd(airports_df, \"geometry\")\n", "# Drop the duplicate name column in countries_df\n", @@ -170,9 +198,75 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/30 20:48:30 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------------+--------------------+--------------------+\n", + "| country_geom| NAME_EN| airport_geom| name|\n", + "+--------------------+--------------------+--------------------+--------------------+\n", + "|MULTIPOLYGON (((1...|Taiwan ...|POINT (121.231370...|Taoyuan ...|\n", + "|MULTIPOLYGON (((5...|Netherlands ...|POINT (4.76437693...|Schiphol ...|\n", + "|POLYGON ((103.969...|Singapore ...|POINT (103.986413...|Singapore Changi ...|\n", + "|MULTIPOLYGON (((-...|United Kingdom ...|POINT (-0.4531566...|London Heathrow ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-149.98172...|Anchorage Int'l ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", + "|MULTIPOLYGON (((1...|People's Republic...|POINT (116.588174...|Beijing Capital ...|\n", + "|MULTIPOLYGON (((-...|Colombia ...|POINT (-74.143371...|Eldorado Int'l ...|\n", + "|MULTIPOLYGON (((6...|India ...|POINT (72.8745639...|Chhatrapati Shiva...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-71.016406...|Gen E L Logan Int...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-76.668642...|Baltimore-Washing...|\n", + "|POLYGON ((36.8713...|Egypt ...|POINT (31.3997430...|Cairo Int'l ...|\n", + "|POLYGON ((-2.2196...|Morocco ...|POINT (-7.6632188...|Casablanca-Anfa ...|\n", + "|MULTIPOLYGON (((-...|Venezuela ...|POINT (-67.005748...|Simon Bolivar Int...|\n", + "|MULTIPOLYGON (((2...|South Africa ...|POINT (18.5976565...|Cape Town Int'l ...|\n", + "|MULTIPOLYGON (((1...|People's Republic...|POINT (103.956136...|Chengdushuang Liu...|\n", + "|MULTIPOLYGON (((6...|India ...|POINT (77.0878362...|Indira Gandhi Int...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-104.67379...|Denver Int'l ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", + "|MULTIPOLYGON (((1...|Thailand ...|POINT (100.602578...|Don Muang Int'l ...|\n", + "+--------------------+--------------------+--------------------+--------------------+\n", + "only showing top 20 rows\n", + "\n", + "+--------------------+--------------------+--------------------+--------------------+\n", + "| country_geom| NAME_EN| airport_geom| name|\n", + "+--------------------+--------------------+--------------------+--------------------+\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.145258...|Fort Lauderdale H...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.278971...|Miami Int'l ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-95.333704...|George Bush Inter...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-90.256693...|New Orleans Int'l...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-81.307371...|Orlando Int'l ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-82.534824...|Tampa Int'l ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-112.01363...|Sky Harbor Int'l ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-118.40246...|Los Angeles Int'l...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-116.97547...|General Abelardo ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", + "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", + "|POLYGON ((-69.965...|Peru ...|POINT (-77.107565...|Jorge Chavez ...|\n", + "|MULTIPOLYGON (((-...|Panama ...|POINT (-79.387134...|Tocumen Int'l ...|\n", + "|POLYGON ((-83.157...|Nicaragua ...|POINT (-86.171284...|Augusto Cesar San...|\n", + "|MULTIPOLYGON (((-...|Mexico ...|POINT (-96.183570...|Gen. Heriberto Ja...|\n", + "|MULTIPOLYGON (((-...|Mexico ...|POINT (-106.27001...|General Rafael Bu...|\n", + "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.754508...|General Juan N Al...|\n", + "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.570649...|Jose Maria Morelo...|\n", + "|MULTIPOLYGON (((-...|Mexico ...|POINT (-98.375759...|Puebla ...|\n", + "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.082607...|Lic Benito Juarez...|\n", + "+--------------------+--------------------+--------------------+--------------------+\n", + "only showing top 20 rows\n", + "\n" + ] + } + ], "source": [ "# The result of SQL API\n", "result.show()\n", @@ -189,11 +283,44 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": { "scrolled": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------------+------------+\n", + "| NAME_EN| country_geom|AirportCount|\n", + "+--------------------+--------------------+------------+\n", + "|Cuba ...|MULTIPOLYGON (((-...| 1|\n", + "|Mexico ...|MULTIPOLYGON (((-...| 12|\n", + "|Panama ...|MULTIPOLYGON (((-...| 1|\n", + "|Nicaragua ...|POLYGON ((-83.157...| 1|\n", + "|Honduras ...|MULTIPOLYGON (((-...| 1|\n", + "|Colombia ...|MULTIPOLYGON (((-...| 4|\n", + "|United States of ...|MULTIPOLYGON (((-...| 35|\n", + "|Ecuador ...|MULTIPOLYGON (((-...| 1|\n", + "|The Bahamas ...|MULTIPOLYGON (((-...| 1|\n", + "|Peru ...|POLYGON ((-69.965...| 1|\n", + "|Guatemala ...|POLYGON ((-92.235...| 1|\n", + "|Canada ...|MULTIPOLYGON (((-...| 15|\n", + "|Venezuela ...|MULTIPOLYGON (((-...| 3|\n", + "|Argentina ...|MULTIPOLYGON (((-...| 3|\n", + "|Bolivia ...|MULTIPOLYGON (((-...| 2|\n", + "|Paraguay ...|POLYGON ((-58.159...| 1|\n", + "|Benin ...|POLYGON ((1.62265...| 1|\n", + "|Guinea ...|POLYGON ((-10.283...| 1|\n", + "|Chile ...|MULTIPOLYGON (((-...| 5|\n", + "|Nigeria ...|MULTIPOLYGON (((7...| 3|\n", + "+--------------------+--------------------+------------+\n", + "only showing top 20 rows\n", + "\n" + ] + } + ], "source": [ "# result.createOrReplaceTempView(\"result\")\n", "result2.createOrReplaceTempView(\"result\")\n", @@ -211,9 +338,31 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a4225c4a1d104a63a852c60c24dde5ee", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "KeplerGl(config={'version': 'v1', 'config': {'visState': {'filters': [], 'layers': [{'id': 'ikzru0t', 'type': …" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", "sedona_kepler_map" @@ -237,11 +386,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------------+--------------------+\n", + "| NAME_EN| country_geom| h3_cellID|\n", + "+--------------------+--------------------+--------------------+\n", + "|Cuba ...|MULTIPOLYGON (((-...|[5911955825051566...|\n", + "|Mexico ...|MULTIPOLYGON (((-...|[5918915733655388...|\n", + "+--------------------+--------------------+--------------------+\n", + "only showing top 2 rows\n", + "\n" + ] + } + ], "source": [ "h3_df = sedona.sql(\"SELECT g.NAME_EN, g.country_geom, ST_H3CellIDs(g.country_geom, 3, false) as h3_cellID from grouped_result g\")\n", "h3_df.show(2)" @@ -256,11 +420,40 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Stage 41:=================================================> (6 + 1) / 7]\r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+--------------------+------------------+\n", + "| NAME_EN| country_geom| h3|\n", + "+--------------------+--------------------+------------------+\n", + "|Cuba ...|MULTIPOLYGON (((-...|591195582505156607|\n", + "|Cuba ...|MULTIPOLYGON (((-...|591195513785679871|\n", + "+--------------------+--------------------+------------------+\n", + "only showing top 2 rows\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + } + ], "source": [ "exploded_h3 = h3_df.select(h3_df.NAME_EN, h3_df.country_geom, explode(h3_df.h3_cellID).alias(\"h3\"))\n", "exploded_h3.show(2)" @@ -276,11 +469,40 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Stage 48:=================================================> (6 + 1) / 7]\r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+---------------+\n", + "| NAME_EN| ex_h3|\n", + "+--------------------+---------------+\n", + "|Cuba ...|83458EFFFFFFFFF|\n", + "|Cuba ...|834590FFFFFFFFF|\n", + "+--------------------+---------------+\n", + "only showing top 2 rows\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + } + ], "source": [ "exploded_h3 = exploded_h3.sample(0.3)\n", "exploded_h3.createOrReplaceTempView(\"exploded_h3\")\n", @@ -298,11 +520,40 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "92e221e5c4b24e7ea9072fe0d591b53b", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "KeplerGl(data={'h3': {'index': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, …" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "sedona_kepler_h3 = SedonaKepler.create_map(df=hex_exploded_h3, name=\"h3\")\n", "sedona_kepler_h3" From 63b1d46be7cfe4093050cf61d3dd29562f11ec66 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 31 Oct 2023 01:05:09 -0400 Subject: [PATCH 09/28] Add SedonaPydeck example --- ...naSQL_SpatialJoin_AirportsPerCountry.ipynb | 327 +++--------------- 1 file changed, 49 insertions(+), 278 deletions(-) diff --git a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb index e9ad784db2..c799353571 100644 --- a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb +++ b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -94,29 +94,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- scalerank: string (nullable = true)\n", - " |-- featurecla: string (nullable = true)\n", - " |-- type: string (nullable = true)\n", - " |-- name: string (nullable = true)\n", - " |-- abbrev: string (nullable = true)\n", - " |-- location: string (nullable = true)\n", - " |-- gps_code: string (nullable = true)\n", - " |-- iata_code: string (nullable = true)\n", - " |-- wikipedia: string (nullable = true)\n", - " |-- natlscale: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "airports = ShapefileReader.readToGeometryRDD(sc, \"data/ne_50m_airports/\")\n", "airports_df = Adapter.toDf(airports, sedona)\n", @@ -135,7 +115,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -151,17 +131,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[('3.0', '2.12', '1.5.0')]\n" - ] - } - ], + "outputs": [], "source": [ "airports_rdd = Adapter.toSpatialRdd(airports_df, \"geometry\")\n", "# Drop the duplicate name column in countries_df\n", @@ -198,75 +170,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/30 20:48:30 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+--------------------+\n", - "| country_geom| NAME_EN| airport_geom| name|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "|MULTIPOLYGON (((1...|Taiwan ...|POINT (121.231370...|Taoyuan ...|\n", - "|MULTIPOLYGON (((5...|Netherlands ...|POINT (4.76437693...|Schiphol ...|\n", - "|POLYGON ((103.969...|Singapore ...|POINT (103.986413...|Singapore Changi ...|\n", - "|MULTIPOLYGON (((-...|United Kingdom ...|POINT (-0.4531566...|London Heathrow ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-149.98172...|Anchorage Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", - "|MULTIPOLYGON (((1...|People's Republic...|POINT (116.588174...|Beijing Capital ...|\n", - "|MULTIPOLYGON (((-...|Colombia ...|POINT (-74.143371...|Eldorado Int'l ...|\n", - "|MULTIPOLYGON (((6...|India ...|POINT (72.8745639...|Chhatrapati Shiva...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-71.016406...|Gen E L Logan Int...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-76.668642...|Baltimore-Washing...|\n", - "|POLYGON ((36.8713...|Egypt ...|POINT (31.3997430...|Cairo Int'l ...|\n", - "|POLYGON ((-2.2196...|Morocco ...|POINT (-7.6632188...|Casablanca-Anfa ...|\n", - "|MULTIPOLYGON (((-...|Venezuela ...|POINT (-67.005748...|Simon Bolivar Int...|\n", - "|MULTIPOLYGON (((2...|South Africa ...|POINT (18.5976565...|Cape Town Int'l ...|\n", - "|MULTIPOLYGON (((1...|People's Republic...|POINT (103.956136...|Chengdushuang Liu...|\n", - "|MULTIPOLYGON (((6...|India ...|POINT (77.0878362...|Indira Gandhi Int...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-104.67379...|Denver Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", - "|MULTIPOLYGON (((1...|Thailand ...|POINT (100.602578...|Don Muang Int'l ...|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "only showing top 20 rows\n", - "\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "| country_geom| NAME_EN| airport_geom| name|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.145258...|Fort Lauderdale H...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-80.278971...|Miami Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-95.333704...|George Bush Inter...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-90.256693...|New Orleans Int'l...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-81.307371...|Orlando Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-82.534824...|Tampa Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-112.01363...|Sky Harbor Int'l ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-118.40246...|Los Angeles Int'l...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-116.97547...|General Abelardo ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-97.040371...|Dallas-Ft. Worth ...|\n", - "|MULTIPOLYGON (((-...|United States of ...|POINT (-84.425397...|Hartsfield-Jackso...|\n", - "|POLYGON ((-69.965...|Peru ...|POINT (-77.107565...|Jorge Chavez ...|\n", - "|MULTIPOLYGON (((-...|Panama ...|POINT (-79.387134...|Tocumen Int'l ...|\n", - "|POLYGON ((-83.157...|Nicaragua ...|POINT (-86.171284...|Augusto Cesar San...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-96.183570...|Gen. Heriberto Ja...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-106.27001...|General Rafael Bu...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.754508...|General Juan N Al...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.570649...|Jose Maria Morelo...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-98.375759...|Puebla ...|\n", - "|MULTIPOLYGON (((-...|Mexico ...|POINT (-99.082607...|Lic Benito Juarez...|\n", - "+--------------------+--------------------+--------------------+--------------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# The result of SQL API\n", "result.show()\n", @@ -283,44 +189,11 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------------+\n", - "| NAME_EN| country_geom|AirportCount|\n", - "+--------------------+--------------------+------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...| 1|\n", - "|Mexico ...|MULTIPOLYGON (((-...| 12|\n", - "|Panama ...|MULTIPOLYGON (((-...| 1|\n", - "|Nicaragua ...|POLYGON ((-83.157...| 1|\n", - "|Honduras ...|MULTIPOLYGON (((-...| 1|\n", - "|Colombia ...|MULTIPOLYGON (((-...| 4|\n", - "|United States of ...|MULTIPOLYGON (((-...| 35|\n", - "|Ecuador ...|MULTIPOLYGON (((-...| 1|\n", - "|The Bahamas ...|MULTIPOLYGON (((-...| 1|\n", - "|Peru ...|POLYGON ((-69.965...| 1|\n", - "|Guatemala ...|POLYGON ((-92.235...| 1|\n", - "|Canada ...|MULTIPOLYGON (((-...| 15|\n", - "|Venezuela ...|MULTIPOLYGON (((-...| 3|\n", - "|Argentina ...|MULTIPOLYGON (((-...| 3|\n", - "|Bolivia ...|MULTIPOLYGON (((-...| 2|\n", - "|Paraguay ...|POLYGON ((-58.159...| 1|\n", - "|Benin ...|POLYGON ((1.62265...| 1|\n", - "|Guinea ...|POLYGON ((-10.283...| 1|\n", - "|Chile ...|MULTIPOLYGON (((-...| 5|\n", - "|Nigeria ...|MULTIPOLYGON (((7...| 3|\n", - "+--------------------+--------------------+------------+\n", - "only showing top 20 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# result.createOrReplaceTempView(\"result\")\n", "result2.createOrReplaceTempView(\"result\")\n", @@ -336,38 +209,45 @@ "## Visualize the number of airports in each country" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualize using SedonaKepler" + ] + }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "a4225c4a1d104a63a852c60c24dde5ee", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "KeplerGl(config={'version': 'v1', 'config': {'visState': {'filters': [], 'layers': [{'id': 'ikzru0t', 'type': …" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "sedona_kepler_map = SedonaKepler.create_map(df=groupedresult, name=\"AirportCount\", config=getConfig())\n", "sedona_kepler_map" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualize using SedonaPyDeck\n", + "The above visualization is generated by a pre-set config informing SedonaKepler that the map to be rendered has to be a choropleth map with choropleth of the `AirportCount` column value.\n", + "\n", + "This can be also be achieved using [SedonaPyDeck](https://sedona.apache.org/1.5.0/tutorial/sql/#sedonapydeck) and its `create_choropleth_map` API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sedona_pydeck_map = SedonaPyDeck.create_choropleth_map(df=groupedresult, plot_col='AirportCount')\n", + "sedona_pydeck_map" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -386,26 +266,11 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+--------------------+\n", - "| NAME_EN| country_geom| h3_cellID|\n", - "+--------------------+--------------------+--------------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...|[5911955825051566...|\n", - "|Mexico ...|MULTIPOLYGON (((-...|[5918915733655388...|\n", - "+--------------------+--------------------+--------------------+\n", - "only showing top 2 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "h3_df = sedona.sql(\"SELECT g.NAME_EN, g.country_geom, ST_H3CellIDs(g.country_geom, 3, false) as h3_cellID from grouped_result g\")\n", "h3_df.show(2)" @@ -420,40 +285,11 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Stage 41:=================================================> (6 + 1) / 7]\r" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+------------------+\n", - "| NAME_EN| country_geom| h3|\n", - "+--------------------+--------------------+------------------+\n", - "|Cuba ...|MULTIPOLYGON (((-...|591195582505156607|\n", - "|Cuba ...|MULTIPOLYGON (((-...|591195513785679871|\n", - "+--------------------+--------------------+------------------+\n", - "only showing top 2 rows\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - " \r" - ] - } - ], + "outputs": [], "source": [ "exploded_h3 = h3_df.select(h3_df.NAME_EN, h3_df.country_geom, explode(h3_df.h3_cellID).alias(\"h3\"))\n", "exploded_h3.show(2)" @@ -469,40 +305,11 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[Stage 48:=================================================> (6 + 1) / 7]\r" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+---------------+\n", - "| NAME_EN| ex_h3|\n", - "+--------------------+---------------+\n", - "|Cuba ...|83458EFFFFFFFFF|\n", - "|Cuba ...|834590FFFFFFFFF|\n", - "+--------------------+---------------+\n", - "only showing top 2 rows\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - " \r" - ] - } - ], + "outputs": [], "source": [ "exploded_h3 = exploded_h3.sample(0.3)\n", "exploded_h3.createOrReplaceTempView(\"exploded_h3\")\n", @@ -520,51 +327,15 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - " \r" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "92e221e5c4b24e7ea9072fe0d591b53b", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "KeplerGl(data={'h3': {'index': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, …" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "sedona_kepler_h3 = SedonaKepler.create_map(df=hex_exploded_h3, name=\"h3\")\n", "sedona_kepler_h3" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { From 8730cc24bea8562e1f4b19196fd6945d3e60da83 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 31 Oct 2023 01:09:56 -0400 Subject: [PATCH 10/28] Change sedona-spark-shaded version to 3.4 --- binder/ApacheSedonaCore.ipynb | 1168 ++--------------- binder/ApacheSedonaSQL.ipynb | 27 + ...naSQL_SpatialJoin_AirportsPerCountry.ipynb | 2 +- 3 files changed, 169 insertions(+), 1028 deletions(-) diff --git a/binder/ApacheSedonaCore.ipynb b/binder/ApacheSedonaCore.ipynb index 2dd0d61939..01a5f5847d 100644 --- a/binder/ApacheSedonaCore.ipynb +++ b/binder/ApacheSedonaCore.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -51,7 +51,7 @@ "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", @@ -60,7 +60,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -98,7 +98,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -107,20 +107,9 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "3000" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "## Getting approximate total count\n", "point_rdd.approximateTotalCount" @@ -139,20 +128,9 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# To run analyze please use function analyze\n", "point_rdd.analyze()" @@ -160,23 +138,9 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": [ - "" - ], - "text/plain": [ - "Envelope(-173.120769, -84.965961, 30.244859, 71.355134)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Finding boundary envelope for PointRDD or any other SpatialRDD, it returns Enelope object which inherits from\n", "# shapely.geometry.Polygon\n", @@ -185,20 +149,9 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "2996" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Calculate number of records without duplicates\n", "point_rdd.countWithoutDuplicates()" @@ -206,20 +159,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "''" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Getting source epsg code\n", "point_rdd.getSourceEpsgCode()" @@ -227,20 +169,9 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "''" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Getting target epsg code\n", "point_rdd.getTargetEpsgCode()" @@ -248,20 +179,9 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Spatial partitioning data\n", "point_rdd.spatialPartitioning(GridType.KDBTREE)" @@ -287,27 +207,9 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - " \r" - ] - }, - { - "data": { - "text/plain": [ - "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# take firs element\n", "point_rdd.rawSpatialRDD.take(1)" @@ -315,24 +217,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2,\n", - " Geometry: Point userData: testattribute0\ttestattribute1\ttestattribute2]" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# collect to Python list\n", "point_rdd.rawSpatialRDD.collect()[:5]" @@ -340,24 +227,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[111.08786851399313,\n", - " 110.92828303170774,\n", - " 111.1385974283527,\n", - " 110.97450594034112,\n", - " 110.97122518072091]" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# apply map functions, for example distance to Point(52 21)\n", "point_rdd.rawSpatialRDD.map(lambda x: x.geom.distance(Point(21, 52))).take(5)" @@ -386,7 +258,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -395,7 +267,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -406,90 +278,9 @@ }, { "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
geomattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", - "
" - ], - "text/plain": [ - " geom attr1 attr2 attr3\n", - "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", - "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", - "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", - "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", - "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "point_gdf[:5]" ] @@ -503,7 +294,7 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": null, "metadata": { "tags": [] }, @@ -514,7 +305,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -527,117 +318,18 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------+--------------+--------------+----------------------------+\n", - "|attr1 |attr2 |attr3 |geom |\n", - "+--------------+--------------+--------------+----------------------------+\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.331492 32.324142)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.175933 32.360763)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.388954 32.357073)|\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.221102 32.35078) |\n", - "|testattribute0|testattribute1|testattribute2|POINT (-88.323995 32.950671)|\n", - "+--------------+--------------+--------------+----------------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "spatial_gdf.show(5, False)" ] }, { "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
attr1attr2attr3geom
0testattribute0testattribute1testattribute2POINT (-88.33149 32.32414)
1testattribute0testattribute1testattribute2POINT (-88.17593 32.36076)
2testattribute0testattribute1testattribute2POINT (-88.38895 32.35707)
3testattribute0testattribute1testattribute2POINT (-88.22110 32.35078)
4testattribute0testattribute1testattribute2POINT (-88.32399 32.95067)
\n", - "
" - ], - "text/plain": [ - " attr1 attr2 attr3 geom\n", - "0 testattribute0 testattribute1 testattribute2 POINT (-88.33149 32.32414)\n", - "1 testattribute0 testattribute1 testattribute2 POINT (-88.17593 32.36076)\n", - "2 testattribute0 testattribute1 testattribute2 POINT (-88.38895 32.35707)\n", - "3 testattribute0 testattribute1 testattribute2 POINT (-88.22110 32.35078)\n", - "4 testattribute0 testattribute1 testattribute2 POINT (-88.32399 32.95067)" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "gpd.GeoDataFrame(spatial_gdf.toPandas(), geometry=\"geom\")[:5]" ] @@ -651,7 +343,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -667,7 +359,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -676,90 +368,9 @@ }, { "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
geometryattr1attr2attr3
0POINT (-88.33149 32.32414)testattribute0testattribute1testattribute2
1POINT (-88.17593 32.36076)testattribute0testattribute1testattribute2
2POINT (-88.38895 32.35707)testattribute0testattribute1testattribute2
3POINT (-88.22110 32.35078)testattribute0testattribute1testattribute2
4POINT (-88.32399 32.95067)testattribute0testattribute1testattribute2
\n", - "
" - ], - "text/plain": [ - " geometry attr1 attr2 attr3\n", - "0 POINT (-88.33149 32.32414) testattribute0 testattribute1 testattribute2\n", - "1 POINT (-88.17593 32.36076) testattribute0 testattribute1 testattribute2\n", - "2 POINT (-88.38895 32.35707) testattribute0 testattribute1 testattribute2\n", - "3 POINT (-88.22110 32.35078) testattribute0 testattribute1 testattribute2\n", - "4 POINT (-88.32399 32.95067) testattribute0 testattribute1 testattribute2" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "gpd.GeoDataFrame(geo_df.toPandas(), geometry=\"geometry\")[:5]" ] @@ -785,7 +396,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -797,20 +408,9 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "rectangle_rdd.analyze()\n", "point_rdd.analyze()\n", @@ -834,20 +434,9 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "point_rdd.spatialPartitioning(GridType.KDBTREE)" ] @@ -868,7 +457,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -916,7 +505,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -939,83 +528,36 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MapPartitionsRDD[63] at map at FlatPairRddConverter.scala:30" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result" ] }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ]]" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result.take(2)" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[[Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ],\n", - " [Geometry: Polygon userData: , Geometry: Point userData: ]]" - ] - }, - "execution_count": 33, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result.collect()[:3]" ] }, { "cell_type": "code", - "execution_count": 34, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[0.0, 0.0, 0.0, 0.0, 0.0]" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# getting distance using SpatialObjects\n", "result.map(lambda x: x[0].geom.distance(x[1].geom)).take(5)" @@ -1023,24 +565,9 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[0.054270513955000516,\n", - " 0.054270513955000516,\n", - " 0.051572544132000575,\n", - " 0.051572544132000575,\n", - " 0.051572544132000575]" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# getting area of polygon data\n", "result.map(lambda x: x[0].geom.area).take(5)" @@ -1048,7 +575,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1057,7 +584,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1071,27 +598,9 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+\n", - "| geom_left| geom_right|\n", - "+--------------------+--------------------+\n", - "|POLYGON ((-86.860...|POINT (-86.818935...|\n", - "|POLYGON ((-86.860...|POINT (-86.794378...|\n", - "|POLYGON ((-87.082...|POINT (-87.059583...|\n", - "|POLYGON ((-87.082...|POINT (-86.818935...|\n", - "|POLYGON ((-87.082...|POINT (-86.932213...|\n", - "+--------------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "# Set verifySchema to False\n", "spatial_join_result = result.map(lambda x: [x[0].geom, x[1].geom])\n", @@ -1100,7 +609,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1109,20 +618,9 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geom_left: geometry (nullable = false)\n", - " |-- geom_right: geometry (nullable = false)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "sedona.createDataFrame(spatial_join_result, schema, verifySchema=False).printSchema()" ] @@ -1136,27 +634,9 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+-----+--------------------+-----+\n", - "| geom_1|attr1| geom_2|attr2|\n", - "+--------------------+-----+--------------------+-----+\n", - "|POLYGON ((-86.860...| |POINT (-86.818935...| |\n", - "|POLYGON ((-86.860...| |POINT (-86.794378...| |\n", - "|POLYGON ((-87.082...| |POINT (-87.059583...| |\n", - "|POLYGON ((-87.082...| |POINT (-86.818935...| |\n", - "|POLYGON ((-87.082...| |POINT (-86.932213...| |\n", - "+--------------------+-----+--------------------+-----+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).show(5, True)" ] @@ -1170,22 +650,9 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geom_1: geometry (nullable = true)\n", - " |-- attr1: string (nullable = true)\n", - " |-- geom_2: geometry (nullable = true)\n", - " |-- attr2: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(result, [\"attr1\"], [\"attr2\"], sedona).printSchema()" ] @@ -1207,7 +674,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1217,7 +684,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1226,7 +693,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1236,7 +703,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1248,31 +715,9 @@ }, { "cell_type": "code", - "execution_count": 47, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+----------------+\n", - "| geometry|number_of_points|\n", - "+--------------------+----------------+\n", - "|POLYGON ((-87.082...| 12|\n", - "|POLYGON ((-87.092...| 5|\n", - "|POLYGON ((-87.114...| 15|\n", - "|POLYGON ((-86.697...| 1|\n", - "|POLYGON ((-87.285...| 26|\n", - "|POLYGON ((-87.229...| 7|\n", - "|POLYGON ((-86.816...| 6|\n", - "|POLYGON ((-86.749...| 4|\n", - "|POLYGON ((-87.105...| 15|\n", - "|POLYGON ((-86.860...| 12|\n", - "+--------------------+----------------+\n", - "\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "sedona.createDataFrame(number_of_points, schema, verifySchema=False).show()" ] @@ -1307,7 +752,7 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1316,24 +761,9 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ,\n", - " Geometry: Point userData: ]" - ] - }, - "execution_count": 50, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result" ] @@ -1347,7 +777,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1361,44 +791,18 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ,\n", - " Geometry: Polygon userData: ]" - ] - }, - "execution_count": 52, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "polygons_nearby" ] }, { "cell_type": "code", - "execution_count": 53, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'POLYGON ((-83.993559 34.087259, -83.993559 34.131247, -83.959903 34.131247, -83.959903 34.087259, -83.993559 34.087259))'" - ] - }, - "execution_count": 53, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "polygons_nearby[0].geom.wkt" ] @@ -1424,7 +828,7 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1433,24 +837,9 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:27: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.minx = minx\n", - "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:28: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxx = maxx\n", - "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:29: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.miny = miny\n", - "/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/sedona/core/geom/envelope.py:30: ShapelyDeprecationWarning: Setting custom attributes on geometry objects is deprecated, and will raise an AttributeError in Shapely 2.0\n", - " self.maxy = maxy\n" - ] - } - ], + "outputs": [], "source": [ "query_envelope = Envelope(-85.01, -60.01, 34.01, 50.01)\n", "\n", @@ -1459,52 +848,25 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MapPartitionsRDD[127] at map at GeometryRddConverter.scala:30" - ] - }, - "execution_count": 56, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result_range_query" ] }, { "cell_type": "code", - "execution_count": 57, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ,\n", - " Geometry: LineString userData: ]" - ] - }, - "execution_count": 57, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result_range_query.take(6)" ] }, { "cell_type": "code", - "execution_count": 58, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1513,7 +875,7 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1522,27 +884,9 @@ }, { "cell_type": "code", - "execution_count": 60, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|LINESTRING (-72.1...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-73.4...|\n", - "|LINESTRING (-73.6...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "sedona.createDataFrame(\n", " result_range_query.map(lambda x: [x.geom]),\n", @@ -1571,7 +915,7 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1580,7 +924,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1589,54 +933,25 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 63, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "shape_rdd" ] }, { "cell_type": "code", - "execution_count": 64, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|MULTIPOLYGON (((1...|\n", - "|MULTIPOLYGON (((-...|\n", - "|MULTIPOLYGON (((1...|\n", - "|POLYGON ((118.362...|\n", - "|MULTIPOLYGON (((-...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(shape_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": 65, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1654,7 +969,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1663,54 +978,25 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 67, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "geo_json_rdd" ] }, { "cell_type": "code", - "execution_count": 68, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "| geometry|STATEFP|COUNTYFP|TRACTCE|BLKGRPCE| AFFGEOID| GEOID|NAME|LSAD| ALAND|\n", - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "|POLYGON ((-87.621...| 01| 077| 011501| 5|1500000US01077011...|010770115015| 5| BG| 6844991|\n", - "|POLYGON ((-85.719...| 01| 045| 021102| 4|1500000US01045021...|010450211024| 4| BG|11360854|\n", - "|POLYGON ((-86.000...| 01| 055| 001300| 3|1500000US01055001...|010550013003| 3| BG| 1378742|\n", - "|POLYGON ((-86.574...| 01| 089| 001700| 2|1500000US01089001...|010890017002| 2| BG| 1040641|\n", - "|POLYGON ((-85.382...| 01| 069| 041400| 1|1500000US01069041...|010690414001| 1| BG| 8243574|\n", - "+--------------------+-------+--------+-------+--------+--------------------+------------+----+----+--------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(geo_json_rdd, sedona).drop(\"AWATER\").show(5, True)" ] }, { "cell_type": "code", - "execution_count": 69, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1719,7 +1005,7 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1728,73 +1014,34 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 71, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "wkt_rdd" ] }, { "cell_type": "code", - "execution_count": 72, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkt_rdd, sedona).printSchema()" ] }, { "cell_type": "code", - "execution_count": 73, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|POLYGON ((-97.019...|\n", - "|POLYGON ((-123.43...|\n", - "|POLYGON ((-104.56...|\n", - "|POLYGON ((-96.910...|\n", - "|POLYGON ((-98.273...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkt_rdd, sedona).show(5, True)" ] }, { "cell_type": "code", - "execution_count": 74, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1803,7 +1050,7 @@ }, { "cell_type": "code", - "execution_count": 75, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1812,27 +1059,9 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|POLYGON ((-97.019...|\n", - "|POLYGON ((-123.43...|\n", - "|POLYGON ((-104.56...|\n", - "|POLYGON ((-96.910...|\n", - "|POLYGON ((-98.273...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "Adapter.toDf(wkb_rdd, sedona).show(5, True)" ] @@ -1846,7 +1075,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1860,7 +1089,7 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1870,67 +1099,27 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- leftgeometry: geometry (nullable = true)\n", - " |-- rightgeometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.printSchema()" ] }, { "cell_type": "code", - "execution_count": 80, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------------+\n", - "| leftgeometry| rightgeometry|\n", - "+--------------------+--------------------+\n", - "|POLYGON ((-87.229...|POINT (-87.204033...|\n", - "|POLYGON ((-87.229...|POINT (-87.204299...|\n", - "|POLYGON ((-87.229...|POINT (-87.19351 ...|\n", - "|POLYGON ((-87.229...|POINT (-87.18222 ...|\n", - "|POLYGON ((-87.285...|POINT (-87.28468 ...|\n", - "+--------------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.show(5)" ] }, { "cell_type": "code", - "execution_count": 81, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Row(leftgeometry=, rightgeometry=)" - ] - }, - "execution_count": 81, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "geometry_df.collect()[0]" ] @@ -1944,7 +1133,7 @@ }, { "cell_type": "code", - "execution_count": 82, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1953,27 +1142,9 @@ }, { "cell_type": "code", - "execution_count": 83, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+--------------+--------------------+---------------+\n", - "| leftgeometry|left_user_data| rightgeometry|right_user_data|\n", - "+--------------------+--------------+--------------------+---------------+\n", - "|POLYGON ((-87.229...| |POINT (-87.204033...| null|\n", - "|POLYGON ((-87.229...| |POINT (-87.204299...| null|\n", - "|POLYGON ((-87.229...| |POINT (-87.19351 ...| null|\n", - "|POLYGON ((-87.229...| |POINT (-87.18222 ...| null|\n", - "|POLYGON ((-87.285...| |POINT (-87.28468 ...| null|\n", - "+--------------------+--------------+--------------------+---------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "geometry_df.show(5)" ] @@ -1987,7 +1158,7 @@ }, { "cell_type": "code", - "execution_count": 84, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1998,7 +1169,7 @@ }, { "cell_type": "code", - "execution_count": 85, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2008,53 +1179,25 @@ }, { "cell_type": "code", - "execution_count": 86, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| geometry|\n", - "+--------------------+\n", - "|LINESTRING (-72.1...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-72.4...|\n", - "|LINESTRING (-73.4...|\n", - "|LINESTRING (-73.6...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf.show(5)" ] }, { "cell_type": "code", - "execution_count": 87, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf.printSchema()" ] }, { "cell_type": "code", - "execution_count": 88, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -2065,47 +1208,18 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+---+\n", - "| geometry|_c1|\n", - "+--------------------+---+\n", - "|LINESTRING (-72.1...| |\n", - "|LINESTRING (-72.4...| |\n", - "|LINESTRING (-72.4...| |\n", - "|LINESTRING (-73.4...| |\n", - "|LINESTRING (-73.6...| |\n", - "+--------------------+---+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf_with_columns.show(5)" ] }, { "cell_type": "code", - "execution_count": 90, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- geometry: geometry (nullable = true)\n", - " |-- _c1: string (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "gdf_with_columns.printSchema()" ] diff --git a/binder/ApacheSedonaSQL.ipynb b/binder/ApacheSedonaSQL.ipynb index f2352992bd..c277a00a3f 100644 --- a/binder/ApacheSedonaSQL.ipynb +++ b/binder/ApacheSedonaSQL.ipynb @@ -671,6 +671,33 @@ "execution_count": 40, "metadata": {}, "output_type": "execute_result" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "----------------------------------------\n", + "Exception occurred during processing of request from ('127.0.0.1', 60351)\n", + "Traceback (most recent call last):\n", + " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 316, in _handle_request_noblock\n", + " self.process_request(request, client_address)\n", + " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 347, in process_request\n", + " self.finish_request(request, client_address)\n", + " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 360, in finish_request\n", + " self.RequestHandlerClass(request, client_address, self)\n", + " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 747, in __init__\n", + " self.handle()\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/accumulators.py\", line 281, in handle\n", + " poll(accum_updates)\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/accumulators.py\", line 253, in poll\n", + " if func():\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/accumulators.py\", line 257, in accum_updates\n", + " num_updates = read_int(self.rfile)\n", + " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/serializers.py\", line 596, in read_int\n", + " raise EOFError\n", + "EOFError\n", + "----------------------------------------\n" + ] } ], "source": [ diff --git a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb index c799353571..e104ac791b 100644 --- a/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb +++ b/binder/ApacheSedonaSQL_SpatialJoin_AirportsPerCountry.ipynb @@ -55,7 +55,7 @@ "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", From 302e39e8da233323de0c293731f0c7afd7a9a52d Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 31 Oct 2023 15:28:06 -0400 Subject: [PATCH 11/28] Add new raster notebook --- binder/ApacheSedonaRaster.ipynb | 730 ++++++++++++++++++-------------- 1 file changed, 420 insertions(+), 310 deletions(-) diff --git a/binder/ApacheSedonaRaster.ipynb b/binder/ApacheSedonaRaster.ipynb index 23f3a1cae5..cde6598223 100644 --- a/binder/ApacheSedonaRaster.ipynb +++ b/binder/ApacheSedonaRaster.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "markdown", + "id": "fbefc0bd-731b-43e4-b271-6cb4cba5c256", "metadata": {}, "source": [ "```\n", @@ -22,38 +23,40 @@ "```" ] }, + { + "cell_type": "markdown", + "id": "b443d3d3-1667-4770-b57c-7f79a3ea5d42", + "metadata": {}, + "source": [ + "## Import Sedona" + ] + }, { "cell_type": "code", "execution_count": null, + "id": "328d0b74-1efd-468c-bc96-a469965df60b", "metadata": { "tags": [] }, "outputs": [], "source": [ - "from IPython.display import display, HTML\n", - "from pyspark.sql import SparkSession\n", - "from pyspark import StorageLevel\n", - "import pandas as pd\n", - "from pyspark.sql.types import StructType, StructField,StringType, LongType, IntegerType, DoubleType, ArrayType\n", - "from pyspark.sql.functions import regexp_replace\n", - "from pyspark.sql.functions import col, split, expr\n", - "from pyspark.sql.functions import udf, lit\n", "from sedona.spark import *\n", - "from pyspark.sql.functions import col, split, expr\n", - "from pyspark.sql.functions import udf, lit\n", - "import os\n" + "from IPython.display import display, HTML" ] }, { "cell_type": "markdown", + "id": "f28c8117-069c-431c-ac58-6ff258b1196d", "metadata": {}, "source": [ - "# Create Spark Session for application" + "## Create a Sedona Context object.\n", + "If you already have a spark instance available, simply use ```SedonaContext.create(spark)```." ] }, { "cell_type": "code", "execution_count": null, + "id": "e3495923-7fb4-4a6e-b62e-a4eeb9c2b306", "metadata": { "tags": [] }, @@ -61,522 +64,629 @@ "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)\n", "\n", - "sc = sedona.sparkContext\n" + "sc = sedona.sparkContext" ] }, { "cell_type": "markdown", + "id": "91d4e6ae-eeb6-46ca-89fd-8f82e6056924", "metadata": {}, "source": [ - "# Geotiff Loader \n", - "\n", - "1. Loader takes as input a path to directory which contains geotiff files or a path to particular geotiff file\n", - "2. Loader will read geotiff image in a struct named image which contains multiple fields as shown in the schema below which can be extracted using spark SQL" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Path to directory of geotiff images \n", - "DATA_DIR = \"./data/raster/\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true, - "tags": [] - }, - "outputs": [], - "source": [ - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").option(\"disableErrorInCRS\", False).load(DATA_DIR)\n", - "df.printSchema()" + "## Read GeoTiff files" ] }, { "cell_type": "code", "execution_count": null, + "id": "58c05200-27f7-46ce-b2c5-4c1dc058c96e", "metadata": {}, "outputs": [], "source": [ - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "df.show(5)" + "geotiff_df = sedona.read.format(\"binaryFile\").load(\"data/raster/test5.tiff\")\n", + "geotiff_df.show(2)\n", + "geotiff_df.createOrReplaceTempView(\"binary_raster\")" ] }, { "cell_type": "markdown", + "id": "db66242c-d0b3-4348-b2ef-4344d266cb4c", "metadata": {}, "source": [ - "# Extract a particular band from geotiff dataframe using RS_GetBand()\n" + "## Create raster columns from the read binary data" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "36eb9e36-cbcb-472a-96c6-79d49305cf66", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''RS_GetBand() will fetch a particular band from given data array which is the concatenation of all the bands'''\n", - "\n", - "df = df.selectExpr(\"Geom\",\"RS_GetBand(data, 1,bands) as Band1\",\"RS_GetBand(data, 2,bands) as Band2\",\"RS_GetBand(data, 3,bands) as Band3\", \"RS_GetBand(data, 4,bands) as Band4\")\n", - "df.createOrReplaceTempView(\"allbands\")\n", - "df.show(5)" + "raster_df = sedona.sql(\"SELECT RS_FromGeoTiff(content) as raster from binary_raster\")\n", + "raster_df.show(2)\n", + "raster_df.createOrReplaceTempView(\"raster_table\")" ] }, { "cell_type": "markdown", + "id": "3932eb9e-aeb6-4abe-a986-f26a11eb1fe3", "metadata": {}, "source": [ - "# Map Algebra operations on band values" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''RS_NormalizedDifference can be used to calculate NDVI for a particular geotiff image since it uses same computational formula as ndvi'''\n", - "\n", - "NomalizedDifference = df.selectExpr(\"RS_NormalizedDifference(Band1, Band2) as normDiff\")\n", - "NomalizedDifference.show(5)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''RS_Mean() can used to calculate mean of piel values in a particular spatial band'''\n", - "meanDF = df.selectExpr(\"RS_Mean(Band1) as mean\")\n", - "meanDF.show(5)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\" RS_Mode() is used to calculate mode in an array of pixels and returns a array of double with size 1 in case of unique mode\"\"\"\n", - "modeDF = df.selectExpr(\"RS_Mode(Band1) as mode\")\n", - "modeDF.show(5)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''RS_GreaterThan() is used to mask all the values with 1 which are greater than a particular threshold'''\n", - "greaterthanDF = sedona.sql(\"Select RS_GreaterThan(Band1,1000.0) as greaterthan from allbands\")\n", - "greaterthanDF.show()" + "## Operate on rasters using Sedona\n", + "Once a raster column is created, you're now free to use the entire catalog of Sedona's [raster functions](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/). The following part of notebook contains a few examples." ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", + "id": "1b47699e-8ce4-4859-ace9-d12ea1f4d0b9", "metadata": {}, - "outputs": [], "source": [ - "'''RS_GreaterThanEqual() is used to mask all the values with 1 which are greater than a particular threshold'''\n", - "\n", - "greaterthanEqualDF = sedona.sql(\"Select RS_GreaterThanEqual(Band1,360.0) as greaterthanEqual from allbands\")\n", - "greaterthanEqualDF.show()" + "### Access raster metadata\n", + "[RS_MetaData](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_metadata) can be used to view the loaded raster's metadata (orientation and georeferencing attributes)." ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''RS_LessThan() is used to mask all the values with 1 which are less than a particular threshold'''\n", - "lessthanDF = sedona.sql(\"Select RS_LessThan(Band1,1000.0) as lessthan from allbands\")\n", - "lessthanDF.show()" + "execution_count": 42, + "id": "6d635263-9e8b-4f74-9b91-d360d196b966", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[-180.0, 90.0, 1440.0, 720.0, 0.25, -0.25, 0.0, 0.0, 4326.0, 1.0]" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "raster_metadata = sedona.sql(\"SELECT RS_MetaData(raster) as metadata from raster_table\")\n", + "metadata = raster_metadata.first()[0]\n", + "raster_srid = metadata[8]\n", + "metadata" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", + "id": "713bc8db-3143-4a79-abb5-08ad81f9393a", "metadata": {}, - "outputs": [], "source": [ - "'''RS_LessThanEqual() is used to mask all the values with 1 which are less than equal to a particular threshold'''\n", - "lessthanEqualDF = sedona.sql(\"Select RS_LessThanEqual(Band1,2890.0) as lessthanequal from allbands\")\n", - "lessthanEqualDF.show()" + "### Visualize rasters\n", + "Sedona 1.5.0 provides [multiple ways to be able to visualize rasters](https://sedona.apache.org/1.5.0/api/sql/Raster-visualizer/). Throughout this notebook, [RS_AsImage](https://sedona.apache.org/1.5.0/api/sql/Raster-visualizer/#rs_asimage) will be used to visualize any changes to the rasters." ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "d5f615f4-a3d6-407c-aea9-58891c1e55e3", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''RS_Add() can add two spatial bands together'''\n", - "sumDF = df.selectExpr(\"RS_Add(Band1, Band2) as sumOfBand\")\n", - "sumDF.show(5)" + "# Define a simple wrapper to display HTML in jupyter notebook environment\n", + "class SedonaUtils:\n", + " @classmethod\n", + " def display_image(cls, df):\n", + " display(HTML(df.toPandas().to_html(escape=False)))" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''RS_Subtract() can subtract two spatial bands together'''\n", - "subtractDF = df.selectExpr(\"RS_Subtract(Band1, Band2) as diffOfBand\")\n", - "subtractDF.show(5)" + "execution_count": 43, + "id": "7fad137f-331c-4c2f-905d-dbc42cff11b6", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rs_asimage(raster, 500)
0
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "SedonaUtils.display_image(raster_df.selectExpr(\"RS_AsImage(raster, 500)\"))" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", + "id": "cef34e54-ac3c-48f3-836c-5a5385b79481", "metadata": {}, - "outputs": [], "source": [ - "'''RS_Multiply() can multiple two bands together'''\n", - "multiplyDF = df.selectExpr(\"RS_Multiply(Band1, Band2) as productOfBand\")\n", - "multiplyDF.show(5)" + "### Join based on raster predicates\n", + "Sedona 1.5.0 now supports join predicates between raster and geometry columns.\n", + "\n", + "Below is a simple example that carves a small rectangle from the existing raster and attempts to join it with the original raster" ] }, { "cell_type": "code", "execution_count": null, + "id": "6442349c-be2e-4609-a16e-b856745ddf46", "metadata": {}, "outputs": [], "source": [ - "'''RS_Divide() can divide two bands together'''\n", - "divideDF = df.selectExpr(\"RS_Divide(Band1, Band2) as divisionOfBand\")\n", - "divideDF.show(5)" + "(width, height) = sedona.sql(\"SELECT RS_Width(raster) as width, RS_Height(raster) as height from raster_table\").first()\n", + "(p1X, p1Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {width / 2}, {height / 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {width / 2}, {height / 2}) as pY from raster_table\").first()\n", + "(p2X, p2Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {(width / 2) + 2}, {height / 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {(width / 2) + 2}, {height / 2}) as pY from raster_table\").first()\n", + "(p3X, p3Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {width / 2}, {(height / 2) + 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {width / 2}, {(height / 2) + 2}) as pY from raster_table\").first()\n", + "(p4X, p4Y) = sedona.sql(f\"SELECT RS_RasterToWorldCoordX(raster, {(width / 2) + 2}, {(height / 2) + 2}) \\\n", + " as pX, RS_RasterToWorldCoordY(raster, {(width / 2) + 2}, {(height / 2) + 2}) as pY from raster_table\").first() " ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "ed399ee8-42b7-488b-8141-320c2bf6d9c3", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''RS_MultiplyFactor() will multiply a factor to a spatial band'''\n", - "mulfacDF = df.selectExpr(\"RS_MultiplyFactor(Band2, 2) as target\")\n", - "mulfacDF.show(5)" + "geom_wkt = f\"SRID={int(raster_srid)};POLYGON (({p1X} {p1Y}, {p2X} {p2Y}, {p3X} {p3Y}, {p4X} {p4Y}, {p1X} {p1Y}))\"" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "cb8aa25f-4706-4ee7-9994-3da474c3eb2c", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''RS_BitwiseAND() will return AND between two values of Bands'''\n", - "bitwiseAND = df.selectExpr(\"RS_BitwiseAND(Band1, Band2) as AND\")\n", - "bitwiseAND.show(5)" + "geom_df = sedona.sql(f\"SELECT ST_GeomFromEWKT('{geom_wkt}') as geom\")\n", + "geom_df.createOrReplaceTempView(\"geom_table\")" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "6461c14c-d479-4c64-8f8f-8c21903dedf5", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''RS_BitwiseOR() will return OR between two values of Bands'''\n", - "bitwiseOR = df.selectExpr(\"RS_BitwiseOR(Band1, Band2) as OR\")\n", - "bitwiseOR.show(5)" + "joined_df = sedona.sql(\"SELECT g.geom from raster_table r, geom_table g where RS_Intersects(r.raster, g.geom)\")\n", + "joined_df.show()" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", + "id": "9be5e7db-17e5-4bab-b7a3-8ee278374355", "metadata": {}, - "outputs": [], "source": [ - "'''RS_Count() will calculate the total number of occurrence of a target value'''\n", - "countDF = df.selectExpr(\"RS_Count(RS_GreaterThan(Band1,1000.0), 1.0) as count\")\n", - "countDF.show(5)" + "### Interoperability between raster and vector data types\n", + "Sedona allows for conversions from raster to geometry and vice-versa. " ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", + "id": "cc0bfd1c-7117-444a-8189-881da19846c9", "metadata": {}, - "outputs": [], "source": [ - "'''RS_Modulo() will calculate the modulus of band value with respect to a given number'''\n", - "moduloDF = df.selectExpr(\"RS_Modulo(Band1, 21.0) as modulo \")\n", - "moduloDF.show(5)" + "### Convert a raster to vector using convex hull\n", + "A convex hull geometry can be created out of a raster using [RS_ConvexHull](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_convexhull)\n", + "\n", + "Additionally, if a raster has noDataValue specified, and you wish to tighten the convexhull to exclude noDataValue boundaries, [RS_MinConvexHull](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_minconvexhull) can be used." ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "22b9dd16-f720-4fa4-acb9-b80c34702a93", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''RS_SquareRoot() will calculate calculate square root of all the band values up to two decimal places'''\n", - "rootDF = df.selectExpr(\"RS_SquareRoot(Band1) as root\")\n", - "rootDF.show(5)\n" + "raster_convex_hull = sedona.sql(\"SELECT RS_ConvexHull(raster) as convex_hull from raster_table\")\n", + "raster_min_convex_hull = sedona.sql(\"SELECT RS_MinConvexHull(raster) as min_convex_hull from raster_table\")\n", + "raster_convex_hull.show(truncate=False)\n", + "raster_min_convex_hull.show(truncate=False)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", + "id": "8ca7e862-45c9-4559-a2e1-4e044d6b5c84", "metadata": {}, - "outputs": [], "source": [ - "'''RS_LogicalDifference() will return value from band1 if value at that particular location is not equal tp band1 else it will return 0'''\n", - "logDiff = df.selectExpr(\"RS_LogicalDifference(Band1, Band2) as loggDifference\")\n", - "logDiff.show(5)" + "### Convert a geometry to raster (Rasterize a geometry)\n", + "A geometry can be converted to a raster using [RS_AsRaster](https://sedona.apache.org/1.5.0/api/sql/Raster-writer/#rs_asraster)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "8bc32fc6-d418-4e7c-8631-57e2c623f14c", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''RS_LogicalOver() will iterate over two bands and return value of first band if it is not equal to 0 else it will return value from later band'''\n", - "logOver = df.selectExpr(\"RS_LogicalOver(Band3, Band2) as logicalOver\")\n", - "logOver.show(5)" + "rasterized_geom_df = sedona.sql(\"SELECT RS_AsRaster(ST_GeomFromWKT('POLYGON((150 150, 220 260, 190 300, 300 220, 150 150))'), r.raster, 'b', 230) as rasterized_geom from raster_table r\")\n", + "rasterized_geom_df.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "a7eecae9-3763-405f-a22e-c7d77ff703b0", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/31 15:21:06 WARN VectorToRasterProcess: coercing double feature values to float raster values\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rasterized_geom
0
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "SedonaUtils.display_image(rasterized_geom_df.selectExpr(\"RS_AsImage(rasterized_geom, 250) as rasterized_geom\"))" ] }, { "cell_type": "markdown", + "id": "df954a81-5004-40f7-b80e-795f8569757c", "metadata": {}, "source": [ - "# Visualising Geotiff Images\n", + "### Perform Map Algebra operations\n", + "Sedona provides two ways to perform [Map Algebra](https://sedona.apache.org/1.5.0/api/sql/Raster-map-algebra/) on rasters:\n", + "1. Using RS_MapAlgebra (preferred for simpler algebraic functions)\n", + "2. Using RS_BandAsArray and array based map algebra functions such as RS_Add, RS_Multiply (Useful for complex algebriac functions involving mutating each grid value differently.)\n", "\n", - "1. Normalize the bands in range [0-255] if values are greater than 255\n", - "2. Process image using RS_Base64() which converts in into a base64 string\n", - "3. Embed results of RS_Base64() in RS_HTML() to embed into IPython notebook\n", - "4. Process results of RS_HTML() as below:" + "The following example illustrates how RS_MapAlgebra can be used. \n", + "This example uses jiffle script to invert the colors of the above illustrated rasterized geometry." ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "53abef31-b1aa-42ef-8eb0-f1d9227e3893", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''Plotting images as a dataframe using geotiff Dataframe.'''\n", - "\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").load(DATA_DIR)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "\n", - "df = df.selectExpr(\"RS_GetBand(data,1,bands) as targetband\", \"height\", \"width\", \"bands\", \"Geom\")\n", - "df_base64 = df.selectExpr(\"Geom\", \"RS_Base64(height,width,RS_Normalize(targetBand), RS_Array(height*width,0.0), RS_Array(height*width, 0.0)) as red\",\"RS_Base64(height,width,RS_Array(height*width, 0.0), RS_Normalize(targetBand), RS_Array(height*width, 0.0)) as green\", \"RS_Base64(height,width,RS_Array(height*width, 0.0), RS_Array(height*width, 0.0), RS_Normalize(targetBand)) as blue\",\"RS_Base64(height,width,RS_Normalize(targetBand), RS_Normalize(targetBand),RS_Normalize(targetBand)) as RGB\" )\n", - "df_HTML = df_base64.selectExpr(\"Geom\",\"RS_HTML(red) as RedBand\",\"RS_HTML(blue) as BlueBand\",\"RS_HTML(green) as GreenBand\", \"RS_HTML(RGB) as CombinedBand\")\n", - "df_HTML.show(5)" + "raster_white_bg = rasterized_geom_df.selectExpr(\"RS_MapAlgebra(rasterized_geom, NULL, 'out[0] = rast[0] == 0 ? 230 : 0;') as raster\")" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "display(HTML(df_HTML.limit(2).toPandas().to_html(escape=False)))" + "execution_count": 45, + "id": "75f06a1b-1ab6-478b-a50e-b621a10d6d8b", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/31 15:21:09 WARN VectorToRasterProcess: coercing double feature values to float raster values\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
resampled_raster
0
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "SedonaUtils.display_image(raster_white_bg.selectExpr(\"RS_AsImage(raster, 250) as resampled_raster\"))" ] }, { "cell_type": "markdown", + "id": "fde725ec-2941-4b6e-9b52-5fd35cea6c01", "metadata": {}, "source": [ - "# Writing GeoTiff Images" + "### Resample a raster.\n", + "Sedona 1.5.0 supports resampling a raster to different height/width or scale. It also supports changing the pivot of the raster.\n", + "\n", + "Refer to [RS_Resample](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_resample) documentation for more details.\n", + "\n", + "This simple example changes the resolution of the loaded raster to 1000*1000" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "c8fdb8c7-52d5-49fa-83f2-44a9438bd509", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''Writing GeoTiff DataFrames as GeoTiff Images'''\n", - "\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").load(DATA_DIR)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "\n", - "SAVE_PATH = \"./data/raster-written/\"\n", - "df.write.mode(\"overwrite\").format(\"geotiff\").option(\"writeToCRS\", \"EPSG:4326\").option(\"fieldGeometry\", \"Geom\").option(\"fieldNBands\", \"bands\").save(SAVE_PATH)" + "resampled_raster_df = sedona.sql(\"SELECT RS_Resample(raster, 1000, 1000, false, 'NearestNeighbor') as resampled_raster from raster_table\")" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 47, + "id": "b14820dc-ed04-41cd-9220-73a5179f52df", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
resampled_raster
0
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "SedonaUtils.display_image(resampled_raster_df.selectExpr(\"RS_AsImage(resampled_raster, 500) as resampled_raster\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "id": "bee36339-d0c1-469d-9354-980a23f24401", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+------------------------------------------------------------------+\n", + "|resampled_raster_metadata |\n", + "+------------------------------------------------------------------+\n", + "|[-180.0, 90.0, 1000.0, 1000.0, 0.36, -0.18, 0.0, 0.0, 4326.0, 1.0]|\n", + "+------------------------------------------------------------------+\n", + "\n" + ] + } + ], "source": [ - "'''Writing GeoTiff Images in a Single Partition'''\n", - "df.coalesce(1).write.mode(\"overwrite\").format(\"geotiff\").option(\"writeToCRS\", \"EPSG:4326\").option(\"fieldGeometry\", \"Geom\").option(\"fieldNBands\", \"bands\").save(SAVE_PATH)" + "resampled_raster_df.selectExpr(\"RS_MetaData(resampled_raster) as resampled_raster_metadata\").show(truncate=False)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "2b0aa64e-4a02-4c85-9ba5-6459d2002f8a", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''Find the Partition of the Written GeoTiff Images.\n", - " If you did not write with coalesce(1), change the below code to adjust the writtenPath'''\n", - "writtenPath = SAVE_PATH\n", - "dirList = os.listdir(writtenPath)\n", - "for item in dirList:\n", - " if os.path.isdir(writtenPath + \"/\" + item):\n", - " writtenPath += \"/\" + item\n", - " break" + "# Load another raster for some more examples\n", + "elevation_raster_df = sedona.read.format('binaryFile').load('data/raster/test1.tiff')\n", + "elevation_raster_df.createOrReplaceTempView(\"elevation_raster_binary\")" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "id": "623123ac-98bc-4d51-828d-9d874cc6f471", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "'''Load and Visualize Written GeoTiff Image.'''\n", - "\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").load(writtenPath)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "\n", - "df = df.selectExpr(\"RS_GetBand(data,1,bands) as targetband\", \"height\", \"width\", \"bands\", \"Geom\")\n", - "df_base64 = df.selectExpr(\"Geom\", \"RS_Base64(height,width,RS_Normalize(targetBand), RS_Array(height*width,0.0), RS_Array(height*width, 0.0)) as red\",\"RS_Base64(height,width,RS_Array(height*width, 0.0), RS_Normalize(targetBand), RS_Array(height*width, 0.0)) as green\", \"RS_Base64(height,width,RS_Array(height*width, 0.0), RS_Array(height*width, 0.0), RS_Normalize(targetBand)) as blue\",\"RS_Base64(height,width,RS_Normalize(targetBand), RS_Normalize(targetBand),RS_Normalize(targetBand)) as RGB\" )\n", - "df_HTML = df_base64.selectExpr(\"Geom\",\"RS_HTML(red) as RedBand\",\"RS_HTML(blue) as BlueBand\",\"RS_HTML(green) as GreenBand\", \"RS_HTML(RGB) as CombinedBand\")\n", - "display(HTML(df_HTML.limit(2).toPandas().to_html(escape=False)))" + "elevation_raster_df = sedona.sql(\"SELECT RS_FromGeoTiff(content) as raster from elevation_raster_binary\")\n", + "elevation_raster_df.createOrReplaceTempView(\"elevation_raster\")" ] }, { "cell_type": "markdown", + "id": "2a6afdf3-e774-432f-96a3-96a4ca8249c7", "metadata": {}, "source": [ - "# Transformation of GeoTiff Images" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''First load GeoTiff Images'''\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").option(\"disableErrorInCRS\", False).load(DATA_DIR)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "df.show(5)" + "### Access individual values from rasters\n", + "Sedona provides [RS_Value](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_value) and [RS_Values](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_values) that allow accessing raster values at given geometrical point(s).\n", + "\n", + "The following example extracts raster values at specific geographical points." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 48, + "id": "ffe589e1-50b7-431a-ba84-b2c297b77f65", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------+\n", + "| raster_values|\n", + "+--------------+\n", + "|[115.0, 148.0]|\n", + "+--------------+\n", + "\n" + ] + } + ], "source": [ - "# First extract the bands for which normalized difference index needs to be calculated\n", - "df = df.selectExpr(\"origin\", \"geom\", \"width\", \"height\", \"data\", \"bands\", \"RS_GetBand(data, 1, bands) as band1\", \"RS_GetBand(data, 2, bands) as band2\")\n", - "# Get the normalized difference index between the extracted bands\n", - "df = df.selectExpr(\"origin\", \"geom\", \"width\", \"height\", \"data\", \"bands\", \"RS_NormalizedDifference(band2, band1) as normalizedDifference\")\n", - "df.show(5)" + "point_wkt_1 = 'SRID=3857;POINT (-13095600.809482181 4021100.7487925636)'\n", + "point_wkt_2 = 'SRID=3857;POINT (-13095500.809482181 4021000.7487925636)'\n", + "point_df = sedona.sql(\"SELECT ST_GeomFromEWKT('{}') as point_1, ST_GeomFromEWKT('{}') as point_2\".format(point_wkt_1, point_wkt_2))\n", + "point_df.createOrReplaceTempView(\"point_table\")\n", + "test_df = sedona.sql(\"SELECT RS_Values(raster, Array(point_1, point_2)) as raster_values from elevation_raster, point_table\")\n", + "test_df.show()" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", + "id": "8643ed69-9128-49a9-80e7-f9115694695f", "metadata": {}, - "outputs": [], "source": [ - "'''RS_Append() takes the data array containing bands, a new band to be appended, and number of total bands in the data array.\n", - " It appends the new band to the end of the data array and returns the appended data'''\n", - "\n", - "df = df.selectExpr(\"origin\", \"geom\", \"RS_Append(data, normalizedDifference, bands) as data_edited\", \"height\", \"width\", \"bands\").drop(\"data\")\n", - "df = df.withColumn(\"nBand_edited\", col(\"bands\") + 1).drop(\"bands\")\n", - "df.show()" + "### Extract individual bands from rasters\n", + "[RS_BandAsArray](https://sedona.apache.org/1.5.0/api/sql/Raster-operators/#rs_bandasarray) can be used to extract entire band values from a given raster" ] }, { "cell_type": "code", "execution_count": null, + "id": "48f56157-ab07-456e-83fe-75d23f5bb28e", "metadata": {}, "outputs": [], "source": [ - "'''Writing GeoTiff DataFrames as GeoTiff Images'''\n", - "SAVE_PATH = \"./data/raster-written/\"\n", - "df.coalesce(1).write.mode(\"overwrite\").format(\"geotiff\").option(\"writeToCRS\", \"EPSG:4326\").option(\"fieldGeometry\", \"geom\").option(\"fieldNBands\", \"nBand_edited\").option(\"fieldData\", \"data_edited\").save(SAVE_PATH)" + "band = elevation_raster_df.selectExpr(\"RS_BandAsArray(raster, 1)\").first()[0]\n", + "print(band,) #Print entire band as an array horizontally" ] }, { "cell_type": "markdown", + "id": "e586b0e5-935a-47fa-8ebf-b63ddd9a48a8", "metadata": {}, "source": [ - "# User can also create some UDF manually to manipulate Geotiff dataframes" + "### Visualize Raster MBRs" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 50, + "id": "2a2c7086-9588-48a7-a710-c10b8c5e4875", "metadata": {}, "outputs": [], "source": [ - "'''Sample UDF calculates sum of all the values in a band which are greater than 1000.0'''\n", - "\n", - "def SumOfValues(band):\n", - " total = 0.0\n", - " for num in band:\n", - " if num>1000.0:\n", - " total+=1\n", - " return total\n", - "\n", - "df = sedona.read.format(\"geotiff\").option(\"dropInvalid\",True).option(\"readToCRS\", \"EPSG:4326\").load(DATA_DIR)\n", - "df = df.selectExpr(\"image.origin as origin\",\"ST_GeomFromWkt(image.geometry) as Geom\", \"image.height as height\", \"image.width as width\", \"image.data as data\", \"image.nBands as bands\")\n", - "df = df.selectExpr(\"RS_GetBand(data,1,bands) as targetband\", \"height\", \"width\", \"bands\", \"Geom\")\n", - " \n", - "calculateSum = udf(SumOfValues, DoubleType())\n", - "sedona.udf.register(\"RS_Sum\", calculateSum)\n", - "\n", - "sumDF = df.selectExpr(\"RS_Sum(targetband) as sum\")\n", - "sumDF.show()" + "# Convert raster to its convex hull and transform it to EPSG:4326 to be able to visualize\n", + "raster_mbr_df = elevation_raster_df.selectExpr(\"ST_Transform(RS_ConvexHull(raster), 'EPSG:3857', 'EPSG:4326') as raster_mbr\")" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''Sample UDF to visualize a particular region of a GeoTiff image'''\n", - "\n", - "def generatemask(band, width,height):\n", - " for (i,val) in enumerate(band):\n", - " if (i%width>=12 and i%width<26) and (i%height>=12 and i%height<26):\n", - " band[i] = 255.0\n", - " else:\n", - " band[i] = 0.0\n", - " return band\n", - "\n", - "maskValues = udf(generatemask, ArrayType(DoubleType()))\n", - "sedona.udf.register(\"RS_MaskValues\", maskValues)\n", - "\n", - "\n", - "df_base64 = df.selectExpr(\"Geom\", \"RS_Base64(height,width,RS_Normalize(targetband), RS_Array(height*width,0.0), RS_Array(height*width, 0.0), RS_MaskValues(targetband,width,height)) as region\" )\n", - "df_HTML = df_base64.selectExpr(\"Geom\",\"RS_HTML(region) as selectedregion\")\n", - "display(HTML(df_HTML.limit(2).toPandas().to_html(escape=False)))\n" + "execution_count": 51, + "id": "6f39b3db-a0b1-4842-a5ca-b5a5850f3ea7", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "8ec0e5aca1954d36abe75cbe8703ba57", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "KeplerGl(data={'RasterMBR': {'index': [0], 'columns': ['geometry'], 'data': [['POLYGON ((-117.6417329630247508…" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "sedona_kepler_map_elevation = SedonaKepler.create_map(df=raster_mbr_df, name='RasterMBR')\n", + "sedona_kepler_map_elevation" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -599,5 +709,5 @@ } }, "nbformat": 4, - "nbformat_minor": 4 + "nbformat_minor": 5 } From 0eecbdd1a7c894c75e38d13df4bcd85c76d06077 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 31 Oct 2023 16:56:29 -0400 Subject: [PATCH 12/28] add s3 jars to postBuild --- binder/postBuild | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/binder/postBuild b/binder/postBuild index 2ed713ded2..454ed01a42 100644 --- a/binder/postBuild +++ b/binder/postBuild @@ -1,6 +1,9 @@ #Download Apache Spark -wget https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.4.0-bin-hadoop3.tgz +wget https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz tar -xzf spark-3.4.0-bin-hadoop3.tgz +curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar -o ${SPARK_HOME}/jars/hadoop-aws-3.3.4.jar +curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.12.402/aws-java-sdk-bundle-1.12.402.jar -o ${SPARK_HOME}/jars/aws-java-sdk-bundle-1.12.402.jar + #Tidy up rm spark-3.4.0-bin-hadoop3.tgz \ No newline at end of file From 48be86d3148e18e61209d3bf4450b93619aaa4e5 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 31 Oct 2023 18:38:05 -0400 Subject: [PATCH 13/28] Revert "add s3 jars to postBuild" This reverts commit 0eecbdd1a7c894c75e38d13df4bcd85c76d06077. --- binder/postBuild | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/binder/postBuild b/binder/postBuild index 454ed01a42..2ed713ded2 100644 --- a/binder/postBuild +++ b/binder/postBuild @@ -1,9 +1,6 @@ #Download Apache Spark -wget https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz +wget https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.4.0-bin-hadoop3.tgz tar -xzf spark-3.4.0-bin-hadoop3.tgz -curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar -o ${SPARK_HOME}/jars/hadoop-aws-3.3.4.jar -curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.12.402/aws-java-sdk-bundle-1.12.402.jar -o ${SPARK_HOME}/jars/aws-java-sdk-bundle-1.12.402.jar - #Tidy up rm spark-3.4.0-bin-hadoop3.tgz \ No newline at end of file From 96504a0009830483e67a1f4ea2ccad49e5e71678 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 31 Oct 2023 18:40:01 -0400 Subject: [PATCH 14/28] change spark version in wget postbuild --- binder/postBuild | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/binder/postBuild b/binder/postBuild index 2ed713ded2..a026c4410a 100644 --- a/binder/postBuild +++ b/binder/postBuild @@ -1,5 +1,5 @@ #Download Apache Spark -wget https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.4.0-bin-hadoop3.tgz +wget https://archive.apache.org/dist/spark/spark-3.40/spark-3.4.0-bin-hadoop3.tgz tar -xzf spark-3.4.0-bin-hadoop3.tgz #Tidy up From 2f08ffc5bee8a3b6b2dadaf0f3f34e17f7a88cf4 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Tue, 31 Oct 2023 18:47:54 -0400 Subject: [PATCH 15/28] correct spark version --- binder/postBuild | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/binder/postBuild b/binder/postBuild index a026c4410a..2562dded94 100644 --- a/binder/postBuild +++ b/binder/postBuild @@ -1,5 +1,5 @@ #Download Apache Spark -wget https://archive.apache.org/dist/spark/spark-3.40/spark-3.4.0-bin-hadoop3.tgz +wget https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz tar -xzf spark-3.4.0-bin-hadoop3.tgz #Tidy up From 16cda91c4b8a26522b96b26aa5e7dc5527bd4ccc Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 1 Nov 2023 15:26:17 -0400 Subject: [PATCH 16/28] repush code fetching AWS jars --- binder/postBuild | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/binder/postBuild b/binder/postBuild index 2562dded94..e7c4e95998 100644 --- a/binder/postBuild +++ b/binder/postBuild @@ -1,6 +1,12 @@ #Download Apache Spark wget https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz tar -xzf spark-3.4.0-bin-hadoop3.tgz +# Get AWS jars +hadoop_s3_version=3.3.4 +aws_sdk_version=1.12.402 + +curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/${hadoop_s3_version}/hadoop-aws-${hadoop_s3_version}.jar -o $HOME/spark-3.4.0-bin-hadoop3/jars/hadoop-aws-${hadoop_s3_version}.jar +curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${aws_sdk_version}/aws-java-sdk-bundle-${aws_sdk_version}.jar -o $HOME/spark-3.4.0-bin-hadoop3/jars/aws-java-sdk-bundle-${aws_sdk_version}.jar #Tidy up rm spark-3.4.0-bin-hadoop3.tgz \ No newline at end of file From 03f2b7c26e518c101a2ee083ed08010d93ec96cd Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 1 Nov 2023 15:28:09 -0400 Subject: [PATCH 17/28] Revert changes to python/pipfie and spark-viz/build.sbt --- examples/spark-viz/build.sbt | 2 +- python/Pipfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/spark-viz/build.sbt b/examples/spark-viz/build.sbt index 17c22e8566..bc2260a89a 100644 --- a/examples/spark-viz/build.sbt +++ b/examples/spark-viz/build.sbt @@ -46,7 +46,7 @@ val ScalaCompatibleVersion = "2.12" // Change the dependency scope to "provided" when you run "sbt assembly" val dependencyScope = "compile" -val geotoolsVersion = "1.5.0-28.2" +val geotoolsVersion = "1.4.0-28.2" //val jacksonVersion = "2.10.0" diff --git a/python/Pipfile b/python/Pipfile index ba044c7aaa..47339508a3 100644 --- a/python/Pipfile +++ b/python/Pipfile @@ -14,7 +14,7 @@ pytest-cov = "*" shapely="<=1.8.5" pandas="<=1.3.5" geopandas="<=0.10.2" -pyspark=">=3.0.0" +pyspark=">=2.3.0" attrs="*" pyarrow="*" keplergl = "==0.3.2" From 2e7d0ca379d3b96ea9a15d3815deb1d3080b39df Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 1 Nov 2023 15:38:54 -0400 Subject: [PATCH 18/28] Remove all outputs from raster notebook --- binder/ApacheSedonaRaster.ipynb | 211 +++----------------------------- 1 file changed, 17 insertions(+), 194 deletions(-) diff --git a/binder/ApacheSedonaRaster.ipynb b/binder/ApacheSedonaRaster.ipynb index cde6598223..9822141cde 100644 --- a/binder/ApacheSedonaRaster.ipynb +++ b/binder/ApacheSedonaRaster.ipynb @@ -135,23 +135,12 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": null, "id": "6d635263-9e8b-4f74-9b91-d360d196b966", "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[-180.0, 90.0, 1440.0, 720.0, 0.25, -0.25, 0.0, 0.0, 4326.0, 1.0]" - ] - }, - "execution_count": 42, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "raster_metadata = sedona.sql(\"SELECT RS_MetaData(raster) as metadata from raster_table\")\n", "metadata = raster_metadata.first()[0]\n", @@ -186,38 +175,12 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": null, "id": "7fad137f-331c-4c2f-905d-dbc42cff11b6", "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
rs_asimage(raster, 500)
0
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "SedonaUtils.display_image(raster_df.selectExpr(\"RS_AsImage(raster, 500)\"))" ] @@ -348,43 +311,10 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": null, "id": "a7eecae9-3763-405f-a22e-c7d77ff703b0", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/31 15:21:06 WARN VectorToRasterProcess: coercing double feature values to float raster values\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
rasterized_geom
0
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "SedonaUtils.display_image(rasterized_geom_df.selectExpr(\"RS_AsImage(rasterized_geom, 250) as rasterized_geom\"))" ] @@ -417,45 +347,12 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": null, "id": "75f06a1b-1ab6-478b-a50e-b621a10d6d8b", "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/31 15:21:09 WARN VectorToRasterProcess: coercing double feature values to float raster values\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
resampled_raster
0
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "SedonaUtils.display_image(raster_white_bg.selectExpr(\"RS_AsImage(raster, 250) as resampled_raster\"))" ] @@ -487,63 +384,24 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": null, "id": "b14820dc-ed04-41cd-9220-73a5179f52df", "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
resampled_raster
0
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "SedonaUtils.display_image(resampled_raster_df.selectExpr(\"RS_AsImage(resampled_raster, 500) as resampled_raster\"))" ] }, { "cell_type": "code", - "execution_count": 46, + "execution_count": null, "id": "bee36339-d0c1-469d-9354-980a23f24401", "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+------------------------------------------------------------------+\n", - "|resampled_raster_metadata |\n", - "+------------------------------------------------------------------+\n", - "|[-180.0, 90.0, 1000.0, 1000.0, 0.36, -0.18, 0.0, 0.0, 4326.0, 1.0]|\n", - "+------------------------------------------------------------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "resampled_raster_df.selectExpr(\"RS_MetaData(resampled_raster) as resampled_raster_metadata\").show(truncate=False)" ] @@ -588,23 +446,10 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": null, "id": "ffe589e1-50b7-431a-ba84-b2c297b77f65", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------+\n", - "| raster_values|\n", - "+--------------+\n", - "|[115.0, 148.0]|\n", - "+--------------+\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "point_wkt_1 = 'SRID=3857;POINT (-13095600.809482181 4021100.7487925636)'\n", "point_wkt_2 = 'SRID=3857;POINT (-13095500.809482181 4021000.7487925636)'\n", @@ -644,7 +489,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": null, "id": "2a2c7086-9588-48a7-a710-c10b8c5e4875", "metadata": {}, "outputs": [], @@ -655,34 +500,12 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": null, "id": "6f39b3db-a0b1-4842-a5ca-b5a5850f3ea7", "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "8ec0e5aca1954d36abe75cbe8703ba57", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "KeplerGl(data={'RasterMBR': {'index': [0], 'columns': ['geometry'], 'data': [['POLYGON ((-117.6417329630247508…" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "sedona_kepler_map_elevation = SedonaKepler.create_map(df=raster_mbr_df, name='RasterMBR')\n", "sedona_kepler_map_elevation" From 02ecb8789a453bba5635aef255b7bb94dc9ecac5 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 1 Nov 2023 21:00:39 -0400 Subject: [PATCH 19/28] Add new raster in binder/data --- binder/data/raster/test5.tiff | Bin 0 -> 209199 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 binder/data/raster/test5.tiff diff --git a/binder/data/raster/test5.tiff b/binder/data/raster/test5.tiff new file mode 100644 index 0000000000000000000000000000000000000000..6caabeadae31129afc2c4309348c5ff6a10d0d24 GIT binary patch literal 209199 zcmd42cT^K!+wVP@nLy|v6hjX^K!8w1nxO}zgbsobiu7&+MNI)j1&n|Sh#C-(VgW@( zK@3Qjs({!70wSOSqGCVsci+!h_j%U4-u}n=tTi*=>|A>?$-4GjlRe*Ya|5&i06+oa z2mpY13V~$vR)~LTaFnOPe`#I}fPnt3kvz@!Z_Vq4kovdg?F1nU@ZJS@3V4B#`=5Fi zf&Y&f(qH}eu8iSFN+O$blRzU%WQ&rW})X~w`H#9UgwXm?ZwzG3`+O~~C z@$m@^3<(JjkBW+qPfp&oD?L3klTOdhK6*4i|K!Qy;*yf`@(UL#D{E?MYa1G_Txo1< zYPxab)~(yO@7!r^y?3v@{r>%qjt39Ay1Ki2d;9wO`v(RdJ{;mPJUlu&HumVzV;~K2+4JWwc)Wc1>eXu=T<-Mr439T&X8&PsZl1^6e^^*}_wMiV?%m?z z($e4MFU!mC|KY({ll<>mSL*RQ9h#>V>kIGj6onwskCtEw(sC@(K9J#*&N zsiGnltDqn+@7S@UM{{zrvY1Rdoklx&@W6rn`_t3+?%lmREiE-QIXNLAE-oe}Dr)D> zh={PT(9j(_f`bDC{r!D?y}dm>DHJz1S663eGTFhw&d$bW>sBi(3yUpVHg7gDF*Y_d zG%(Q9)72#qw6ySeO-&6AH8oXL6%}P=98O6|frq@joSX~~ELNI_l$0b72?-2FTpZ0q zOiYxAh={PT5Dx(XK|uil6c2tLe=`e-gn2+bKoG&>zXrqqKU)L>f{@6+`70zOA|fUx zE-oo4EiEG>FR!GetgNn%$Lr|m=@}Rp8*kqHH-l|$9UYyWU0wg?a6mv%P-tjGL{wC4 zY*JEM+MYcb83zy2=~-F1xyO$e6ciSoK3!UR?i`z4Rdwl7ef{4&Zf^dY!}sp}&EKxB z-rj+M!NK9-v9Z6|IypHt_2LDW`{vExO#D9w06u^I^l5eV!-u7%w{Pd>W@cW!dj5QB zYGPu1{L%j_#>Pf@jEoEo4GsTJ;zkco7 z)vK2;*VWb5Ub<9Weeq&N1)F{0!uj*%B_`!{YF@f=DDk zzmSlan1lottDvB)tf4_55Q!!x78WFuy}h%uo12%HpWolZM#%$!F%yaVx&Xqk z9zYcJ1Bl8!0HVqfK)ka8K)F2tNH+q&Wn?R9Rz?dB>)h;%qufv0AT+W01V9lU}rmT`v(9d&jEnADgda^crjW4Alw1~ z0y6+0_I)d_y8a)*iQZS;I^U_vE7s@kPSbN9wLWpa=GA^z z{4e{(``(?~doQ#xAi>W<{q#_D^Y%o))}6OrCbtA9`CC*^zTejtlI-6)_vP1N4mBmf zQe%_Ik*@I6fVQa33Mcv^(*iAP{B_R^M(ql0o8M=3VKgR;DuRO?D32ERY)`b(_rEs2 z6e}k}5DL9sk!X_QrZID*@$h2B-J#D$@yJ)Ju|ZfRp;NVQcP(bRpYgx)r1-<_gU9&e zZ+0wv)!6Y-Gr3!D?R)y~4F86g<6oTb{r1Sa@%Hxo#-Zz5?;9oj+!~995mp=&`1!uR zF?fQ;!J!bG_iDmVI`y`(p4r3_B_Yn}0`HoMiiH7AjIrZOJ1sM5m^LPCkLXxM!H5bG zBPt)A8i3`ak9B#m5CBmSq~&q|loLBf5#>aqsnj880g^}r0li73GBrT})@D=C5RGf! zMF-eKQ3&Tv!oalU03nodqj`nCs{~#|+2)bt5kuNqaTgND#tHVgqYKOU=%fH4qrt~i zaTWX2tRYADl|-Zk#WT~vs*gkzH=&clt6;j&<#H6m&_P_=z472CetU|%onapo5)2LYCb0EkJ!@%K~nuS5j&2QL}pR)w<=w42((eDvDo zc4%l(t;i?z^vw-^pThD>UICi|q%3-TCZd&H0Xb(~+O94t5^=-wXaSv;uDPE<%f$sh{!bs4t|2&o-cV@6O--Wv4wWfApucT1QzI$I42XK2n zs=Y0;6svkcNsQ<3^sVm3+TC#v;CI5g?mC;yzE&;-%FH!rBhFO+|e7rjR(JTy|@cq5#JHe2w1yTCwW&~$c zGL+D-h*0zU>{Zs|dtw%DEH=$$9TVNrf672ZBP4f|g;tdAj%Ky$4={}-2|BCs1HB*G z*!7En{nT>(Uw&AGw!0~&28N%M%_w6AI168WtQp%kNL-T`CbRr%Zl8dsB z$vaHLz=$zSK$x~O@e&)=Kl)wGba6GrxOcs=X#bqUT0TY|je4lyb=*~Gzoi}>NJ8Gv1SNrK%!tnzD5u))13#Ub*l;&m8|E&)>?ez=6b$+YX{pEx* zp**T#(&2tpSLrjGYX63s?boS7m?5^nflucxj9S`L+-^7<=cyA%K1Tu)*??)Tf_QUR zSGC`F#=+|=bU@v*Q9SLI_X#cETSw}*MVM_W*hLd|WSO|s`RW|_XfJ?3qpxW!eF4~N zeA>0!Z*FhZJRYVDkuv(|R?o@v_k67RD zWEz}yue1fL&Pf+T^!4zm`b>uuHw#|FRw!3!e-0TNmeOBvg=th2V#Hg_{%d19CrB7^ zR8-TuYq3${8P&%`-2;%xT0E03s1M@Jcq${jbvCmNoxms`6a-M8oNhNTF-l-td^#KJK$ijdsF8nC86eM2R6)>tj z&AF`AYqn@{qn1*z>+|-4kIK9g65lBxVQ<0vkk8E#F+b{cJa=UzRRS+56SQ36Q;UVO zvgZnhyrolbw;FQC5Py=N7gvX2uL-ApF6Ar!9Z}LZqikKh>2`qC9zgqGZSRJs+kWe6 zFbT^brI_<`Hk4rwON`Dncji=py)>W0_1V8PLv~L@i%Rc8IHto!g2@5pbDO)kQjg|9 zxiHgT`s!*9PX5}@bNdfEbk1j(RguLH;Zq=Gk|=yTJ}N?NM5^qw5TA)7yXlj~R6S)40(Q#oPuJe=y*1P@oPTe->rwE;6%E#8f+Q(+r|FFfQqD~h>T|^Ny z&Jd}<_#8DXcL=dh7#THBz0~lur|FMjzv2yZkG@;Swv8tyls|myw)pTa!G-)l<{HAe z^zxH|jo*C0iR7AxXe7Rkl4o`oZKgl5YiNA5>QRNf%}5^#9VA)p+y(Y67tM3taV(() zLE6rhA$M{y4cV_!dM+)HFS7?Pbv{d#a6ea6#EFQOcLQrW-``x*b^?ufd<{Td|`>VvQPoW{PK9uc^bY}pl>f+ z7#&YI-Q^dMFP}oixx~dq=U`KNiydf^CjlAZ>IE=o?}=6U=-gw7%jltPhdMOj6a1Cg zH3J!NRqDXF|4?Qu8}nq?>)2wscXv{4HD+Q)%`Z#QQ8NG+Yh6k*gK~vS^Vc_WQu=7- zeYL|huH}~#`#=>8jB^lU{ZZf@FxMSsJ1+8mRU#T{>VYX}*fsMD+Bz?{uONm}HXkn> z9kQyCz|~|$>4vWg6h~FB*hQ{a9LMJ$3s0`*x^m^CQOhsuu1)}(ko?p{k!R-sMHia% zyQnxs%+K7qL#?@#?Y&^N!TcM8^xkj=P(ZS^L=s=e|J*ty3n?06fel!Qs;_6T zoKMoou1Y(P9?a;jPjwe;%gZdQ72W6+jvCqC1n8UhvO}C&ny9AH&gSk%S_W3&Y?uWK z?=kF&-?}9GK}gEH=yFr@}plxMeAupUucgstQt`CCwZGRci>fx9(+MGt@ctSQ7g^(0^+AQ}By#eqD#?>_QxJ*s zL(;Y*sgzd}f^esZ1I`t06nBzLYf1sub2#~iJt+;+1vOD2Y(mzR$nAqE*;;)ITr5bl zA$Tr-oo~yVJtLX??1mnEE}^hA*JImkdQtT{E7$QM-{{_~{RfUQC29ipHoOQ0G^#&< z2WGE&)$jH0u@0(gSDLeRvFG?8<-=@#XGYcr&KSPx3{^DtMvp3?m(cxa*~Px$e3#I` z!WWQJWvJHFOEw`CW`&+wu4y8z=T7D0$4gx^^HF_}{=qHUUE;XELvJ2bw0eq~@ zDfiVUwah1bV{QuOX{X?z6-DSXm)2kVMKY%8L>OVu3V6fV8BTK|Zx1-Nc8*4bQl_EL zle3$N#RBI2(#}Mk;+=Sx@by7oad|tc^!nTfm(=v>7{5=1&P%y2`R`iZIN$2X_wkil zo3&>}v1K%+jhCuUAL>IN0aU)t}uPvb*?;8W5uH`C38XB84*EL^Hpxarf){^^`!~*})Z@RG)d6)i$f0zIS*us&989 zygPjUvXe_(!bAAJQ{ztc@(j0#2#qiKO~G9CpGkd+dqupwKW|+6Q5xsnI4<`}o8xHB(S$#ecLdO#8Pfwa@$F|tk zG?*izrK4(>Q{15=Z(E%E`;R*nGNYGcbILMNKKV3l1m2&#%dv+L!OW>I5Q%iz({xsa zZ~LVd&1O?L<*OL$t5?PY?G;g|@O)Vm=XD~nEFrhrGySZoTDcROlc!NCP+M zR2-!dL-!VbGuS!cSRaBecjLny{M_jhRVR5XN;4n?rvLF0e*urazLYxuXk?$G-sJ?H z4q2`xg#1_%$to%&y^a~~jfyI#nLx&2na)>z4Sl@(;sbK@5(eTzsn}fvTkpOXz)6>} z?zI9T_(1gC>-goKm4Q$PPbqcx)9>qVp4n5U$H_Y}y1k#2IyL2XoPDT!HoE1@VRC>b z)%%i1iqa}V3~)zc>R;7eRLq};saw(M>0j~*35KO{iR4Gk{^=f(NvZwD!b3j`^Y7~RP>n0r2`JBsIKa5 z(g}u+y(6J52KS>kGQ?<33TU&^?S(h$a}h@^%LCOhY?1J#ucybyOWyCzM%w8azV;P9 zNo^cM7c_Fg5c zC%Fokw;d5pp+F*Wp)hJlZeer--4peAG~2nqyUnmYWnfmz93fes!Z8D%wD*hJn54fC z-T5vw7M(3yP|8R{ewIewfst(o!~j>Mu{#n(wy7X5QB1T`5h7nwcieM?WRHPkof#Zc zFzX4#Hit7L+7z_mZ*oR7;81r)0?MoD)GD{UGeL}XD8o*<;}3e9%Im<%cjIaBi6A+h zzLcwQws2PR<=qKAJvDWcT%+2kRqc$Yqxm@NviWGK?C-wj`Ol{RlsHEA=0DS-i1)mSH-j6fQ7+F`OlippphnbkuLtSsF@ehY5_-Blpv3qgGNc~XrD{mCXmX*FKLJ51eysapS?ez~}wYu5& zYL$l!X0A#mLBq9hz`+~6!W+-(i}LM#)tB+4i2XDC)A!wx_q36|GTAHd#ZHd3u{K^M zE+Ky3%EMV5%xQyYc1<&Ue4L$+aG|e?j`rD#@&QyNV5NGRAM>qrZ{nk&dH06kZD|HQ zi>;=8cYd4`xC9Gy_GY@iu9e6mIvbT`r^#rJzuI)YSFlYo`J8u!>|%Jz;f1BF(bc(rGQ!!NtCT8Nb&OpWBh}JcR!u^nt1}FnM^YBgtwOA91n(>Q0uH)Z-fCp%Ti4w2c$Nd;pD}6^MN=LQm@n%pxY>;OH$0_ z#Q33(X&}WU7@ccJf7Z8KPfnE24iUW(H!35-$LK(}P}ML3*_Xd?hGK}=X1)g%Q|qo4 zQ5UkAHrxllzsrl|LdvX0SIs>)w|3GgjluexN21qd6{Js)jb(>=E|Seg6O6F74sXO` zYLpBld6zwOO0B<;#If$m4$6gO%;9|d83eX5^t^0BP70mO@xx<|O6tuQ1=sh?0~oX%cjGNiLRDBBm2bet^)sa%-!wpy8NFUp+e z&6{S0(b9^Ktg`ki@^>ukRXC#D7$VG^W(O2NB$gnG&Y__B7_KBe5pB+>9;X}VvXn&X zFLt5f(L6z6aA|js7?awFN9hWsn|3t~aBRilYmJW5*$BAj6O*&J#vv>Ti@WMRTreEG zLTfdWeewk&Dfmxw;$(7^sT7H9Wfy5l%dkC_(%I7t(S;o8PEl;YbOc?mW_s?JJaRPG zpZa8aW49b%k>~Fg6x(P1?4ch|6Amn-nU_AJC*p; zfCPCmVsyXKkxv!Bapa2LrmsfRqfufE(y?pah5$E38se~3x>+;Ub*KSJPipZW?UH9wQid_5HOlCUuUQl?kkqKjH{Z;>N{tZ&R-_0{glTz`;!ni}C)CJr zV@09t_2EWkZBF{@CQ>k}!!8K2PZYJet~Ytg0O!!P`-g?Gk=VtP7x;UQnU?`MXy+Dt z)O@WB6tF}7b&3WE#9-_kP6}~0HW_reqH8xKQO^Zr(m7d}CQM7f%<6CDfujcWI)Z=~ z{oZCE>jwr$dg1l(p2jwO&V>p0G|I6Wme&LoCLKdFBuY(4gVN=sWQG1l7lB7lJ76@; zjUgC^;*3f&MoE_h?_S!cn<+84ruLlno$aafUX2kk9LcFDg#eax%Oz0(=lBPzMhADY zZf6N{!=xUelTql2iPRk}6?QtUIkrj%jN--0#=y5n`v zc~|??q;$x{XxAr*)~4O{tKul(0kSB=__VMx#}L=mm}i5R?e@!)B+n1MDB2Xxx8P=B z)S-B=h{{4=9J?SuS5t@YrauIOwfPH|4`-#M?d`ZDuHPr^Oc?bOpnIn>W8gUzq8>xA z@;HRD6xCq5HhUm+kfcT#wCeumM>GO*yzzy2QPRalH)jE3+~K=gS&WZ$>8+C=g;wEEbE}n=gK8}^rgGbN$7th^0#25{KGklP+{nVI_ z@+VrcK!AZszk4*JLCiXS`4pBr0y?AHW=mS z>gDb3h+>S!2=}@4F=(gi^%}$I7fVR|xZf3T@Us#O+4lm|Bu$uhLa4)X5m`?bzx7D| zvOuAv9`%s7Jlo#RhDazE);0iFVo!_awGYs0HwBU6Hr$d_uO6q)M;v*D0STE+-zses znto$CQ{3*`U=K6fsEd7AZD-uMYXgOxCj;GF>-kwJDDdscj$QeD4HWDZnYlPOqn82P zKH2b7wKKWyNhvmXW#(uK+Eyjuap5?=kagbeQ}gd=>g7#$Ip%dswRk?Qa!K-rlosim zcp$AxscgkP!KQJu`+mzQ#!tWVHg*V@+l4hdXDAZtUcHl!SB24gu6MkoKRY?7g(;uk z+L>Z6VD$4NL<;sF>Iq17TIZe*H_(}{bmCK1%L5x@L_1LqJDi0l6S&dWMe;vQd}+qU z?%sFXv&0rf$4H;}k>7c#FF-KN2zO$oM8alaOKH;O3*fIRk%<63<>Pu8n3<52fLbFI zX&nQ;KQB6cO^#pVPH#Ignd3zVEpy*Rtg1Sq({~EopR?aGonLO(opBah zqSGBIXk)CIP0SK)#abY({C7i??P7*~gdqMbIkmv^8HfNqON~`|Cg+eCgYUUJ`gJq} z9}|9YU(TXcqO~YaJpY%~5kb1E7JS9q#@6BtKj-PlPL`i=$LeL|RTY&tKD#&WTw_<) zaHSzu^2^jl5=y_KEB2uNvx?9NcLnejEc|?Nh3KQM9DFt#T`q1*-rPl6mPylI6tZbQ zO}3nshgQ<}T`SmFp$s)W^)|PuH5?9I?sV z1Rgt#==i`ujo)F;(z|5otGipZw$N|P+KjE99F-NVh&n~tWL~SX_08|1lUvNDTZ0n8|mn6{X-5f8^8O#3ELe zPn*5Wzq`QzaC!n`B-pM|LI&|^!fwfn(V}(ar;YH6KFCoAV^l9KyJ=CG_cP=`!C&R? zwpiPRn`>>h-o`>XE1$rX@T>^*E{(Qv4f@Y*H&xDq>FWGH{ktPt87;aZC<0iN%pl?r zcOpzvVG3GBNj zYOdOrJKCK)2672tjJkDr2!f?(Z#%6K@Rr_HZeee4Z$4+faaxliif{&0Q$!Ic2?kM3 zW|#NF;)lW;h9X#fa7r7bs&H!MN(69b5eLt2$X<|iY3p)sLt_yXc?26Lfp0=^B#tk% z)A86l#l`$TrUu(y%cKw)eVU-rGy1nxx~mEOz+82gCq8`ILEaPb{g8^ztJAd-_!4o1 z{W2YgmJW~{`njT7qPwki6{N`1>3WFZSwxoSeY7fLn+d2=q50Ep*kH?rTp5Is6P+Y- zz06yRC^jO3P$uay8u9XXrRXgA6K3)?=?e7yot@#<`tOFjy@wc>8Bk^mohq-8`(mgw zc~n7Mk)uZ;YJ%G%5siu-6xp*fT7|Sth;aiCmdZ`cD*{#x!ND?w_;73qSH?4GU-3sW zmEg)*@Wn7#q5_g|h*~v<9LD`IjR9LQP^y~G4i2brYjCRc=}1IfQf^}@q6Pa%-AEdx z$C%P#aEqDhAfr!*K~({d$_=!B#x!Ctm>;^;Y<8o*cWdT4NTJJJeDm1KPP~=~w&;Ml zI*eQmMy(E`PoD`7gE&Qs*Ek5M7OW^ZtRG~cRkhIy3`-G5ylR1jKLYp4$I4H{VSb*lT6Ks+9dQF#E!G5|VaY7=OI(Z3h%pF6_X&;_qoGPZpb zXe9KLwHbJg0rlWVRAtaD8N_QN7A?n4MR01Bz+(B*_Zb1XXPJP03tJ0pu0Gmm2y%7D zcbI@KJ8r9*_!|a0P!zpS_9Ld$jk=Ep#+-%7Mqr~hqgJcmte8nQga89@Lzh8O8M~~m zwW7g@R1f?Sf1B{eqo_4-&rxu~1e_u=&CWtZ1CXQI{Ik-+{vDW(XV@~qZO3(A03?T+ zAbjz|M#3}NStd%)=fLqTF{=Fmng)wZhx&tz097zWkzwBwWO84i#h7WP57M;s$jadM za4j0p4y{{2mL8+qfia%qP@)62XoLHi0SZ%3V+@%n zeXvi1!8T-44l^B0^cPeZxp+qN*75wuAREs}*EYMjMSo)cY2qM+HDHoWm@&poGLdob z^-G+d>&sW*f}a27OAHqNMM8h5+tEOpA(L#(zhMf+811O8VASeP)*3)qBKX<353A>? z+ub^e2w)jQz8ogS1TrInyrSUtjxnVVejbF3xr{~vqfhTCS1ZKZg!(dWUFW zB)X$?7_S7t9;KP=bmpF~5XBfG5*RG);GJdQ0+ER`45J!BBx4A9>qQAZl&!@WHt|a_ z-Oe@&O)+6`2@Hx|BPM<`v&5XqG63JSF*c0mucDdiGr{rI42l*bV1+?Z1J$JG&#CZ!G-ir9 zlxr0Efw0rz?aL|R6E>lBR2{LRGx)L~%ou*VM}@j!&MYxxaCKhe3?Y&}+J@}yN6?25h7n8De~-V1=m{>J9<@v#YCAT zgC)E!m`-FY5Tl63I}f2hwirz<=%i)dfv)kt&NYP=ESVuqK|6y2M-3o~8FO#=yHUO1 z9x0}hk&BZllvKy$-M1#$Gl#K*ucUV3ESTq^Fh2B7yxy)9lU;AE`|(yxwgs2CHH&JW zMIte+3P9c$VVUtJBB7Ga%v{rGwiy&94=$L*Z*mzhvxG*?nJK1Oo84ieB{-_jRFc}c zaG1$9V$!xSODv#t3#O?*)0*r9Z3^{SLMevRBgcbBvieZg%mte)ED26=f~k&K zF?P`C7B8aV&SX;b35@{zL{A=3l_`(Gl*sVV>~e<=5SUz8KyaA?Q4Qj z=HRQVOyU-1fHjkA!(6dtPFXU!R?OG^&N%xYN`qM`&M=t-v51Ux9VXTc+}O(Ovw^9$ z5HBZVEPo{w!s+C!6Bn~Wb78<3OxJ<}wybZMXHl)Pur@HT_2-EI8;?W_QE*vtxAvCp7bK=(g%Q*B|KGfZ`cDK0RLoC|s?9x#$Z9l7{j29sxB zCB6`8@mpQum_ztjdRLT*T!B->*f%)GLAB61+*$IOn`Mc!>Z7B(8S-m8wN)P1&Z>B7 z9CuMwd;A^Amu*++8B@pej9QnuKJ*eWKH639N>V6{btv5HZ++1}e&Po*r|Xz)wZuY~ zd7$Ab7j+EYTb$J|L}8!OG9Ga3KF}VT{A8m{OS{ymR;#e9%x8IzaU~^o9aDiVCQ}Wc zX7)r`7C3MpJ9iE2AQZW=-TB97j@zDhkN&j%S5D9GQb+Y7=d8s&>UcX;MIDgQ%h@kqQ)Ekrbn@6$)l!poSo66`}SdaZntJzORkU_%kdA}P8bjz#UeU1 z98b)a+8}J3lPw;m^YGCs#akkh9;!e1sLqQb0S}It_Nm*r zdlc^4{Z2+I+vleFh^NkRtC7))KqZfLxdlJz-RkIX(#38vj$`8omC28M&S|+k8oy`Y z`q<~?7Wc>FzicSuz6#D>;}ce%J`=t>0{tc?4u%Cx`ksq*WaIvK@eq3eoBr(~PJU9h zSLm;Y7&n!v*8WYPYxb{)SV&H_uOsbF+x%{XbbE*XK8v41b{7x7y%})D^st5a5a-kP za}Fmw8(u^k%8+yUp~}IfeP6B&FC9Of6%*ES&i!#{;Ij{_J&tKFx zz3bb(q*tdU;odiv?Qv%gRNr_@wNS+RU+eh%?clxfp`M8KPu5}welOO2+_rnn$fNZE zqMe)~3pS1%FZqFYJ#e4TCNWYSG+l`^;@fiUZ17U&iub@td$DI_k2%EW{=#>!dU;D; zclJ@0xlIW6Kz55?)F`o|{IGnB`;aFg&dG-}-&ZEcXX9EiyH+=mb>B=L=}(cBe)jNW ze3+0Url?gfl z^;)hZOnbW_3c0a)Ph*WV{5ZJPqKJ@8ppZWs>EB}|h@JpAnD3yMK27l-Q!^46e~?o| zIa=Jkg(U17E$5Xh6%ptipPBQhhrA*W(P(&nZ?97hUWmZGNX9h2`R8p{RdxX8Fr7+~ zgnf11KZtcL^c6leAU;t?s0Q+HrUVxm&;JbB+ZJSN<1N1wTQPAN2C9J;4L$_bgo5B1 zq9u_uHeUS5BS7nqH4uXi76hqN=E4j%OBNZp$M|PXz#X+WGNA=K_SPdf0U&~oB8t&e z5GZRACKj!_4iQ$nCCE-M*>{wgE~%Ms{#*`*X?>UE2VF_ zUDKv=2*MCGR}zfdD}R+6dUsCj@-r8j3WGG-Sz!tpX8eRX$#j)`Y!Wq;Cg~D zCmi(A?_^z)Ikt)Uj~cRoDLp}ItHa|S?Jaf@oCfZKu^{5Jlg7g&7%D$$IZ9nRc%a%9jX3!m#rYH! ze)L!6eI%x2{LR_8I`Lmu(+c=YrqI6JEo~Prb-uq)jykNrvq5POI^Qi*2~Uj?0ltQF z05;@B_UI+m(e?ZE2G3(ERt13FH8P3E?`ihlwUQ_8-{oFj2 zDkQ)7mfizM-R@3QrjvMswo~#`Wsb^j6lvfCFwpH-j|!9xJD?rvDIidd{ei4@wgykQ z2m(&h)tipr$nsWsYLOUYK6d5Gv32shO?35)27LH)m>{3O+2ns z4fHmCY`kZ@ssPWkxBF!nVW53;!nAW|3eCNpuVaTqVqr#2#EWXn}JP^ZpYrOJ%Z?m(nM{R~soW~9p4GV|Sh%$0;_EP3~ibZsQ`E9de6-oZ26@|4(`&#gD&-bXPHcYb#GVzuuI z`FtZEaI+7fu`su3fhK&U77VD7ELoRzu9OmW-ZJ9*bEW2Gl{^!tXY~Xdj@pfyanTG9 zi{f#beYggdEx};>2y{5q;P%Pqc*joBqE)Z5#LQj1zq9P(6YIkSt;>DxQakEyR@N0+ zdDHv^x4!rFR;OOZFaQ#&L-JGD^`WP=-=AGf-WhxOOT~en*=p^NCNsEY>xo9Lc#wyc zd)<)BY-3cl+j9rc^E=mH2RyS-ads<&&uoYx`lC8iuGBZF{t@90NXM++-BM)OR9`x= z-(E6g6R^(BzLgWQ-vC*EE_wU+P2D?!I7jhtVSuPPZgaTixuGtf;bU6S@gk8jcY3u; z3a9s+lDuKw-*g(^cTWQaa&j?hw6Z_8G*cw)3PS(%^wL?^w7S7x?vWW;?Lmd4hNXD1 z`Zz1*6=)RT826s2=ebE~GP`}Lf&*%_O(1u>KnG5lm6O&T{cWs5xJGz?*D3#nYDHUV z&$>Dqrf1gj`q>0B80LI^0;mq*-Lb){0p;SaxwibpuWIy%Tuwf^cTUwi14&&sE~3o^ z+}_)I<9wIgw#FYJFf9dhc#r@`H41{ku04QUz+rm-Pp1yFd=@*FzwT~yy)=R^D6)jV zk0#qez@iXl!SwLla)^^6-1I5h3{8>!P76RIKJ|OibSlBy8OjicCe*~0IGGoIa*+Q^ zkxJPW@1gn(&nHR0FzUe#DXn5*37LEdKn33X{n9d$TA7f1@GGM3&4c$Bc756Z zd7R(ID!HH3;Anq6FDC~$i>Yc8mjH*1QM+OmktFt?bjuegx)XYJ#IzekofHc3jEw@` zw;^y?P?PaSC5A6Y@kL!!kE88&LHh?w&GyM{hK?~+_2Empg8Lsz)HgsT@(nHbv#Z4}S!1AqHng_=xKyz28*)pOHnx^dG*t7Nx~kLGA-r zYGlLMsIX-0*MbENmP%g=yZ*9mAinCsiY{~oyj1z~m*DxHk3le{t=&P7?Q+_S(#%ON zY|0D4&9Nfgc5|cHH@lmC8(GS|_7Gp1GS2k9m}GQP!uzWjU#{f)esoYn$d#H*Dn}T> zHH0{}8u?$YFpxsk?1yo)$Ly?sz~JVdV-Mo+dmATfBjCD0Tc8Kk{eG9oN%Jm84Ci>r z&AIK${Ph(%=rE#6P<-$$^mkP%`NO;LqZ~C5U;b89O2*~YG*Q?~NLZhShHxZ}YOH5| z$TNrHU+zAhLat>Uzvsj~r9B1@#I3Oj(huAse+iCG(qx2$Lj^wEdY*l^W$=`ckQ@*t z%^BIN%5{PCrN0ipnAYn7DLKmpFWll$VL0hkw*rl1g6Hv;x%d@a55_}7`*TXqnRkbp zM11_!btM`Jb&2xF914NKthFRq`BFHpM)8=VqOvCW2?wWKsG@@Nf^`yBlXG6rqdw)$ zDsg$WfpElu)s!8)dV=>y)m?)RwH6Kstpy2zf)zEkP5PWK$$oY^lE=I_t_we;5Wl^2 z;mOn(4kh@(Jmz({1|snaV5|L(W(Y9|FwZTNoU=M2|8-tKV+J++QT8N&pHlt}&Fn;R zB-fG&xTag-6kTc6@{uc-AT&v7>5X9z*HDM_fC2XiM0q~+3Dj*ZgC5I! zJmh`ZII!Rm&e1I8qZFc(`hMst&Li)Z@*mxyTe!jN2NGh8uQds+l53geEq0E9$Ce1f zI@0xo2NId89KAn|2KaCv=!4k@jV-8T*{@uM%^(a*cxjwq_{L(I7P+62HO(tg=X)}r zvN)slaP!(j7&f<4|18nq=Zn5zo~EtJ)do2PkfdWiW2M)Lg_^N1FX>z0cTZOe!@EXt zD<@_2<)7zhNp=bMr1rv5RXs&KH(y_;D#AYQQJ^kH?ZnS@TWQ$fDvoU)4H%&Tv%(os zs3=N>U&xWmo82XXYy$%@RmPo#i)MUP5d_)TWMM0j?8-^|_fkX?XvOszq&VR`i%X-l z6fxA75}+HyIG#8CBwuXOEaYMw;a+llJlkeQ1C|_dD&DQp%d}9z#`XjWBx@N7PXGM&)FxVzxZ|Kg!=Iv#!>d?>K?^p-5rUut=C!A zak(g&x9*G9@Elvz^cg+QF(6j_;rIAuYb*#(m__4E0>dw$Ox_Y}hy^FGtH~?Bv;BOp zW(B6)Wlgjl^>p01R94xeRPvn6&E58^|lu?S>-eM2JZ#O9)1%7C$y! z>~X0tPcuz%X~&(~At;#ABYseN<~Ui}2A!Doo_K`pd=I5J`oVpaH)jY<5O8eIz!@1I zMpW9w$+%6}Xp^I9H^L)ClYU=gERDl+PBG{5$G1ID{9N3|alI{sc`oLx<2HW=y-W0M z6C&m-bH}ZVjuXW&m8?Q?+h-dVC){8&U=%0GJ$hbzEnu7*lV9Z4FvSM3IL* z^6 zmGASvpA=XcdJ>zmxAKUL zBy>|ZuoDOOBzke24%!CBNydAv_g#>bhn5nLwRl)Cr17e&=@TT*|Iqj! z6rFWclkXeG$Jk&S3ql%XbazXQQKLtven+UJf`o`Q#s(uD9iom9X+c6topcPOC5#aw zf&qexg+G4(z3)HodCz^H=bZaI_vgB|{rmHwjHieYS3QiaH zCK{nxe9#%S1tn4KKG>0tcVNV;nduby*t6sxTp6B~jwR6!KUQ&SAB=LPKnX_ISe_Cr zM|#Bx5Zx-Oqw>NY2j~}vY+M;=r+M;a$a;dYq;NBP(c>JRTeirNLoK++j=xLuJ6W6a zVxgOlGC^LWpJX-Ov4Mrv_+k$u*5nhMlVVWjQ}v}I(T*oB#y6EjwY`XPl^@ij=d7*6?t3W0r^q_ z$Tb0!(SS}Y1=)u`0-TEQSdlLk150_?pfR9Kw%kM-80`+G`_3|4=MWk7^^x)gYEi?; z1a(o!c zcE^zqUO47(IucFDS4!%MX0>QL2K+&?!9CJ%$w!zo9O`x)Xs@;vmv(?R`0@;>n+Cod zQ+N9#=p{VJIvVrB%G>#au#JbYqh4avueV=ov#JIBei5eKZZL;V1bkiP-pjLN9V5J1yK9Y zE(?Qv?{so6CszSs(X%L+Ol2>u&92xBtUl_?ewYwGHC{ zM!U2gQyPRb;NLDobYC#hV}$O}wwwt@`wV-{HW)k){2_9?g{$M1P!J~xoqZ9#bgprK zzL4UhpSyZfStLiaIE}e(guP0>EomKa)Ui~M_Ury&$%i_J`V0>>i64=5?jIb5g7SX0 z+!p2^%xHNA#B%`7aU9oH`~>(;1Ne>zSX_GKwb;RHDbVZiyH|wcJk20+SIzQlpWSjF*{rcpqx^7kv0(d z-nnS)r#f+H+y3ZEv0)8`|LQB1oeU>^GQ$jKWM1>6ab3!R0}#kzijiOuIEY~!qOKf( zfaR5U!^c{k-4ot@!5Ui)DNj2MVUFSkkQS%hgS=K?51zN zh~GOGe_capufQN`{|V+*nd~nf^0l|!3QlN0N%>L$sJnJsb?uE~+6cxcxx0F?*XLWP z>Q21Pqu0&gGmFQJDEJ7=fez*{sa?KF<+!o*nQ{8doo+C}Z`}lj_RVSY6OPZU8|$pM zu5kd>lwAGMfj$$IbvmHqv&{L6e=c_na&H>_mmNuYYj)w=@+TBdwlS@~I~{!+v5>EV z2AZRbouqjfWGNnloFhpT)RH6#Njj23JsV(K1rY6yDPHL{Nrik*6~V0kzLvN*`KT7z zzmwLaPK9U*!iSRCXahk;f|*uptN5D#%muitrG^i?^Z)UugZf;A$d=+rPK&*|kysN? zBqw`3U@<~V1d&2|j&J>=bOK85GgnTqltFNi@2#Tg?n=B6zdkQ}nuFpE*qKj~7zRN- zqs41eSS-KUFPC>7bg4xsOc7l>7!}9(*3wv-Xp~T9Uq?6`2qch?i_j zu*)Y0Xweth8T$cnP%{ggK7i}jnj^z{p2NUN-1zdx0a0PX=Ft^_Vw~p9QKBiUBe7z` zwtZpS@*k#xZ_8f@%RX`MbFbEfl!6`aUVEdI2J#8)(FlTFi=_o;$Mhik8ynDa?W!L! zYFw)5VimX}qi=4gg~)7!+h^#&o5TstED#e=^fZY5d)AC`>c#iVI{ z!r5@?KB8i8)t~UTUS!`wy9x+vas8SUn;`6Y&k5&Et7)FO6OS<~klN zcZ_H%SHafQ-L;em-Y3vu=~ZdUA$N3`QcdHgJUwg%*ktmmqbjyydwddEKD*tGlvL31 zuTW&8ym_0N#JYBpGhif4WTC)2LX<<7P7v&ln@uPSdY&G*y7ByKlK@x}>n@*M3me6A{`1G0$Y=ZJglC7OEph#bU?q7YLPEl34 z)1)toxMp}CXrD`WoBSM~L*8a53$1nj)9$+;P!lTwk%HV1FN=yryoxy~M;sAn$6r7= z?&@(w$%slHg6dKa)xpG?+$RM02{x=f9#F;4n_n0Y;i+{|y5eZWcCYRS4=O>wbk>pd|4$^x34nWng5YCe&8-O^H{ zK>H>0Ma9^#i%d;cO}=ppn`W2S_D7+zOXs4YTiU^xcYVpUYurg|`axfYK*^X3$HFX~ zpn;FV$sBfYLX=K0=9RFL#cj^%*pr~`w31f5>%*nnIk4=|U(dEs+Z0n$P`88fOa}y(AS;%=ZzED^8Nd zHBcv%4rCQD1(JcT&@}g%*f7BByxFSQ`QLJKn2A5nqK}X~okljG)nPH&5P0#SRy2L- zmgYuc^rV5&P1bTmZUGL4lk7=W_q7%FxM6Ru@(bViaV7_=;f4DRRT{YeCTW4{J=+LN z&22fK^az7>R1^uX-;~q@L!w|HVa1?7)w#6?mk~+#L5!+JN2v;E=GZk^Of0uUOj`;p zqw)cnQJ$}Xx6Q0Icc%|ZB=08vOWMT*GCrb@-BVD-bBSdx@5C9YA_)Ij;B5v|IGL7k zD`~{Y3*9cQHIqKNjaHGV+lJNv?LOq5H7TjZ)bZe`X;6E=lt(Ei?x+5Tl@b=dG8b(s zK4aPH7|N3fC_7;vBje&Ba$mM!qyR|PfSEHiB4FMNv~aa@SeffJFs6od8Edy$;b5UM zO#+SrkkT6$WN}~F*y)Kpf(d4vjD^}`f)ZsuD{RB~bW%$u&f|QGNfzOiIOa;PBhRP_ z4K#P}gxlTNTw7VKdJfy%K8}!)C`qk@(3i~{2J;u)^`vfPIV!YEUd^p=S8{-sl~7HC zcO1G608iSUw>F2F+Z(wJSaC`PtCaJ^Y31r~=6R0j2Q5nVlbE$)$qyiQ=J_XVFdrp& zX(aCHMrTM89BtNWShD?4Ee)7(ja$70Ff@W_f9qh2jZrwo;4e)4E;O803MntvqYLm= z-mg`|smQAIW+_S;ojZ#)1vqlfNjevPwG=edva3mowxR*H#FM9wP;4b~o?E*o$>A4rL6@B+TYzcE3ii?6MyB=zoZO zw;Qk(ob*$umDRZZx&5t$UXU!sH2j-R1XZih=kg*G=#cw?U*vJmeQ;s!M(~9(gpA_U z*_jt@u=x< zwgZylV34>(6;Q31lYS^grlqeXeZC!Z+4!VQbM)X|fuDHLR+(;ndcEj2+ABI^zlK$I7m=D|Ykymqf7)M2km7BF-;U3tX)e+>^BpkU?! z3eti2Y#yaNVa~;(C+=B#i0AK5W)WFyWjC~AwrM6oz3CUa&N)A68jLt~?QwXWo{~N6 z#tv2ypUk`jx7;+4(MGjcD+WbaLDD#-P#9{gsJM*dtTxlyiG2qUfp6OSr+-OrVa8U$ zY(yqgNuaY<#m%{!_1N`e)r0X*Q?Boy`xqBtddun!L>qJ?1Lk^zB@F0PUZ5)svZ^9O zzUGAbk7ag~cql=}kh#qdHagKW(N0eK?2!!YYG!1%u#bO%Wf=rsrlhSsc;!$*fH1g7)W)dv@N?wxRR(b+I zZ9J}-HmhxCoi1Ez7=(GGy;LAouD8ga6!ii6iz8==DSyehct2ZSYW0~L4v3L`hL!~q zWPu7&M!)Vq{mqxO-&5xHy^<7d(7B$}_4nR~%m;J{o~aEP$>9QNpMu4T^nhHb%V?WW zp#aYgENPmGS*DI0=$-1%El+Iin&I_nV^a)Ji6U7-U0@J_Lhi~o^k`$Aw-jCwG0o5s zwMJLSSca7dNDWX$hVRcTvc>44oB8PU6G_B!b3*R@sSg7|l4*y+Jfm4jqY{IUjFYC& z1`%h>+_*`$U^a%tG4cyEkRn0Qn$pMIXE0FoPz*0=)G5{WwSlMepM!1bCy{KPas@=7t zQsM(6yl)B+OS|G*(CZ-ssD|+3(hl!sM@W?Y@`FguC210%uh2;eq6OX1_AVcdP#=OO*RPl+m?X?ic(2BK`mSkZFxTp zZriG^J2tL8U^|fR#bTuo>5l+;_nHg%4fA@(u32;iX$V8JXeg?|mwe(U?$H@>9It?w zC7bpKaaaexu#8=q13yhjJ~lax@B!!04IRB=ZogF^{G$JoHJf1xt+>1-$cfgv(NvjQz> zNk_aCP`Rh=(StJkpPeHTV{C&GyW-E%{u$f5YF_nbD|68e#Bv_4n#Oh)jNH|jf${UA zY<_Z{2<4LL>a3mBQH`psqTMdhC^s_qZEwAb%#D%WsJG1P*H{@YsIqH_8XLHh(91*> z3hk6sN~RUB4tt{|ich3<jWX z8L6;c>0O2xv*z{7ugZA+WTVfBEzSIwZb#>uj(24Nv65GdV`j`4E1lUlRz>3b;c^`X zJPS>WgaQc)G(10>qz}nJ9*CEDcAbwTB?;s!epzS6~ZtHfCMiocr8 ztb5QauRXB;m{j|Z4jq;#782Useq_i%jKR}S-6&}hoV{c=;&UvPST!QtqvHg5mI|7* zKuPJ%ojh3v;S5J^Nqi>VeS8m_Py}?B2iAgv4_5wWMijrkyccyGPk66Sj2+TcQ*&Sjhz~OG=RQHc56DDkLTMtPogi6F1om_>*=O zfD|g}tTicO&RSQPWa4P83Ry8}b96svjgfUMt--FXidpdWicK^n0$xlsR*vMCG2hLm zbFy`L^hdq1o3Ws8muvoXJwAHchnod+8)$4L5_7lx>L;b1kHZF~(xbk`e?6y+sWQL{ z=ZsoI!j{sWu$EA@>9H;%lW9j4kHS`APU5+eMOWAoGU|2#UX%nCC-J@&bB%%*%QEtX>C*@nK-p~F`AH*5^Sr-b(oxv-=#W2pB#&)my(-#k4c=>>#RHbxop>`L*j zsKave1ISynhmjWO7rzRi^{_OlR$}R=MC0>;Au9nh$O!3rr(lBeiV1X$sLbu*kdte| zwj~XutC6Nu{?G2ylz=r#?h!#G@%^5TfWW_P&y4oHjo)bMEVU3$R*v>}hOM@X`ROdz zV%;pn3{Q6USnzC{K=JuvOVXAhn`{yP25KsxYf%1A=x5Dk{7PGOof|T2!X7d1LVs#D z_9Uc3#2LW#DdX7Y*E5fMb(LlwNwl9w^JD_G)1Kj3zyukR?lm5|vfq>fuvmdqte6(1 zns)VPTiVaY5jh}D1z4j798m?fACNK?$3Q_qjN}+d$=l=E%0b}J9YKHwWf`fz_F$ zrvHu^chWJ(KcWJ4@+U21r-j2E=_2V6!DE~n*mN2=r3Pdvlb(FH)=iwa1Hr#@cm*n# zc*jX{b8sJ#eU@zRU2p#^=4a9x$n4S^AiIS@Y)PkeMG);IG|2t@hl13@lj-c{C6t-U zQHib-%(6%Cr8i{ME*a;HiuD;6D$TslHf7I+8Ko_5r**j&l>DsQ%Wk@1jgDH&>0+|| zrX0%^pf@rC=_AL~{?;NZLhpGal3CWR=FhWd_`PP=y}fvloW9)d}S zVr++ENeSqW-m0WGL%=Pi{~X(l+V|fQ6#b^;@r!bAD))dP3fQuFVC2>`7kA6?^y0tV z3#=Nm2Dl{1dgmQ}9b2u3HF{)?FY|R6g?bVc#ck`Ffil2e(vCIH?!96xa zKCn~H?06++1`o-yU6DNMwD@io#Jz}@b+jRqc&n`5^6y{)UsYt4QL6Ru({Z#>hv#CI zg!NCqy(UjC1A&)arv0n;q>}Okmqv$>CmWKg8`+XkKrnBMN^t1hTarS7i?PxKK+5d; z`!|!aU!M2G89}gQMR;p_Dc(# zrSaQxGZEs#5NU$lA99wwMjw4v+GzFIH>lFvs%6NaJ0@70*rXw>8=Wn*5&nJ|i!{%9 zZ)Ym;>p^G~da+ad`rFD6$=thx+;b!OEAKv+srh|GD@IHza2~1vJ8zPn4gp`V{f<({ z)8*e>wBX>zCAOY=dkfJrBXETnQEsAJ#n_){lu*|0zFTs<-n%eJw2 zKJ|xWooRn|=4X;nf}wcUP$f@Wu9*mp8Ej2~p7FVr+3rrUn}9jPUu^R#lo*kHwUME7Ym zfpM#SXaeKR&7!^B86U9{m~7tY02_Tb*h>D*7fLIX#thafgpt?M+#7Ey=BLZ%F4WXs zS1GikIEFfVbSs`n-bX z0EP)krI8GBWg3BmvN@<5E?@e1%TGq%TZ5#dQ=k7T6{iA$-g{{-Fby=s+2B$|s`-91 zTCsf1U51K9lhK^Jr}}=%avoj~N(rM~5>-&g#Y;jZcWG#oZAKUxvc)LZgBIMrE2FxT zK$A^JY}F{^3;tk-D|w>2HQWUWg)Hje7STWwN#GoW?$0?u??z`oaxg= zSx_GeWy-x=b9-Tzx;6XIWV#TSElFMDPLW+0^`=VHLFuvgdy?w^h&5n=+|rZgTUi#6 zhJpa?1yY8ue42v6Ep2^l^Fu<&IDJaUIw)v|q2$+>lG&$NRp$`6xXr>&vO z{Uj?`kKS_iP4#MI3c`LIG`ohwm0|Ti0I#|9&%aT+c!}Do&^~!JYbi~VU!<^aIp(CO z8DxiXvxi8VUHTOnPy8lQWT|5fwAYH^lF0yR*^L0Xajr=ask#E0q$QfitUI<`g;bA|t);7glprGMw`0j{vFJEj8?$3K-qiLuz)vruG*F zc1!eiI>mc_{qJiKy^!}M<6l&gIK8uin}>w6aNT8tD{6r(M98Ot9lwn*NRv?>x22px z{BV$`jM@C7$gm_Nw?-`lz$R7SEpqnLJj;Jx2C-A4A z*Bl-|?i2D@5a7}9fmJhJ3r;mhy4HAbMRzU?S22pDFA->$;4+2rz=6Doe)QCp+ttN@ z=e8~vY4%GR7JPWVmqbTPFtbA)e5n*Nkes^R>{pkp!vciKWbSAHRL7O98KwO2K#aGqBoUtX zM3%By9Qan@KTWpuR>7z?L6={jtpcJLPeIN6es%cE-)8=!5a{h#ny2pI&~2A)y;ps9 zhr|t-CCD2)!k4(4B596l0MO0eiM}Yr37yo2(dP*by^yp91#gdCKE&74>a#K`J(MHf zmC{x&XtdJ(h3_mc4WHBDcND8)y-&mO|KLfv)fL<%1PNfkF&LW18)tX6>cqUNxhxSJ z#35{f`;RM5wUyE@E+j$-Xyk7UuUZcfg7ZcAf9a%xbuWv1EP5D78ovTxT|G(4_+vP+ zq_He`DQ1$}o?r>pC+Dd!fEZV`+Jx@%wKEeVSeh&zxM{HR=Z(j7qZQ;xPiAX7yr?I! zxbiRKnJzPRSu(+Yi~fv4?`-P<( z@X?b+U{K3@@mEZ~%!}AwE49|oPkeMC{LJ*wQ^x}zEfinp zu8cONXhX(BPH~>mZWx`Go!H%keB|f<%}cZNOd|)*rbf&BfLW7v!11pHcJ>OBp36tb zY8{<$AVEO8&f&zf+m?WR&_^D)!0s zD~E1HavnX}LsSA}04Li5pY7+a?i#PXBs=y6S=}d*TApWVclPruu*1g{-n~w2%7N;H z*h60n&=*Zmf01A{Q;1z11@FE|ZGZq0**?4s#=Lf`OCgQf)|GQ$dIi`CIjUZ|;YO?4 z0uiS(T;DJHZJOU{l~H;soU9KYV+($I&ceO@d{_7N4_CszZ*&)B{JgFk-i3OZ@~?w` zQ;J$1T&xyKPJO8xK@ILV;UQO;O)wm)xu1%NesuN+?tf;ERH<)z)L(gfHL~Yc=@%KN zU;h23hn=6) zwBY5c!uJy%Apmm-022Y2LBNA?nWd4a2;cFr_K#}1q1iJJBQCywRkSl_pX{KwnUd9@ zb`8`myY3Hb>;eY?)riekAMj=moKbIV&}+Kk-!Eh}a?n56z_oKN%idw&+}IuUslP~p zPT&?iZMpUW`fqwEbVvvyg-O7~Y6THB;A4_fL=FrAJZYyPo33Hi%*m*`Q_a+rhEGr-)g{uPwoOTw-J7!Q zhuco$9tUc0VXCj8Y5IU}IOYX(Z94+o%Y`^LcV52Jw9!4ejf?IF$|i|Dd?{#8dWeOU zS;}&en>(sJwOCY+3OvUa7K+ZaucBQCWb4>2FV;O>C64%`yAi|<#PlGLIE~^~1`=oB zLrjgcjD1U*EwNXxX{`jZsXhs6qAat*#y(2})yG!Rff^bu-iyHw*GL+7S`MWl*`FK_ zxftWp1K5Wa=R9Q-ukGB3sLQQc$0E>+SBNtREz=?55SNnH(ypm)B$BJ<%q}6GX59!|z{yQU=AF|!2Cs8Vb!0p1Kjp$%1F{XJB~4*)z!W~?hF-ESR53f0I4`0& zb4S_+#rsWt_K~qOu6uYWvOdCcop61#!nF@$LJtG6NU$-c@8`{NKr`11TOeUc40#St zoYp4JAqX=F=XnL!!-Jf@FRb1#1F0|#?o6rg-+V#CnqGXE>&i4fI~v{R~O`NknwuDr*SQyaQC9H^Z!m(%&FhD@eu* z9hbA3@6zFD=3kA&Mv(O#%i@N0-s>`6Ru35sTE%LyOwEviI~LXlP5;FBG;EOSI zS=n&B9Kya=oqQT!U5p^E0J9HQiG$kvEjIj0vMqg^PDc?ty_(FCREung%poU+<|!@5 zNl1sOKU!hG0HgO+5-ag7kz!G@Kpn8di4MeGmwoFG&noJsGw$8&=PwO80#4hD?%27`&C%p zlD0@rJ%iOi@NcQLc-}?Lz=^#&8FjXoV*M4jzO=WVXT9(uzCy5GAq#fSRFByb>$s9j zVe~RMp}JJPu3he{g=Hsfn@h^W4px02`Cy&x_$+%az!{5(Jblsq1H}9W60m{9tXw?t zOee;MZT}F?m=}oJ(+;O*;uL%#H6%M+q{DR=sIeJ!B4T(PO8W?Byflgpf@%De z%!#(RWd|phdNZDg#a*%VSVVYgAy^woy>eH28Ej~vup$2`z1_iPlIz%AZs>c84J2R@30On0){yAcz^QMcZl@6O^4gEpb?`fp zvldrf`Xim~zPy;y(HwZZcUC9xv}*S6>$NlRJX7QNBLsvo3Mz7xwjSeaRB#G6TPQ^| zqY)jS5s#~ptSuzwecarIV%ZV3one3YxrVSX7NA&ncT!!Z`bM~?6d)&|I)RHZiD>J& z;`t?r|M>sEoWbnG<+q!8&f9y+A&P1ci)V>>`hojKR|{W8Mt$?oj_^Bv@H`ue)zsd% zp=MS_wJXT7Ui&cVn&CHtr~4I&&oO{m2h2F}yQU82U80_5T&H-_!Nn7r-fFB-GnXM* z#@)-XKM};H&y@~K6@P9F1?w`x7T$lv890g(S1Iwu2*y_w;6n=FeWv@dz0IC|$KHj@ z57p5NNWJ|F@wbBnZbvd+Au(^MyoZR(6E_9h5T5hMcJW}|EFj*G1y~w>N*|9$`;BH&r_mHduJxoK; z+G30A9ZxZy<{rlju8ThEMLGHxO5zBI9)vA3r#AxiT=} ztl5?G8Pm@r7k);@{B)te1;)pS0p5zmUUqn?j`CPZ(s>dY|JIa-e!O=+GJa1?$O*xE zpV&2n?8?Y|$55~9XJawB7_SYjQid5+cj}F@T3QySApmnpn?Tm0&mdV}kuR%}jL%5M zrxamsq~Rixu#LQO0%^#9f&RYMkiVvjzYM*a9 zuJtpp%bxoA9|~nF20sfVyf$;`3N7kU!1NbfVNY4QS<5&;u?~=|&nUoG&YZ;AeWFx;sJ{AOM>;c+56<`IyX+aR(_ESh-O+5dtvD8i1flC zzp1xYiv$$MT1XB2vR4snwM--(CF+k%ynzy9f7qRtBkK2{JLbXfK%{S0&5y>!pVJ*4 z?{tJNKGvJpKD@4@_cAEPD!xAZHH{p8wA1X{YM5nX+c{PUUp~K0H{y zh5{U*h(CG`U);P{e-k|eN3UxWR>b)7;jO^8u6yZ>kt}>5WbKp_76}DJ!MktObP8Tr zIfLx^{jl`%P-5J}?tiVUogw0vapTdN-#_jY`z2m6O1k{;YRA5g=uLH<_Q+q)vRW;G z#QDHWSrtCluU%W$(Nj-kE$Vow7hbi$BO1WO{6f*cqx{;34J0r_#ava^Fv6;W=gMjJ z#OtgI?<#BC!AWLXFXz?`PFAKfV+7?N&OrZo*!>5EK0u*=KfDt(PXF=1KNz{<_pqCY zGCBS5&x@f`#YiK*_MUM2`1hGUl6BuiQ#zd=d%i-%YhPF#wJ(mjvY6|L?zD2)&AofYo@3VL3dd_%UrpIgNqsLnepS8YXe{dv%IxRd+{EoUa9XEyc zW{kk-OfJvQaKaoizpjBXYWSlW!D9L`7zoV%Fk|YB7PkfF2FLkN4&7PPD{O0+Kiq)d zYlK785n>B{#C__=+DXH8!euFVV>4!69E<>FV>u`xh7=@&M~a z-`*|Vgd-O|#U+T|x$I+KRGt{)1pk%VCUU(Qu(w73(i8A+=uO7fLV0A&T1i_i5ItB~ zRLw=7;8;>X5H}IlQ%LleK-LTwo#w!dbSIGD@5jvmEaVa56XS^c%qEKQi`yK4I$A^> zzp}CY{43lnF7fhT&+5~w?W}LFFyG2VO_)nOwI$PKe&%;dvyzHxGw;2nbgkT7@fj<6 zIN=wDKtHGX1w32>BpTnnW%J|b)~_2kpRYzO#+pjlnq(wP>TePVZY^$sAp~+2=e08M^0EJ8v~H zT*J6?e=}}N1^Ut3TkQB5&CGR;DgCTUp*cBt z-_)2^wln=r7P8-)Pnk_ z%xJDb!scG6K70h3bVwfSY(+bFe|^FXPyf}WezEgov3f@O56&zLDQms#e%3CA zTYC4mr3uydDF)TTFM+dIXN(zTCo?v+5f(x(#o$2-ZK_VHDJj5ksJB3>b}?H2Y@*ZR&%&tkMan)bo{@vp0hMG5B*8V=x@UVMZKP#EYIxJ^D{A1R-`&B&1Re%0_hDksD0Hcyj z_`Hf_mvYtMvcRnwqB=v~sB#6Nr!+H2Q#8rc^21_9NWbWzL6${1%!1p}klT{BWXNsB zy}{LOPK(v}HT9$o%ZpL8A}hw~%~32ve~ZOUjM4fY`=d{yN&>V0-Hv~gLNk!Y4re8MZ)jzas2%EX9=ork zTNcz_{cJ5XYqnlIe^6-4%m(Ap{mPAy|$p@EIPF;QRR(qV;+%TzOAqhlosczPdkDX5IOf*e{ znPm?cvQ@tcTKmU0s(iSq|G{Kx^Wm+aWCgWM;9{K$v`?t985zV~ZGf^25Y5jCjz-($ zSmM*XC=Op_Sx5PE+UL(VB^5%FAZTA7Zy!Yd?n7&u$Lmqg(0a*=f`hsWh=q=dTtcG? z<)egiOSDtKYNG5~4b#qcQSqXku=+stCa2bjLzce6SC@hE+qBeZ@Li)oO`N;Bah4#a-ODOWi^eI%vUy4 zl05j_=9w)q7ZDemy{dG#(oVt>8lW^t{HBmRbsfH>p&kwYE8N0##gtPE~VrfbdLm=7UDw4!)3OP|C zP_f*T)hsLbh$=K{sgY(|t6>QlEtcZn*W`9YS9bW+vaPqhw5To!Qb_LIwyR1;y>nTt z4t#U{oq3FQt4Uzrdk>)jgVY;eU9X6=Af?HGi#)th=2Y5Yi(Q4nF)~&xM1N`EOi$#N z;^pJSvqX6?AuzUcsbJc`$!r#pM@o0n?66m=?BL5;tlZ`P&|5fRiytw4A~#xji*yEf zPy5`L%9uI)xa6pa^9lPKI4LkKUQ%8j@}RS$fVR<9ep-b>;?_ zw>Ns`9*yzKLnLA4XQjKmvs3i>7c#d>X3@Oz$t~r*ioM=$dN~~c^*?O zISu}x?`d4+NdRH_qoL9L@3IuWg_zz(FNpV}Powz6tT^`|QkKEtF?mnVIgGTNF8d_T z;~rwUsAM9ry1I}VpB+-a^=dme`c+Yd0+yMkepTAg#+Er2b*ji(V(de({o~w!1;;m< z=e#p{`c7rOj6jgIHU9Cg8Cydn(Q=l>**p=W79CeLBtc~3?u+=RBahDOy7_n^k~)nI zb71d5vL`DSpPoCN87`phaU-UERy>VOG-K_X)cFVTbag9OQF^Fr%Iv3qjG@36KIsEj z76O8_{^*_Tr^XQ2K8I+Pb0zr50nZ{6RiJEQ(x5^e=FeigP#0b~OGYj)&_0V zxd_J^+XWtWNP!H`VBl0!A$v>&VE4 zqtriun|&vLg{?B(L#l|!3qtV{T9J4a^Vx6 zHT^r(J?UwEe9jW;Bksd#-{RpZ-!mYnKd6WuCy2ouPY{@&g7%yn#Aq)c0D_7F$u=LYv3t zeY`t)ombyP?dm-WU_YY`$=Qzcw~cxHvj_ALx4*j{{JFN|^tJyoc=DbUS}(*iLDC9F zaawa))Tik0ElyeYmHtt~Zj`U9_zix)lh<|FUab2$;O2Gk(Pe+8B3P)N51ae}x@Rmq zz$oG&3E5CJr6kA6foDnD!PM$vbU*!iMxTyFYN6|9+z+z}pJe)xTDHspz33m%+ZL)R zBV$<~EU|O9V#UA|Dkm;{j78t9v=5i#)oUN8Dhd~fjpaU>8OUC|&%UkM!1F>YAg{&c zU;7Hg)co($AOe};PQkm9cill-E))e93dE6IpABMJkV9(OiN?G;*n131vXDh7&P*Tr zq9H9$8-O!gpYF+<1SS6^7dwzg>_El#pawZ|v2EG@QTv*T1}R4vw}WY!5)WXOgazx) z^kA0c+Us9Ej9$>~4s2TPITduW4Xaz%Y}vIE_n2U7uAE6xaHc4@f-&9{ya$*YZ?uRf zPgyk<+dti*us7?15-*d5SIB&VHM`ZhyXL2;w&YGbP)wD++tITPc@XV5-pk%f$d=ro zXxyvZMz_)rI&*J8OKj${oQ1u_eyow-N#RomWUUYB)SC}uRun$`9vnB&x48c}h^dFM z?N-3`?_`pjohSqk3Jwnr@uCo1K#O<^+6}DXZfWtiBcOnNOApaWBBQP4xokigaB`HH z9?hQIcJ)e;c|!u-O)CCfYRciH{`ZWB0|#ZWZkx zm^vBXOp+7DeRE5U1hrizb!z`f?cdR}{o6NN@$6OTQ-T{s!3j+DHpjVB2=0^+cQDS4 z65=rozfPv(kXx4Q&3aS^aZQ>D7H4rv@jm&gzD=(ux!D2RdmW3j2X#7v+B~szN9;9c zP^YcgpJprNii&;>sU5LSI2DAqvG{)$TdH6*}Y#Rk642`t;sY?yOs)a zggklC!H!{P*ZRAUW(UgH(THu(({Lnz9M{t!kq2H4H`{~gHeG*!s_vckGap*X8D$3u zvc0CRaDjg5e?vy=_vTW~_$qYQ#B8?gIR9K@N^gQOHz`DK3dS8A;!2@YzhmJWOhzSif)C((=1^w= zX{DRe>;=~F2RD0zqrAZhK47{JxW*fdakK6%>tQ*PiH_hC5IL} z`n`iZTd7;8f^3xPCvm=F))XvgQ1~gq-&w&IoZw2qdAUM-D91X>h*H~{|Hsx)0M?)p z6Y_=-nPmsE{{yNFGI*^HqM6zg9A?purz# zrQj0f+MS_+aEeYX7~(W*;Qajf%Z-=uG5;FNoIyl;khjwWbJ=x*b1HUmR_8}^B*#=F zbSiwuL&L%G+M!wNy|#*HwyFZBPT-Y#?sv_dRc`tFI6_7{u?N>?G8i0k%5vUuLwVtq zDLKlQvgi%2@dJ1IQ4+i<&He*2cfmTN6yu2b1Xm9T-tD-L-;zz+*?krNx+yr3|N1X` z++P-Qm{rv^ZTG^LjeA_1!=D8Ww5Infy`z-Kgt9~OX-|4uGb ze$GsM9ZPqz{awZRONH|{(^0v?ycp+lcG>gx(IE5)=NC6Tr{r-2dD`vdXa$;>ggzy> zlUY7sjWqC~ABE;cA-aLR-6-r&-?kr6G_1%Qg8s9;p5|<6th9d z{0%MB;9U^Whq6UoSv*at@dX!qQesa~H1MG0r(gp|ijd3MEyq#1+c`OL@>~>Xrf`f_G4`zHhyr!}Zs6oL{~twl!{=YHN4>?VA%y zmWTFJ*GTVMIqf<;Cak@T8ZLl3UBEOdWzmoFu82bSI+Ixh7IFpcy4r=e?o zyU_@qyvhBBZqu{q$#i=U=GR_!xAecZO#aR{Ydr1?a0U%>3|ghAHoP@^C1WDGW7nSQ zeKi!DS8(P@FGo zEQxmQDyfHd6S)5|civx3bYHj+A(Mm@2))>bDk31g2^f0lp@q;xl`bGnL=7Q{bP*6x z5F$y$@MX3m*8`(&Tbeje+KfBt%v zUR)zyoY_;d$ys{A`0Apvz2=oO7koM+-wR-SEc~k*lq&&(h5^_oSg0c-@cX#mFusP$%acmvfU0FfM_=nUhA5<|&{`3~Gb6S_SDNVvXdyFVT@W#~CE z3{6)ICG796zqkVLJ1@*>Kp5X*FeEO_H)TXp8T58o7{yNRxmCwrc?90sI0>zT^k&?e zV#H(sjTpwN4`b9Ds3pBFisgFF(D%DJ#yHcP$fq1`4XSxQ)e-pGdJRxr|Mu`D027aT zARcEKtYm@t`qs~jyDq>970nZxK&=N*deJqX!ev_+TRx!&afm(B zK|RY8TUMTA7y1?nNI1=K-2#HlfD$3b;0PnybxiPfP{O+9bM7732afI}!u zh&N-=8<_HlA~-v)C=-2_tZ&gYpM9J{AC%Zh!9G$6a**A$2C^(LnC%?0P3(8UeVC}jPwMw!Zir+G48Q4PL`go?`ZE1y^t|U8Cg=}WqpB}P>F{Zoj|jfj5Z&a_K*|;^ zm=3@1u3T)ZP3!eSv529r2J&LL$30Zs!gtEoavf{AVzdS|BSmXypHiH zWp~?UGvMe0q2FL9NwqX0xcPp#*Rdg)#A}B(tfPIzmDV3@oXE^3l1qr*@_a3D!4z-t zHR%*esr>UG=fc6uQr1F2&5BP>F#6(r#5dIwy-Ve<`ud15&P4l*1;>)fYBkwJAF=nJ z&Y%o{vsCi!sjO^(P!M@7uy~`68CYun!qQdvG9PGBU?CZ_BKH1HjE^B>I?-cXSu)z! zB5XRyS@b0q@`zX6a4=`K2{%uZbG|#Pq}mDV|RFaULARsoXqpz z`N4pg#Z(dkFdi|9?bxW$lBd+XHW{s*c5`8|BvUGZM$(hBUa@t45WE_{sb)PKfP zyjKBJT(;`^{jDMuWCxGOKM}eK=9mCGnqn?UPup)I{4k1FU&JEsyWAv@0a}X<=Vrj5 z`gxHN2io%D_-9+vyw6CttHmLqBN}oUtL$c80nx`!EGQ2|5rjb?c1eP_T;14B1(j>3Y-fAE3OtSY&8IiAbq;~E zpV-6M@NDcqV?I<4L0sgkauRc>eSBWCQtd9@4wEGe!`*4g{4=;77Q{ z4H?7SIeMaz92Zr-eLUzkBTXp8dz3c-V!sCEu=T{?)E`$IvpcAG2{?~fZZtd%43$E# zSb0%dBfkOzlWuTg#LuN6b`{|~i}~V7Gzc@9@o^$w1R27K8cw4r;`Sq zfhBLjVH@Y-8`5!!lX4Nompg@P8Loq+G1o3Q|l6`95pqxDsKEGgBdR}X^7c_E1t z;e}B62Mb^b#I6)C=r+-KvDHuH`=uo8@ID@@ws?l343z4MNu$W|zSBB+Os!57G6lj| zA~7H*gP%v?0i3!e96oktp=3yEL!3L9_x|BW72g7EwlFUk%p}YumrIcO&kpUJ#xsT? z&(JbF9Q%gBPs*l!Y6o~J_UX#HMVBCIf>4T6)qSm|V#$;rJhoSy^>#mhpbp-X##02K zICHMfqYxCEnR+Gt*E8qP>O?(x!Qi-B`UmH@%{!6@tF1KmUt#RS-`1D14bPi zwEXr;xIrq-0l9mqFfKZeho}aWt{*A+`1PFvlhSt6ne9qZ5~3;PTRj(R~5=t4wz z#O<7nK#(tO%Mec8%ivXmAbDa4h(2{S1Y}9_atu|#5K*nz7RiWze|dK&M| z$3s!*bvyc?mbt;(i&sFH5Dy;WP(^Sck?(O69?=Io#LGF){~4PGQaASVvQsgAL@r`E zF7>?Z?y0mUE%-CozH~6&lfpHG6PyXtQkdd#yZIWTOn|4UslodQDvAp!>^H<@0iKTI z2yjah+I8ssIgVi3nO0^<8NES_a{V zQ9Go0+iO{LA`?$jSB*dg!HiZc+?aDO7nZ( z)Po*0DgjBYzCdF+mGR)@VIbFW$>N6Hgpw|ldO=~Mzetd#Xo_HO?WaR%0)4CcP5R#Z zp)5PDL)jz%DV0bs-}9hx4eUI}j$HaJYD(P#pPuQX9{{;}xU7SKK?YZ}BVSx+7_|!D^XY;FoEhR-hsY)Y@{*mickR|H6tWhH;6cg*Lj-x4_XEDZ zMRQ~?rWL?d<7pWV$eXwe^oj%uQXa{B`kEb&^q8>nf#sB%kmE}2(1D^>~=F1980hXVgn)bf(le-xAb}^VVHLxc7Yg-wF z?-0A=ARw4DQ9#KO=tL5bod5Pw>Z87xH2T2J_lu;}e#9;;EUNRXZhPDdw@&>8MS>r* zL(m$SG`1SRZW5#YJ?uv~QNnxP;+pEEB0uHeeZZmPA1`keo#>aR-lL9uhBgIu_Ystb zr5tT}Ro0}za3J?q&Bctgkp1cwbRKTy>e@HzDU5<@G-5mko~sN{i2&6ZPW1)wq(zD^ zT$w!shDuW+0hZ<04LP0+G0S&CkUJ!3lf(SI7qXOmB>Q}afGM0SuLW`ak^?}g(E!_f zh8+NBhqF&HrSZ`)4)G~o=>}se0FP@6QAi$F3&N{Sa18cvA+mVp6IM9DiH55s1DS&W zmI{CrU@H#}28>fu?&VubQ$qj_fh$N^ANr3MTX{RX!SVh}alGM9t&8lKA$$gsO6^cTFGD@Qt7Zq_^|;3N&!-9{-)hXV^O3|;$SpMhXAF;w zhfl1v9(&u8r-+2=^s&6TY1MY@B}q=JELh1UmJn<@U-rKiNWh;7)nQI3f^&X#a|)?C zxS8LB07LrN4hZ#^2l~}em9RI;aX@TLr=1YIJ{-ZySczCe9>Z(|n1DidvXl-~A6I8R z!LBFkh9B|O-&qBG(U%Vgq@XAj2v*iR?r)=&U*XRq+02iSEIxcOAEcBI zf9>x#)?rE`YWW`@C$FPnZX0AAnHimf}++sbcK(A081FtJ0h3(>$v z-uFhbi+(zDNvvufm4qHE0F9wB!s#O zDfr%ECk@_Ja#-4YH`>-jkM z^dZgiV7m&p$5>6$?EA;L*iT^!NUcNRxnOrQI^cqOOBcwwg`!lV*tKYvTZcGZhrYj) z)|@#E83rls(qN|^c>DP21vI1{#cs#9Y{;|P4O>2g>_L##g9cfAEid>eMJP%&>U0wt z?9{I$qw(EF+hWqxW)$_lG^K)XmdQu0Ktp;fzb%EPbAf*S*d4g%J#(<+3*Gdf383PVja4&MU^Z48XIJbhHYp~}w z7{4a3wQ0Z2?c)}k?aM?N7^c+|U0CI`{R7P&u&;EqncF(V=|Q(%wimgmK^ewS`a%2@ zrnMHtkD`kvG3;?n{BN}DAO;+Ba;*gMJ?I3PL)(wFC}TF9QOy6?^d}hZ!CNGz?>~2L z7#c0wy56wMw)b4EOu8Hz#k6d9V2x4EKDuK`x7rDVFwx!m&{F32@93 z-|eC8NPKJSFmL1-AEA-p5dcnM-L|p#QOx|7G;2CQ+X2Iw!-7+oVr98vch{W{82ljG zb=qS3MxgY6!Nty4>a&plVc+w?>5#_F_U#EP)g=i%Uj412=QZTrvu7dQqdVrhp1AoC$fTr}A+LTe2}8L~1oi96)ZT3P52TOfF*Fp@xde(oDch{gaw~P(tsrPGY^`iUlIkwc^bXragsW8 ziZeut-?Ss0wsU4-+YPYo^%(9a{$&z-QCd6OBk5cc$pH&_>C1V+FMdCC;p5r2;!z1- zLa(@!xFd*veEx^54SR_2!#z^;`r)M0k?aM`vzKx0THwyv2=TyZ{Csr6Hg*x@a?)6C zk3DOW<}%zrlSriL3kzQ;ZX;3cClY_$2oOq2TDTFNyf5;tWbhSj51sdMiz;DyAith` zDElSNR5DmM!B6~?o0t=2;jee1eDsd{86o>{a1PTxj^!+1`z0f(i&)Wq3^)<&-W{~7 zLc`BtcjeDQUPvcC4*eMzSK=SS8i^GBlAQE4LhI0(ov+^QcWGpnjNaihKQ)t{b)J<_ z#wN@0L*C>|sGlo-A6e{_DCQpfqbgQw-CL&#&3S+>`I>x1PFn0EP5gL1_@2pLj%0E8 zp(2=$!=VeoAtgsLe+CEdIN5+BIX}v5k~V#xD}mHk0phdFg~$1NQRzBQlYZ&u$e^$b zS8|fhCXp^B{di9zb=z@9e1Aw_u8c&Aue)458rdO(eGcKrukv$w-!r;JJpbjJA~B_> zOP}Ro7AA6jIE6k-#8Su6yS3=&H_3A>G^_I@b-?FJw=?+$jrv+zk}a*Xgz31=)VUHy z`jX16(@0znhCIz`E5NkwIIB9cT%YtvN7gwc|uGB1qMdH4(A`kua8CnLpgish;^1TF4i zuR{3sq$3w{!*)w9y*T71jt(P9UxPfv6mrm4a?L#Fq`8nWWdppHl+Av2wd3#YM5i*y zY&LrZyGX){dVG(;x7o~l_#Wq%_TYk{@0ib&_;qK~kE`;N`qEho{3#aE z9v7p;)i9*s$i+K>;1l$fFdNrPuH6nG>wV!b{Zdb3{^NyA_8PxdeUo>IVO{IFS0O6I?Enq+`3?8jEwVdPmMan~zROiuv)h zxVw z7^lR8Ba`9iPEV}%MR$>w0&Q_yuo8VXmuPsnll_*zJd#T@4?ox8_fF=gESL2h&EHpZ zu5^d?O!)MJusGIJ&vs?(6>i*YMsZq*)Q1?b5&hE2#w$+%J{H)LaL+~lDhS{6r z_~o)Q9oRory+5B_FAh$08ykeYbfi2z+iHw*wIi zUy8q*#IGwH_vc(%=c`%+zU+nf2@7F8U}uER4!4#SwcYY$Umn;iI^o^htKU0ac0H}z zE$zW4Pp_8##Z%o20>q*L2hM~kyTsIC_V@uykiErU-aT&E#lN(#6a0YXUeZ$8W5~$a zGrhNyOl-kk2ku-gYKjK$;tMoT>TDF1am?sEc>UtIk>LFPb7lBX>ENex1{NVnPX{Eq zPv$}N>~|9NA&PoWhZBR~t`k@Va8?eS6@a9cNS_;7qJAIW8!AgZ)!*4RUFLS_+59r z8SCW+8l81tY5@|g=8F|o!dIa2sc=pTAGI7+RF9f``o0n+;FCSF|Ix_HdyQV|ZD-ZH z{;6Sq-U#BfBH0i4AmIqg-Aw8WG~}f>SdXIQ^Wn!)_=YLpY5~ZG0OYg4!LQ?`hjZP6 zN4VjPGuzMF?ArNqi$y`6_T+<1^6!u12iGI1Pf(meET_{C|8dRh^80@h{H*6AB^RB< zZTRsoFr2pjr3=#~L1@+_l2w4>)|GKO_|N}*&)F8hZ}EHlm&zsVslUF{&>xJ1$Nqi{ z-E)eiym7d8w*N&l`up$@Bn6W?@MRx2TCIQaqY6-e3$XVDD1UZM5xW)QUwV4@uNq+8 zGH)gcVOX#EIUgfgeG1b-Sjh5E-<`6nZDX&0eK4kgh{Cadd_cO%GOpBBRT6vA?5&_g zk;4(uixmP^1AQC2XO3Uz|0sL2XX%udq@8o2Bz@-e@iMy)E&7V_4&YTsL}2IQYOH>_ zgOyAD#<%3EgO_5%-BhpI-*#CO6rPaLNi`bDyB*kuT!|`@$bq{ArEWxPUqyY;aH5P=35bh$jb5T(1H|D*?C_xoJ!n=Og`V*JWU#v^w`PfxFYH!#?r2Olu0o{Jc|;h5;+ zF*wRiDP&u=Tps_BLSr4abg_6HfA*2T&b|Z1lO=?qioB1#Lt;jUw54)~2QG7dryBcD za3ve27RYK>_~cJ(EAZ3w_UCw4oq2hu^-I#;KF!J1U8?bLl2dLc=UtM)!Kb4le7k>p ztE47EHlIKx(oAqLiEW*{V;D3qzCPgEiHGUmvvQ8rnGGB958y}N3am(S-H2gmHAAJq z+P#0$gMzSMmjaX5d52;H;RE+R&L@qXyms{XW@-sCZitm2#w=AFc(PGePsOpitt z^)naG#nmfh1}IKyWVP0EwB;PLp(go*kgXHM`I_)_hbOZK^&3AazGmeqXK_ysbW7C7 zrc+m6=kaDNs%KYns=-X?R*^iaa&-frzZO|U7li4<@e9x7Lyf1+N?+f=CQ8v8_#Xs| zaB>=i9~j=dS7)iCM*^6I7&AqVQGT^XkqLJ6n}~&1iRwP(rcLmzR_*{M#K1v|{4SUO zw8WG;CJ$zYzaQT9PRA~`jDY7f)V?_(?GbX|D*j8(lwLJTD&;soYRuBWL$>tAn^$I( z4@UWdvzOq8IT0}ixT{&pCRqBxlXr>t6?aFf0=q_8uhys5^W4qO&FKpem+XuO@?VZG zcUw&3Rk|Xh1B;1BoWS zK%ZFtTaAR)J6zdH3>_J?kRTvIhYcj1>k)}`SvfQaX3*c_Ol}Tpbd(64^Cl!zmkn-! zbV8mQ4R4WwwBid86vS)Zfej%kuj&{M?2jz z!(A4Wc}LE?Y^tD%EG?1DpGz^e`i*k}+Wn_6bSTO{J`zM^6+{ek#up zGtBb3d?!u^E50ND=O3UzddwpvZEhIPn5Z_crv4{Mk325q5|Fe{(pFp+l1G0l6_b&M zDWGc+^1_HW3h!gZ2TO~Dgde6$NQ+B^C|+XF4q91;F3QrWISEZ2rWXl!j+&q2Wo^el z8s-6ISIN18G#w4(bGYnnaz<3Fvm#PZN{-%iMW`sHOk`PD4VH0P@GU7|T3S`!yfh~@ zTFn&6!@qwJk|jj*ESvjQ<}jFCa63rEdbUqd-aNrvFaUR=5wf`7Ty&pMt%!A|x|rNu z6=JRo-j?)LO#1C&j?m5D1mxF!_U6y+gyL)Ie1iMsLM2Ls0{3h(e&xvBC0=bvXnbJm zq$E#|Kb8ah8=YM~C@ZVNIP_MAlyU!vKdkARP(RU%PhCsFoK+@V8EKmts~`CGz^#@m zTh@yi4`e>Jde_m2mQ%}uHp-)y8e~*kodwktV53)s1Cnhs?*>Z+3fcE0pcH;E}Z^9dfa8?1TK8bBrb?5j#NXI~TA*3jTWzH;{x)&HUq2Hwqn`x*-BBLuBsBLSc(VxSw_x`ChuDM5%ZP&M2r&8(O7BjFt!o z1Uc|^ivrxF;$XZDJM-?8qWNf*NI%Od%RBD32IaC~+P39_w|e1bCcG^m#*QzdKz^NA z)b94Af+8(Rk;P^SOKRCqjYwXO7*ZF4+Sz3G86IZfszhkzt|8)+3b3tmAsI5^@N?0~ zW2IMbCs8acr4S}CHwD1}84Ki?HfhioI+(Jbh!S)-3dA&u&`O>7rd61O6Q4w6a)?45PR|XyWkbMP4O25O*sw6;4M-*&lof_j9Z_j^M=e5|#h^W; ztUmG)3!q*^r_~9SMml1lLs^VwI|Z6p_t1B5-by-&$&B4sQ9pjC?5)gf#;Pk!X44d9*=CnkfTH7M$v$Bm#l`5^%lN6XJ|3@e0_FG7LYq4o^}?QzK%3{ zQM!=uOzc_&ufy_o-u1+C!i#A`#rRVPMFW~h**rl<8Ppch%3AmMSdtRM%3-zV@*>}| z!Qm8SwJ`0Z9-lvN=4W{$p~`@NmQF%)r*wde{iZ#cJQm&NKh{K`8O~p@<4QvrO`@+P z+)>wqR4kJ3i_ykCw!fj2x?0?Y|DLqqTs9(p!qWvze&RBZ`vt{D?KP3}dVGF@r}57_SoTdXrz6LU8rL3kG!|NA1ikRosaC6-rX~9`G&e2q|7+g<6T+u zCRW;_WpF1)m{4RwR4&W-;(O|@8$6AkVFOE<;o^q$?^3Wmt~;^g@w-3u{?sxe9(H9f z0h)KXc#u)Vw-d58jH>EK{$|IUTmacSYB9tvB(Xh|0K*etl^?ixWryd?J?A}G440|3 zQZIFAf7yjM3p5x@?e4b1w><2~L3b6&;Jd#8MA9RW)T4FSXh}e5)CJJ=Yil`UTIvcY zTGCA`lt;~OZdp87wQE|ng%!--V_DFrh@BBqTDIr9vpd>frZUW|49XVzz2)%T@p{>r z9c7Dv*EXZnL$AZbG*cNMc(i99>kPcakZ{z(8PU1u%#y|b809@HJlW|L*c<^mA70Jy~C^IhBNe8Aai(-_Y|buosEMemkt8~1*p8+1J# z7>Y#aI{b2M)z01l0=*0~?D18Id#AgB{SjJTmfE$>W@W1ER&-aym#*x1qtjnH%Cg$= zo&d`Ws81N07G<;CfDCtCH4+zf>WeXx#u-Szyu?s+GrQZ1r@HIg#t+Fk55GKfBAyN? zA7!^>GR)Byr>o2cD%(Y3^|EtE${u#9k$_~9RV*(n)Os(Q>l)Hd%XAI_XXSCEr9IEP*y$0MO1@cJd$$; zn*87pA2==m=?O@3cBQ3c=z852CncLvxq6ed@w+HlOq%RT%-A7Z~8HP*? z^ZyuL5Ao>_jpoigF6QIC9Yy!80C0Uc8sz^Xfdv!}PL|IdnLio)2X0Hc1umN#SREilE!zkC3eFZTc#^(1Vsb)~kN%Il8S zyW6&I*!GwJMMFSg|1{By!S-=V_UvGL8&xM7RgVML4`WNcfoyA7t&`raA5$@isTTxK z=AuFYOzg2~ydUGe2(an{tWp4u_X)dqwu5;NgP++PFGCf%?v>BeM9&$D*Yr6V7kimO z3Gb2M6E0jps?C$pW}cf}Z5cPssP$pugGuBNxLy#G;LlX^oi$Qr>^R!x`!H6$8JlY6 zBAb)-pB*+5dRbocd-1)k@$(I)y(}Nbs}nA;1UAQGM$rox(lELnyCCYHBS9KQ5*cb< zzjlHfAxvT*V@G-7uZZjR8wSUZ zoZ+`{J&}9z3-58eAvK_(i1A*0_JGWcgr|#zy5~^Ji}=SL^!qN{U75{WZV+#eTnV>^ zYZxF0*ePWU1;g>dOo|_4%A0Gs!W0#O|v{AOcf< z3vHr{NU!mHzh_ZdqV3i_m&;O9Gk1AWGUjpaaU(3_|iGYh6K z>>v88c~2WL;1)Y@>>r94k1t%COyZdr^B~`{K;}MwwWG-x4e+A`%)N3TO&*`c`_2u$ z@nZ)u$tRgH!7m^|9v>u^L?b;d(wT%{63M_{`3d>F2V7_WtLu+khoe3m)b<UxMDdWut$rm^ZQ!T`A=mk?uf((YhefF4^2jFTUZ-P%UxgF&z5qPDS2|tUB z@n=E;7aizJ4~g| zFrObj)PR#Uqnv|6{$eHJ!JSO`8*uVTcxVWm7{)9O zXJ(v?oD5%wgsc)nn0W)tj8JA{ScJoExDS11vJ>er^09dTBGH$0Uf=8QBfr<>+^5v* zM+RBpFN-q>1LJlq--eB^)dfy8-o`}>7Va9~Pne*_Na3&?3*%1*AhQfW;qKV<~ z{1AAkKQmN;hBLrSN52r>jiQ&xBP`FxXqyuv`hL_Ik%EP`(8PwWWNIUtZ>9^j-`O;Qohu?%$TSV=TGlurn9 z)|V@`KU`!J?A#f|t=lfO3vtn}=H`jY9ZtG*uKYN6;{WlKR@Kg7u3f=8*Z&_++2CGJ zig4?CpV;fyM2;jKmoJ_;+3FcZ>JEPJ>Qsk!wEOXzh0Ua%|4ZZjy}+i#kPQ{Yv0l8( zpV_H)817UC(eO}q?UBHw(Lom56~oV$nZ{n+ql036P%0jTAdzr5EwuHFM~AN3e{kL) zNUlneo)oHYz`sM$=bu<3V5*Kgr6Y!$#HX>=rZP+qts}v-sl^~=;Vl4~=8GgB9Y)dn zmR^?{9^@ZOr%B_AXU;ANnV9?ko5L7ldQL&qm^5~W1_sK`mqQvuh+>~;0%+Qw!>nDs z?@;!l-i35FM-7|)bk!VVgwGy+4^=fiC}1L>nTt|1fgBg2;X72Z>DEh8-j>8h*BSYG1TMSeYhq#>GykEj0{eJY{FZ--mTzkPzjR>P!II#hUB z`As?kHKgUGf3QcIF%sC%UU&Oz#zNF74hGG^9vkrG5=i$TLuZjFn{4) z988dX!i;wmZzJS#>b!RMcDnNLz!IQ4dIgXZTd0$+^;1N~#oE4sLY<5;{L!7z=>DUjWaukug zy4ImCHr|<`%E&fMJ`@U}NZ?++R;oP+K%Q}te=3Cwi%=UTepl1*T-zmH{1T223c8p> zH6lyl&mi^DVmH%?=5u^$914!-o!*!$50z!{^U@uBtu)mSBI}+*wI?EBX`nCV7L5|< z06QtM2z&bR`S0sjc~}I|lxDIH4oaPX7u`0(lTkb$IJ{Q3dGKc1ck6Iouj2hq<;x&$ z{5klT#nTxhw$sOn0e^3<<*TB4X7ELj=TRH5sVypjiZ`Dpn1 zp^+muVFt_W?=BhF)Ia{6d;J}mS}zb}C_9Rx;fLtgUa4HP$)Mi^s`;(Jr!G%#B_Fkc z9@;z{!klzP8sioqb6AI4RE4;OH+qUNUdXQ)9KE?b#x^QI{n-^Sg~d0iP^!n6!om;2 ziIyIEA84Xx`waY_1XJMnC$kDv^>``yxxN=K z_fan0$$7A4UMSLh)BS1Rl3`pB)Z?^k3Kg8z81OcEZgvEsswYgN*xlGabHD}?iBr`e zL)l)A+L#0BYHnMQ`K8 zdQ&`q-a3J4?nnmbn%sflWNWrLsLsrpZX$o{sv6r5 zPQ+ZeszDausnvyZ#wD#K_n%k^)qt}X_od?~m^7+-0XXT$i;v?^n=(SwJVckZO`RC0 zAo4zR@S2)g_KOdS$HkVvO4^m-ad!?0QCD>4ftuCL^>`U zYm)z8Nr6Pi(G}Krzh+|?PVEp& zM;T&5AS&0jlEr5{RxoMFs&L3mp>x+U^HD>(O6FBX!TkNu@osw^Vu8UEtGkZl{#;Mi zTNedmVx>-&YQm|57iep)XcLO^b!|Nf=oG0gO-~8nREt3K-H!E9l>vNXo-@%Gt$@?H z?2^NUA5Y1B)V5M#MM3YIjjuRL3mD{lZF-_}olhYxCwS%x2WJ@z5XyZ^@o(GHQ(j<> z5Vc&{jZfUzBot3@2>@Ppq-7hC>$<)G3Fe}gwmk87M3pKalXdB!C;E0nR7o7=y2Ec1 zV{2!0J3)~T^8H#q_42HKklKfVOjBqh9@z;|K&n<^!W$VD(rFNyWqy*=dgM2N_2B+| zspsuJy~Bmb$tpC5-k(>?rE-AYM4*mOPgy7K9&~z z5}@>m(n|CBAY0s7eX{1!V{%tHDVqcNi%M1M25t?s5$OXy;}NWbFJ5*CnzWdF)F#~OyRkl<`ampwPSXQ5s%R;V z|4{I0zb9XsY3&@Yja0{WMt*I31EZ9jps}pcCYH8)?X4#nQOTG4dL%p@(c z>TJhk?c25PG=nQnL*}R?UX3F*agmP-um>lqE$+8m;I@(?x&a^Ky+Y@oaB#_rSYBY&Nyz z#W-JzUp|?^PPC`7LbrsLkC==2O|DKXYZK$5uzz zDVGDp)|FhPhm>IQatW|X%fWcNL4FeizhQRrTk|YKGxpwC%9r)5XsVDwpsLntOcvsE zr&T#5H<>_{$s1uoRP8Pxe_TlS^^tXz=6e9LQxSKD;cJ6%wB1KV?;h0$s~q#)t)oyb zeB_Y9&MW>MUeTPx;I--aH6_ujf@kMdwmfOxMs=*_rL5WQdDN<|#;6N!crP_2O52d@ zDoDCcIJ-j4>b_bvg*HXOl_M(cM=9)FvyKP_lSAEr38jE%ZAtpRAo=_lpY zjpNdkfYwN)^btd6iY4zrbLYU#`n5R?3oxEzdx~W~ejF|?p z)^SoZ+IQVCrcO2Tv1*j!*w*#6Yi(~aH8w+s>f?uIc)^*R)?mAbG6w+*V$ae3yDWko zkZNtEMP4yI57SDMBU?F+pURVp4O!sHGwyRR?ZZn&h1fj9OF>rltEY3cEFszy5h(At zVLA)aVyr;{@TRcZi(^{xa4-{h|FJ@Gc>kQ!_>Y_a?aPI_34*Ou&+BVW@X7k(EaQsD zp(RuCkDDa*BP$+a4R@6$YaA=VO%nq74(gECmDPaF+6 zehVaiELy(x`7bOdfS#$&8@Ga&Fc5w_R^Me5GB0~KUy~y!3Q@kOg|Jh8XX4rqQfP=f z3vhM_;^<0MmW9gr-ucED=QvDnxWTVE!gSc$Tv&Ly?Z28a^5EJ8?_HZYm$Qlvqix^n zOOLFTS4zR7{z?bnhGKsm$~Da7PjdnBg#}3K;Bsr0-mr3j>5LY67V7_9YCf9=MTn}K zts0Gpz570AOe{6kT#8iqK$sw(fB)UiEj9InuKU+oBy#dTJN(cN_-kYL&IJA=#O6o7 z%&*g~v*9)~cV3qs`A~2qfa`Tzk?rAyChyuV^KxwY-1mZJ(QEKD};@Jy>Js zIW#u98Xe<&L~k{&0w(ufhLM$CgX7F7nOTBnjUv$7@#dF94JUsYD{L4i1Z}yzXlL*D z|E9q+@;PKc@w>38@B9VHE9Z0W-}kx`Utzsg?HDkkUCvE%wmdC<)5b1&D|XqMx@mJI zXl$!SliHkDQPq$9jhY13J)BMD;4U#$mw7;qJhw@vvwGJmqZt+%O5_;+G1oG)lsRJ_ zY0mS{@o>Rudb~Ys=J?7Ql>Wfs-Z+nor1iVi|4yR)sS-X`yAkz9hqVT8R+0R8Jx+zX z27}j-)C**GP{mR5<4>W#)(PuRy;QJ=%p#hL-%8vc`!6Q!oO$ND(xy_i>-NKW$+))q zYOxF%?Yq4B25Yt*K(O(u02#Kk!I-3el&?(ws1=}a52>;88V#8?7)js>_kMkQpGyu!KV*z*1?*D{x@~_>bc+2?C6TM`H&JsNSjpE zxWj`DhevLW6>eP>Typ>#X6@NgRT=(=kskWZ1;Y_zE<>IU#BZ<~9om<8PH^RUx-#u! zBqylYxxta9l`C6d0L^F4Gd>tjemU_Gl|YsG7gmG6Qym=2)Nqc|4)jcYV4$7ntvyn6 zIrCP&*lrFaoyiE zinFd{)fL~#DrmU7A)UEaXHC01w{(3YWrqLS(;?4HwQoPUVZ;;W!mm5?HQ*&578A*oWiQw@((ENOG&F#av zNijKwmOpsm3#W3pg;58gD|UOaiAik!i}2c}5S(A|Yzo4h$MN?iH^yahU;#M<0!gm@ zqR(pBZtyB^phgvDOcq2)lDk$Agh6n7a*`ci`=@sw>P_LbV^^{zN>ZdM)>JZ+D=XaQ zo7R7w&a%4in0_EitZw$E(%6UBnx1P;YcK6i1JnC+lRv4xzist(B(3OTt5$}YgNO;- z9AB+=nkAEPEh~X|^Dti>53WC4AESEZQ+xNSc zt|xSM9hE>h-EYL78Br-5F^p>b)7|xlU2t}S$9mtWhgYWbf0@Y*AwLz{u?tn3;=~xj zhV28_aq^wbcT}a`()c97$9hz}`S9qmBc~kqd!Eas>czj;u~!v8VK9MF`x#)cpINTqzNg$#I~`6YR`Qv~#oqFnriPcf|Klgm9aSW@oQ$Vo(G zJj{{enOU!uKBqf!=MuU1NY|0Y%GN{oD&g>%OfMzwYSR<6{RKqeTeakxbE|mk8k*H2 z;Bh&RS)`t)(a!S5h2_INwa5G}AA`O}?L_s^-Yb6|7@nt3nyrS|?my6f60;DZ96X?! zH#9HFM)9akhRJg1=;D)%89HJ+bU60#@k^RJosM@B=U8VNmd;77KAJ^NB6gfNlTk7i$Y&tl87_abxo zE{$}a&&dn|J*V!M=2c!Btw9kUK5?B6KOr5jQ-{p&Jv#Aaq3Cg9@*|Yt(=m;fiRxa> z3dYkj;Bi&=1P%9$a4$QG%lfDTUX3`(IdA?D$~lZo-oD2>?r@uI$kBj8#CSd`wX?;p z-KQBG=qkeV8)avWXTf~n54KjnMB6P!NUR@y>8_|s0a=f31%QGC6I*v zs(qC?8=E6WYLzEy5l4_$4`M25|A(S;k7x3K|M*U9Glw~!+2%CoInR0K&hKAlwrL)3{s;9)<9#1rn%xwjrWGAb`rjiJTECI}qt$GtLCxS~d3( zxSza5Ct?a%Uw&PgSUsni_eiUXzZ`c_lMU56MP4~$s}TwfN|`~%*sA8X9CK;lhnp)O z5(SIjq()gP<{Vv$tE2tJDP(IdhT%EhIu?&jCe~0|nDP3Y*$B{k z33t0TsHg3#CA-O73`6DYZ+Z#m#Nt*G@yZfVFi^0PYO5@~FLd%O39ASbA%;;oZsy7e z5nvUdcvDRpMkh3&MCtp+qToPovIYc%N<@|dxCA5^>lLB}qY=n55b7HJ1CZ*qOauT> zU;{;wDu!LFD6L~x13JjRC{%J|nHZCH-r4}PJvmte+C)xP(_C1WLETvQn^1=$>^=;2 z>1{_$G#kqB#x26wG0J;@Md6A!HX1?|yHDdO3L*>aK)G%-!S7W!aBrf_9UlgJruU3$ z^)lqNf8Mf)e|Df8a{}ut#e~J;CBab~XAcZ#(p;jSKdBAVCNu+n(z)Y5p>zsM#ra&rNCQ0Q6fO9o0Wu#my&2Nm21Fhz9G;i0la}JmP#V)N|7q* zkCFeKg!1(NYQJiK(s8*R{cTEpPXQ}VbI;&!mL{~R@Uc2Hj2qu8x?WqXX718J8S4TT zIjLmC4UZ_eErbj!cKZi~zGN=!t(LR)M+PgTxgfpe(@qNlIGWP*K0uhApsv!W(MJys%C)J%0yd)q zbG5xhjtAExCy~8af!%ueu-@-I$J4`D$0E|vO=9X9v`M-~#=QPb4e$P%jqpkODB z<`$}Vc^QTQk!T2#CC%F#cNi^)zWY(`$jev<7~sc)9y}@^1hRw1S}AL>qi1vRCPLvz zDnoZ{08#b=(!8-uuUvdFO?k%qWL&wu+;u?U>Z?`)u0C27)DUP1$uGmjZqOdMY}9jM34X( zErFYi#w3;uFKh*}BJBzb{sJfGe6Ru$3Y82Aqyq3z5mpz2kl00A&4Zq*xC@~c0HygJ zut7N#c#FbD#k#@JKus-5P61DeXu7J_3$9U+Qq87_0&dIc;sTw=fr(@Qbyc04LH_~-Mo@DgbLT5mU|?~j;`b8>x1aGi{ub&8xE+Tj)jsKtRJP^WT*=L5 zN6$cM`Xc-NFKMx-RBEXUpRzJI>|3($6=*0mhXccT4g7}3gf=8F3RWg7P}^c{9ZMPd z*o+D=R&OGt9P7;3pE`z%bNlHahIk49EdDcRSpB;l|)UW*wi%K|*` z^{rRE^T*m;&4euyf&K45)=iM(oq%m>?N=A+7jQIEY(|U%6)xwZnE}z#=E8z2&~6Tb zyOK{NdBM%eIuH)$h}Gyh`1bN;rmdkq^sjIZG^hr?Syd!6cQf%;k_OiDR*51MIdt*aSzJ}>T4&+*ypN|%+j1vhLq%p6F0IpZUo2+W(U5AtNpy4h{ z4&F289v&>XL7i(60i?>9H@@|NRcBRF$l@BKd=b(V=Ojr9wh%RQi~MX44;CR(jFR&i zI|9}xV2dHKB{CqN4%i$F!;H-z|5_n5Ynon5IDJOumgLCC+)*W)Cc>e2qfATBD5btb zrLdddo3Enoy)m2iMGTgjGN?VvwBJKP>hL}h^PdM*$pqd{jv5}`dcM*%2_YN8#}lsp zXHJ(mL{jZ7rxrEqp-3>_GVp#u;xF2ynK4UW6ij6>Cgin6`4@94Z8v|!La#$e^E($a z9{&pSC}{(bq;gKnL5nEr$1(*HuCNF#CbvSC7`6ZXf*fiLS#XQScZ1C@l{1e-%JNzr z|Ht^F#rP;AXU+Pv;mouJqpHdFjBFft%=tUKNn?pbkNWR!-!}E*3=HqmBMo1yDtOy`0m3 z##x#mhmn*${1HeN&}^T`+Gg0$2te0b1G_q6+nPzRV(vmwnf*#7Nv0`Siza&yk4Lh+ zda79x)wZ^+rEg$3DLKN|GO7d(?UKiSY0ue00>r6n-*tPkSS!23*q`<7s~T-$13Nd% zm7u+_YXF*WIgl(B#*v4WHpfCiEPYV<%j{A;+P3zcuo&$I9Z;;$-HaVnJ#ug9N^kIf zIskJUty92KxyEy+^*y6)jy~KO&Q|WA6%5lF)PMy{pkK2=|3{tZ+T1|}b;`%8lvq2C zLPe}nF6U*5CtD&&zLhbUS5{$Nq&{Qzguk#hFZ}Yfxn%LR8?e7kV-+S6*F=S~y z1W1`GCGk5+X;XG6_IswZdlME3BOwjvmU@r{WraghJEGJe5RzY>Kl&wqJ5RZ3^a>1` zxvA^k&`W3QJN95eNbOqyUE8Mcdjg%_*J{RI^H(VYwWK?UvM7X7C$LQ;M~*}5;Q?>x~52M!30Tg8~h!C<&9JWp&g|KUBZQpdrO|Id5pP z=j_`yNT&!*hIpFHSt_ryn1U!iOrv3YaR!M(F8L$so7}@au30ez)qbE-I?EuIbvWHJ z?t0#)I6%OBMM1j*joMAF!6H2-yj($$Vqxgx-n;%K&ki*?7ZnyQWy zK!RcqUIMUN)sLbL%XH9C5^e-!<)lFOIeucit!Q9)Py51yd3+gNyAk!-Fep23XOG0i zlj_AkG3=~y`#(U#doc4Yn%I46{~j`K3s$>PU3Gx)Ed|fczY>1lvPe_~{hdnH%Vz7l zv-IePkIYT}Nrc=U=Ms zwEu}_XJbEEPUmaH{T}&S-$JvSKKkjmiKm{BC{+d0iVd+6l?})NEFBmmL!JBzRFZU! zeUm#h@Koiy?)bf@<$ACfp-FYvVk*X!CDpl^OI>-F``ZSnwv&(1mK8|L`JKw$v65ud zEFB&`rlh%1#HpN|EAVx_m2GlCrPQ>%pz1c2(eeAK3^+w>WiHg}d&Yw`V(q6G0qDe|i zQS1thyrQ8b%9HDQo>D_oM%mj(yyGUsp%98)HBT zsqB_48N78})t>t2tlwkD;$mQ<(hq#>r{XftrxLikJRO*)JC%M*oQ84D(I_Cl ztx+t{1g~;UNhy~)Bf0jyQE7&{Jxhz<;cM}xHf3bVGQ>8<0PbV%_D0bXBVU`!HM&%G z94v)JG_kS<$e-0WdiS+2=)p1occ}f(KRMtf%8s9=uFeJXSF62FwR(?e9lGB6ZTLzs z4PZEN7?1-J7}3+72p#JnmY$GYO%H{G(!>k6dh@G2y;MCA4s!t87luU3Ak~0v!?eEF z#y-<8!rezVbb#!8;_LdfQuUltg^sGJB3E-DrLFIZ@`Be*;O*-zc;#zcmB#&6Rln~+ zx0k73Cl}t})JNT1WB%w)<0=Q=*uBy+P<3%#@MQS_?1u6A2KV!3X@JQsjK8(1xBb-G zA8M^@zOuX0!{)kyyMbH4m)CwP|L5LR`ki{;PU_Ygj4zeX`b?b^iocCPY#mTc8T}m6 zJ$1@GS-;?ktSU&4$iin?2@-EoGuRgH{I2iUMQl zA=nk?;P0-ntF7KPeouN2R-dg~i!F{_W}%Dal;;3F-;}fZI{W_%oYE>;DPwL) zMTkl@z}YHRgrLmMubxt*-v8H~9}s`Y#)3Gycc{9;H#g$-b&yY>%j=Rm4p->GUU%$r z8&>}fPHMfNprq$IuTfMAlGR({jB-9H1MeTY22cgMepDMZ6We~u5j-`ybcOaR;1Ejr zz*fE^|_Pe z>w7@NMTuK3l_`IYGE!g%28&}Ek~y1sA1)bg=jMIAUVoC*OhU+&7M9H|TTb=~jq~my zFNBb--oT7OoKf%kHI%gK@gSW$dA_tA6%JA(46k0hg1Tcj-R?O^>)QuXt5Nt*9oRD# ztlt4Vx0Hibp!L_#JR@jk|DcEC*Y32YY2=o}DU|z1 zDP|O30M36-@*Q1}BY)Eq4)Viq-{5x-k@#u>6xQIPjg%SUUNxyYRTw@ zyS>j;&c1Ox}_)7TwVty}(ZyfL> z57b~3wAl+9Rsm6UtjvMtD7^!6FM?`H>016EQR$f9a%L1X%AOLla|ifG$#Vm(Y9cQj zglB(U+zB~ukzhmeSy44WMy@daAoo*$ zl=S^4ILu1EWn8(D82~hEvIR2)^%zFo@rV98On2bWHZdUoYvdIy{Zx5DXF#k#8-(xC zk(hEJXw-7FwI6E8gmMsqE^V~p?XhBnTQ#t%%L^>OYWG_Dg041pJm=G)B6C%VvMgX= z#5s>LqGO5A%3w+Iw8S9`B^M?Qt7L~*c8YIl%4X#5EV80LKI-i7eHv-EYE~7$nFo`+ z&sxGb0KZH(J5RUe3TFJ8Q{*VJzLWCf=b9hw>;dq<9?sdUUJ=fuO%QD3lqf^c%arT} zUB~MA+Xs9@W-{(1TydGyrCYqPD%At|?H?p)owU%VODG@G($TjnR$vchi@gSxDgw*y z0LP{qs)^zz9}m6W^q?rxWNxZ;eo%|47RTOGwNhQZQlr?WRIw2S4125DF)7|NrV~3o zHvi2&_UnWF^OIovX_!RrP@~TJIYZ^Q-msUY^cdN)`8?0;VR?Z1Ib0qvvrOhxQHQpi zSK>?jXMn);fB8MvSXdAq4cc>gziY5r({s9e)<3OEtK27MnW~OWU?SzRaiC=BL`<%7U+-& zT1eK4tKxQ-pMm_8J8zKC=1-J|yM?TJRa7Jwh+lhZBb_xR(zW(4JES^)mKqrnc1S5ki(+Zt zTw+T-H085IG4Jq?g+XjC6dd>EjEbY`UeU1NB!%=avaf6h4N0tLR#7POMqvi&vT0%D z02%N&*Fgs2jp>ks7@^2YS_{+=cDhSYq@oBCGon;KzPMS(6ptc~;Nr^b9_rI@22484T8t z$9kH-lIdGGHjypT%2U(Cu!546HMf$zellK=4%nxmLv$6>s)E00i0mbnHyJiYh}D@@ z;Sc$W(4vV7LBK*|`|8Q^Vns+*K)?;=SAq8 zUP8enkrY*E*mhEp$Y4*RMn<@7j|v%sOw3>v1+h>W-#fI?;X5*R@}ha9X!-Qju5HEi zs^{A(`>Ob{D5%SIqIw2;Dlx0M3gz9IzRzE*(cx*7_l=i8ku%&qG_NwoN&Mr!E={CRB~kQoiu|iE?UIp3Z?I@x15k-9msUk^l4W|^?Ia!{_Yjfw z#5W(zyg-5u!?&Q?E`x0Eo-TvT#nL)MuhXR3*-UyeM?W(>wnXO~-B(51T2?9?<5P_l zrr*_)=1Liof8VP0R(;MbKrj4W(}dAc=@>YXLdRq*Vx!eEXgx9MFc(2mN7^7NP)Ua- zxU>+J8Ff(@F>~ToSk6(DH_-ZBP3QZ{-&wM0A{#yQG!~;k|D`CmUCfFT1m^@{Al!=Y z^vU3J)2pMR+J_>*l&Clb#JMxv9wpGsM)Hi=nONqZvnJ%g*%FW>?!8-MNk=s;doT`1 z$-$)q9!k0bD-pC*W0AHF1!Cy|R#n&&5pLH#V%vVPDw2T+q0yOF)(WTD&YE@uqz@fx zvPA3HL8y6aPa52yG&OC=o)+ZbS}K&VvI$0Al5lBoJkOoW22VE#wLc+n_^?(b>91rl z+yWY{l))ee&AY7F9v7@uL$u0WClcdDpW-y{RR>FfQ2+9d{Z?fcN<%V%p88qKgWU9u zx5x^VNOW8jC_(W0^(~kv_swUujx!RHWS#j$yKP7qVQ^D6LS` z_{%~vt%5+vV?-K@Hn}Mp{O|Wanq{q17caz;a3=n}z)JLyQjI1|{9GpbLo=Cl(+IDf zN$QZ(+`M2<-wCZ;GcPegm9+~0Gb=U;LJ+_t^(T}4$O>*S0!T4RJ7|R@cC8}$-1BjFp4&fb7jL2bVk##0b-T5xp26&|qVc$Ewu+_U|oXw{^yk);knWr+_a6PT#x+%7-DNX;@62L7`S#(6z?ZJVyY z$xLW_Wwrfyo0 zfEF2|;R7_2pu>zTj`Q7ifiVf1{O6zNfXC#R64I`klTqJk+y9)Y&o38;=Xefc&11iD zZ_stO=wnT3X9*2*GV&3KOp3iNVeW=w5o>B9KIJN8!RcsOCY9Ce8F-?yySiBEHSi3} zZaK4$VRz6hP^@zR)ok|U&ifqnb{K+|Xp+XdVZWF|il<%D5s6}iT}=)Uwrg-A8+xIe zUH@xWxTqz9%JyAy1>HgXFkqqzqd%nY3Q>cNLsvsTD-AyPGO$n!Wepve!Q3q1bqrT6F9~iL-LvkYK?@5n~)$ zhTh|L*^_I3`e`H>92+mOWrkmRD%n&}GM{y=!7PT!+QwjV??-Rl7wu#1$|Ze1q#MHx z3QIV6YI9g|Nah=Y7Gthg$!@v`ZB0g)?5)$1(R! zwl-gUJF5-k+HU15<<|@en&GA;Oqh&21M$ThH64Q)`d#P^A|Qs8!o`&$Ow-srrQUas zwii;3)818Of=$OWk`Jz)qP7)(#e5r5dLS3por7V{z~m&%ZYAVzWudNny9RoLqhB3!f>X0#*pWM~gnUBQQq(2}+6ArleQ<08 zTJ0^uNsVBx1biw_?Bthhb4F$* zs6*Z)aYDzC$Z|$rUdCy91!X`?(4FNeTur$o6!eKb37vB#!!x(PfP>(I3d$+Jy{G)T zM?ltsVi=&b6HtA_t!{7IPcg95KQ9QJGy-346vGZ%eVK&`iiO$rCHNVgT;viZ{gU21 zOkxU^kqRCB8|f7dJ*hHTlauV4rvr$ke~Aes1Yg}B#W)_(;xU_oIVyTK`1Dg>=o!m> z)v>v216o=WPMK#=BO4;C*JE$iT{Cgz112~5%EucIirH7kI&q=)U!r3fAI9*Vz#U zL>odn47LHS{SZb({a07)oOS=02i7j*?M8fIib1!ia5p+L+8N3{>5-(BPK4%#I(-RX z_-HBa-s7r6mO2lcJ z@^<&#PW=x`7WU}06r~tG?iDtL8wKQ$brcFdT+$(=y5+o-M^X7YpxoSfvb!j> zItp4HxUVtZSvT~*m6a%B?0ks5*M@kkL&j(uzr6?f~qj;hima9aiKRz~Swt=)-3ewLOIH4SY->}8gq)@u*R%dPH4 z{*LyT_3P91=98jHoERbIlVYl{j;cvZ4SoU@49|MeUFBK*X)KCwU(B(Yu-|;2~4l|zPV5_ zvZ-=SRvk1$o$^TI*~SYa%M8*5@((as|1N^V-4!AvZ!-u6|MR1cUCX6!KU>@dj1xHz za0DKU(r~Vm2zTFNHEAb~nI7=aPD0 zsLce_!H4^-ilXmWnu}=#iN&d8j&5g(#~=OF0OPlxAe6(?{(6m(9xX3g!+NqaYNI+5 z3zixlEvlU#@%0TC!!W+<#VTOD??)W_4h03nz!la0eGE@Y)NL`H*5LDx%3`Xq4#IaQ z-RRku0H>e52I)+<27=$QiFX)Jg%w>b-{7VXu6Sc=0xUCeT5QJ8^c|5%*zm}RY$c*;bKT9Nss`zWU|6oJhWzbbX!Z?Bi+ zzr5B!`0lwvBgHd#3&us*@-9@s$!BeMYeoCI_Jn&^SDjvlh5fZ@ z3PO*Bpif*&N&I;sZ}&JVzVGUD`(q7XkM&fAqcBQ&0sdJ5uG}UMmd?k;2hsO!jooa+ zV;=Ac+H?=mJ-rYHH$o4;m(pD^ww*y4K|H3kQEnc~FFQKDgQGlh5dyx7F%_r7)Z^B~ zW!|d^$&rBR_}lbQpL~=)m&Lk?uB~@{rs+Dp?-;k*;{9%7C)sk{{~S&v!he5BZKF*` z(w)7Z9SZn@I-#jTGuy1Tc@MC^I^c@IF=kLWPgBcOQ_9)!xTAzAw+ zh3&aMs{Ay$PA9pnFxP^5VNc41Lwy*-Jp1Cp3y_t%nj~^$2-#h3%%<@o29{6S_vX92UW3Ydq9vfB9o5p#Y0gpz4Q(=5gVkmk>LghO zb&zgaaw-q-Yj2#MZjG9>97?rGFnWW^5d$#8zRY2k_E_IkfYo?VNW)!?lCa^HF|BBy zN7vEQR45~9R0KlyOR_(~u16)4BaQ!yPYUO0AJ4cUGZgU#|KB0HdI}u%>|-}-^MpRh zKxY+Yzm#iBh(&O_vdq4%z-sTKcI%P$eKdncwK6%wI{oInJh?G+{1_&6A(*r9 z4{#(jYvJ$O=azp3x)f;CSZQ50bGfDsY2~3b3P``erTW-j>ax$z*sYW?F!fl97==xa znwcl%)U1gYmHlq9yE+rznR~nbS?iRL!WyEuEMXT2+r&{geo|o zSJZS4bO=}MA&g1g{9N#*WPLf$_cAoXjX+GnsMW5!#u`bh=%ix&Nt1u!I?^yVbA6LF zcT^Zm8v_Tcxs5EqBBVFB;)$C18OIi8&g1c_d6ta!`401Nb0Gsj3rNR=S#qDG4~2G( z_u#JXP5ZN-&5N=sdt8@bRy%uQQ7Otjyq7u>Zd&%Zd>Wm~KUS!elD2v|nHOlHv^P;! z1bQ)?eziYc7JYV}GIRCuWq)9D&!M9K-Xr;iDnXoVRx0rGU*F|pP9x5KFw`k9N=dI^IuPK|ul-tFtU zBM83|eFHISwJwW}XK6n~|3dL#bPMec77w3=&J@RtVOY(mRLzX4?$7zn@s z=J>7$m}T>EeDet{{JdqZiPETD&Jt1g-sKH2$?;F1n@vbs zE}Bb7f}fk;IceWxlFj?$u$>*wvzzhL2aMrzV8P_iR$brr`#yk?J9d0{f;(XUP1P_U zBD|sPauz12Ug2Ru)GdXq@aj@5ygk*yKQKqbOYbbr65g2w$&XXDmISV|4 zFqd=Jls&ve2>%t2%OUkAkdIl)Hmu|-@h8l3Fq@COC&0SRX4<2^x|PdK`!OnGJDA@AqiZ^}@5PLr4;`e_gN(!^6ILB1jwQdj zici#avy>R3&M!zXq84p>wGyJ2*3XN@Rp*=`KeSt81ZD-c zmQ@5PkYW!S^SO5({13EL2=mdO^i*u=(nuH1GUKw(h1rJZ$P)ejnE}y!W?|c)49WAT zNh!MZJW+}#7MvcYqG0+z+!8Wc(LjWBU1W$e%{X;mGUlsm2ltg4m=Y@~Igf|pR=%qL zQYX`@+|-d=-&Y_eYT;(CTEx$-Cv9hwX(M`8u6ZNASp^sC-7FlJawY;~;>6G1w!3C+ zt}IEL_tj<^S;;*w&&RYQjM-O3VQS8!nYKD(!!OgGJITXYK?FmxuHiQ9HK@b*jN7)M z;bu$6@~^W9cdKp_UPUMIf}1}7w31QUY~X@E1^XT?$E@ZYMXXZbA~_vQtF54`bAj-v zo7zlHq6CoZ3LlgZt+p6bR+>sbKiFda8Dt4dC#A>b`+KYwqrfv|^Bu*z^uJ5im zCy70-J;=6@(dts9kG|4|uy#~=3bfos1m>f_+`wE+0e;8C#cdEuAT;>GS9gPHp_N{i zZvLo*GlxWs?v^yB_Xrk;^-oEMDf;c0>WnR$h$ttP$aQ>px%)3(hkPGHDZL`CqhOM&;zPfI`2YMeu*1u9 z|NOxdr=8MXTRKm&{{?5IW|z}V$ezLlLqqqoBKkPFF&?k>uLApK=vu1e2p{$=ouh#2bfV0I4Ozl z*m-F;cgZzftfgw^cZP6G?%FkfCrHlf3Fj@}6LsWfd9<9+^g7xGd4()k1vS#?UNAc3`vp9s?eSu@eY~( zwl=eUuwX9nvYd}!Zvr+3m8Fj)a+QccaMkCp$d{Q>pwq173 zi+o5TeCtoFVXCET$;}U#C$B1vUj|*E1V7h14v(rbx{-Jl9q2ic+;sTBb9vXv|D?jU zoS~Cj>b%`d3IF5opn7M+PgN#J(z@(3aPN+MPfmOj_F7c5o>{4USltGFhu9?;8LP*g zO`7YPeRXCpRY1**#mzyl=^Z-WKe{g}ZNudXj$w z*H2}%$PS*W^>3M%e?1(#FdX?V)_P2}!>xAJX5H=A3n!0Ek#8X{WVaV{=l0~W4cXGb z+clmnelf~oN*`5}iA{$`aWWY%ZmM1-pzoSw4p*5;HWWfMH;o=(GVP9eO2sWSHHp~o z_VukNFly|+T+{Ciwqs1kfEQZr zA(--SdQDV22FC&1*Ke4;1a9oLc`c>BGjFl->Ot5$@ZLYzvM+Kc16ZRhu;)jM?Vn0C zD+100Pns{@zJe9Z@h@|S899icL6;C|7wSB zJAx?|m}^$`znTr3F*ss8j$jWS5&_TR8ERGxyKcQ*D{zem?^>Hhr>H60>N?5_Z^3_n zw72Iwx@=#*8)VCiQ0!#e+8q1YTHfPqaiPX&Er zQj_McLNG5cb1L54#{WvszZFol@>#O!U~c>rEclIP7ard9T;KJtY<#{6ypwHrRSX^r zUT2n0-*&PWAr`f{gWGUX_I!fv{Zny_SPRp}%dH()_rs!gL+v&ZOdJ{CH*XHzCV*G$ z8O6x13Nn~?xqgd$AM5C(|GIM1+OQ+$?&wv!Y?gDhrSo%*>nK}Ef=%@^Z#n5LGp1z!=DLyOw0%)DP}IcUR5mZb+Ek z?tl1Wq@o%r-B-P)<=uEUb3@k)WznZpl-=^GpR$zQ_n28J@p1hfxtg>_>@Dw_F>9&D z7sdQ{162XCl$lx-PL@Wh#a_vb%D%o`0EQ9pBu6m&)o_DJ5r^JpNOhYJ0nfh&kLm3l zqSk-E&6g`=)UJAi6MEzx_)cDCEcOx`du;5vHidVaYV|P#$2(Jb?nC^(Dm!IFhotNs zy|`=FZ+X^iCHSCz;7JSR^iws`j+%#>fzh{f|Fo?A^G_4g zbTHYxL7dN;I5nZVezcef{l)^p{?&eS7q z!A)-%6kB%&wJjjqMs+iQ;K&d-gO>*G$LKuk`o!Q^7Od{iEsup0Q|&o+jG?-Ih}Hw7 zGkELD-t~S=sl7UnT0fUx{KU*-Dpig|$H&^_;jNHXhp`0^&vkG2*TOP*PEiAmHIN8K*)Z<7`xki*{<=K!{8ls4%Kl_!<(58}?^wTS zj~7?$znRz%inpk#1#des1WwKXN5&u0vo*E&FNVGyvFJ-v zn}vO?fji&c<<}|3udfjK+f10_lw+#ztT{+{h#@uTWHk^@5xAn8`>oF@%9-)T13YWb z6zOC9>3cBUjzx@UaA|joE>sQ{=OO9t)y4-F|9Lm=H1&;xPqkp|CIb2|9_qhUv;V3` zkd5A^O)eQVAG~22sonxemTZUR(Fuf%#70Z5LN9 z2;=7x=AmUwztY-SyrGC;uS*`WXS`_f zx~;u#&uefj4&=Z7oa`J>YR5F5v44{Q?gY_Ge;2ozwvKyPbkf16@D{1e05@guo3$Wg zJX3vf0q4d<84D}a(m}eVWbI6xB?Ij-I$8Yo*O!TD*AW>jZ+mOQ^-uF_J$6-j48NVb zYA4;+cI{Iig5!Ies(b6d8C2(lGjg&SBe3E&Gq)Zax?qNJ7(a5jMZIsA!L}s*SQg$D z6XxoQaxk*JjzpTeoOV|K)#zShy?AQRL5@?O>sQjIGeo{>!P>;*Mr1`@Qu`gi)|ST-7Sql} zJT_t!eRyAUuI)-LBi5YGM}t;6LCH9J;~_Y~05s{uL=wRFCc&kSOebsSNn1P5<|YQ- z6B58HdguS6pQ+A#-QXg;hN(n=Ua z>AjMkQOH59qqB~ixgfqL{5u$h1Lckx=R)VbP zlS(uyS`Eadn>_=rDAPE~C$X|XtR5%GB65}p7T6zYaAbrMhfp>dy9Nxh9V5zYM7EaE z-(gb@W?-ETHxOP4oY&mbATZ|zlY~$#k%@F+qKM#4`!2j~-4ovkrYi_*p4(;zeiQTN zv=Yg_{Yck=hliW>ek(p>7B&Z8jy!@1_|(gHe;JTO<=3VKoO82LZ;c931hKTD1TsL1 z3b0KbD3GIV%Fu!oKqwVp&|{FTFP(LnNpfYv9Bh)!+k%{#WDkhXi~hGxU>Kf$XM_>! zFr9NnZyTqq(25Ikgn(}{MQ_cL3E&TP{13<#lmqxfO&HFVLCApP1;J4^e54H%;KJCp zpxY_a(_0}5Q2V!6R~$m@hc#Cs6q|0phW^=jiL-rK%p+EA9Q%;n=W`MK=VHLAftSW_ zAHL`r{M);xdzAq|(}PrzlWMd!1KO-=8{YvW&;r5Ku56;A+f!_uJCp4A;Yac(Hh~%C z&ZM~a&$=>hy=BM+gAb&GKc|B^PF?of7Ve*p2%JM_iIJPcGd1^^D5t*feI7FL;P;;+ z6F)Kdcm~eoI@XZRw!lO=oInwoIA;jJnbBiTpT#Nrq@144XFS(`?zZpYOd1btx1Z(k zy7Q^ zhoW&N{uXV1>F~2J>w#8VC^YPYl6tFj5C}VI9Q9*o4k69rc3{++H`B zf`UxxJywjCR|7fSE=Bg>mYah`UoE8ux8Z~{KfM5fwd@wEv9Hq zwDnwHkaKh>fiY`FZ%_vIXfiC?4Kka ztWXXLor`xwFg4BwztdX!JO|z|0DqK^BNG`v-$6990z%uSZ!$kZ75q!iuHl>*Bm=NB z@qfnsyDd#%q#hk;{}E+m3s@#t^t~Mm~<$ z)GPl<{2FLhgRX&xRzJ`+s8bCl1Pk0s6pM zEielQK6wH}HEVJmIfBJa|G+O>%|LACARww%U_0K&_uF^T>ALwNL9P(%_y5BZQqF&Z z$h#lBssKT{CiuMgDOGLFB_7T&zIZqOJBa`usyh?v6#VuFU7*h-5IdV{`$B6y4NU$= zr6NggQ+RH?Q05*+W~#_-Q{6BHq%JRYmE5!k&q&8cpRXm@s!7!cc%db;2pl%MEJR;C z<8oCEt~gi%l1$(%>w&|%)}n1xd^2QMH#Sr4uSY$4%d-1HY#|!n}M_0D~bJ5(l zwWXrrm6p!==3i_e6Tl!;S@_i&fV{HYF#DqdWW={pT~Nps*Hyt!rmN;YzIt64B+Xte zpYOWxw=9c*dRjl--ZObnCpR5Dc{Dj zdP~@rv|k!VI7!s3%m`@9`pSXKg3q^5Zd;bmv_?C2bEB=U%)Es6)KkvhY82FQIHfVsPPw>rURF*F32E53} zpP|BP^<07wwUSB7;aKd+$?zEE{qI|jDrLM!W4GIv<1|TMzT2dkRJ|W)9^F^^gSPos z+!a84J8oL&H!4u>&QgGXp!kwG?Dd-Mx!Is0!%IW!?<|t!j?Ja0pjZCvlE$`^>@IIe z;o@b(Uf#)mj-)xS#03$gp=F=>(sRj=cS@@id=hhvC?AZZ<}}K5 zipr*j4ACkEL#F5tjeydy8&h&NUeh>v{f>R}{S-&TTL?pn8cgFT2Ik_bYW!T)rmO;& zZvktg{e1MB9Iw2|GEw5M&MJ>B2*=qOB0S z0YQyY4uc`d=9^0j?Ny-@v8W*fCn<2tE`n+DHycCy#HBnNiSICqHUAjq!|vgH=;EkN ztmn>9MN7)v$Wz08FEGfnxjUd9*;VBQhV=Q}WO42*yM7VP7VYMg)yPAA4@e2ib_!^% zSZm*OX=+yoj8cRh(g;U?A>0KfT1n9E;?(YO2R?hd>b8ZyxKJB=$rNQ37^~P3Gj4OX zTvg4cK^_m-?Cpmrfp0q1sj9x-a5JW_KSYmk3jUlySXbIdqC8?O#sxl9;hSOcOq z5;V#O$DVM3gSxO_8U{W@rXfTN3ON-1;|J5e81NLJVWq4zEI2F!y4{+e!5ACei>qcOKcQZfiF{yxb;rGJ+2Rx`yv;%aBvK3^r=r<4MqJ8svNa}C zTG(x})1Zwsl$U^RuN(#@`0|a%sw!7%?8I$0#rfMxT$90m{hV}V`&--bkU;1Bf@*b` z*wsIg7&yL5O5%dnK3u3gKHqdnqmfc54GvY#Hb}>U*LO;Qa)=za)3U>EcEGHKa+bg! z5VYwcSyvUg2A@#iiEu0Mm{;??Ko$4VW70z?8;$!X`KK33u;FKq=7ld>kJFAzJIgC{ zGZ9MnH~#E%=$`EXt*j5nElS@b8p^C8wtvPPMYAuTpo-rjn~ji~jn{|M-fZ)Z(>PE4 zGEh^twXOcYz0{qs{rQFlgNPb#YN&;y)2ylb$Y>p2Iov5fBftyxh>o+evCR*-p_X%L z3YVmK`qq&$6&p}6^1+G%th-GflC~YqKCS^z?vkM|7{eHDaCVugOcqUBf~`u0iHV?A z3n?-stNZh0GmqQ&mS}ROPL^(LmD;ch?6Y(x%XhvxHhkmCRk3Hbo(_2!x^cS+JeB{% z;ITq>bShdnGnSjY@Uw;$E$(h>!krkBgVqF~!_}k!ir{6L*>MyHaZLgX)qznm)UInd zO_SDCWltA6O07Btu?;;V*3V#v;$58hw)$EpQ*b{DzTe7-O0%)OE-8#RHTW)l6zUd+ z5(a9)1Q}vb`gRFk4iv)3z_Mx9NT!z>4Ad(jbn`;Gu3A0u^Wg7aXJuQtof^xAq>sdb zWt54dSS%sT<#@1k7GTIN)g_OORW@^=*I(TMn`t=n%akeU zyG#G_Xgz)?-Z8Ju@^((not!L~jsz8<5>c3KT7)uzv91y!8GaJ9t2j8#SrR<42WSNC z$f>@864o$LIyp5G=EomMYfOlL9o{4P8CYdRKB625$z4Bn>T{WeYFfNpcHADB z*>ww`dtSb(i0K8sv}XJrjv5h{rKJQmd4cI9h*+T97^uzVT~@LIBZ6E^%;srLve`3P zX#PpB{K=j5T4*cWkcG|5l{MwShH zN^MYDE^9M^3B=zw+AoZ1kIR|}mw}M|tc8IpIo*|Fi+xnmo^D$`0I1rX_#T&~^Gagf z+{&g_a!pkmmG@h9NQG*!DDJP8^@ zf{`piv4hgg;3$Z=E7ERJ9g>!T=1of_c~#NXe=q;Q&T1{{4pL)WWtf=i&mPz({urC| zXQC}n>QilsjErC!$$BE?9Q+UMk2FeWfKJ1U{iUV9p;=mjCBCrV71VgZUC<*Jml4SHOCudSoZU1!7O zN{txb+8vo^8r3_f2ADangR0vXiRwP4);pmFPQxbKND4;XelzYeo^Afl=>}$YJAT%& zDpwubs6k*FVQ0+-I?oNlgQWyK1S?q*!~g_&FjgcW@PhGt;H`Qjb6n)G2!hBAs4lJQg;Ug(IQwZPFy+~!LX z4{!aoTf;Y65WqA7)dKG!!V8Tau?c;%0FB2-wtl!aY!Wm7>gaGJmNJhSwaLD>@|UfR zV#`ZkG{exPaYa^Qps-;h1T9*{>aW)tsqRS-wLk3d0Pj*^THL}Jtu$Kdoc@4E_**(P zfXy!g+5j%o6r&I7xeejK@RP1PW!cd3Na{E|A2i2TFwI_Ir2uvFS)0`lgnvzn2 zWwqR0nZYz-q*?dS7nJpS*8-&@y+kcr%R$@dC~4LrMyudv*(hGGmC>vyBgV68Fu?{U z_(!6f1x@%0A^y^^Nu$+`ZWBCJwZnJ@U;el%P8~y8kz&nD2!bV`=b$1ZC@36YIl~0$ zlDw|dg3!Z#74}P&w2L$^R-e zBk0~Ecns^XwFr%NgvW1k8j&)*St)HD88I!FG33lx*(F%SIMtx%jg%pz?_gC1;}c3G zK-tdY*v5KvMa)8It?gV-fFzsx1H84se4(K3JmnE#KPveQW(sv1!2~o=>6u0rVS3VnUe2&OBaro>lx?@<~R_mSUB_Mg4#%4-l2WVCIsd3_$!?+W0E(h9r-G zV8u&Rl}0NYYuwMo3QYp(%Gn9)@AdwMu^j@bj)Ykc!t)j)JCHaVEM}Y#y=erYyB^&@ zX;w!DEG^AgH(`BA%Xu!{@<8*zEv#q-wcL9>D^FUqg$0A8K?}EbTJL1;h^0Cc)5!#? zt}|WFp7%#)t%CYF9oPGTAfDJ4NPOddJ)^f4E+6pVU1^Rjc(A-l^k@|mCCytvf^Hyq zWe8rf1ZYUoz)_OYAW7j$3O@Q=Qj`RlfknHb4<0>oC79d0nR2}x)EW=F%1TZFpM{t>|SZn zC{l0;&dSOIMI$LgNM10E=K&M>OVaWrE`E{``6DR(a9Y@r89(3IozO;8LL-e(@lns^ z9s&0T&$c5RdKuK{Mm$qTZ1g39JxEk9BG{QwDcsil21EHGJzr*wnZdxVl;QK(lHlZr zYIfEI*ze|es;%X$GfrqvoDC#aydyO3HY&l`KjdL?swvJdy>K7(z&3vmzBV6LHPtyT zLuo=Wangm!(k}d?m|G~u4S;t@f)^;kxU5U7LC~rZyhBI>gd~qEJv1T3%aHW-cPpN_ zcq-PpQKw5s7|qL`<1KXn$K4p=edT` z@lpAvZN=~KOcfiiF@n#9Sf(Rx){R6DCsC~kOg|=*9&7wh+PM9#@o7xrZ$+vhrudi6 zq4DhQO|0OLn5Zr`3`o(YkdziF+8uK`49(&p7^9MmA#vA!1h2JRG>)#qNr?I-Y3q`- zGL%@9$@4-{!V$A%)WF^daWrIVV1-&$-bF-y0kIjvl}pnDTOSpl$(aYdjNj(c9f?dg z!mK@!?nPw!kjz+wSvNvvEKzprmU^b!6y}s@OS$_pro7ZzXq`_!L*au<{UW^q6 z6WPH;rl0Pt1xYTNNOg$$`HvTUrx9B%x$`pK#NypfV|>puNih1rk#VG85cM$+DdO{4 zBT|$Gv`rn7aYUPULz?wWo=ui!{6JBjOYtfZBJO1x_bek)lJdUkw}$!=YVF7|R_~ls z&>-@j?$G`{2xJiCh{u>)mnFAyydBN#+vi2l#Y1JlG4LQ9}6l;(*W`E40o<&pG}0M+IzNX`QT< z(1OOiLw?3d@p_|xK^dN)YIZxi5rwC8$rLtp&^7Rv%+aC-Nl?ZsL5(DX1@KNvyw(;h zze-9SC8Q1OoZjW^IlC9UXF%u~C_a&%Una6`i0o)$BeQ;YG+{>>ZY&Q6`SU{MF=j~F zY*!YZ=}V&eOrpI7Q3Ox^BX3C<$yud;#qT2NLB6iB-3733mgpZ{g^Q zF4vmO+~RlfzK`)%(7by{2YDo`2yuNJMY)qUYlLs~z+IgT((Xx~)kvC!Nr^EZpm*A= zED1rY)F15Qk0W@{a(fRnm1>!r`aYNKG{JPrt3IDw?9d_nTO{-)2?I&>;=!1|pW2?? zOu-6~0i_AbRGc`UPNXyAUd58Ap+sgRY4#9_?Mga+>K@pK*ceG-2jgBPlBBNXiWd^o zgvbme5w7cgw{+SxKJIy0=ASVQ(C9tU6e@S|)#@@@g=EK8#tYp?ee;O56Zp+BJbOn7 z=nICnh-P&lXeZ3xe1G#H5<@q@%53P=9&YuVoycsAt-sXjbDn#!LFu;#f$dCejNr4w ziI&$OPxtn^<=@?J9^$qaM_I+6btch$N#Ib@L5M6kgVY#IYD^@~CS3%FlbG(;=su*z zK%(~;p+S$pjJQTUP)vM~3%0;D=@mKcjYg{5O0iT#_oT&;dY0%K6WNxSg%Zj#sV?nl zcXc-$@ml3l!?F<|o&@jA+wE=tg4OLS4s!e6*8N-V(tnF6pO8;#uWocCT49L7P{Qwi z!YqTtjwa+gzU_70LsveJ_Hgy0;3`}Gea$5``jY!4ir|0`XQgbywKVLqq`Z_!>y)Q86 z=zVZBiEfW;G=9xi#oAMTtGtku{Z$B7$FSCLbse(QpcQ7UEInD48cld@M-uuyqxzDV zK~5W;v9lpX#BqvbmA}xr%U6d`U@L!^T(nooePLSeKjg7}JP)8H6^u+a+FxQ%C`#F$ zZ>2h&KYt_0tJR~!yTBHA*0JfpMbEbEOi9#XGvfPId7^Z_ZL|AlLY)`|PIp&#@7U?s z=-=hp)`_&W(7M{{q3NAYOugjQ=H*OmzovD~vGJge_Y(2+gV4*4%F_-lr&B0pw#w4? zO+0pPQ?uUjG^XW8zRzVpT~dMViOKK_L3*-F&tfNIuN~BQ9=9{=E~C%mqDQ+;b#dU; ziOx%lXxj~Pp)F-HFQkb1p+|PDUL8GSZ&Gjjc6e)hBViwo zYLz)OOys^wDt++rrzgH3eDTIumruuHEJv%#wbirZWbbcekMHF^qOG6r!D_gl)n=7i zg`I!3?Mr93@(YqS2e8c>Cblxewd8%6;Q@+sE7g>rxQ%8!y3E?TgBRr~5#T;R`kjvVd4lS7cxz30E( zLLMaiR5<6o`=g(umpaG%f!)4B^T*ywAKMx30*;?+014i6JmgG-_@TI#pBNkLd@agM zI(#?D{`AM)w068rk6XYQ+kwa+S)B*T{#8BYQN%No?BfjCh(2du_37Q=fz{>H@BCjZ z$nCYkg~!HQRk*mTI+m))5$tdylV)!CaJs4!uH|L0e-%d<-~Ea+5ahc1V|nmF-Ij?N znqK17$Gw%S$UF~M)z@o2Of&Y@LE=Y`=YG1jbmuAa0}+_My~{mbJ~aK%$KKVBl9v0O zKkkkWc*Ge<50<^qp6DB9knzQ+xLFfpcUp~L?^5M~b@nBFeDueksP^?@tho10YOt)@ zWJZweh0jM631!ef0o5F~rQ#X*vy%t4MrtWemsAJro$w=qFIL!`0aa%L?17n6k_{c{ zd})$?$m{BXlu*AL1NaR`lli4U|LVKf{`8Wh8v`t>?mF0(yCh`I<;|)YdLOPFzy=2x zzPYH{%ede|*s6MRtg)O_m^tR{pQ+*`cP4V1-IrK6d!H1lxlw-q>6Te7m))a^6P~1q z2d za;=!(aP!-ll018Nrf+{JuUGTAotCF>Z#twXV6V{kq;wZjFYC?E0eYwwx%L7(XPe2u z&&Q`YzHJ06o;UKRy0&9`a(HrWx%msz!!IViId&!o>%!gz&h(zQZ4xmG#VOavHzM)W zje~h?vZ_sJKOyQ@U;*1gY`VWj&<*r!%}IR}ydWgckt0!Tu0EW9MOL|8meP|#t}W}2 zEzJllOo=1T@H(_5^E>TWKKNYH?OQK}cczGdu95$k1PRG2v6rl36gKRXM1jc?p?+ zw2{;{|5GX@QO70aQ1ac$$8|PiQB<%K$tp{tza?H=MPv1w2f$+o8q=M_pz)z~4aR`gq7a2t~gBLo)m?dG1%3}jwu2r7K47ssi$Bj}OW@mOCCa$8HzPVBLY+nwjFd%*_K`sdLJqfdc)r3QS`LL)&A96@-`=q2^WXO9_GIM`Bd z7$w`9wB9#hD`PHt4AmChn0ovL*}iPvG$2=)lA%-W^*VkOb*i8|!@z}n=g@~5yIqY0 z<*LxPgopnPBf;BM?$WwQDFx8&J*j{1*^kIIn?#{zi}W~*p)%dT;78oc#Y>qw1)=>N zb?%W`oOZEQtf(Jp^h7sj;tWZ3)L*Ivp=-M`L=F`UoK^l1#F#9up11}=PBNlwTk4EE z$v*@enStYTJI^M#no)YfBIVg>EgZvA^=o<_@Pgej67HY^zT05S?6bpBW}X4-gi{Fv z%i;Fmqf+cGT`qYj9x-sH(2#8a&$ul+v>q(WlvlQ)6=EOEI7^;sary*m!e5B?DP)J! zbB?pg*@W=t?ky&O??dt&h*9`((a`o)&FQOXax-Vi0(&T)^y0!Kalz4#!)QFZ_Q$_) z)J>Q3pQK5&G(o*ueeTJJ%@hZopS+3^q2YK{!J*35-2hMLvr4Hf_I7XQn3DlIOEQ&P zOpwBr99Mi(s2Oh}PCR~JD1 zC7V8G*Otesj$p(Z7JFOyMvWc#Vv7)U3tu-wr%YQ$^T9dANS~B*CU&e*xg%E@sFV^T z_-Nm4A}F}FAz1gyt#7tNUt^JyJaOmK8vNZGbrg4R1svuYSynHMUZ{A7!@f*Ql$R{1 zFX=A&c+vQ3x@+j^(*25@W4`z3an~HpL~E@L3R z-Y`~bHvOL&27t@IM3jG z&eqo(FWf>xpLPu04c02K8sDHXnS0pLBQnv$Lml^R;Kqkxd`( z*D24RLu%bBArlFq*E0H2N_1_%51L(>mnbM~kQuo(9Y73I;4jtPDgN#R>8BqLD^{!Z zTwHlR1&(|<^XJNv+R$h~t+~$2LK9-%o8X8>xA}CpVX|49ucT)xHK(S0k2po0@9y-3 z(?o4iUJ+Ml9Q1A4+R4p2kt>ZYJGuO6xlMzx((fnR?n#hkmvn-qzUSWuWID;#z{!Hf zy{fR6MyDZzXwomE&sDRHbagkM&Sh|)Y^Q1Ch~k96+U!|vfxfRBy6vzP=b0Ji=9#aw z(NzDnxP~;3{e)xmLTr&C*X=%;8wI{PnYv_qiu~~SNoT1Sji3N6N%kkpVfoqgz!$%2 zh}~LVT{mXWOXin;1{bB4H5a@hZgjShN8w>pTypT4wndJHZN8a(PM&oR&ow8*v&>-) zPIlpR2lEAP%E`<=el)i&8kkAowlO$u3@$m0i@T@a9dsG3q%wSA zc}_5jtz<7tk=(;M_J`x%)|FOg+7_r>-m6_0Y5Bpeu;4!GXs>QFX{b)3Iq$Isb<_+S zbdh!c#_8bZ!X;gXe{DsiK}HC$=iv?z5VS)<^h0jLXfWKg4I^@h>VHpGP zwm^=rr*Ziy41IjZ=NF}~#;9)DMahqcap|T^S-^OT)6L_opE|dA8bdh%>$bBMAA< z#T~mxcNulk%6KYrlK$yc##GC!&!@RXbbDGn((g5M3H}u$P8#iZch}3LEt%f(8`x zRpA;3ITNlqGwz&~$p2$Z1&LfyB%REtn+WHENd-tf;m>I>u>fxadye zk$vmxB&>+?7zWvhNh0j4Zy8{cX^g0;cIeZuVj}x#3FL;}~=}@{sSpZy8g@brPT!=Cr zjT17WnxG&qCOmgjj~m|B=6G-WmvF$d6wbfJ0R?mE%NkTSap>Us2BTG>Eo~uyQHC9K z%2H~gGgIQ??qKwx`>4eM63&tHW>0m);fJdsRfNpu>?;~yoO>{SHjw||qDXF83^zX1 zUX>x=(*+pOi=&x!QxGH~Xv95IL3{f^yF1BW6F$3qU$fiLehlMaQdj`l=16dNY9GAW zRK&l)wSTlzPB&*`pDfJ@&Ue$KcyW;NoO}#y-I0Ue#c2ySi;e{ZF@WO{PMS66^uJt> z6ccb=ZCfy4yq)t_LbhkpB2SFsjpou@%*rD5>*H1OiChH^koEgO+Qbm_S-G$Sd$f08 z)t`=S7c;)^4=rA~hHm^B4WA@)wU#WUqRq8CY##dEhamus5N>iTm&bH#;au*Vy3FvC zUk`1D9~fhW@+9MWUn_84PxZ4e$qRdj>W=QHxU*B^hU||)UB-i!rr@Fv!L6gg1#S$k z;Tl}zUPSZcuv|=P>|ql=wPpS;GXZc-6OM*Ml|eYyW1Y*3x%DB4R1^AaJrTOjf3^v0(9$y%*_6qHA-gDul8z(%xaXpYz9GtrzUS5!0 zfCwdw@#u?J??H|{i$CNZJK7@RQ&t9Ao&_B+mbq_XYu0^XpT;;s#f0O6&vWqKbJ_%! zBDOj*;co6+vsj=Q5bt#OC_@{;u*X}Pxy5o*`+!4TT;Cu#p7sdgS}ZO(95w>#_wO3S35PF512B0 zs}?3E{lGmJ-agp#)x7Lc)9K0TMzTJ?6 z(uDKHN&FZQId=L|r?&3z_k%FUzPx71*1C=me%?3$3er?%c$IgVax7HXHRpz-HR$B# z6S*2+UewF=)Y;q@Lm{|o*5htWPDaQ*rzzpwlic;__vqG>yx3!FkcPKf?bO>~3fhk|>CjW8e*}5MW5q|I<37#Vt z@XRQ*Ep#8^AlJN)TgKzo^>vEz?pb|+_Q8D%fqirA9bs0oU57<#|J|C1<8~3a{1|Ro z)ZE#0z~!r#$%VlHdIQH=g{}CZEyA%awEQ-xyl&U@Tf~kmXN`}K?yGznuM; z+mXSz63yh4F}!XbzLFdcfFhBIXuvEgcRiLn6W>M-=wgJ<1xRvDn-uTb?55ArY|!DwCHB(=4}BKGt@6cLWa-UYrm5TW39cF?jDFUiRXi`^ZpD_`1Kz(zBZ) zX82>5@(eG>+ns{xc5*|1dv9F`^PAFu=uV^H2MR8FNIoy-{ zXm%LOWi6!^V^Z07H#*H@de6=kMWort z)hb$=1;$K$diqL} z9@nImUlSNf3jq)TvwS~3C>rb&t@?2E^{mwMHV#r`?WxZAlAO3RULeURre?YqallFIu(U>1I z&AFHq;DZ7D>vzJaZ!@!}=rh9x#)WqdYN)Z7=|M9oKi|wMwoXRuif+BA5qt*|34fXo zuwuEQMt~p5h;E+Fn34-%tTZG?$ zJkZk2y%rCo+i)|&4n~eSLBDH;)fA6`7v!`nF$RL(%sT4OQYF%Z;c_c@0S@E2Sc%+im6+z@dejFeQ;}*?x zTh9i3NI7C*u5G%`x7vt#yMuG}?(ljv$%DP_mXr%h$)zOc0u0-wM?mWk-PGzSNd)TUkQLg8nrC5X7y6Z2&fOl`&M_8unU)w%CLTR6}i0@yQwE_wJuB(NUE zT~E%%r2c)Mfq=$bei467{;ORxo%bYSr+?ACILmY65k0;|KHBh;0!9y6>JwzkpalaA= zLasy?R?TUqKFsS=K$ALy{)YAX1zskFq3b5%cFuL(3xlT%&-akyeXQm3gbagLBk#rU znf%5p2c0^UKOfzH*WpT$=1<7Oim`~)gimw9f@cW>3d_Lrl*{AMrUB^mVP@~E#904F z+^3c;?-HV3Cmhj+pPq;`s9CxP-CcSn_qc;&T!{HH?8>c>%iemZS8alW@h z3{z2n;@k8)AucbRBaP8TgDqhld^z0aCH2P3izrhY!!z{G52|;+b3@2WOQUtNA7^6& zNp}a5gXHWSGU{q0KljEqj`wUGQjC0A873DmY~ste7Py6zKlv5il2bP<3G0hNcOc*mqq8+u+#;p$X(lt zNBny>ELycy7-OGOYLDr~?T#L!8~ZMh4(tnfvTyb1ZOgQS?b%B^uc01y#=)P)|9X9v zbmwr*i*LJrCCW(Udf)%P5qP^bpjI&XbIE-0%z?t|i%&EEB+5j|7Q9;DbJFrm=AMt) z;im5nMD3rfxp3@6h<Wl;L1GgyG1dyLi(E9)xAqi&Ry0x6TMZRHEOccH=?zMV(99~ zvRCBv;1r7Se!qJv5p9|>o+4}0b2UYIQ$zbZ{20sPycr|?ltl3nfNKAgn^`~ zrvd*3zkVJeOnXZRj3T_dvLH z^!3o3*`h?;OP_Uhh5SAW0synTBebUGJol=l&8?o>4q};Jd;U7y?1JUdkH&yE)BSqY zb@PaUlLzagWFN=XI{CdzOZ;c8H9Kk{Q}_M(=vU7BDq{Re^~IlmR5urYmOppNe{u7X zb!(je@_{Sy1XnTlqV&`K-mmg7Vr}eA*X;hU&j$xwmy;uB`7d7ITism#{r3Lxq?^Cc z*8++C{@@kYZ_snkc(VBR_*-dmx!>25NcTZBwM58|fm51Jse#MzCsP9&@To%wTOFc> zEEy{htJueI`rY~yD-hE%O%cqhRZti?bWX|$0tV^nKtXRWEJN88G7iV6O9~)aN1my3zlR6KS zwDSx_2nZ`$3Ve{0W>#rIJJ7^AYUu_d*lH}xc2@**9? zB0w1C*>1^N@Mae7yt|g#!~8h>ho66r8EkvE*9W)3{JP^v*K@xuoQI9*Tq>OimKj`;L^7Sem%0a&ZC?_jLw~iG z8yZy@tRNugP2wtO9&NsRpkdm#?IEIQl-Jk-oF$Nl?o^dJc#I4OT`+~PjB@QKwwZj~ z)==_X40NkOZ*N+)EfG0JgUvp`Xbkw7Z`1{lCLS4j^KtffQlLxiBvC*Ciua#(-o?{` zb#}`^tJH5!C#%9P!|y;T4CyS8l?3gvBqV!FICCQ@C`yFCe3kU~>1gh@CNE=dn)|Q& zHC2l95kB>@c4;=d=C)mc*_BLjRfekdzDfMoooPJz{w6l8sgU2*W(57`<6p?k(l&1?b^=JCDBvAOFO2bk zgbT9$t8p-P_wH03^tIMiyE(RfB=idh3kCVqdyeOaEZgzgyjGC!P;}ho2#NEl<^iz? zQ^2r9>h`n5LwhbV4V30J`(16|qMj|-KOZx$P!53KWR{*bvOEHPxdY7Z-W8Mx;I1={ zqrnbsFv0W_h$--UCQfF@Lbcb^8yS+}$Z~6)qQ5;;sV5v(6jWONE9H6NVnN^kVgxmR z*+-|I-mBKxN^3ol@Oqv7Xpclcty>W)A1=jKNQd`Y4nga^P#-D#?$cHjpnhg(_hVbI z#luA%3KS%3^WWs_uI^ApvDV{ZdAsRa!qDlHhMsQQI9jB*p0&_(_5#&?3*NEiMx0NM zZLZ(+b&#A(A6~E_C?HkcrhV5NgjeUcFw;TABrEej_B-2?oul%jFQf$%pXEZ9F|tC7 z^oxOdw(j31y2|H&JnPV@nzy-cS3eXx&*ajt0Wlff!G!&_GJXyF zk#SEf5M`N3&0j^Nm!zd_DcxogthN2G1Znx{aT!4Bfg@Maw(4KIXjiU!A9{IR*8NSu zQLlIz1t@s|?HLlc50ocA^-=@Lz8zq@Si0lG@YXAEoX|wgqW9uTy5D|{%0CT9>I^Th zZC?F^g3x*pQ!_Fu>@v%R-Xw)9jN>xwccTDnMQL@5kN%sqIL*-eG7cGXu7)fowriEG zCtQBpQ30lzwex8I#aAvNSLb9rmUWt@L|Prv>R-@0Q0ENFL*?I+g6z@dJo_(|(SOpi z)x%?5|FkAoD-%W+8=-L-Yn|f1vo8XH-4@FXhHjnLcZErD+qIG)rCxJghxi?O0*1j_ zN2Q?D7tXNRdcwUUg_w>_yq>f<`px43%>q)oB8Xj_kApx%;Sin;jHXAi8FM=-V2Qt4A9b)6`ZIM%?O_z!O`(3yN%L2bn-c_Ao%c%ZN+L5D!x~%i z2=@gwuLI$Cn6ztih`K}th8^sH8_eBzO{-y|GYe?^i*-IN=&po}K zv+QE`O+Rzq)k5hD4#u!AF9p5(N^5QB6~*vsJ4ZkuMM@c?rj?vS0i6t0Lg=65>-$m5 zRFLj^ze=m2;weItAH?GxNUJujm`(}I`8q9sd;V1_ZiS`{!9GYJudIGyS-^M)6nHdP zDj6o84Td8u^@LJF>nOuyf}y)Zw@bh>zn;>~>xg}nPzG`FYLw!i;9*rh0Fb;X!@OIm zQERy>@*IhO;hasdUY9b(R?`Xz{oeg$DokN22>av*RJipjaeDHAyM7VszoboQ_X#}1 z4sKLSOa&?ZfIugn!;Ji4G)EZAg+g(Ki3yb=H#ox?MstNNk3xH|V~sZODW-D_6BwHC z*WDDxaxyK=yq?HEAGh3&UaY1eDS2d&WJ9sWjZ^!ZxD-soKg%JSzD3Ho>rGe$Xs^=u zUL}#!E@RZ!13z~bp`hPyY&F5=-SyF_4KQ3iF~xKH_m@~>57<*Xc*V}`S3ICR>pdwO zhCSO5C>c%K{(UmZa^1wzHK4_tCY0`BNHR4o<@Fk;_i-8x4*_CzgYV;J$?VW z_Mm9Hi2^S;)&y=i12ZbsKi&Tq@4nYl^)JQKXhHx3Im0NXu%sU!9M-yxS01>F4XH-Z zuXr_qE38M>)Nt^6ao3n`RWAPiT9?#jq0)GFEoEfPd&AOdwRgG*}xcncH?DG!G8N5W8&#dC`+et z4DF2fqf_W6dizXyR-=#A=RKo#d2ZXgtU8fkG>TjVUrMI1v_N0Wu4s){Kdk9p+JfNg zN)YLv)2fgB<8vXZMRY%~Xt^bKboGqB>MaKH{jhp^nBCFOK8w+Ue9ij6qT?$ntXvy*EO!VZb{jlet>L-$aRI@0LMFz2yJTF5i`KwoG8gP&Y{Gd9#>Vy}~JBDy<7p~rz z<&t{326os6=2Zj}*@hXukl^_pTC;~e_kpdjpgJg+AW&>afQ!Q6zxEwGT@}jkfl~ru zyj|NSNzhIM4uu3k)@ynmIP|X#Oyrp^6Gn1Zhk6iJzv@DM^sgsXwl^VR!mY2~{^KI$ zE6y2LVi(#q>pGs+RkxwQ`!~CGQf`72>!UtoFCaT(PO^PoCv-N)cZ`?PP$#`9;I1`J z??1K|hd?p+K3{xi$dl&%^Bioj3URLptT+V{ghbJN;QbnKnlGFckp28%q^BzE+Fm&0 zpyyg7Ob~WN6bLtq;L_Z~*TP{UKYN}t?1imbzZYD@02tzQZ6ut*fD3%#6!#ojDBP%M z3VVEW{4uCgIon0>;7iWNqZ-(m#&CxDhU1S{aoL&_n_tB@zB=W=tz;!;R?J>W6-$3 zaOqqhGN)a}28uX+g(90J+=B1|;UZtaRIFc%Q<`N$4Y~aD!~sqDl%ao>G&(b$Q_#OTR&<@_xZ;_k%xoq@BTqhRU%fwV!Eq z?a?<`>I2m$r;*e7W-GkzcSB2N+&pgS{)UF!qMGN+7uvrVogu6`BHtv-Pc!5{ym$}Yo(r`8m3&0`){X%W%+J#tLes4NK5A83P13K3qE@?7 zRxhu|=Lxm5{(E{DifV20JpM*{^-al*JII_LAn7LEZ_ORjE0Vv;K3jo1%G#w?$}iD) z@n)Cy&tyOBdXc-yY7Pd+qM)ef6+cEG$RBrjs!%O;rS<%l&Hd?uaNv4cHX~7Kqcn;V z4Cn7^5hVk>L~$CuO%MqjG|tbkyf#w?uY96!cErSVhY3Gaf+jZQ1cZs~Atg`Fe&4yH zbM?RJng@rgJ6M{2H2Ig6eW_=3;2r_q9`y*rFnyK}WuTU(i^n~6VsE77pX_%H z{KYb_l!^Z?b^3}?GDHv!3=~!iG9(1?60}4hAgM*{76LIV|4q7jv$+4bU!HiWQ$kuc z5@Di%^j{h9s|9e6^IS$CKx0<@>tf+iGOxengkcxUY|eXi^BLRo>@Dk-_d~n#30^$- z>Jy1RcM}r`lov?ptunr{WG>i^PKm=NcWyKG!=b7tT^}jY%qBp};Mnvl5B@-OGFM$Q z-+fkmw1JHD zC>#*Q1H8EY>q(WCuE6zJKy*mLVqZTctpBJ(&ap%Q#&Bb&R8MCZKNTPzr!e9wJ+Es_dLPEs zYfk+%Fx-XhKIPx*^=ceBZ}#b;|KGYo{r6g?kJ|SZX_1>9Gy03m+9yyOsP6Oan;n^` zN27~p^MX0U+izSppGzqFh^*U`R{x;GQW6i6mnos>C;>`pIhItt5ic$VObep=X{izl zF_o;K7Ews$S%>LCf8f@MYC#Cz*ih`EFwxH0nqX`CL@7NTnPh4G!uAwnPA(}}fDt35 zqaq$pv-vjs?DrR86qgGkzatQI{}C6vK3mWAsy#>QeJ;uBSBs;tpW5UL1$^kJ_{Qk9 z+3wsnAo)Aq5&Zso<>rgAwvG3sINa(OM?mospk$!dbCq%=@AzJTFZQpbv>Xox0^L8JVz!BY0mRXk^soPw zpVw!qd`dre9a#A4|DnmNZ(GSKq}$_ew0?7YEVTbf&E^SizkVjgO(F{e9CoogHxB!8 z#^lhGUD%z5PlrQS=ls!6rv2`8K5N4(h|e$GJ%_xoD_UWSc}Q9+lJ1~v2I-Xld)$z z`rcf5+o#ao_2_%eemV1zrV7Qm&CIfqP!oBy7{=AI5O0h}aiQ3Z?w!gAPL5TIeYb}$ z5t04D-I-ych{?@nv%1}kh%yq^O?xupO%zZ$LyU=N4OJ2XLjQ2__aBBl2B8N5t>>7k zVe@RP+}$e8D1-voP*Glss~RpZ2ZBXZy6|ua>kk$uwXSO7CAIz7lv3AiS2@}A;t7V9 ztR&}pfGE#EogfK^uno=-REfxUyTB6@8T-`HLDu!P7>0=~B1eo}&$CezNn{IIqrR#L zI4lcHaRcpr6n*!wHM69fVYUsE!~P#p=N*?+|NehCpt6^HGaR^a;|ynHYGhW9G8~na z6_s_{xC;)bxN&A>i8DtQrj?czj*QF-&AM$Fnwf2~?mpVOe|-P?{mbM0!JG5G-iLEN zUf1(U@W%>g5PWkTWZVHWoghVKwO#T6*EFbKyrt0dp%TKhyV zJw(&#iICX3n0lk^%m4Q2XD1)J?iwAmavMYRVW^Agsst|OYe6@>F@;I5^-_R;!Kose z>eLyux3q;;@xp=bi{B`nnS0&{Ilq~B?i=tW!tYa@)q*ruZlrx3_uBwtg+_mlji}7B zxdf<`z7%mV;&39XZeC1z?OU9Miv0pBLWY(d;c z7l#d#(XDqM*2oDWUB;?HktQ0MmWcVvdA2(7yCm%iWssu$-ljC>%?*Va);_vNEO7TW;9D?4lWs?qN@!Z)JYB(?7Nh~&AzaEW0|?B$;zf&*TRvdTK z$N?@!40(nPYx7&M_UbsiS-K-)Tw$ZF6=@78Q_unqB@rUkmBE}*&)9BSlKcvAH!!Xbd$}Xa$WGyU#CXba}_HRT|dCmZYr(bMV1-DAaR%(j-4Z`m=0>ZSI|@2wOjT- zq)FH7l`ZV>$W!Q^M4Uss5E#XtKVlqa%YGtAu&r$P{m(Vn7G&8{Q_@AVKH@OD?rgbi z*Lg*+N0Wv2O!wCfyAYqwHts4&nxo{88`4N8DD+(vd9q2!*|4hrgiqd@dTTG<4kGE>+tYXUGiBluhr&FrS1y}) zm6fz6(KRGQK5Wu4dlOSQ;dVgs;mWCfBVX>l-5i=Te&8C7^?5A*i5q%d+T@RQ+R=SI zzy(%GSI#c2k{?@~S414cf7MHUvGm)DFEuAOxgd^%q~sA^ocNq0(oTS_67G4JlYTl&Xp zOf`8zNZwwyVcGsSoWT>EK%_t&5C@s9nw1?9tk~wI15ua>Hy^$5_Q*dY;}vEU_3F6( zj4(KP!>a%YT?ZQL1FJx4jd0S@0)?Bl07H$09Kb@;l9!90ragLViQ-3=L@nJ(bdZYh zAAj`dxEI#wT`a@c(p}t(pF9K}`lC8e?($%j=76fS=&g=S?LFI_k`dk6Dd~=al^?r+ zf4qZ7Qw}CA#J8nYyg(3-#0V5XMU!2w|Jq$aYtP0$^H=(ZmvsK;K1SVtzhVNJyL2WI z`y$Th(n`;~yk&IN``x6E`q6oZOmw8rE#P|in#_tOoo1X9(9&47(r(Hh;;nm{#fgD- z_y_%E|4}G8@N1e7+@tC?>>#jooa|t<(TO5@3L7QpK7EWe-9c9EKDt7*g1&EC@AMpu z6U4HLay0i~NjWk3ZsdeGR?Evpbav3;?Rnyw^OeLuJH|5?b~iO7`0wjMM$X$Ox3*l^ zt?e6UEHU@6uY_1VO5A+e4OsTVzmqjF@fN#xd*-j%jv3hb7poQ4T4`?)<+(vO`lVhY zryoPRxmgq~ zkT}9?=zpqY_RH?p{i6Oj$c6bG!9!lMq#yQVt z#F+MtJLu0opBQ?4e|1o)Q2b$e^Oely=B$xdFfPB-YujyXa+|^(+4Jbi#6V%$POKoj zmg_Bg#kxA1z;z_|Gj0^3+S>JEGRsKdHu4L*M?8a7?Ow z#xbUEpeu>k5#ezMZwkGjSB1(bci3(C?1#*cQaaN-C~DPLY+Y# zzYdkzUj0w6s)t5(^CcUz?sQr`-0jalVJD_ozC}B@UP8Y8eRBL{Ub-n%B@MdTSugHE zJZXcB+d$NYP*sEFBn2STbQWnq{JGj|eU1#{F5~Y`1OowSBv5`Ai_kp}F|}+o4j^Ls zcQ~RDCX^PvYbP_j!GjoZ5miHGcBXj7wa8><4A92keo@kzY#K9oDK)NaTOSngDy}9< z-ey@D3T#t4&bG%uF155& z^n)0QyvO2}JaU8P)0eNDr`6S3Hb_V9t-h@^%~mVTF=?p#l{d`ik$? zcDPJU6fdn1F#eO(`9PJ%NDdjK(*QaZKrTfFPhHZzRApqIPo>$Dx&__Nd804v4!sT- zODlW7jz4an9G2kW~D))DWh;^Jtp`m6rE*2H$i4X+%(X%tQzapTkG1t8uTAs znRdX`{lr^G;_s7e;pAMIQyr^BYxY%9-rRqcaQSBQb0NKjh{QzjEeTvu#9bYSu=@Up^Xb#%6 zb&D_8;$}B``7f4)V{VNx@D6@;e&w#<^wui7UPjxrBz1}*y?~0?QFYkYFVfmCp(kHb zue0RY88Mp2=((MBV$8Usw#9-B<+{nwP=Z|WMimt4lV2q9! zYik76Y_2W9wVk+C7`tHrU|EAaI#_K7WZ8oI!vPM>{`7X4+6?qSgP1LnglLe{--P^e z;STH^gP2DgBz!tzWa-O8XFbYEV-1CR}Y+zgyxI3VeQkJjSMeO zn&!okBRB9{PsDfpc&R_#$uhgfpuG)$1wk%!1@A+kXvdj8+10_alVFm?bWy#eUfqpm*%i_`=s)wR8+ znUH3Im~?7U7qV!i;M;q^rC9yKvqeV{c)D7JxQdK}9)4B}f}RGd)QulC=M zzPS15omYj(UvC`}fPnW?H^sH>j9N1FwSRmEo+fxYiX?Fh$f<2&c ztRvXJ$g&6iE{*pL2Y_n%r4m4JVSNv^>(V=uhYo@f8+WBHlsn{8P2;ky1YoQFq75jf zKpipp{-2@qmhPv1pLUxQwDH`PCd{FK2&aAlM%0-J14(K@-eB>~E?dGjEyC$R#o_S0 zcYrO;;lioqvKuIyPGJ{9@zNL4vA|ia*%FbbIqJ&m#Gm$MF@bqGrm;6KN}F5nBnX}Y zYbKU%P3Kf2?_QM~KmVi$G(7}b?59(R$$Te3?gXfuz$v3bBWK`rAyBPJRnee5G+@$2 z2w{LK=gm#6D`ql4m0l)SQYC6t; z$=NSPHmPe1-3?Vxl9~N4cpAUEBPe$OWcE)84s)7))#C(yUkA<*fgYRC-(j`mQDBb? zIPU^8as@srH=Y}e^yp7zQLpPgmYmvminLb1B?^YMfcrav;#yE*>(4q7wCBQB4d>!u zdtSm){)#1kp3TPdS4JaQ{b|OQ{eLQ}4w?Kp6gR_BWgN3MGL>a7F||k%MXeMAJs;pA znc30ja8&=q(u;c&V7hhs^%Yy2tj41TUcLkTVvg57UAlN}=lQ2+Ze}a;-C)++Q=waU zAl>S%&eb)js6AYA z@T-SQqm)Ixc82EvlI=(NF>r3@X0NFi#fISHX;A41svJRy6%#@O*tTGcBeQftxm;m4hQXE7kh<6y_+PC7()bJ@X0hDVhix-K=rYS zj*3X1bF8x_U=(6>Ee9|dP`3W+=g|O`O(enU{qvXaMFy&ZJsulxKNFon?vDw|fVgyr zt?N(>!J}idtT)o>PqYdrcKa%i+JK&@j*nhf(0jV-HcAS1>-TS5f0N?&xH8PqOK?J8gBvVb{m`X!KyQJ1bZRG1yH$ykCY(K3fzhK z_QofYP3=KgOY93lyCZ%(>%zt(X3)Rspf@#=@Wz~R6?nSvw#vG8)VMv)22|L8zW!KKeE@+bcv1I~MF+C%GnqEu9Q(N(%7@qCPh9rC zUmgvvul-sZ3Ko^cb`O9D3ewMzl?Tt|ajKkpPPm-AQ&~BEa&R|0t6CyHxt>XWv-R&6 zt3vYlvB?ov?G(?a)5-&YK+zN8^Cc z|H2S1fTlF$Hm0Xj!zL|&9{P?RJMh8HLd^-Fb{0e9>Q~$Al;t7Ln)d_V`(xDT^AKvMlmXk5R~V=PHCk} zSJmJUX){4_ElJMf`Asngoin0s!cf_6HOdAL+luzag*2upgenn-l8}fje==SiGOhl_ zXHg`}Q^zJ34|toEID3PT4;i60x_M1NQ)XMtupaj@kh}~NVNF8J?xm34ExY+#r=e6j ze`MlupU$c5l=Bwy?Y&WvYu_ zUa+Fi9yQR@Y>N!$)jObpf^OTk8Gf@y(-fD}flBGqTSVQbOKp)#{+Zio>Qj^kK{mn3 zMgc4d4M}y>*eG|HjCAGh=*hCo?cg1@$Ss)6u+$OA3T^G@X_FtC#U2z~-Z_smw#XDt z87&-u9KEk|k+9uNXTa1$(08}bmF-Ul(#$(QGvqb=mX2$}tZ;MS{+tVF*`}#yrLu92{5wZu6x7ZYf zFrRu^4W>W!#HMPjT)y>_eUAAuql;hxRe5<49h`NteK3P3weJVh+ZgBwPK&63I-<2v zntthwU4H#XJw~CPUzeR*J?Eq~qIKMghA2-zWUfP*%%SB@KYeVevnD;mSnQ>2bVPHi zuR668QVi@UVoxvnnwYGLQ3QyQSt2NlDdytf$QE*Lf$u1l3T{aBobl%I#vBJSzE)fk z(v=>oQjI1Us2Se42bRWH{U{pbC|uAp5|E0Rp_f@^1N^@YB2v~aw;N%ot7?%{cD41L znUvfe`7@Fl`+`B{geR7^d1DQZCYf|A7?|vKr1+(`a?#zMfHMjd*J|gboL;l{+V(!n zP5EDpE@sD>N(M^e2Nc)zQ1$%smgzWoiJ>@IeW+U}iY3hIsJr5I(+P|#K^9b8YGpl5 zxnAF%fB3*B!NuqWD#fE3RGaBQ=for8g8KQ>YqqlrDWky%p6R_#MUM6Bc+QAh26isN zy>O8AOf%2NRJay$(0r$SDksTMOCUc)%Q*o#V}&?b+z^%1Z6`7hB`Kw>7tDrP<0tjXNL5iNhV{_ zg^P@{LrYo)3;mnw(k>WVH5c5u(})Pte<@s05LVIyb;y$@FhZdoJAsJKSGj0InUoF1E@M zleO*sSmLN>*X0fSDF_XCv$3R=S9KyN%Ug>qH2`EHv|xS>(n^o6#UGYynEtEZM!4#) zu`>f&HX!qAteRBk44<^@#6?M6iQj(0Rdp6tK^>vlm12@OxvaEbPngXT5sOBZta)B-zi+Jx?{$k~!)q#t^E*Jjqg4E@Z@HCb!X^IVslIJ$9& ztMR%K`?Ecab9!LCQR4c=j=BXC-N)31*pBDoBrR7(?j!7{;L&oqE=z z%FU&bS&eLJCnJCJn>;_H^IH2RR{gFgkZ8pALWlS(XEp})*uuGLEBZSdBESkk+~(2C zWfiM|YEQVeQXei6lu+c(ofIttElGx6uxbrP@SK$^o40^f-o({S2Ii`NSq>`y!Y8W( za-|By3)9RL&VWTmyS4T+n)Ph2+e6-Q0u2n^iPJX_IyU$##Cst?nrf2 zX|%UXpGS|QatJXG*7p!`j_3G)txj>u)#mlWx(nzJ-`x*0A@VN|q~%ln$!P3O>&k(a zN+8ezvALWmY9_22kVXgFsT|=SQVSb+9wq&KQvrmFcqos#SH5Tp<0hb?vNbyV8X};~ zN`fj}y;Ndd7%%hL{)fWE`gx?;sH{3PMmIWH{m|{-7u@_gcQ`AnyP+xp=7z zwREH#_I%Y&v6rQE-1K}w+B(6hu!!=)_Q$1hn7aM7S~Dl^FWt=z5OB|V4_=NUl?S5)_8Xg6rDLQ+ z6YGEKwwT_NoB>*VwB!;a7%x-@N}#~_=>(`W2HW4i0u|(8Eg}@+MwQFX_c=0i zU^?jMx%o=onbZ3XZ%dDyQ$f#p5zk(~ErdpH8uPW4^ga%%r)e?oux&aE13H3JKe<3t zta8l0*9%RKVfN5nw5^t4hQNf-P<@-gt@&^3As57T>4|ul>PTxs)Ba#y zr{l;U?yBBxQB4V8@0MBjS*p~Qs&_cm6fRo*`2wbgyMTXR_W9o2RN=tX$8R_d(XKwN z2GX34D7z}G1(=c&^JStF)fYHpvcMI!-FFV5QH#ChO zqXp_Tov&IGFFv8oUF zeh5zW{H{ug?)dSAtH!nG<1$7GkUvw>A9$GwzEF9Otj4L{LR51&*mptxCt7RF7TF3i zc^*gpgeU(?TbYjoqPKF#*RhnCxddy?=Mm{&oAkWpS9$4;WdJTHtY!W7Px%>clr+)> z1cN+df>6m=7&@BGK;OOs%h|A*uRwoTY*9Aagj|lxIk!EhIakLX!OutNWFW{gZ0B7J zIZK-r2WN$9k-apa&V$lM9QUdE3kxTGtw+SYkgzk(pR7#~XcJax6Y!8PPOqp%3%D@{tgpzGV;*^UKdPR$t=}H5H~-p;u7#^>YI*d=Vv^h_dQ<=EP7JCu7Hiv6 zJf7CKThi6`D?a$hKNt2!tWae&i|V;YP+U2d^!%rMgBCel3)iQe%0ri7O60Rx?lU-% zg(H9Og?#&!F>?27!e^G6;Iz;K1@K>e@#W!o*7vDoptl4dKz^uq>jY^Qh5Zu5|Mr!! z82E6-vUCD2;|k<`A+AG-J2(C^Hu|pmi)9PJs+or!;9yxpC{-D<{T=O7W~FU(pbi%` zz(mP4d9oVc?Pt)E;`kZ<>g7f>`x~ptiu#wv*3-Io11+fFHW|M- z7gR)yXrn`a20XZ`8JO0Rm$A}k;@OdC@}PD2tx&b4%ZGj_!-f#GFeLz{+!=(2`ayLt zPCtr3!4WBXL^oQH>Kk5wA?#xirf~!T1~32Y8Z|MsFBlK`XhZzZMKy>0noQ;YwwB;2 z{9kxC-qu~Db<**5yadcG3^uiO<-7$t`Oh+tiO!9;hsZGGQ9r8l@%lgv?K_%$vSd8{ z(ESYb?DuN^{S3Zx+B(HM-PFK3bJK$wN+ZI7Ta8GeBBWLF>{zV)P1NVBI;!odceWws zO)^OK_&xqxxifg$SL(mZIdxj@v9_Fk4B@iw8tlUm%pgZeS3B_W|9TOUF+_bbF>=C-mPuhJoTf1tm~SYXjeM|PhJmYzYK z`+@VE`FSo0tz_fL&$0UtVAZBUqa(OvXtDv5P=_EIyx*aZGN@0w%`;a_*^ z8g~!UIs*A^%meMusvnvYmr$oY^-wov+|GK3q*tzL-V4EgHR~z7s=oKu{h|Z(apY&# zD{t7R=`JYTp}~_igIzff=E%3SmL!IJXJ*VQU27v$lbhxDvBNy<`yuQqx3#xY7H%av zHGVOES4?|cGdHWAuF3TQo!PO2A(|oebq(_MM`@ao9&t`NCbVNZiAO zyPv*eBOg&!|G7hT_BzdsMn1HL%;DO0Sikzbi@^G|cQ3Z_d-aEpHthKaV>L|L_x8EC zRoMnA{gCnPEu;~aO*Gsl+WWY|4<({Pvw4p z0r^2#=XCTR2vM_R*FPm{(BDwfi~mjUzO1|JxK_bmPp1sf!Kx+URe{sjCb6ClL3;HI^SP$MFgKnyY=LQPK?I1&%I7p{MKbdCG2sxK)|%2O&l$@{el z$$G@UKK^~>J*PyEV5oB>HVe&C?$tVg;Jk)FK434#TOW6U;$f*>3_Rrdl;-4j`bv?` z;A0c1s(ICCr}cFDe>)m8G&;f(E6l1GETj*0mW1XHSV>>%9ZbXvzZ@R?v^jimTg9%; z%yrmZ0lJ)}QD7M3yK}dJPs>C1 zWM&<}vzhv9o^Z)g&9UYSfS3GXr0R%$AtIvTdK4s6n_sWPN{f^3LXsOXk)yx%t%ofA z>RpAo=cxSL@e!i>98w;$6|sPQ{HQTv$9mKb*wCNE2hNxo<~r$}%{~C`+p4rOua~8Z z*wha=#PfAKyfW4t!t6_I;tk;bXGTcI5!J*Ky{G@aL_6D0B2tWq{bb^gy~H}xlg$3Y z$M27>aV`Ac{VCONV6E3Ee+12cilsX9HWT)!s?gFZ%#&D*1|%+zMGF^^QeFocWb)g> zq-W4;WLSwBdqS)B$NY;JXZ1Jcm4)^BmHRxY5!1sv{oMZ-{^s)B?RqTv>bkI+z$N27 ztS_MpKCx|Ew+sHe+4B5XK6csm9Uh-6aL?R+%0CbVHl!d70sEZzoOYICd8y90JK+!s zYJRqcPOQEe6Mu=f?)TA5Q!>v6CvOaxYeC8f^*g`T@AYZ?@DweTVweHzrLvT#&PW9x znbHS_*~|Hl8b}!2>kU2FhpvuYPp(QbBMh(P6Z%a^K`3k;oOpQcV37I1Qy$4@u;5ez zG1+o}Vmdf^^W*{DBhvr5CnE^DUd;=e%KaMeHw?aMRB2OQ?d zNqLIS*+K=i%~Hh==C+*btVO8!$OpwZlJ{}>AG{$2oNyVE{!GhG`J64>VQ%}vnPPA5ckeA@&mc=ps&6C; zm6X)={FlR}J`a**CW%5OR#FLBSGs>}Cv?nU-{t6Vi;-JVXsKe-KEFiOh%}*Yb$n#& zaK;*QleSW{cJnU>Nd7kJLgW``Gb*W@QFCASoO>MSW>U0ea8Jv`Za=dJTfa^m`SG{^?FXe_JD;l$RGM2Z zJjuL~?0Wq4W?4g_n_IeDb#uAhw?6{cU`-{~PmeC-hc=YXHwJqe6}L1JTQdI+Gmx+w z%Mh~(PA2&w4aE6x5!NV?Up1<8O=xt;_py^0{yr0uT?p_Kf9)A`0F;=Kf0bFIKtPomY?SXEFZi{|8-;`tKv-Ym#&(jPlu$+ zzs^Scw0+-tX7TqK590;g0oRIqxwC*4zz=VDzb>^-kKHN~ScI2_9_qVTmbSO0n<#?( z@-KKlSOOjllgb3F~)#DyB2$r$!XiT}+o(tWlr3 zRQ^kI%&YX7WrUDdAC}=e8qQVV9o@XVtv@Ng2GU+lE(Ba}<*A3*sT@tO6ZTOn2%m=H zvhb;z)@RX7)_@mA*rsJzqT$nql3*8!oOaPcD|VTcKrg&(_q1LJAtY|xC7&{x?@-_= zz3Pk}m>)V;_+F-gkfhR|y5!Z18y}T?U3}pwqdh|#;)SZu6x{6}P9Er)*)R&ujyLXE z2%}$hKvP?qeT!geiU2!USH?$Nlwy8}-0hHkL=&|g@s1qiPWkx2#EZcM@mn~nWN;!a zpd@-?u|lKznyM-u99k@W@b#n7Fm0=q~C05BM%ZZH%$|o2YYm07PeRYqfaXVS|@I1BUniV>DvdgN7W`4ti zMpfPLOrec(OY!5?gM4(+HB^8!m3`HaZQVPWVkR`+#gb$;Ty}urnYItQNk|__VxXIqT`IYQ{q7o4j!7538zxCn$!8ehC(Q#d}fD++b<EBi7k_zrHiO!Zwx^8jlG4o~l&& z7p6RY7K+Z`Uv|n*kyMlOW_ntz@&@EB6eO6{XeSPi^FjypQgS6q?O)B=@{d3oj@ z68H}e+GN!(A2)^xoH0Sw9&)nE7@Vo}UQsinvln%nLS2kJzrw9LMemL=c?%9Y;iUtY z-hs8(6F|LXwfsMp| z4&<@{cz7H+O`rl2wl2VrMb;TnL`spKKoNpShEuaSm#ko%G;}!2J(ux~47)>#@1ONO zB^<<>@-mRqEc^Z|`L!X{v6y=m9=X~p^@NQcNyGnAb1%k5V1I08Vziu?+;^tYj6D&v zo<&CMEXYF$kPr;bA?a=z%rnl%gMdY~QLcWhDG6WJ5mn9laM#w=#jCE*Q_FpjIFw$x z4PfE38K7=3hqDVxFz@2EVFqH&U85^}>&ViRgPcvM@qnao)&D8GVo;`KW(b82WF03qy{K0HR#F~`kKeoAJI%Y%UoTy8;yie<;?Z-6+2R`~xB~^dK#AO}B zpZgDr(9OD4qu7T7w8&zLLv~P{CeFq&7Ft&kgC{p2>txPw_1|IhicM(BW=o;0$^e|- zgs$UX%~p0B4k!aI2{vyK$<7-LNS7l84BG(6Ari=AHwk~yZpvkbRgmr2eyJ^7UPjV% zxzJ(%IA}p8OoA(mG4x;7*^F_^*OW1AU2nS1wgVZ~Cw zyc=)54Wc}XjZ7KgyP1%c-X3l>m1}p}#niDqb}Qsr^Lh)*lDj*iBf~P7vLEDv zuz!i3rslmunkYOyBGvpIR4Lh^qUK_+!qm;27OpM23H-yF4SEc!Tagn3D7C{$Ikj zWO9EYGe+?RJ{Z%Z>}B|GQbJ$MLW4~jgbbLOVTKqmH) zbu}yb^3Wa@!rS;(TslbDdfC#vVyY%&a60~Z!T^$Dlo@Gof+Xtm%sg9+mCH>~tOpL? z8TD7eL*{62vV+L{Wl4xp%Esg^9y`gsxdW=y;h+unBGq|=YvKqu`J$~Bp;#Yy-Hr~* za%#9mGKPuWgdAr;6Dawa;AM0iRJ#N33P83yj?OG?YY<93#jAs1gMSOtUoZsE#R(nQ ztSQ~zF&o8XvEn1u4_?6RZnrllWc?Jmhj_T>;=9TNQg(V2Op0_@!KoQ}JE~BKpwrco zo;EYl3P5jhxl2n(o<=dt?|}DIT~{3UrrQYSX+oZjfcOh2?*b?3j71m2I42-=D^P8H zQtAkUI0NtdIvDmR6n2K3+c%YcmqV67O{@fA16*AOwk~xZyy+)*ucB@cx~&pM1bO=; zoic+8NpFEknn3AduXKgQd6JBL&OgDgfgIQ@)$C#1*ESiiLPO4LmgYi3i*A z0htrfcH&GHy*10#)C41SI3`Tq*J0cV4q%-;c3)n!Up~UR?2`a8?5|Y22inZeg#UoncW1L z6v4bZD02l|BVjF9h3RgZTnkLE7<>-zyofJhxq|b~Lb3}ms0Xy0(vtvO4@#PMw7z9GmVC?uwbGz7>SpWx@!r`{HNH5i>%b zJB;aBxLT*YY!9r?`}8(-u~NgSy`ay%AYz81nIR&*7ZJsK+X zrr^vVQF?XVXk+dotE?rof}jr=IWFhA!gyYQDITo$W=k1GEgmpLKKOjW6=Vq+j54Kn zB+IoccGne&yMk*AsvW?2dja3}CY`Q{Rsb@qyFB;MwylIkrjX+b%4~oHHEep6#bXfX zHLJ69-cSImH@@~{h0rLMwCH8XI4z802xBgEx4DRU9#u>xQt1xM^1C1{3HY`4&btiJ zX7hYbSb^@?eaNy1a!9nVZ{HWuegDJ&v*!C;DS>eU?wctbqlyU2k?CPBz2VRvL!}Yz zuET!87y%Zs7Ut6joGfM;#1@@u&h#y7y#`kZvoaQ>~1ggu!ctiARO`Xc*5`*usLvGxC_uLWP4c zzQ6TO61X^PJ;EwEl~pM0(OGl^876?(={VC}p`3xaZdzS&{XEMH#`3G-2oTSF0l`nC z(ql;GRakJFLiV=)f$K1~6+*mVPv|uV{d}gqh04ID_+i+zzIoz95xq;q4${v)zCs!* zVur!#q59nL)!*V)f3yrPTrbjFKT@_2?%0Atgo%8#hO=#u+x?>JuL24tbkP8qq*wI$ ziRhj%iH(r%E_^j!y(TCup;ox)J=6|}+daK&4!|Hjz+2Bqz8k3g1|wX@h+P2b)n)#_ z9ToYLrBqyScIT1Duv=Y*@`^J-D}_i4w#-M!v%C0#bHWM{XY|NLwO`(-n8- z?KK-=E(aIwL5`)M-=?nGO+@yt5c>msXIQ!uO!!hn@DY(iqseop(>Zbz@-@ezo+xAO@jvwwyB#oM4P$0pO*Z*?&`hv?~Mc8_WV)+o*8Gt zUXJ3npNyS?TTh^wZTXAA1voT1>34dT*I3c$0EQW~m{EoSaU4=}41Ev)yaRcN& z9>%K+-GhW{3}@1_F)TsR&qSEYT^M?)fbSd!@gggHg=!y5ME9xfJBr1U(kzyN?B`i5 zC2a~{GTFMK+0Z2Fg-Pg{Cs|>uKk04s{3m^#VRW&P&YxIw>-F<8j%YDh!vz5(dLTOh z#_$7Ld^PtS*so4{R8mOrEM`8i*_VBf`yPjo7x9FY#>z5+5Ox;<6IGV=3?G-}!}xv= zDx5*KjqR!1+mwDybDnkz7g(Ij8bJ|kk^%nY+YJ>}k^RJtpS^qDj#!;BH;dKNM%3k} zn3oz}m$ROvhrUL%q@B@A6Evco%;%%^B97+j!{?tUW$}*-g25eEwF|T$(eOz&96}eU z9Chg-=5foKvv2FcQLsB@LYBACDiU_lSlHrS=&w^pDX_QgJ`%wi^I_( zN9Ff;2&Il9vh&Vo61YLd`eYLl-A9;`AZBwv~inIZ-T#uWX~%B*LmgGO$VG>)zfE zlh&GY#J^)|JSnint{2L-9d(UpoZf16%dD&wnSLSn&q8#6W960sqXpPveo4AlW0)x2ZJMEOqXW*tBfc1 zS9a>A!Knu`i%=VdE( z7Nhqnzally%7>5bg>*MVY6sB5FdNu%+P>jKWtTtM!;9bxWBXvToQ1rb zmOHY9LsW@((&EsY+3vXAf+mY!Cq>L<{gcb$ha*w*9Rv@Xl1X;Sn)w-Z*90N`DYyvLPHT!b(wfm`fuA26BXD);f#+b=6EE*G0&-9pf>C z|5n0%ZVRiOMcSVnwlAC|RMdS@<)yok<0++57be*m5IYOh76O$C$YsF#9>Jj9Fja_} z7Yf_$wMyv{@yQ0o>;d}nrITUseJF|fKO5Q0_bA!7vRXt;zSZiM%m~P@V?h+o)>+>G zt5S9Yv1RB`lugm{hSJaf`>}#z6xKjOy!qioD5{`Yv%B{?=@qT8YAFBz@X@D_(mUfE z4eZwj?_^f4w29d=u==?)#ikZ9x0lF$d^oU1c{`=GTD{HP3{STrKKXNKSxwYjNIraq z7hr+6UDG>Mwm-;{V;d4*F}%*rU}ebIG`pMYSd5$vhKio{m29Z4W$Fc27WFQI`9|=Be+j z9V$eU7DNcMo;_%EtgLv)y^t}zpuHQ9Yjti|tG_uuAx3v-EF!8L{={+RH5jWWfP~^n zyd(v8v_l9&-Vc9tYC~W^(W{>CEXm;fw?HPGLhvdQ(%M2K^8y2JycJ~B1sft$x}XOM zM*=XQ>Z+4YD!I^=5K4Z$tb`_i>WV2dx)FdK%$i?@OOefc*Oe8{iK*hdEn$~FMZtp! z3JNAvA~G8aZCp_xVc!>R_wyn>Oz}F_+j6%)JYs0gdC=-D(j@Huwo2WiS9$$enjGcb ziM5rh;)7RJh^ARlx}np7ktD0tMp2UgN85Y0HMNERy6J=t=>f!qX6T_80TX(cPC$f& z-UUP#B1;npA@p8t2?7EFf+7Nni9dn9Yz}!vMYfS!MJ@z&pH(d$jomRV%~|j=E569RZ&RAXn>CjK0d_5ARIh(o zQqjy2lwY(@m7}^th%yfQ$Gi!)@}@xPA`Q~NAzSjjT1tx*ksOo128S-+tPl76=;hGg z<+%UJ7qRc@hik|0h5J{ZyGIMGhI-q0RBpVs_eO5C>AA?OrON0RgZo!4q&8gCY6sNZ zVlCvx7)oyt@l=u<5;CV@k5F?l`=`<+j$k1j!CW*g2AfY>6|Mb}--E zR-?JdA=rt|)L{+P$ul_Oh`+67e?X$R_j3!j{TAX&tNY<1s<*4oVTlii9lK|Y#FiJ`C)Y5TVBcE6CXUz3GxUx^cy;RXe$Ey|C3Ph49AYif z8r}ogGb&)pQCkg%W`8MwosUWm(^VmaF-*PSn3Qm~7f#qv8Uq}Yg=KWZn9HLRns z{B?l;C0ee*L(%{iv=UNCjnp1b=|*qDgCq?Sb;fBT=X%yb@nQSFA*~s*j8!|a4{*## zRjItKI0rtbqsk`b%Mp8>xP&jS9s4EN(6gQ!cv%SF-qV0QOx!OhpqAFb7K4=ap z8c;2NDJCoTiwQW<*@~Rczi3tkzXq|IKA~>Hze-9>nl2-7e-OHiuMuWds zKtWy0zH!|7bHeZ9>^Y3GYk!v171{wgAs%htUn@`TXXhK}5!h`|*>Ld|6TqJ-Hi<69 z$TAQchNnU-F>;@II%2D--2+n@>L*LFIT#VlJt@POwdS_jo)reC z%TKwQTaL{pIL}?lO;(>%2cNfl#H_ReDLHa0v^4AoX7|4CvB)-Vq%J1+Ry@_j zSC3_ZQw4+;4yR{IKo<9nOa^-}yiQh?H6hp7?)iLW^)X5zB3#Nv%o~RtbG9!B0&L(D zXZP5b6(QB=Zu#>b7zO&#s^cf_i3$4g!714?An$*;d_&^sydsZLkq75?CEjyX&{!5HbO7kVdO%Zlhu%-s8j?Jmw<&w==4pHS<284?>;)yGo8<>2T=^2bH~x=D_jp2d~r9)+D z0l>K)DxnbtPC0g-Jmiwkx7C53^G7$v9p#JqO~Kq_vc&dTu3(oa(|TH71+|}Ben8Pa zIMe;#@dIVH>Wb61$R+Y8VUfSiJ6-=)>kA4!fB%EgLH2At669}^E^kyk0EVcl5Z2(| zL-&0t^~SBk4;RU2>5WegVs54!Dl8t-%^fMm{49wo?1|J)-oB2O`9v#-DIn#-YfM#? z2I0&<1dw^WjPR4Q*mq-Pa63-^+|K?y${Q8w?F`dwnpeSx|G9X2XFUSw=dVW@JBRmhEaLel!_Ar}Pm^`5~{@4q9%sx<} zJg^?>&p5M|;Z$I-@7x%fE2l=Fi1B6iS{}Pa)rJGA|Q^W zlkN(t-)A6*dN!QkS9fQM^*Fg(;*c(G4Ny%aga1C=h+||8CboOC(%QxA-j?>gmFay@ zBEaknVLOKES;^MJerTX0itmRI8M2A4zlApPLLGU+r$Qh~PHqOlGL_etlZ(=t1iE8% z`&Ss@BCk1ex*$UcLK4l9WYX@&FTd5r{V^@KRKn#LgoW&i^nJ#C>s6llVF=j-=%o@1 zN#+pW*cWJ85*_@6PcCtq$rv$$PMTY&*vPkMTIilerEr&lCAeWWj**QOW?{E%-pY*p z(jThNi^jCIW6E0IHfCWicMZO-Ygk`GU!Fh{UD1iR(bPzUkb(x%ri9L@lGz<#lHv#M z-Z!lnq7RDUNM{-QT+u_c1jp;jA@~Zie4OgIxy81*{4nH}t?`r1wQu{1mf%xQE*E_T z2bYN1mn0P+S>{N25VABAIT`$0Xlm1T5S>FOzM7zyE8P2t#^^TUfdx1|6PKEJzb7S9 zg0Cn{#QM;7bo($#1L|#6KSlkT)MRYw(2fQLD?2KFWawF27pASsyK7W}6_0+~hvvHv zc`wQHZBb=|2&y5HVHr*N)xGmc-aP=>Lqi#KSg5sfw&y)#N=&v8zLATbh4en%G>?O9 zqz}%RCcpee&Rge@3v$-SwW=NS*f|bi<#O4q?3Sqxf`m45!3-{yoeK z#V|yP4+b#5nY50BhwZzZxCn+7QfQ$tfm8*m%&b)`$=lr1wC&&5QKh0&0wSGEYxUt< zn=>q3``F><2}91ZR!P0g)cTLZnAPz7gpW5PY9FpLT1J9)aca#@~G0wlnbDV{L!S@#DAD zJD1{2eyZ2_hWC2vEZ&SnJUUW)C^;Y@cd`EHP+;D{W2--o{a=OOu|?91%me;yXSdR6%2_w8?C)0e-D9B8}wAAq7=1iM4&HkKKpY9?Za zhT9_d!fdsNH2Oxmd-bl5YA%@e*|q*UAZM?=<2Y#Fx^WQYKw1d6I^MbxdezY&J^G4c zhhl8m#2cNXicWdIjw!eF{Bj^Vr!F@ZIv4vDtG2h=TWj1yWB|@P&Np4&_rR6iQQ+2| zbw|6$ND{Aq{a?d4gU)&Ph3Wf~#@Ec6jz+#VnlTzYZS0g<^{L4{CB4@0;lD|sGj6Hr zSB$)N{gHdT(;h1xAf-oX3{KEv_qYkXP&RPj>M=7Ouyq%F0)Tz~R|Jy+0D*x3 zM{@{(Nx{G(+c&Xz^>-#Lu1fU`t>6i%8=OoFSQS6t=I`jD5&8O&9B|F~_p!rkeG}%O zH?l@Z9w}i7d@-te&E(q7*XN%@w`w#-1aCS+8P(fgn+yAcXxG1nGk=^q6qTGb-@dj* z>fCHvQ~Z5H5W4vIZW0iuFL{15lLUhRO>vUfm;tlMa9R)#Irl8&jzLFUt={_NrvR+Q z+-lZ8{*K$*kH5z5p183;;>FYJ?+V_1-JATCdL#D}=qLepv|$JR(&Eganpqj>r|;NO zPr&U!WXN0GD_1ZpkgVv%668Jh1`1BEi2eL~LYFSe3n>zRIs|RBONZZrD|~ti|BqHQmkpTJfKs^fxs&7LH%APo7RmpGmjFNXL+?)> zKJywx@sa?Vb9jL{3Sq9BhXFC3!#H^`Fz2@ikgNw|&f>uyp4u1vuZ~CLC>8H+L%YS6 zA#jv0(fc}}*HEu7@XTh)Ub!vrYM*0oqChHx{20>Juzz$?3prrpH$TaFTw8Y;l&H8bnl}r+;kv z(i64SRUdM_Uh>}76&S1spltxS++a(sGXl`1?tplqzTMxsQOm>Vr28KtIyCHu8X}BS z431r*qKmqpjC_vN=2$Xcc?$ed>{>yviDRFy*_6c1EC8{98C$e=Vekv5Xa(GJsR<-{ zs(25gxmKU=cULVesNk_J?C!fYMD>E zi+Lv=c#WFEtbz5=-Jl?Xj}c2?|Ff*;iEcmP!W*S+h=3PjYm>6_cxz+qu zn54V-u^+cFS$$b6TL&O>r+Hzy>X`Crz4rHB3WrKLm+LczUV-Lu%>e|8-r3ooW}w&{ z!>9yC3bVP;6W?hAtjd4`397P!*}V`gkv@ns0}>Q{>4r$bfrgOQx`og_x>dHzn{TgQ zD2zwz>egM)Kl>erTnc&EE$-26V5Z%ea-(Sp;*^a83+>JV{bhWg2DO5f&#EbO^J@1(N3pot9vn55i3t8iCP z`KFvgmSB}xiM^#O-RA`-vC1r5d{-0`-^mqO&0CzHG(FeAH$kuEb-fla(Zz%iDc0N{ zn4qtO7<5PX;%4yl(Psr5)I>mUIO$1L;l=P0IN&uCe&-*Ndt%|oGoVhf!qHR+7*NE? z_VV<8oH!Mlws<>cF~l(dvCFde`E@YHvF8X=rhIHtTnjI*NqJFi;5uF~qd5f<2f%># z__D>Qb)8x+Jns>_4tO$2zw5Kmrr_}ORVim%6L(j@m~^z<%|!5$UI55^CJfN4nR(4lcTl}1_dG+jC@wX=G~1H1;}ZfJYZJ}l+1wu0$|AvKUIE-Q2m|v9YL#d zz_;;Wf&);(@e{y!Qs4>$w|)!Xg&f@KeE4RKpamBBK_0#V3mi9f48e4J2=5SB-%x+m z4@&X;!?OpM_%%^n|Lv;zF8$|>2zc)mtO7{X^Su3y>HaT|vfhcCUbTdf7G~BU|1!Nk z3)}$R#`?0Spt4&tQbpiZ%jt)_?|@J-N;(WcgLcDT&35V0{UEa~kg~+->BfKK*T6iA znCc1XaH#ai;@4eg@YK$?U3ZAyeJCM50CIbB4e_>d3Q7<7Jhiiv_#V9K2o`wD%FDUV z=n(-8D3nQq?507P z(J;CdlpYV`MWMSty1b|CG{V4%@0V?F!$b-dUZ_a9D{5eZQ4-KO9QR}Z=skJwswJ37 z5m^^>>CyiaNm>nn@VsEmJW;$Evpk-4m*^1QXC#Qvt=Ov6hJC;v;`-%BWB#`w!gFCkPmw#=nT2FD};+L-01-@6&VOG1j>tq@(vly zPZ>Aeh9a&KDivUaSlDWk;UhgFdjjT7CNLwRWnzlNKd{wfBFVYiE{7lxg}9Ate)M+Z1C}&*_STkcI}d=?b2ohCV|>R@x4mmwTM6ZqQ-3U>Q&gg+5o4Nsreb% zRo<2Wu;C~t+jL8D9#@=FB0=aTrmLI#S;7Tu_`e$xysMIpa+1FFR9*vo^{hC(0!}ZH zU|yA66-ch$useE_$}1D+)rd2>FhQ9Fy-SkPC%M`s`RD3Z%cg!dVE?vNDw73t0d`@Qh;s=UWH`U zlGK8dd-_#LdcP!LZ13un6rmr!+9Bmn^ijJlrGk*6-|+oRaTN^stRbDM{5${?(v%0b zaRgVvq_jyC^%IOhc^dFZ^MGK1aO|(Z${&vWKQe$-Y5Gf=3&&NIxe`S&DH}4?RTnl+ zNt<*?(U+wKo5Arr_PiBo-mDa1RvNH=h&>_A8D75DQM^;ID2x8+Dwvn%%}4{@ON-)3i!hhfMQO^s4DUm*|CHP6k_hxIOaDm^ z&~g=Q$*k_mp$CH5X)=^qS-x~+e1ijTSw`@0WPqPLeMT0rBm2ewP?N39Fi3{?HF{%0 zRtS(KLSx2mi}Ip!eMe#-qTH8+{P7BRQ4;A(Wt_dC{4hQ~azdVuj}PFw4yz~B47vz0 z@_pTIU#=u}e-857AJI#UHby1|yrcy@VO&2N_hmh>=2bH0QF6dP0bl+-QuFl)=4U{_ ze<^|BBTvNqZ^)+9BK^KRi~LJ66^l&`+?!PMHqlH!>B+v-Cq`*lMT(hfYOPs1_P~K! zyR_Qefz09}ODX`pf#lY;1b4wMMPS zrG@xo!MCDs9KN!Yc>Iuc%af!VE6INyy0!K7?>nn086h_Nu*G*b(oTfjjxjs;a63Ko zu+62w&L=y^PaeMg>aSZbcQdj?)wJ5_SD!Nf4!sj|3=a&8yQfk~_s#0g#Xd3DM*krmg@wwRyvS`y{rn2MR{)fn^ z&(;qscYYt3MM*=cvsJp%qH3B48vXw-DEiN6$S}_xCW&sC3h!LdYF2PjE^*SVl2mVh znsVb%7gqcFy2;=B7`oWP$5*FAuLkPW`kD-NW(@|7T>4*bMp%IMI!^|9ua0whIBrD zaH3(_O~FeSn_w-mnKl+WlX`Ms{#JDN3(lR`RmwXrPtV(XpE=%AviF+~zDX-9g8!&5b@=9Be!Cuht*jLQtr071Y363fs)c zqStzz$O>zUvr-DFwSQ`Ra3(jqsnt))`!D)ZflD3TTzu#FAr#~T*{#xH4u zbBlAnRkP{hc&OUzL}D1xRCoI5ZAWS}8?$?ZAqjEmm064Kec z8B43))QqN8=hi2sGwKpDf(PbTH9dpS8?R|IHTQyX3>?Rxm{tdE_tP5w}sPob3Zt=#5F%XMXysoHG3HC(38oiLloQet`9E5zZmq8HzIsOs1zT$Y=T2G(Tk#F;V{<^;kNqCEau3Q?*J+ZS(8n z^WXQBc4R9=$0faVl5`|IfT4C zIe<)D3b>@Ubwt@H!T*iRi2T;>5$KN%QIg|&<+`-Y!s;L%^xU|{L5&x-D!_r2!YN1l z^c^T&F5ykD6fl@@hX+66+P&Y%e>k#d2H)s(1Q*^9zkU-ln#PDRbJS>pmVeUfAB%)ByauLVrjULrE+E|Xt+i3J9QPZLc_ zpx*?;w&Uw+XElq1La;SooV;;k#W`}JD#B2$MfXlWFzRq2-<|9OXg&cJ@3?G`o>b{# zXv*F7lvjs)gtjqHJ2uTXRM_e=Sf6le-8F@QP5O@Jw=vF{aazDSy+`Cb7s4B9l0nAW z&*mag6H~MTgtNf}!W<1c>v!Je8+|Wd*`@Ox))~sZ6Fc}+%$SGB%kKit%Zu$k=hfhR0pBP#7 zFPdx6RBSN7T!e%aP~CML`p;&DhaV~8GY##(^{QK-fDN$V8ctFz<4TM)MW?z$fchLWoF$VLHO%IJ&F)~_tgj;6-BYd)j+ zy4$g99qy#cy+#Rhy9#!smBJpjv19*CcaD4bg%8a5N3nv$=mt;$d+L#dZ;lB$<)5JY z`+cjSX2cxzsr-q6y4iDW=KcnR8}?pi9OpUwx^q}JKlu5CUnM?!H$sA8sr7<^F*IlqVV^9$)*%X#hjuXQdE(59huX-98h3;zbJF+KUG|t zx}CW42qT2*caeSCiCNuZxDfzl5Ug*ygfW@FdX(f zI&P1L*{?fA-rUI$K{IBQ6eI1}ZorXyfXzWg6@72joiJ}jL-{Q1b=8xP&&r3e%iHga zr|z8@+r^Z~1SmPE9d6@W9cGhgl34qLxy)`%N}w{1#nF==evxCi_8^E+^=3k@3;C&= zrZJZn{J1bNso3BTp-Jh{#UJ{5(}4)`Q`vLgdpgkJb)FJ)4qgg_@3C*-fi~VgyTPNs zPJ7);Z2zhmH=8kVB#PT(cC)<2WY`%Gh94RNX6eKUE~9N5GOWBkwZ>Lvi?~{Sg^jkl zd%0JskuTyO+Vz~B{4?;(kq};F5N`+F^h2-h_=$fwt_?FkkI;~TN;W3%GMIhNuTpCL zs4)Bi`c@IM z4~&0Fi@Hf^nD&j-y;K@P7`QVa$1i#W)3mZ}`GJ@G;B#d^i;x+)aDD)UIR5iWK53K` zQIJ%kn_G4c+1RY43XG8q3KqvS6z>Z6@m}03^m%WiJuk;EGk^xYMyF2t6bQXY#YhQk z_~aZVWNpD`|2BH0*#h}@s`1~NBDuoq!Rl(kUykr5%kckNBpD&4G{OyZV_99{RgR7% zDl%ZN*n2sHvxZRfOQ`U4f22RisXSB9TF)xDpi)9NVKmKH;%@uzm+yjdCtLS!_%b9# z5Yw17R16LaH%G1QcT;RVpW*ZPoR3&x-9NxGcH5_za1NH?mpUAPx;SrbYeOy8hy2s8kW9h@IcI<+ zM0RF|qYtKN4YN*-s>BSxco%T$y=)Mi`(vHc=`Jub11D<#X_4H5`yD+GUh(|Jq;nQ=V9} z-S!dNWE!UGvv|-+o;;>G0!gxqH63w5M4tc0fRriZ{G+K>+|E3+v8$jWXOrlQ+;Pq1 zXQi-?>#4oRYjqI#4LEsPLfib9v#Be!80wvt7gM#OWo!3ZYKlK3DpJPxWJhiMQk@I1 zaRaD!XPDYbh<0JGkCT&KuH@V96I;6yj*(|zP(n5y7#tGWagSf*lvA^REc;06 z5F@a@Os|iPpy(ePWIabXJ$2x_%Jd0QO9rX7cwhyNUxDp_V_7=M&NtqB~`@SOm9Q98)>#%I{RkQ4`1YsTC>pq^5j+wAML1kWa8bM!NaoUWoi1XcB z82@U67Q3}@K@DS?_&{fqjTS)@#xu^T%H?c6l_)8~L)GrS)gw54)N_rEWt89%+z+L= zRUi+a*4W2qS7=sM>wt1DCVsEPncE`}EM1wkOw2k(ZzKcTc0`Z(A88z5N6f%&9M8Oz zfr{G20_zlpt0JkjP&*C0u;lX)bODU%Q!LfYOpehYLdI%vM1@tLqlqd+h+tvK=tP%y=*4Op{v9G)FY5CMS z98OgmquOJYwxY_iK{kFQ*l(vK&k$lj?u`=IEhe5(!5&x37=NJ;S{`SGBdAu0wfI3HCjf*YB^k`}PL93<5qK@8hNJR>Kk7B*U|PVG=+bSMBXxC^V{-ly8f4q9x@?_D2S z>a8e5xW^4=%nGx-Kb3iEURy(rC92 zH5S3}oFSIr^+0#0Z)z3l#BBar1vU;7&*O(%6w;dTqh+``Ox#4pwQTz{@2&-?Vp24o z1~j7OvMpm;-JL#wu>5#5$1*?~rmL@v6+dF^I3EeKFS~AvYh7sb^0$FxDx`Zz4Oc4e z1J&Dh%_O&%Zl+xOJZ9hwKe~pRVuVMsIB_nv?!vh6_lT9oP~%HB6-R}>`Ci3^I%i-D zDbVBt{HmdR4V*g{PyB-^^$d$ymm9IFN=*${mBSun5=JO4Ela53U}WieQ&uc`&EJ+v zLkycDsGb{6WZeWEix>mdisrC=O|ir(95Fcn{;`zE#%J>|X(d^OmZg)wQjTZY*xHFP zd=NyN*Q}7m@W4diDHK&7Fj03t9Cvl*R7J{tqVR&JPC1-fg5?*T#dw z2@2Fk+;G#j)Wna`zIYYeza*rl@2;JUv%U7$qVc6{dSH=x$ts3ng(jXz7kZjxFCB@3 zpcrZLLNuu|Akx=KoqAU5dON-!F%;8;Gh6-DQnBj}7WcxO5vDKptr<6swJ|Q7ipX$+ zRiGI*Sbi{?NXgK3M6Bz+EDB&O3ac2D#7>$ZpjNXI{hUJIK>0{X1 zIz{+k!ow>ws#I@~NAvWs!gFl!@fk)m3P@I`nxY-T$A;a}!?s9%2!dT!^QI=2?9UQS z3$;~&-=zTTJ}naqLQ zO1itILzO^NkE6mTu!UPFVgwjyiX0A)d0cAjvw^#)p+LM;Jlv{C?N?~qGOZD`)zS2( zo@cdNe*DmiOgekLCl(;d@+5Y1z9K(0y`qKh6Ze*rL z_e5h1d{ez1bX;{hQ{L8Iz9j_<48J@~L9M-Zu5kV&%weJ3{P>rOSv-7enM>!2+Qxmf z2k$~eUzo<18DhR8BN}Z^LqD;TyBClW+c%ZRj=R^M1&T2^Q5Ju28L`NeW7y*Okw~Er zlInAO>(nYL7MC(*NUiq-7gME9%?dNo0eWXR5R86$OELdEjY(`%t00!4A9jyQ$qRG{$hUvn@gFO0yBP$N^{{V5q0?263383I`e}Ab;8}XFG(F)aV(CYS+!K?LylO$%t}|NM4uOO6@~{D zSUuH+R;G{dG_!FU(Y-$TcNpu#9XDO~zU9RLnaE*$SO@vIg_;)o*fN|ySBPr2&NNZct!8*4V+e=|kx z%-~*Oq80HJLa5LntjpqSA*iP!O?oF;^<(XwW9o#{cFJj-tb?p`mm@Q=?9JTSq2F*o zsm78ba7>9YdY6A+NwS;z&D?ZS(*Nz+5nD&ok(jJ3&5_VRubm#d z_^)cqVp8_g+upW^7u%MgDEYONb7T!%>hkPjda9b#&cO74zocBGs8gRgq`; zcQa_{%Y($5r^op9_OBYl611|c&z?cZYB??tIrU>23on*=x3hMwq$(7K`xS?Wlq6%j z;qT<~?>tPjzL04!5_h@znaD{Qi(2XufuE3R5G2d_`HW|M%}`noqi%|JwfQ+bL8sU$ zX7#gal2^T}RmJX6Lq*Yc>jQR;I-W zsgap)ch5<@9->l3bVO=)$TM{37+R-i-upllA}xyU-z6VaPE8=?KOK3#mB*%zWuVvl zQj&bQj8jR#5-gRg$QUNrB|LxC65m~#Kl3!_Z28EEGP=0Xjv9}kqL9>rW_Kn0p;Le5 z6>T_Q+ajqPRT>I=pc62D%{S^tSaWDh({g5HcG>}}BtuIautvdSbbWMqgpra-WGi+b zznJfM?v8Z%yGYaN_h=yj9To0vQp%ZuXuJZS8;{Pikq>*T6^Zaz9-4f$`NIN7P~EcB zjeR`-DpmG(N8Ih4-Jz;etkY|8b}=Wfx(7!ToN0Gw*v<$w5PUry@p|e#pXzaOH1}@Ul_l-*kuB$;;I1dy z*XQ4z&<(w@oYvf1oHBUE`H5Tg`kxcl7Y=Tt6wosH4i~*&qZ<78WcCNPqu!!zJ?I?) zM);z>L-+zgxPIV+w-8e#E}X3Tc1z+n`wrWd&~5TlzTj?1S8ZYQ$nyAJI+-x5VT9-0 zXFSJU{sQ_Db0VbkmCyL{JC2}ca`Ej6Wly5%;rUMrdoZOg&HO7hZz4Y)qm+AWEM2cT zNi41Iy|RuGha5gN6uPY2fm4&w2Ge_RQ!ak|Jjw7YQD*~GEz0hNUJJ6y3jRnL*w)=T zxl-{tSGPK}Hb{K>MNrGvlYjOqxb4Yh1Z!6opnd#R`oe}di0$k;Z3#h`8cb`USM}YG z*+QGvc1S^|6p-^$P?B1ZTetav%QoNAT8vVebm3mB-5fX`)-2A*>=}fQ*I%(ZPtUesyr$bv8K_*GrCP4v+jh;P8 ziB}qximtp{*$Bf$EBuD`xP8bh6;zBC%Hg;Q{jYFamfhj?j^S2Yb+c6=eZ$>F%P%{8 zvm-yZh)$o*c$sXT+=GiwN6@Orfu0W4n}dHCIC)>}K{dot0Q<^rTn<GP;i4X*MuDtHE6Gdf?k00vyy0*Rj>OGco^hu^PJ zRwxOHcynviqs+YKB~0c;#V6m}Q|en%xk1$sp}iBj>FEiQuFj$VoYiE+vrBbIp3PVv zy!|q}f8miYF%^v#n}>b81g zdFc3{Qp;ye*!O4qT3$j=?->OCiKuF}V7gT_&m~>Q9WUZt{~r5rAGp@dUa_zLMzw`Y z7v6Z zKkb0I*A*D^ky6CMaVeHFX?(Q;X}b`@YihW8*ryDq@*d1PwsFMEQkpg9C+zC2i3kD8gPKUsG=MF=SIUWvmYB0L zvWItKW{bkCL4xH6A(FxQ-lm;7r*7gi9}qiE0Gk?Rwv>ab4&|Qn(z&5b_jwR%U&4-- z5;hvbVrTKAwCLj02BQfHHqN`zSCyY-k(crkpOZls@5xdb_v*#Q;$EsalB+Ru|Ea!R z2)hN(Zh0;aF);jeMM^D%s(6SL?$AHl*Mv4uJ6pw_tLv2d7GN?_e-CFz5}oc8YQB>H zM=3(*NC|MrI9G5RW$PT+!C8o$UQRkna(|!;B_vblU))~3rhl_c&N6|Qf8&@nWsE6|EQMQSPCHTh|7Dn!;zW&=w zo|S$vALX5w#LFpT??Z7l3E156cC({eS08*3Kh&mEz~8cMg_+;m`^r2@#qfCGBM>wg zm3CQy%GR=9y@+m{nlyZT)}emtWCE$&LvlK3+N8b4c{s+eIT z3mUZ#$Ko~A9fZqI;nfL*K33b03(g?3$2<#D>3W4#1=N&}`{ZI8 zj|4o9O)!?9zJ;v`5`J8?8~^>`(SlO{*g|?a0^Lt z9P_-ZccD1PmRDDJf_uHTEE$&aK*5N!5%!U7n9JM1*s|Lsh7(f?UiY66gJ0|aq!zqK zVR7#(>h+$8$qBr>x}R8?&|zTv^t=9Hi+vhji+}}&j@8+1m(-C;hOm2&L9)7|N>RlI z7nlZfHcAn>BnfX?#7@gq|e!UNU?6;f(>T ze_3JOFI;@iMrXXNw;o26#-$c99UVRpTGi)jo|fsu@Rhb2PhIP7bl*VaC#vs5n>|`U z$7(ss@Gqy5aDVML&^oav#7-i();L5TI;gksRq2$B_mIWb(ijF*;7b@<7-b31-0d4H zeU#%RAKqK&P)3oU6)#Em52(U`Qx69^wjH#7B3dk_157@=d9qv{8tD0gwJ0Sx8)XxS zYN@fGbzZ;PVxKqtQL8zswMhTPe*XzkSZUphQEV5g1?65-SNS1+6y|q7qqs{?aj$jx zw06$wX$L~{Fd{vBvA+NCwXWR9a-9#7nH52p_it&v6(a{^b3d;q|8~4vI=pHvMxMZA zpBOLTN9%@a9vd6n67w414Eo016c(k%)m?raGpDVo<19T>vtf0#cT4|?PO(wo_%&yR4n+-tWnY4J&w)sR!kDqu2WH)`145B!5maJE&LAE}eO*mTY8k)##*B z$#*qtt)NS8dDta;L4Jue&sLv(<=_K*{loa%U(q2MvgI$#%Vyg!>&}7ssm{8VOImp<<543C$1`o(hqlwhOookK?`T7He7c17la>TMCAiV9(oonLB z9P$q-x6O0T-Mz7QuF#`oC|!h;PG$Uj3@!9LvzGcni<^ibEv1W*{}$U#;newv$I_u? z2cg~GkY8H4yulLjh1|E>xO(wD^ny{QP(nCGjw9nyQu2~-nzBz@^rRVs2aD{aD;AWM z^A`#y^Njf?B*8Gz+AHTRd{5>QIj(9!Xj_iis0K?NL;PT z#Y!U-;bN$HPE*qfKt)i^%{Zj)YXfTo+F(kaqSuRtayLRsgnh+jG3RtxSxoMvACwfC z8|zh&afm~Tfe%+p2ESJj>M4AWkqn=xsiF;x=5c@KRsB4~0UsPMbi?|F7#Yd52{;(~ zeR#@$m*WOZ{US)KiFve?T!C+{XzH8~LT3G-8KE#zRNwBQQq6gMn(O#lCR_nrk2E6D z)^W@m2xi2cH+QO$3$;kT^U@fH9EF*yLX=;>w6+8v&Ud{1f_llX4NKT zkSycIFPBOmAy()YAYtC-GD4)Bg==}r5I!~7N~dQZE+dZ;4{O|l(rAz}uUuX}w11!r zezj_eak;&Qu=+r%exbgt>cMW=8S+5c2mOxNHyFvAGO^2e;>?IDU-ylEZbywmN zYfsI;)I08vtS00x(Q;`%xsz1(Y%nyV;d+zLUVMFb$_>eKk9=U{;E8-mRU^5DJCf#+ zXy+16@N<~B<@KTGXSWB7+mmH1uGg;h%4(A37}Ez-pUniP=Ha5DDG_Wz0F>ZTmlBZ6 z3r1ZGsrcT5njgAMVsL2d6&c+Yd}p1whk|=Pa-y^A_-2U>5sLMu#=4C1BR zX=JY`j*BJ7L>org;t*m7yEkku+Z5Rca}0WQNG<*4MhNd%4ayDg_S8HEmC~B-;m%>F>>)bhbj>L(LgChp7Nh1u7UMS2s z%r(Bip|Bhb+zqPD_Q_?4Q4Yg!mXL3O;BN<@M<*MOBAoB(F3eZeRoheq_i>GCb62Ba z@!xrwgHi^mFhI1rxVK2F*BZO*@nGSqg;<#j0y}p}jyYhs&Gwf_6{9CZ2Rqqyz7U!h zbTRY7zW-mqRb#Qqqfk;XnB}rFGGtc$zonJIRnN$l#8>^6}3*5g~?7%*Un+3&K#*u6O;bXHT;1D zBS*s=GM9G;YJx1wmA-Rm(Xgy3j^Ne7vXUaDmZz!4k5_XmTjMy9YzaYhMM#B_<<~=l z#$txYpxy3Jl2mGwZrb79 zkeTkDfb9L1MdQ^?dzJeKUs^kI$v>QDW1e(kdB1l>9i_0!yleZ7<^_>2hK;KUSG+o% z(WF=yZzlkclpg+Aum)x ziKQejta?D1{*YOBXv*Q-?m%eHJ?JhsBI8iuJ% zAPja~mm`|bq6GD&KA418J1HXg_s&y#45z|fo3Ea`8xdNgxAx(Cp4Sm5&nK7c16eU* z^Mdmeti&gSWT}ecskr9vf6II2me@;5ygQ|RG31$xF^7CunitLSid$WcDQ%LJ{KKHA*jy%@`cf?O{oTEACK5YoU=h2SByu8?xe)9ltcAW>bKN$LeSh-p+ zvs;brTqPNK$av_64yTw*=JvHjMqT~QNr|V&ncyb}nr35bDUqv75gb9dx{R&7G{P^W zJTfB=b~}ma{1Jm9+u?6?45PzHsCZl28SXk2C6=n=<@Y3V;!+pb-BQqj6E%|)sqp+); zP@FH6?gUA3g#v#cpqKqK+^?)TOBf1to ztnz%yK+|$AOqc961RFa6%yPQjRWq+H>?e>S7Ps^~*X)8HZ zOo8`i42nk0;nC4iyPS*!>DGdkqXn=?scWCat1+&wv5Z6HNLWgoP~O%@upuRV4K~8v zG5DTGNrEoXAl-+dbv107A2h`YvPpoJIkAsV{6B1+XIxVM8}}m>%$1vs;@(@+vVP&f zk(#4S%>fPs+%vT$4lu>NM<%Y^xhmV@z&%S-TjC(KG__q;H~;&7dOzV2ufvyf&iCSQ zeXh^@qneg0IFK(WZ6~6p1=G~@HkTCmst$}%moU^=VI8&2$r?WWP^H2D<-|~%N=wk) z;gDCsR9uj*_i`w|AD^QuANOda!@ScM_#7{Ahqi?hU3eQj3@fnZkCV&KReDCt0o@4H zs-Qc|#3r{efxe?f%LKY?>r7nmZ9Xrmq1w#VVUJ}(u91B0x>nasm7a)*1=O*}$ik;HYV#z`LE)dQ%&vAZlkeHH||}$f;c*Q=1%s zr|Gy7l1Q)L9^8a2o}T_v3z$QcQrW8S4W=fB0PlF+?1{a_bOl;xQ*pk0ju-ie&U`xV ze3Exl9Wgw^R=mYjz7qFpxTGy{AM}IdZyXHFnxVRpfti=6sp>$wob$|f(R%XMsbGJ( zv$an)fj+TsPFiMF+s7Q2+DHr9D8634d>ynK3mjD2Ea9X!oacSQlhW&cap81|4ko3? zJZ0B0HT)c3a-1t&jmm$$DUL*%?&5Cw3r{uF+yedMI8^Ir6W?RNqQTUL0API$l~qw} z902Sk0^`W9jQpupWNJGR*iNM0x%TttQ_!<2TH+d>!)8(!(dtd_rI2{Zedgk{%w(yWI zHICTP7u>-zKR6V0%SsbNlV+%~#YszU-Bi&fAgR<-KH6^j^*qqDe!Q+*Fx5}9q>p$HmT9DK(>+fzav%Rz z0K|KT27tZIOu>n0TIf~;|>*Ip2k`tmbnpr&gxSjpTbXUN|W1}VzNG%XOj z%C=q4pesg*dWh}VuK2derXD!>8qKQi`KGr|HW+^O;R3;M!Cu%rAf4si4Eb>W;ZHKt z_IA++%g)WXM?UQ&dXa=l$xvX|qJVFacQL7nS`HyVtAgQ5@tyu|kHP}Qnuu_0e2xA% zwWC>7(`Ed(|Dvm>TgI)S(uWZ@cV_NIm9v^X{j2I!RjkAYBf{OJE!i=OB~(lS3(Nvg zab8MJH_jDrUHcpshw?MsFnzrJy`1bLw zA@+}}H?coIhL>YqL;@~aLu0K();cVl3kMs?o)7_#6dahU5phnn)g=3T2^LTHExpGq zX)EWjJA*HRnN`8%N{a;nFxDTTAQ-hx{61XWRk`JZ1B0C*V-^1`R1=#^#~V|faFn8f znZq81bx`LE(tIC_YD+BHR6jCD?3O1q#{F7=>gEi+5{A7i6bOAtY9K?On^e7l6BgS$ zBDz#>cEAW(_a{G(b*wzT1#%y_1S8BO-h)xu*F0~hsuIo>)~RaPOQmg2mKa@5~#^Lspge}UnT&hKoU5~-$$C5w z(DzCqM{t-`yXMRbkATZ;fuNcn(ZZPFe(aA0gn1$vx<}Nt%xRNe6+NHcp`kdqh_d2Q zAgk2G8CvnLhBdv3hTNStK#yj^R#TtSzm@4Zb{fJJzr4-_)JFj?W1MzlH7xV?Ptu6< z81tMEVdm(uR~xg?ejzVZ@?Z6oxGL%DFFrz;Y{~SE6^dU$NG795zv{TL>y8FNXz$@o zs_cl(s-ALYV7nW(iVYjuczdVdy0m{S+_!%^DiiR+6ryhl^~;IO)UDJ^jhhKe8o;H` zSg56z+y(DLf`!pNv8RBp;w$$eUl-n3o;dOux`>6@&5a&doDvthRT;KN4R9@G~ zoeum*!y)7M2ziP=eXrL9`{1p2D8I+8jw}sJNDJ6(EkL-V!HBx?CIa*&Q0Vqo8FBaD zq;^SPp}HE4A=y==e1uu1TcxkS?<1wp%~DME_m2&UZo+x0qUbYE-KK|K*Y#voY^Trs6h*btHoEP2T=_%oT! zUN~+Qw~jJjxOUXN#e}BIfULa!EBMtD=YHsk_a_aKG@z-&sZX@PGN-Z$L$h6RnS!p; zBeN)@M!7r%@O0Ix%oKY^^%`;C)heRMiiRl-w~LUEyH4LLG5}h)xrhWui%=ppsIpTq z9-E^@g?}#y@s7EgV-H)@A7&02uOZ~Q*?p`kH|)j|tR_ZRC*qM;i0_!Te0PMVGk8D38_o;oQMIY}zZmju-~AkjPSC$oMmBaY_riU1n}sdJI_u{=lC^6Lj_ ziEka{7oy&F%%6B8``WI0AV`REPg9^|*o?<}c2%q?jk@>B&gO;zeLNuz`>|3Jq*mGf zy4PqoSP&i+rWlxC?QPZY4#~yE3sF1_2i-PNe&Nv~fnVIkm|qiljDxds9~&Xc4B`EW zAtt^pDcHDot)Y&@L3eU03pejrg9}~KSTI(tOqd&-%Mi|8Ix&Q+m*|Ug6r!*Uhx8s- znPVa|(3eeb$*v-P=!7?1?5drYBj5(R2_go^jr;|Urq`W&+TfLAqBZt>?)R0R4*bs; zkZ6;1Qn-_VX6><>F>|Pr>;*bsxUs&<^7H>-J2VHBAW5)G{98=OD^g zDHAhyNsU`aqtrAAbPH=A5BW5B!zEi0_gAF2VoP*|WAstoof90% zwHp3`N@k1-A5$_qADvp>Ik~Ys44(h>W(kpe?YV*L9liXMUOuZXEaBXTQDvep5jp-f#S_k(TkBzAm!2tZ(J^N2Y%`N{HEQeDcQ()I8~&5T(cj zySw#$HP2T>4cZUV=OVd}ojLQifA@^bkX{+bcZ|JF{dfBQs9z54A%i^1@GM zc(SAT#-xfn^8kT)fn;)!%op$@zYxsV@b%GvX=B(?e>i2bF;n~{V-m@DE+A;?uv$W< zTew<`{$=+eFrx^@z$0Omo&hDeV+2TZK*ntUCuJTOR6Mm;kNf$+B&W;96GrG3$vUeSs6<*tFbw!@wf#V;WN z!=7v|S}YxopS{vVyu*G9U+w&fpI;5Ph9~u;GfM0MW#k}guvPsd^K0b7%$;Tv1`3DFMdEjLh#&CnA1 zs)c`zKz~Crmt@e>D8?)TlMZIaX<)8l()J_q^(5PSM3%6og_d1%DCWIfJib0h?YMDL zzw%(Bb`x$bUvEk$Mr*$S;rm%QplKC7D@~!-Be1tiG|*)->=Ky&EQ~?-$6A>S_O4JCPz;s~ z=A{vRn-174^69OLCx|m^EV|jXby%JcPX7Kl#dAJK4Rk(%HYdQ z=F&HGJp^CbGx~P$bE`NvL)%JDhgU7#Jk;MFl3uGvPP%VTbAEx&!!ZjW=PT~Esi5#5 zd-3fk#yesBq#L6ejGvY68vxguVa~_MZtV9A!Q5(2__Ao^!aGcV=E=Rh?(Ml-h25>tb% zl)vnv5ZIre=_5doJAC8$sg*}C`4`7%D1inR@O^4!Vk&VG* zQm~IW{^8UkMgq0}Q-8c=)#lFnES?cAHDui>Ph0aq*3=k%6 zCNt!U%6^QXgx4$n-~@_J+<1t<^k2NNVV5q{B~{{uPKQv^&a-JQ8$Zr7%N4hG$<#c` z4^%1prD|jek}?1VoQ6<3v;p6*Drq2m6aB`xw{CrReLXhkDPr^xF=?RcHsbz0?u>hq za-Hj1Dl8G?`Z)7Td9!R@TwDg=n+#w_hO&g{i-q#>XUsdSr0zmKy2%H1KN4B_ywm-5 zcwar>pB1~dqc&U@`xm0Ks|Jyd{uGtchSwzOW?FYb+aU*lT1i{n0OPf$uf_@`%XNP0 zkfYk1=C5A-C}Kd8E;oZ=RCx`5Yw@$S9X%-)eCcqTwC!U0Y z%Zy1Gd>@!u4MmS4&?|85huWC>1`jux&?4lH9x+??AC#B8kIPmVFl#z4!@lXSn1b2h z$F+Ny9SMh7ABG(c);)LQ4#*VF{APh^12d>d#uo&pQJeoXf?Fh+e`U}dB<2NT!0u^Z z&jh2Lq$Nti(%pD9pn%B4lo!o0dN4KKdZ56M5rr4%DviSO?)S&xXqF712g0nDWQUzc zzearg>A_f&L9a@KqlrPCn)@!tX=zugTL}u2bat!}$>JFMj(3JXwzJ#pcS~sjnbq0` zE$@t)G(~Y!ASnKllnkwl)7t3AGMFtCWdo&iQv}lV2-6PbPk=I7Ak0WEClZ^Q;KQy4 zv!kI=kGC%#P;7#4tP?s$ij02ignNnxMO-{j>6B(SfEi^Cn0eoqOEUN^RA8gTLtMlt zYEh+4X4{T)NlPQvAHqNJVC$aD*MUc>!BPIG(&8%2#(L^ObP%Sxzs(aYh(b`lLM1n3 zn2$F*ir{17NGI9g)anv?uJR;-ILiA19siJ7lVW?t;_N5G^#slsDzUoueBa5KgK=N{ zl1y`pG=m_GSwL?4%K$#g00v~xeXH!&qwOiM9)HNEl66iRtabYg4OYJ@ZZz`V3jg=H zk#y3fi@A5T{4wbeWdYTz3PI>uBt8*JAxKc@GJYKb=PclP8#3HlC%P`Up#$NM?)^AQ zGQTSwko1phQ;~q!tLTD(1w(|kgM|QWlc^26JcCJBEjhA)RV1_fUjuriwcDW62XMxm z7G}GT@fse$bDcCJ_)!y_COzJ&)WiDm_jAmHm7%LdNsNV=aUJq@V9}c&__tuEw*&~MS0|rra#x>8!|4RgHJbw0Ww40OC`tw!GFKGr>(*9!w{y1 zB<1QM$}c1$O~$$4p7b+p!;{9oH;0?(%>c7_f+_OXiRn!3M5WQrs#1vHqNe3j53 zvmOe?{&v4DWyjC&^p&#A{;y zH>qSk#1Jk#|H1vkfoi|SIezz^1;gs_@up2u53vN<29@x1k zWGn%baB4iaRp@coyTiTJb@>A)8T0g2pq8U;v0n^lJMJV(`$9&?m44&!azsys=|w?O`$EuN5Bs7vcLpWsUf4u^ z`rfr6e4FyNbRmp7G0{a58qZgx_xpZ0S^4hLv$ta;9rug4CfXO?N@646UmG4XE z<4{(Z-YCRvA(3Zg89!^+%LtUbfKpCw0~yxl-bR9eekOT7RNbo>H%-SB1VG0ZOZ-ZK z5^M-ow}6mbGUS^?DpefOv(4k6-(I>III)9aWhG5*^}|Z%I*u%H(fETu#s;^stJ9gm>fSnY<$)& z7$LvLR@K_1$gk9>cG({)b#p4(6)RrgU%38L=nMq7xqBTsM0$Iz+=?=O$y(Xuoic(t zQy)>z+H1IFQE!_^X#P^pv>7`x;hogPk2o2|PM&u)cW&zl7LZ(8ARGVSG{Q zqBE`l8(nqhUnT;x>kDc9Z7*#y?^+zjAYwwOAk6Xy}yn zKbL<-HL>0)GxGJRHqFH=|A~uSD8-VJybDtq3HH(f%zN=j;#%n;>-Prb%JBLDZLM}z z$t$}f?rHR)o|%i`EXx{%??cX8Z6Cx@%F3JBj2W_;CGL?VkF}R@?wScJaU;@v;qpD}e&CEDA1JZX{{&C06&?8ARuChq#HapMudW?#Gn z0gl=VC2R2Hg3CvF^G;YfZXv^j{jvs1jtv>F;QG^og=_0m6w>A_&RW*d)8b<3L6PBp zKxg|=$W4pga81lHuTPOQqTV z84|7Ps}O&ju7^sW!#Sgy^15b9gF4+(W>t;?ViwBOXa|IRLu5AN?Xe+?-eVuW;j(pS zkAt`!-5CFfL1w3sNZdF~D2`Z)uF$VF9N+3X-)t9T zSh{_qS?aO@p#$1-?~GUs+HXeh!k@Y_-?2rvoh$$2*o5Nl)Yu`O3nxCqEkjKN#^eKw z{Z^`Tn^IC9>;=eQ$&C%J!lVsYnOJ0lmkcoAle*Q83`uH&hmLykG|psR@}T+ zpgx&l+;8@r%dugEkLyfy0nNPqpEZ@sZbEYfYyo(9T5%OWOd!5VJJ|}^oc&u zUMLSr$ZaE)5k|Vn<%EtlvMPbT;134+))RGZJf!=Ta`5TiU9vi+EnaG2w{)lhITDb0u%Wp^g<$ zO31hjg%jS;gJIZUGor>fKPz_#%g@;Vf&{6G498}0f_ky|mU^u1T^?mDFrp<;C&KR! z9g5YB#FcdHf4lXzbTXRe|26Ll)Tp3R{- zCGk;`5{Fz`@{mQMljK-VDPEu6?8H`drLSlkwhF>^WhOOhh6Bq4@V9{F*yn2`H`Npz z83yxfro#kLxkVp16<>aq6W))K)Lu9>s{~Ndak$I$CgNup!RdyPwLmuU-lccjo_=;W z6iof!m`f*7ac-19reV6ftu*K0=f7y40q&5-K_T5&>Cxd$#F@walRv1PHt=Q=3>ML5 z=(;cqsXp))XB1^fY|SWzoi|6}8}zK18P}(`_2l@Hi1fxaD;9sZ8sD%T?z1SC-{5Dp zV!boUS5M>p?qa^&Sm;5EhO@UT^ zvoE~Ydms3`F@rd7Ki*kEKHJ!MG24*b8qicb-pD4)+jAomX4Ds=e|X+#Cro*GUUa?a z6Qj3VCv5EyKs8O#`>^o1L6NunEV?>cQMRs{-D&8Z_6i}N7AazLQvU7O)J?6F+m1CK zEL^^xHxy*j52Rb>=UzyL#{UzLNyZ{6m+`{SzFj>t$Oc*FS|9a z3kzo>sLafcXR5&U(Oc>E5IuvWj8VH8to4yIP?aK5RrtbPFg+<>d3soDwEtGJM;AT! zVWv}OlClo)>8smX)p>?iwZ(+!f=9MjvzyF9lo#a}@V&>dOLJ$nQn9jfC0Ca+98AAb zX!75TYJfL$QZD6-9Q``_OZYflRac8CT*GS!`ejp=64xnhOl1m}+XWPgmZ_FwI?ppb?Bj)L*qFOBQ|Qo2D^Qdo;h`g3*)Irk3L1WAEWWV?(c~J!}4yP zhsw^tCr!gdRL1plQ5u?RNEwGZ1G@>tV&zetnm655blZzL4Xd8v!auObs6W^UA(gP> zE_U}`c%z$=9S;n?q;0+tnsHA>ez9S<8)N@o?7soFt^>6@??j;xWG6#tzfSvkjJ$yh ze@$h4`s~Gwh`E^{cqZ=;BO2-|d^Ilo+!Dgr;P6s({OMl>pVtgE+Lz$8g;3kZ7p_i6 z&ZexMixAN?q$&LMR`tV0Wa$o6PsuSI`F}@TtofClk2g4{kat)&iA=n~of-+enI*h8 zY<#Kr-BL|KL|AK75?pR!7!D1Vp*(8)yzyqbCM~qg@6Y3NOp;Zp9lks z+H$>i*N~?i5A1hZ-o=l>l1mjC;a9n+XpF_n{AnH#b$-0-a&f4Z1Jj@63>27w>Q`GJM&uW2ikuZzLKPJj;9;p zZe)F$*D`1<2j-th_|2>1+*8-RUAS|l?HA(j(dtjH|MW4CQ}9Kw#-+?JCRV3EBK^8! zL~@5yl%3lGAeMtIKgqA1rVf4Aj7(6->tuA^RQQo^ofXdmj9{%$l7XhHEooU#lbeq( z+2*x>yauQAA@Q>a=H#oEUkRGA-?KSm!UBk+z6d6_!}0nD<~4F9t20<?;>st0cL~RTvxzNXqDQ z5xw^!-@a>wE-|K&c&>7_j6%OfQeGnk)dlNkuc|!aS6f1axeibFObxc5V&D7B@6a&t z9aLR$^XwTOxjZ9*v~=}lCp-Ent{(M%e2T$BP^J;=7s%bw>9)2{!iNxA^er zEFrmi4t{oh``3oxwMNkCoAMJ!)4lOZ;-B7E@7o=~r-TYJ3lNMqfNe+tO%Bd8A>d2S zAb7;5TdQtaM-%%+MW62A2ateSWKJJ4Tj;xnD7?)dK^aAAUq|e|6VdfX;m3sRfpF%C zgrIG%909r)UB~Pm4i*FC4kNO?WQF56>PoPhFR8qFrMEG%{lFW1W~`uto`7_`6F>&v zgAjEtmG&!1OoWw@5vE5PAc}*hNuxkX)HDRfvoiI z$gk(ZvBP&_f6zrAs>#jJ#{Xt{esf;_)_Qn~(ThTFs%wKZFq4SPha0+1eBvFzeK#4# z4;jEW6qC)TeeGq?wVV2(N?noJfYFyx=}0{@#OydSXMM}|NpzhtU~^EI?~0Ty)Y(0W;9tW6 z4W@nv3SB!{AKTw`r7tlC{)DM(;OBck0c0o-WXOm`Oxtu9t`X0Au+66+F|fq^+jwUl zfv(@qH$28J+GcJl=BEnJJW~Ah(}1ynbbMo%e^!d}Qp82`G+>f3qcfoRFq_d}^@U8! z^4yh>{Tu4}C(jdsuU*v&MQEg~1|QXv_=e1D6l(KAFoz6W%sA+$5KK4x)2|=S$B=+Q zL%OZXAkV0_k`sdy)?Kgu|JFycShyAAQ|rr znIA+XQjmM@LYT30op+A0pYv&j!dX?P#(v8(o8T*fuE}YzRVp0)7&cn=F1={z>o+)M z0K&aP*sYke*DrjwG;b<3zO(x$?7XBChl81&se>N-Y$-h3_D$F4Oq3%Eu!ce}Be}g~ z%%;>F>t9H-thk|9Oc6&o5%2I%w!e=H{kAOpcxfsU9*y0&qcb&;xDzuE&subMB3 z-cud+r>DoXKK*vdYIK=1MrJMjyEBh+IY8ovxjjzg)Y5ZdfazJ^jn048{^^SrxuV}b zhP97w7rx}(o8>SbZxlklPc|TSk}vEMI8xWWE?wE_F4-T*g4eBp+v*W6hEn>Lrmlh# z%=wMbrGJ1*quoiP9H?#sp(9ti2{Dunzr!1Twt6mdOg)*q^E zG~>3OpK4@`WDai1eM4;AxyAv^S%s70&W|HRM^KodOA`qjVb|d?*LE|bQZYR`$v!$2nUk)A3zp^1F&d1i*Q)&~gKUseq*y zjdx!*h*y6ZoCwZGLy=`@KI#?oW=KDJ2<<0cPaqb z75nNTpX?D7bg~5w$klW5<(%bRpStqu?wo;3KYk~zu?%FTxyyM_x(B*`A$Nh(f$%Z>ud4YQH`?!Cc;e!bTy>8@*U=p8D{e9{GxerCO+~R zUfcg=x4dwKy;JMuM0)!2`*G{s-x1A6rGwjT#J)ELaHZ5p=r$D|T4JiS(&@*6z909f z7cYqT))+p@n6Zm7v1MDGkzhPB)gd8X);P6BAwCIhWzn1{gLQXa)oXr1(1kv+q|S

pN-R zKm9f{1S?@i0v4|`?jknw`x=`{whQVYu2;5Ry*Yhp_yK!*+MahB-QT0=W`5XMvDz$% z8+d$!-io;w|GvSHPpr*);F%?o~{o&;y1&L+fCJD#sC zz{XQuqU$MVC&SF-gilx2t4_eBR}%)}%Nv0Ed=4M(v}E#iL*TqYl@9L*Yln~XNQ3cl z;G`5qOuCD?nsfiSI>;Z2X{caVTT|-sS^sJxMrw3NvNSK77t-WkURi3x7dhEA-fh0H zk!ft->Dd$Tb z@PLduhl8!{!|flt+_f5y@AvM8sZ)Qymz-rG?*A0pPM8iHg+C`;bXNI~PZl;NS8#Sc zdNQc+q@Ep#3_&M=@v#sKeih0!@S|UuLenrHzp42+Y5qNU2z%w#8z1|4Wn09n=eDmp zjJ|3A!MmNL?RRqR`#Es z-djZOTRFAc{Zc(sZvPa9J^m{9t~Jl@=`+37?7!1Fe;1%ZXlJm&emKYJ?!X(TY#*zrG&pWKLA_|K^<9@>n6&IwTJ1vOm>*k`Ep&D{@Rdcb*+# zbWsZ=5z#&uWcVuNHV^*kyKpX0_Fjg4^&Hvol`vtF?Qm>yb_@mO{&4dA*c5%l18OzFXvVLjHy%W%)i9r3bR;5Qa zjYyKAM*IOGLw7$O`-__a)4JN>)at3`nOPx67 zC#$Eq&04WBL41c+sL^Zt`L1Mv*O8v(A+f*lopV8VKW($REZbj`?&CwHGa|ZYgAFwf zVnc;%r$V@5+S}xqM?uBi?YumI(;?7nI91jOZ?9rUKVR?Sy2V6%gOV!M7lKuH^e^R< z);7VEZWIxq_3@P?4eH3#-ZDZ&HTTcx?DBRk@OS5(YLTA}AIn&cMgAu?KJO1f+xO~! z!0K(9{=liZo#9V_i~WafBWs)08nNg!?_$f5$csh0S5V&I^#kQ25G*VO4|S4ABbNd3 zd2P*2zteilY=1UF$}?6wgUdo^9tM>XT;6_y6N8pROCz1CLUafn4Xe-_NO>FhZ9sQ^ z#e3Mnl4+DX zQdi?;*1yQsf^+Z$&tm%wqCW&k$_h}SBF5ZGp5x2>mm~IyW!zlfTn{f-3CbEi)xKCA z02TPW)m+9JINs^0lvxxsvAb8rbxh0@kYI$xvU?Q{_@=N5E57$P1HW$ve0~7Qw8$sq~imsO)AEA4f zux^yLHn%rs15zqLc6%OmQ{PLO*w}^~$uGvZ>{+R-xkq>FT8;OegXR%uLYd@Lm;s zO=-6D_mWWq2E?p67s}EHabs#4Rne7QtnguoLqmWtCWqZ7uJp{F^5iX5puR zzbhZB>w6_596O_h0j6o9J8K5L^S8b zF~P|Po50cmI6sp&I zx#BobAX>U>M?N%}IyNS~vZD5czp0pSR1g!z=Q4r6MxK2mh}qBs!mS?ath)U>H@!hr z8s<2?jz4SXNNS*q6)e^2oq}ozTt1601P<;Cfs>Ye9bw^*6~&vRKWSdA+k%|~FF(1} zw6D!e=@M=d{f)O^;kjb%jiKKh>r3 zY2G%x>%^p{VtlLaM`&C7WTZiPgd3hF2Q7Qu=_1bB;zo=j;z}O4&W2Wg{19^&ttSb6 zT{3FAQe1!<>#YgR488}VV? zkH>@>V0DeF<#~OC1}4i`hW&0Y2DL9(iE_Ce4-DI5XFR_&75h%i=JjoAkD?=kmE-SF z8csL7c`r~jcN}iaV^^sJJx5j0d!XL+P*so-pwE~1P*u_FoR{Ek;^yy+^cj!Ta}RgP z`BIB<-yT{?N#Gh;r@88aLFGt#)wjA1vm(cvT7wvW4Iu9tcr*6*yNc7K@JDG$Y7Jf) zku^qd%X}NFce3D?yHD&JdZEdlGuYTf7h6XrBvZhuhe_1LLzrN*agU4&^E`Wb%QgqK z$(P@=g3ENC^pi`jcVEFZxBskZNEY?M+}6?28ugZ5FQCn3<^PWuvS$1p|*sq-e~s$rS+gGK}4O~v0`jbG>MrTEOA zZJ`O;xXdzevY^+;D|xD_C6D4A-KzzI8+6w=!%M{+iRlZK8hllq4av&F+g?lu%XnQ( zNxr-wv`kyz6#Gp+#5Lb2SE1f-YV^eY%Z?C)W@?30X{x?j*foH>_(pfq1BeIQ?pbf# zIe~#s|LHN^vD#EC!IdoGs~mSlh13c!aJx+U-Y;IQI3-Nd<){?l60czs=Nj-*qPxEs zHx>E-8C<$4VSxq5slz})m}{E+$S3l}kV6W=477Xx7e@2jIfTdyXZ9OamX z`A+6$)c&GfR9>0W3h7b3&DR3=Xw+|ujXI@WT{AA7H2D4nrs5dzDqiohp$WJ zp0wOTZHe`gp2^vm-ITfN?lk3w)Zsp>*-JxlKC*V}lA`f*%bvm=#Ric*{hmW>8S$v; zWYY_U(`L)~LLP5B(X{?R`68u)cbFL%R9AW_?tB2t3G-7)cV}E2$LoWsB_E*8(*eRfs0^t;$9^I-yzGfhpF0gCrN2z*-Wj}D2;5j& zA?yY=inXd2UAzAKv5Ke5i4#&NVyD!ftPuUGB}7^YRTkGM+AIbT6PWg)y0|@SzMNV8 ztimr^%UR^)D(Q)12~bTWr~L=8q!m~aDw*p;Jtx>36`q=&-duB)FZ_b+@}hviFQ6zP zon+9l?3XP|XcP^mLI2B7#546{D{ys{Q6$;&=lM_`0o6Md?KNe~xZJ~U5Di43-s{=s zJ7zrO$H!As8@#B3H-V~p!a3e*Ki!1)w%Q<>g-jkGK(Z)t|3+c}e|&ylzdzpsUbvKp zubm;PC&ITe*HqGWJI9|Y77COlbunVR`fYTAw-7vB6(x;Ng^=$DCZg`Ew z;d-WXPdUmQ&{X5@3(Q}k9txpu?2E^RQgP|Ij!`t(Fs^Z%W*tM5jXe?`3E~g6cz8e+ z9Au<<2|&og%Qy>jEQTL_DJp#tn}VjEqe=iq)`E+hF@>`)9UmO1xRzh^-p5<44E!F32c8#9gYD` zM}w+jXl~u0xEPx&7io};;k@ap*$}?O#nipsEVD-G?)Wl5+tV32fgd+OD#3##k&<*P z>PD~?Zwf6goQjL05o0sBYbFLt#t}#Q?n6|srfw!t4VrFnGqDl_8_i_JRAp`mpmM{v zj-DoAr)iSIQ8bHinsqp^A)0oj3D_PBG7ibu4IVpuZ>R#?28%~SN`Ud1ReL{?kAPw- zcfh(}>PQYqCmZ;&nHC;J)49Y2p^R~t&7TZ99c8T(`OI@bb-?~kZ724`3g~AljO_<> zYXT;@0~Ia_s(jA>L87VrrtXH*jIYvC=0L8Qsmn#)t8!{}8t9&>zcmn2)ZOq2td@J9qw;{(PQE`jVt zLHx0x0_JE>Jgq$nlok(aj{}9r(^O(WDioR^+=;Ss27PPV93}7eT3{hSqurmH;|Fo6 zZ=!t5Zdeo16%mh%;evN)i7~*nXS6F9ovkm^dSYqqakP>IT23@A{02>S(3!m=Amo=B zQ6r2IQ*OtXZQKz`Br|3rN*FS*9RKMY()e^J$T*VvBZg)j;?RDHdo4g|S3q2c`|0@S zt*2>jVFNi4Al;G|PRz1bE5mhsQX@tJ(_DlCzJ{Kivym-Rnwh>3kYVx~q)~3uZSCY1 z4N|!R%DFsrC?4dPaE{lLc4&-d9WnaH3&H=JJsU`6bB5#C#|){gbDd-FEjd;Q zAz4X|V;mzPbkeetN`=y!=J$Ml{Qe2&++Nr9x^BbK0=EqJ)wc!HdDN zX{CX{$h8Y$*mE;Mg{&+-jsXZM0%8&cA%;OzzKIYcATw7X^3fyIPzXoVIcLv>p5W6h zozl5{YU6tCRm7M6{XlBO;hbi`PEp(;NTw$b+-w&%@~5(YMM8w5CkMqPoPvq;P*r~7 zu^`Y}0nrM2%`;hCs-5ZB{8YXPOq0!Ml(ly}oTqeMz$W0e)_-6dK@j@jmBg>{!aJ16 zpCb>PDMq0bQt0S%gaD}v*z^{#}%E_2;LK6~^pPo8+>vYe!N z2QoG5aN4p|BTh;KGtfhT@OJQVggp!SIzd5176-NV$LhYh9(JD>Aq)7TODfUjTFKZmum{P{Y-d?L);7G@R} zO&N=*M)4)Tol0>5Q{O;D(?zsGFKloTEHyraIrz-1Gxs~j!W5-qf4BJrQyt#`UreML zz&xcnM^Z>?!Y7YS;;ekYdE-cM=dCNgEW>5(W1<5!Y!YD;4AlN8)|L`bJ3(jt2Da(h z+NT6G4uD>X(y6DwuPxy0O&}%$!W*;Hyvi$|zoO+INOKh4zXyza$~X$AwTJnz&(F-q z0+XOB-gRN~eX!vdYBF<}BT!IYS&Qa~tZ)=}X16UEdc{Dc4TK?$Cx z&rdzuTaA8{J*xmP!zl-L5DhX|)B2T!y;fo)R~vho7EZc(!KEo!8bTAnFgega~lpi*@&(64!ZG zrE(LtKD*=vPUDqGZ)fFw$#WjmH{tM4$Jjw47hOIWmeziz^pqcTBV%Up3m;Ir z2*Ux(BjC@dx*Wg4#1tStaiL*o5y=^DdNgp6m%=-qtN%7tJ^=pny6E1Fx5>ca*h2na zfb)J}D-y<^rv$XPKj96gE))Eu^~9Szs0kr#s)=A@&uS18NIi0^{blwb=|b2k=$CnL z5bu-d4!YjQ(L$}N0@94lB5jWf#!x|)-vtU3XcsE`Yvd{Ke}#hr&alZj*w-OTQMu>f zRRo@8;-+%~?{)wnf3xTICGm!RV3`Q^U~esd!G=n*Ud!eu+5sQ_X03P$|EHF#MUvdL zD4>lE!zZKt9f}fg6rGd8@qr?+?!#FIb>SRAre@g^SAoPw;L3M^-jSc0;IWncUhn9C zj;dM4XF!#<0lFwG1`B1~k=$Y9C4=X9#G|5WpO^#zH&Svw1cK14qQhA5@Y=qqYW{W* zsBKMsf80;2o*F-1digV-Nd(FnH_@wZnP9 zTWAtqtSv3f3Y3`X$lAcedc{izo=MEgrs*Ct39Zgi%F2cY(D@W3SI!i&0t$D+WFCvG z`k%5(Sp6D8m%^NvYJxA0n(}+&L&K@i3y0w7iR-&VL9cVN9C{M~XfgRR|{I0)r zw8-9f?jU8=#-R@Ou-bs?VZ#gfMQuQcm8%uSB-1cGlITC(A1jk9e|+s zVyi3pRMWEnef?mm>-u?f`1TjbOTVw@_4%~vwng)Qvw!w6DsFLro899KG;k9_zceo9 zBX+o+v%2f1H6M&wQs4Z-dD|LChawlb`I2q%TDaz4_c#U}kX%F>A+7y${OrmKcB>iquWyaNvnwYz;dv?vR})}zseUj z*=<+Wa`O=859-MwU*ZbSQb(gO{0;#OpLzo8czC8pV zO>GWnKo`mr#`t4Xe4+Om)3CY#*YR(t$rbXM`Gpg$vGB>pju6Bh)M$9cx%{_wu#MP9 zhuD$RF7jl(YdyBKUW_98Se-^uE&&y6{7S7**PPuhmz#w>si!~WDc|_ePpY;utzd^^ zF%`iElj98plm8^bG0+PW7T8j>V!rAzbYeDfsrdDNXZh%CU+97#XIoeW;h^JYse^%p z-|QFe1pnI`;X1=51Xw(V9gh9cS^_#p?tAyj+LR-}+4clPqis{TJMiH)6lT?!#e$6v z>G`V}XGLN=d=Egc;+|hdEEUj|lJ~*Q%`I33*r^V_y;Is5+)65~#guNrOO|0z8vnkc zS*aInyk@H?t6>2AR_urJ8$|&KiuQ=U_nv?28+h*B4IgMHt%?8xymlWl(AqKHP^I7I zWmOxARNvvFr&lK4w#*KM!3p09RI79OF8=-7(T{WB=;lf3szAQTU!@nmoW|+a{ZpNc zUM%7wA<-WOL$8ki-Th9ddLLRl>W?b;S#od?<9#qyVU`Wuur0ww|9#~61_2)Q1i<1- z*`w{zoyRE%^k^4W@g-J#wN$V-g+25=i1)-<2`6D6*PrCvx;gIq2IV;LWpwiuBGcs! zTWVtfrtMm&elR|UO49_Ykp;o4<7B0hysEEsQn@2)Ui}jO1D$DOY&w2)D*0IqOu=M(vr-G<> zEuNk6^qND98O$;H8Qv%HioOLx?K!Q?zFhd44=MAq>7i-6Nzb9Z$AVWrq4^JT)ux=x z><%|&fG;m2%&()e_l}eTz0xG5ARj^B<#vE~0Gi3O*lZ?vpTCDy;PiV=C#{{vdGBHqni-edUt7}!IMJ_gp+*mAe(z2yXvWpR}bh>-Z}=I z8C2)`q9#B&EGH#DF3qiEd2al>yGVevS>0@fovWx9K%9cF8Qtp{)feXJ>5ww}{#f0V zq(R>AhAEm|pUs<}ILCUS@FB`Zs6M0uaD4^8;EWaL%+RzPyf3jm7ihkMJxXpEf^`N| z9DXn&PZ2Xg_;tQ4e_pQ#(%-UY9%lv02)-AVzC#(>1VQaA-49PLDRcV-3~RR;N+v9$ zGKmeDoK1Sb5{IG#0czQJex5m5Q<(FLf@jDSPuVi+((F+X5z;z_j{#l)M^FV198D9BiM&1i1d9w z!e!EUR=Saw%fsrX zYPq@JI{Na)IFV_w`s1l!Uydfv{(ULzjN1i)gQRf}NvZ8fOl1H|gQw)4ROEdCllxcK zDyB!;`Uk(D`e7n@5ze@r;F7K#qs=q;5bEIJfvq?PTJijlQrPmGn>uAqZtEG1(Q;c% z^cVWt@JNb_bveEHdiK}lZYk`r%bF7-HqeLO>#FmomgZhGLh6vlGB2m2Li2VO5X{Qo8rWH{6$8a4)cuLoCn5YpnzK3tt8wijBiu`9$XF3VYhEwUfRR`Nwka zy%ZGcspctC4O}`ZowVgCfO1hMYqd#9f3kg9Si$5e>61=cc1!E5)Ql3EQdK-sBhVaC zGi%k-8+uMffc4{8grnEubIq#)uL|ug^V8xXrn%ZbE79Y8YBwKFKNR@&d_Z&Q z^4s!%2_#vb!F^?(W}WfmsxvtGbdcu zl6j!4wEl81At6DV6}Y#%7v1BbC4IN28mLPu#(m!eVu6W`7f_#sgyhxXR2)8V)K^2gBI zqQo(dp?M~a@tQZbu2f$$K$-l;Tz1CE#0qz!Ixpf z8^rtYsnK*h2?nkMp)vX_xN|9p*GtIl z3@QgwJ4e*sbGEjZkMVT{z&?JA!@~%+>B5ggK4ei!07R`%@dhl0T+=Ym>)FQVnuIGw zbF~sm+fpJ)rDq)%BP!mjPJ|)G#Ck#yE%u6vXqK~r55@Ngipd9nPHgd8uD!rAD0 zcVjlgpKYEfA*kg@!y$MU4f7^jeDheSdC)}d%iWJ3Nb`>>9CAg21euY zN-=y{gi^J{(YG7Gljg9AoC+@Ava@>K6&eEZ$MLO(`9m0sQE{ZV5z`PG2&()T^45yp5)Hrcw&H%33;$Z#5uOQA@9d)zR_TsL zxrV1LZC_rN&vM?&A;PawABJm;)QN~=ovl)dj}_hxV~Zu;=*_AoL+_CMg$xNs)*Nq4 zLtgU=YY*#!RnOt(g8rFe^?JqIEZAno1Hn13bx_y+8nAB{Q?Nbbn7j$ouqI4ZXs7?o%fV^5QUM|3svAZ0y9|sNPfm~g zV$0|F6V5{q5R=^aYjbzPP{hGoOe{@HR8htX&o{2YS*Cw!byqRy$oKR)7sgZGG3ca| zOonM>DhU4vTZ<$KYcq4Ls-AdS9f=TbpE0)TdF-sQ98}eQN6ji`8usyvA$p}iYt-eb z^`YUfD(gfyn;yEGaGV0dv|ut88Tc#)7qQWMr!?qcp}{e zwFAAAjqsDz!kT<-eW#K|@j{17^q6A8*l=I#-ci{^kW<(G-PYa^x~dZ}nsGK)q30>^ zR+QB^K=BysB21BP_NyDvWR+mS?`1~5T5l0|04Wsb{Bg;AL&T=a1Uu`gSNBB9=jho| zODTMWy_ueQ+BUvjEz(0k_ZZx2fmiTDvted{HIi}CD@(h@ARytNRSWI4W;*03E7tBVt@wr3n4~Inmy5mj zU!zI5_*T@Ae#o-s;byEiLMds)w>(PL=56DG*jgQ-Aj3O=q1$IN5pCKRhBkJqG8G8{M_6 zD|H>kHeDJVKo1w0pMMdqM_jrVb%#XIf8G^rMTTDD);XuG^Txt65TfO$=bM>W7|oZf zQM`UGS!$l>rZh7IGdHTwpk?V>_WbL5X8ARfJ`nm+bWcVR@9RLj@z!eL3S7PCOPhAR z9)SN0m!J!(iu>qjO`cVFueEXap|Qhg<0HfH`M0$xc}5W4_3o#MES%V)bBMq?v_Tt^ z*)x4+8g?0hb9HW#@-(*HTw;*siPb|#*o=`m?=__cK`FlDE8hd*HJ%mfFq9%NGkwLv zIHBDjXZdcLZ1G3ilerT99HYcJ*w7%GioC854*t7df54yF<6(#}`HEyv{ffov4QNe9 z@B)t#owXc&t4#ca!=Qi1!v0x>3@_NFfQ^NPzc0at7ly0{A%*RO#@5gcWKC^kmSe*) z0QwxteS?K7dXaYJcQ?G=;cody492hR3Dn$XrQdc!Pw3I%=G;cXjJ45kZ$_Y$xmTpj zurA%2fKZr_oxb;2#W#;4d4@75-0|UtyG_4hAX&ppVx?eNU@@oYovZ4x5Iva5_ znG#smNSm~^!8 zt>wzsCC(UwBg#lz#aNbOn|3TNs3%r{H}gc$jxYwi=qDeTnOphB6`KLWovmbW^|{(4 zdulj-*q7|Nbk#m;-HrRS^GaFd*_?wnXv^c*E04UB&!RGtNXr3+NkcfR93~<;h?JH4 zwDF_KOSlNbM5BGn82ki*&OJYQ?5j8S9zFo>r+Z(>F!A{K=ZcE?A45%LomaTiE@Qev zHkimGZU4*nn=Z=WTV*(Be$j^zbe0TlQKk(o!xbMvT>bv_~hZ))3Q%Jc~!H9-33KxL7;GP@%>u$+{&L88k*$6ZjQJg;dQIzsTWlfY@Uzl`M@XI&6!CdidpP6sKOR1cd&tuiPk0`roS&2Qm+B|{rP&KQ_+)>oif z)s=P;D_z&#)oy0;7c2cX@-?WO`M8DY@E&nS&}%g;_rGG9NK{7=XAhhKFmQb^RMpVC z7)OvT?e#EEY>I@Gm72+)$D#v$t6-YytCK^GE1fpkUC2Xu-!RN|6(?BhG=j7C3sV?2a6-5BApPZFai`HU%U!BAowXuwvB#O;&P)2}3%ju___xRNXN43OdawKF-mt<&j zGVJ@s^etE7D5v?D#=3>k9 z#?ID>!dWyDv>m+C53i=e_(H0i;S5y9Ozj&D0Vdp{6TVW7He2#(D|#3p`lF^S)HFJj z{HCkM0f}!dc&dQd4AP}nG_IpKF@5m6Z|reyNX($O(#bBT5RCw4x9<;PYV=o2GXi%W zoU3obJj-adZ;WBF!0zqP-)ibC*M3!%LoLaQZ_9GB$#3&W`U}*NNI0#(s1|+w{vkDb z$E>fDAF2|OIQT2vE|fb(-mT`I+fF~mR0Air|Lecy*qS1m6Y+Z@r%()B5|3Ubn8ZRSbW#8GwjTmwe* z0bR}ox!D6_hO@3hm+5$=^{luQo#;z%Gt4v6QDCyN#6zctGi|Du+bWljD2wZht|(>% zuidb7%H-SQ0$nz3B?c$|!;(_o`)ecy~WL>)uoNU%idZ_hWMo`R=)O&UcxMuO8x*JqB0D z_vzsQ)wWj8=qN9SN{^t`SIIE-P0wq6p9*6xA+vXOY_s^%C^ zXT#oIVg^3P?4H3o=C$mM7?#(;W-ES;K94lSHlgz1*3NLwj=`O z2fAm(XMA_CxrdO6*^204*_ACB(`sq6{-f(4e5b@NL-Rg$@h{WPKINg{QLm>$R*f2V z1ua*&X7`K_rPu}iaHlOHOVCZI%3-cJ5cN>pXu!5j2wy{;;|X#=De-maR_JyJ@yKckJ?mAL>tIkb0DX+QT0gP-6u(>K>+D8jj7{?DpNr zZ|TG&II3TFM_OQkBt68)kf>Ep&hR{lyWt;T^RCZU^W=i|AGwwJUr!Zu6Z@_=+LS%H zkkwSRnaMsd9dM!Mc8f*CW+csG&WJxNWX%cDcpmZJs14`b*nIdE<*w~?r;F8ZEEM|` zxil)Pr&LKv2HBG4M}73^k4ucfqd1-2v2SO0_zl=Le=D?(SN)O{~%k+0wQPOy2d9cYd;Q+FiG4y;c9us0&u;BL~|kb-z* znuU8URp0!S3jLX!=?aNmGs8?F2wiaHG1|~jfoe07*5eoIwLxz;SMtlK9+6cI=xUfa zinCZiJ(GVfB8Hl^H;l``*Sa4{iFfl!_dRZaO1VMw3Hj=>Izf>lRS<;L&xIJ2p(ip@ zvakHq3$4dobJ6-Jz&ZfXssyyg5Uxvtz3Y81B4XfSfVMr192LYC(q13Ku)BpHW#~)< zXOT2pSbHrAh8mTo*U=36k%x4Og;MQEp7*Yj414DE$RjE7bTG9QBBS2y@bvKqVbnq_9 zH=U)r=z4`aSqwAuwzie5u`Oe4H)_!yMSCU3`CWEfRz`ISN^6S8e8AFEF4&cF4+TSCXO;s>f;89eAX00;7+Ohcf4Xqiq`kAXMWzyOW?>>%8 zs5^NpuXml@HjS}~#+~e*kx`qKx^^JAo9YeFht(hh8H`iyuZ_yNg;GCTYz5?)`d$Qm zx$kd+gh-~ z)-O7?Wu4vzqHk0WxZ?l*L|_!h$v;Y@42C=)0Yh%6Dyge%l)Uh@^hUU& zj70Ip7S`6)fy%ER3(x)dTz~wQ=KB-AjaE0GZ%H8p8oOpaJC?+(?VBu?N!_o6l@+7< z-i95o&;)*gZ&p(c-GeVb^Mbw!HQat;RpDPhO>pUeA&V}k7EZdje)`@`>n-J{&d-*9lb)k@olK04olT!s#7AU&wR`%=Jco1qo}wc;!D*RP-Ld!(eHn0Xj( z6?+??ZN|0+mv0>Og&;Q8TyDcRXdNL()J|dpwV4PzLicA>nP0s_QnVkO?^#|}S+nV2 zSVc&AN0h3y<5H|FqfAV-Zrh!-{eq5ATMktF5 zv>aUzMU*vhsf230=TZwc1FoGVlm?5HN|eqYRQi|ec*bwQ!G3c%dxSe3QIYPt7$uX| z&We+B`dQCrzjb!SRNiUq_r9eZuO5eV!qf(_X+E9R$E_VlX%+PcV=x24OPApCR@3fl zgL|<BX2UvVSn~*I+UYQ?^w5nphk9lB&grHuFFL1`tmWY{{=3a3 zqmQ0vSe5aE4f(0gy8)Q?nj# z&H3e&bI_9)F7m(Z;CIYU%!jgR?BFM+TO-6;iLNPc;j>a*xfqsykxlr=v*}}A61U59)ssr>saaRCFm*KR~1WAA8DiC!1*v#^OmtH z??MfD>9E`*wN7@nKmB;cL??F|ZdnZ}T-I=@&f@GdqL_S^F;57}trhmLGq#sGsuE+O zeDe4h^&67W9R9-b%NgtKKXMPmXS0&(Pt}-BdyXOl%2%dNLzv%mf=$AX8bZye%ss@~ zRF}HtVPmjoP3GwF`>&!Dw8_kFsaDk&@^G`ni{twjf=L5P616wpy_i3KcKK74*kBO< zik@=aU>MutXEbKCER$STRD$#Q$8YS~Q!{RuwX+;OmNVhbT@@zWZR6Y`Su0mFH#e{S zQiLBr!@>bCiSOt0)?gB{pR^DHeXjW{}j}pk!RyIrMFbTg5T8H5ildosZHzSFegXEFrUTL#^e8!bo8*3B> zNk%Kn(U&S~@0X`ox-`4yzA9V|_<~dnV#vLUGqA=IWbUej%l*+?q@eBx3aJ0qn_U?8 zk#a%&ma-N>?VlWbRx6h&AEyzvb;64Jd`>Z3Qd+E58}RulS11~`q@3ph*?x=1MTT{_ zCY;QAS14n9kM!*auja_ieQ5PNRn5pQ}0|_m+ComV(i*&)s7@j;o;WfUW#@q zF$YswsMXUL9#o8C&O>4GkA*t8F&Q59x?6p5Mik<| zrBIw?e!Y9@d%nu}fxo%N^`0Km9q`5jX(B?|c}tl#?L-qK?j_T!L`s!kkEKKt8O+!T zwk_N0u!c1DU|s0GnIXj0sezHwC?&Rg9NeFP*QBsl`no?Io|a2t+k6@bq{@ucvo&js zg?DEgPMp@-iU{9`<8O=UaN3i?OjAhVk9fo7R_*yws@XH$jr967qu;`Vbw!Cplp^(E zRXnAq0aCsuzN9D87Royl$g_{>-|p3AhSVg6oL;_ptgQ{gE?_he9@xZDs!bj{%Lw-1 zyP>Y+UpB(b2VML3I`e;@R{0G2)b=r&`tq--k00$xvp;q~f>h@e*>X@W-svHIJUmI^ z*UIUNC7y_#p(MmXO5(LD5Hyqy#3UL*jfOx&0GcFVZw8USO5q3uuiZ3_s}k;iz~04+b6+?)^echMj0I{7J84pqJ)X(QXPU(#XQzxNI&r2BqIPL~+Mx1%2 zN7;16 zSdM{k@w`r#dyeXPORL%UyXq@Mi=5CDt_jM~M0q5F(qwc(G%jC47xu~FPC`oBK_OU@ z3Oo!4nURn%|2C7WEuuFeq!&X+GA32V0Tp+i>RbWRzgCq`6nRX}(aIib)PwRkNZ?;c z_-Dp(PKZnOo!ZbQtZ4fuCCbUNd<9R#wr1hMUkWj_3DGcM^|wt)tZ8phNA)jAwVu+p zKfvX@pwcCMQfCe1aH|kyfX{mQiNVrl{~krm z)x9b`UN$N~3P*lBhy1u;TNYo(j27vs5q}t$7Fby~VSj{Mt$T_A{9Jv!dkAa+|oc%KAGor1XEL{(w6QkITtVR%@nxih~z zQ#2r=81i<=pw?gEY7>aF(TVbA>7e(DE2kzKXY!T!@3OXJ&DF-WN#YM>?HjIAY9o78 z!urbMj_2X?#kBJkPD5;nz&;V=#b_l*fOr_#5JDk_n$wmb$AL4v@gOS-(i1^1cP@c? z1I9973D$xCJ_sEac)^jq@`MtD1w}(C@wv?(r>C^;p4RyXy!>`%P4%#7RL9^|e&+4~ zfBBL7%|~eJlf!ZLcYE*9rt=vK9r#v_<=9(?=sFD%Qy*R&6?@OKeT7deRgB$Af(e7q z_#qV}$o@5mO*TbMKy)ymgA~GGC4uAIPSy9p%|GcY=H1&5z!H2uJmqc-#p$i0N>qK))4tjD6A{lT3Zgx*%37|8Dnw(N3ki}C=8V#shkgK9{-6u) z@(h7z&`;2(e1L_@ZUQ#WMqi)tOTjlks{RfZl{}NyN z4RI6Ed3n^dZ*^!p`~t`KV}n=09Uh>>mnX-I7wQWh4kYI|0|CQAM0Q!l7i8U(H@OOV z<{&@{M!pEnNFX(+1XGNHL3}m8;-Tz?^AOuM;PcHZKa(@GPu=4UId<`BdOk#z54zM# zKoei3ifdgmiGbj209Lh~ZcwEHIxGJ^vML%_o$f1Nq%@0|iC&#wc?x4~^=+-5X^WnVP5HKMK@WBDwH?>M`8erxi4UquTUo6klaQvja3JFY3 z22DG%V;(6akot07t4Fn{7|V!#%7-8~^%6clG^3&%XaY089-87sWRd?ckIAt&7#ZZeC7<3p}$aj<5!+LLoL`>ajO5^65e=7gOb*!ssu6!ED?zQA*`i zHtPd8gl>QE@svv>NR!T-_?G=*rBa?HWs;gpGe5<7sD>jFEWyaLe0Vm!0Kft8=p;Xr zDltj~sdxjE>kzFQ!rRv%nMzcjhcB4{qUQORE&9{6Z_h)+X#&b8_q*T7>M6;16t_4D z#I34|PJ?~lGI+riF7M_2C4ib|W#Y%XTeOxwBilgiOg;y3t$+jtK}@-Z=-JeqMK*t# zoic@Wd~3AZ&N0G!6Fta3;sE)#KXD-s@TobqZnFMZZ0)IEdLusD*&WsU1Fw7*qsGKl&FOd7$s%7j#O3u z15d3)fEcLAi4?+)+V9nf=Y+GXL4=wYN3;R~`MlzXgCYlQNLM&O@+P-=@XUCTpM{gz zve->5IR=a0#F(D^4zj{14`aX)Tec*-+abJT=&6zIE$|Z=WX4d2#LqUw-D|&+y>=h? zl1Zzp1Ls-l*N=KkJ@F8hd9x(C>ffiqb^V~HeO))a-4 zQzd5zei!DzQ)5eX6N*72EU1Fz6~%!@%e;Eir8TYKo=3VZH!#`)8l?l5_D+|~^Syfl z#^A+F)&ZX@H#>l17{S$`a+E|X4Ok17suTeyq(QRY+pOuIn85FUR#X!Sd|;@GN_izbKG5;sMQxuWk6yoFpK6s_?)*{XRLJMaq0Pv1xRRH$X8}%MT>_ ztP=eIQ4jJlBALUPtl~*_!90<7Q{iCWcV1+w&pEWHkOKi7C_K6Q( zl^~?dL;VQNp1tyFCWvqG94P-Jhd3m$@1JW!q_V#swy%(t8He^S@U;2p5k38TZf1!( zfn%3G#}L37qG;ae@thclnV`VTU%9pA$=ZT6mm0a{P>MoiTxut|#}fo6p4t>$^ZE&% zBEf6|@^gnklqBgX#-q_AGzSNW@RDEuATN7@=;YnY``t!GSsY^uRRKr)#`sqA$p1Zf zK7K*yy?sU?Z$OL#nd1@<)ftp}0pz&wFauq**dAyPo_$|{*Dbo?wh;64v5ryAFMq{gPcHwHs@n%wSepagd)QxdAdma7Z1O~IJ?4t^retn@544?JS98wSSD zuz5YEab8mO#y(zbX)HAm3D6TR7ObxrTs(|UyrZT_L2X=>8`IvrDy!_uw&zk%U17*E z?fGc)JO1+%GDP+$bwuKisYBH$l6tdLXbSY4wlbxbmc09RqoUY%tprmX9EdM9P&&Bk zTI|apl^ff}CLS68v=xaf6LBp*1Sin@L(8&yn}SL<#+w5WKyPDE8M?ii2s`Fl>sbo4 z-|#A`cLj0PWR#s}!8^V^?+wDpE#}TQjm^%y==n zV)S4rWZq>!>#20=O1KZseMjv z3U*%8zlcx<#MXaDSoL-UJhUpU4|<3$Z3=b=+Baz|oSgq}Qm(g0JvfvKCll%A_B#6}b8H(u3uF%Doj_`%}%D*Pr!pL7M5 zTeY|PAix<=T2HsN|sK z=yu-HPs>x#JVT2-*Cb6jiSIMV+khOSFm82xAt6Z*AT9cniNUh@6Z&(^6~r1oOxSVQ zpok48uF0*=;Oq;qig^f2>UX_u8m%qA&9Gradm! zWLu+!5nQR?0DI;T&8JS#9ET09UAAT0hQr#5myF7L){(d*>CF8Rtp(BEu+ykF69Xqo{lf-0J z9;lD*zeeDQA>+hOV@hPPtkAd^JL7l)h;xNoBotYgHk6L4JdF;th)_)ijIEFK>pGc) zW!dR#TI<8ejPqD0$qfB{a2RLpRAP05BO!uvsw32usnlT$jXQ ze;1<{2!W$EjM8r>tE%^x!e`^0;kel4e$y%=E7lt`_V`kmzGO7I{aMz$Ay^3YN*g3@ z!j%Sb0_IKHqYm@A+)6LCdfI-@A|IR8@+D=74c)O8UCUczTWbR>eP&fUH58a`>8PNO zmd}pVRd3#IWl_)hseKM$aNRPg@d|MbMBwlT`9=9@$JQnrl~d7xpl{koPtj3Lp6h;@ zw0^yS<#+KlF%!aKOD8C#f7Qu<4-3(UtWzWIBP4jM)#ydW)}#%2TyKY1qJGRY2gqvXSiz zhSci1hh$>LcphrZ6V05I)mry8j3IINu#rtL zXm0{tbMYMrn?8}ldWRG;`jk04{L2fi6T(gMZx4Kw#jNnX$<^;7aAHhnq~lSzbMEFm zk6SaFP(K(Wh!-^c#}gpMp?sxnB|TkU>q7woH~+gzd??6fsTZlYPF54jt<**($;mRh znzMf(jjVMg;SSd&$0$L;PE|{ATz^<&vet15S_i?%3Ce6AGJ=S{x~((iZzUL8%e<~E zrNDII|1`SxDyAz=;jM!}N(O0dRY#%z-K;M8+Km_IZKRDQz1|c#Wmmj-lw-ksrdz@X z0f`f8H5`{RXpbX7n+2Jgsd3>Uc81xk0-igQUAwC3GTQULn>QuubYfo@5L6Aoc@USVF{Ij1+xE&gl^JtxbA^m&y&&7-{+a$na-D?YgfFKT!LqVB6#T;dcoV= zCKj|m{HG@rf*V}@bNUk1wNDW|2R7D zza;mEjfW<v9 z9_w^E58vkx;0IpZ_vdxr7uWTEcV5}6-LI=J4dEeFhY;;v>?%2$VwuZWn^^#W_7LYiv+V?X!E^5s7G?Q?tF^Q#Ac z*DrJi7+AE7+-RS0-nZ+e=fh<~;pjF5dIV0G`hVv1?=KjpG?!UtSwVa{XM^r&Ly_S} z&|`WVj6qTJ?SL_Qm^#LUx}F99au}WV`{GEa@=7HJw-MpJ^y2h|aptvlPFZuQ0Bpk4 zNP=uz*D^@v-#K8^MehxBspabqxsO&Lsj5M&e$Wq95_@DMpaE&4DmpJGO8o=(GZO_Y zDxhoOglC978#ukqf#=y9Y7?Aw2%h(Z$E+$DFmw~}2pBQ)jXRzERdb^mylF0_Gaeu- z<8G1RbgkLxPg=GtItKiO^V7>O1v#r5wz-jE`yh_;DVrW;Iej@o5^meZ<3lY{#R3j^ zE2b)^_5tod7=d+cAWURBBxi#dRSK4(h-r6E>620x3Ou9Y!ciay5V{n0kb4wmV3#sA zVbGkBoHnKV-@x5Z3~se_&Pgf|?M3+@nr z5c=Wh-(skkHw`0$j48~F0%n(n5R}zmeRclfpcSedp`i=?su>wxu2Y{`kfR@qPJ~kl zQs}5yHE`ufG{5AQ&Z{r6Dos&5e;^ifl#5=9vZvn7wcw8qfvlx+N_E!&;R2{~Fx@N* zwIh0c7LM|aXSPTQa}JWc$iNu*k__C*;0lHuhbe^8>-7akJ~?P_${4;b2!}{VK{;J5 zfex|i2C*vQvW)?LoOjrH%Wrh*3bxZW@c1a!NR7o+^E&S_p4(7H6wGLgikYgWrX1~7 z^X9=@se{MUa*8H=K`|CZARd#W|0KFkDC)ER9l1~d-i6i4+0o90FV9wfrF(DXdDl>C zd-x-~)bq;*PrTRjW0*urb?ZE5#obuFF~&>+A)8V4ySZceB-N z)EEKG?d1!Bnn5bEC3d|pZT*@xnXXMqLM|lTozgJW%DRZC?>NFVcp9n0&`%w|jlxt~ z^a%-pETQ>qp}LI`Qr0(IV#2T0B@FFT%}QoA75}?Yp#N`#cP`cA5=QSio8OJsaFm>S zG&%|lFt8%{K+%Oz6c)k=&?VlEw?3EifCc7%D2>hl#6H*PnKf`aJxt4&sWZ7G`;R2y zc2s)iA7^)e#iJ^IA%^1j4gb5+x#yP6MZxF*A;wiiEIukV71iMvOwR?QvcXhWVN9|h zLk6r3PGgot18*1mH^H)@&`e{G72OM;cVD36mK7YN>}@RfbiU&<;GH7o7QYE*+B2zm zf11*AK9|5F9Rub#0pl%%auKKdfVBdh>qKGnlqr-g6yPU-aS$SCg2U>Ul}rw~SXwK3&I+U>hdmz1*Ib%k&iWvemW@1=s4{kztb3zom3!i5Rf5a=9% z;yH+55-K$S8siOSxL;+E1*yg|TX8f%3=~!OQN1BQm!fs_Z`AysWdVgvYP7F~p;@CV zrS2v_`mUCs!?ni6*h^Q<`TH-u_M>gZ3q-va+%FWS+JcoD?nayxVxmyfJtTTL6m4Zl zw-UxB_%>7#0AzR?7>+K@%-NBuxNi#X|AdY{-}XhBk*=L~TWs5%?(9DNAtst7<&sdC zSy`Z2@i-#)+>axC=*JUbln;d9Bpk|W%EDLiYI8-D<0 z3C!mzd?+;(s=OO>qaQ`6l>(HuwurHOJ_YXy6;LyS&GA&M9+W|Zo#kC%xC#q?%O(=3 zzP6H_u}aLpJXpB5*f({fu1ev;a*W-hIjVYM({=euZZw~QF|I1Q(x~(AQj}Rd!1rQEp;AB|99ugDs=-%_xATjh7 zJpI1u-rHCwtN4uJY8Fz(abaY6|5HM~Lc=_TN{K-go7r`xblYi5bPya253wefT($kC zpYto5oGLDZKd%c@eT3?`g)Qq~QQ?=OCQ!{W2>LLRFkFh>MG|VD(K)^nw_#D>vbGiJ z%6~2}B1L4yZ%~tT_k=7fSyPPzF1F^1?NrTTlf881-LR=Q0<@dvxNaz$EX`9k04~B* zbPz-L3OXGuiG@<}sQNy{13x61S?OLB`w{?~OoE@Ufuz53XS4^QvSEXrUu!Fu;|9!U~KNE=n;= zC}gpnk0x}Q0=7mT-L8sZNgLWZTy;Xl+ESTNRKX3Ery&dSHy)~lGDY%$@Ea!UMJlxe{A5_WEa zxvKVwnH-mP6FsRJoyN(1?&N`BJXKqO`qZ-V){yO#R=r~)fL52bhz+43NJ>J01H|NEqGu}#Kisr{c=;{6?tnQYLQF`3{P4%8>law3nPXic!!szS4 zEZjq>0pEk<ak{seg|N)l2s9Fh=#@Y)-ii|Tj`P~!zGrRHeY>bJ zQ`4@X0{TXQbXsQ4;O=!#>VNTGJSJJ|lE)U3BiLwf1-em6mt>U$Jr_=UH}Eatx$zxC zOqiXwv=llQhQ&h}t!BGfkVB~V1k8ay!72!$gk^V#O>X z4t^Qc^Iv}v_ycMpLg1r(-_xI^#==QA)b~D zAJ_hL*z_3GI77f;@5ZxxBIDW6d?z@n{4%{3PVGZ-o+4igonaoXqv{Zhj-d1&H^#Fv zRE^hPh95^=q({s@j~8Il0_2GL(ue)h0g+#fOP^WgP4kNTPVvGp2@eV7W=~pgkbb3b z+Wgq!ca}Ie>8D=(uy4t-eJw(3Jv?A3wufo1g?)a*l~FSnz+~sRyt|Y9^6u15v72U% zy?mL~hvK!EXBf4Li>qzUZ$GC#W9F10r^X!V<5;yD4^{+ui_CpGS7-Y?gp)^nJ7k@$ zQhzdqh$`e+%XiZGBR(A!4-S?%Qu^H=fnUT{4Vv}!skkbYD+N&A;`$MJE8StVd0b!h zOIgXX#r2@B`n<=2ZnX=QekYZleK*w@f#fe7XMJ^_!&N~SZ%Npzn_tFr-usO%Ac zb=d4m21n_nx0+TQ5dp}Y_Ptn9Wp-I4bK63lw~y@F0Ve-c*`SGg08ImH~GK&>(uP_$^iRvKaCgYWG`AfN3zc_ zCm_`qCgU4$e?W#M|0X%(XR<#uKwX_GH(^Eem1X)<&sRv(Z+liqFNFF?WemA{G9%4e z|7vFM3s>3H(i9Hg>)eeK1-UIIh{#O$Gn2+Fa!}=~@68HUkM>D&uy8$GH^hu(<|br< zKgPQv<2O^?*up5S>i--n+9?NwWxj^w}cm!Wf1(_ zAV9YyW)G9m6Nw0zZVG}2{G7nwleJupz6Ww!kD&yj`Xeb9iVBOKtM}e?b&3Jq>fY@#8-?9W&%ImduO6`;htV zqr7G_dIYalxK!r)L7L$CSYy{GNjv?!k@tKbD2xNWbjQcpAKGPf2Ho(OMx`?=r|4Aw zDjBBf5mA8ZjHi%bGYR)(AKJvi1JPqK)q9{`v*MePYC!VMbxE*SLIn!ZK}S)Ix>&}KS!cO zJ|=#Tr^s|xO4b1nIKyrZk}l4w2izk2PCPIh@&X^bC1J^0jBt-|oJRJ|cRDk{+0bqc zM?x=dW!)=;jZQLz*as)Lm-|=7VHZHZcf%+X^imw+x_m?JZv>Ug zq;1`(US2B?7GHn$>btbLkhE^wi|aO2U3v|<9(kBjWzs)vu%HCrHnj=u$&~LRtQ}Fjnb^pQ7cY@B zDO22M!D2TJmtoAPQwDQenXTcvT~>NkF1i%A8hk0XS*WXwN(>$!vVa8t_C`9)lC5jU zpq9T^nDJVr$Vr}Frc_Z*hJ@-gst;;xvXZCvRv*OFp|get<)K5Xh#hNA#-$NF^d>F; znB+-`ocwAwuRw>NjpQzy9*&(z9TJ69SCC&9C4% zSve!;8L6b-lWWb;Vtg$ZCh_HlD4IxXzkb6#rLn>E&q}<4!x3UcPyOo&~0?WUTa9I3N)nRY6AVNB zQ3D3eA$%LccrkBvKsqO)!BSzBo#mIzJAT3?U1<5}4$VpSLvmTvf;47cYtBf?lx5W| zC(A)K-&tMrfWNp&-sIX6Yh-&7EDcU__u}f9hDr}CQ-0lE2mOpwK7Z`)wx6j(5}!p% z-j2;;V)fKaOG7<<;%8n*;BNZHGV zVk4ahwCqg!15+}%G5w`iKv4T@iFl8KRB=%U_4dOFyJ6$Q*SmSC!%A$K&ta1K!Q6`p zx)Z~T`ZCC<+=UfglFmMS)cf03x4K?tePhZv&tEjXD0OV9neOz@Fc}4Ni}81V%Y3F2 zB|ZRC^OkR71k!JEcbTcpTbe3xdufY&n@9lqj3XrPsUz%*FHRKhL9suac%xKwYx)n6 zz83!2*;RjYM|F*DDZc=Lw__PpoI)#LLA|?1o=>YZTcC`1!94-#mx>$TltDM)zOmA49k?O&THPSD)35 zRX-;L%7Csf@G|3cFDiYWIsCz{y* z5+Rvcl!tEDB|+aafl^Zq5O!Vf+C${16p`oUw^x>?n=s=mK3w)eN?xXa->3&L8S<)1 zg5Mf1<(f%pWu(dM_iDSp9CoFcF(`4YjdEl?@!Yf_jftQ)<9wcx-1I)33H)cVZDI<~ zkbyR_b_s9YkI$#h=#x~B5!+cK(O=muL8^2Z@qQf_%GXS4&`+tH=E-^N=Vqp>Tlw;- zp+NJOO)q$mTC7Mc96;q$#c}BzWV)vK*M0|)t(|dj|ZPFXV6^r zq9^hfRLZAb0il7Fw4$dca}Df46*>VVoj8(36euVTRTxN|-UHG{N#b~rc&rgL{z6LJ zGr0)h)UaU*Q!Ts0R&}lJoA^DB`THaB(x?HEWA2O-Lt=|)VG*Q3uZyk!My1Zpr*M^` zs*b+Z&iuVVVnYHNWvJ@XZ8>1|M)Mpb9|yKlTSceYBr#GVJ+6sCBMi)d5> zw2K5@z6N9#{-(kt#iO4uID!mAGgga9uCbLb{*`0`s#D@Y6w3)Fo4m-1WEVQ|&Fz`b zVMb+f$935VA7g_N0Gp@U|H9qCxv+6d=LrdR8t#8O1K5;j~mI12Vn(3=|$Z^coIw? zTVX|N8dl2JRcmD3`SbMyW+}~M2-p!o%naj~>y=W^g`e1{oh@an5r_#bBJu$CX}JLa z?+9NENGFo`eHuu=0s5x`m=FQX=_8UO%{mf6KDUfg;tSF=K;kiF33y<7&xNBz&}I8_ zR&V}9bqPaHB8x;{msqxHBfmRO0l!>xRo6w@XE0}!;1iOv9;cp{v$Nc{(3*vTG0-~ z)l7?{Fc7q;Ni`JVX3-vborL9Ma=X>ki36i{9ECSIQlZmAKe?xpKuZ4r$$>z1v1VML zh~fSN*k4d9d(P7J>J%T~d}yY;MWLY&u$Dp|z_BeEHQ~GXd>Vj3f!*Z#Yak9*Dgye+VWsWfvOW+++)D8r^T$#v~ZIk z&0fRM?$Pb!u`x5?{+)t^1QHWwOp&{8^0&Z)P4Ul$xwIHkLTGVNFh3@}d{MSKaiHqB zq(|OMkk21z*C{|31MX#z_(F&lf01E9c^*q5n_Gf)O(j+_xjsRnK{%v_ll{3ctCT=9 z)kqRwJZZ0pNDeNJQ2^$26Bq35eG)-qR-lGouOsD&dSBS~Le>3!xXE##Dl9~tg0NqC zfd42fPN^m@Lgf~-&ipuz7l{Loi$Ojv>vegNi{vK=X@EP>b%&TaRnUf?0w;N)qjecUAR86b4l`!d!hSB@<$S?+RSX z?8xf*rAh<9a~>KT8u|l^8Ak!YVBI>*ly1jAWvxYpSTmrb5HToN{G1=Lb*?z-Qw>fC z6%#^ov$rV>U5Zf{4vNT<;9_FGYr<*-nCH)MJZUNs)PEagSs^;p4T>`=3BH6ukbJ%c!d#C zYkT}3u;|UGCxuZKJ~43i$WeMC({)pAxti zM;xsv(3Q!d1Ax-Mf%Vp8YSok-PV%YrlGJirWIahIs`TBkt6czbIFe~;czEva(#Ziz zJf;&L1*&QQ4FUf;-|-~n&jn3uXsx??)X-mr<)n^V_ET8yti3V5P<8w(^WfYi0@Kfq zDSe^)V*F0gaDX8`B4_U@&?mRBq+4j}ZPkMU>jf*2%@0S{7!_JLamUQ6FbtFsLz2T) z$b~U=5Z7RdK=(;d@O_X8mUiwI$>$W*aR=1>{KA^NZ%2d&xq+?r6_&l6GsL@p%Vl!$oAz1eg1vnf{TD=7dJtZ0bOI(eR16bAs1u04- zHW^rC8QujQ-Ddvux#E#VjWN`8gw9!@a3hC>PKZ|8lgtAou$u>CqhNn7yzNtRNlyyV z_3aZ>)oXbxq0VDNc05Tb1cTmRn>`_w{sIMMJUbWm`iU#)mv%cXM4$5jI6e)c-3Ik^ zEzo8yg``Oa$-uo}w$&SaYsA2Fr5~Bq|KU3zas~^ApJH(G{{TW@#j`UE&5(fju(J9l}Rd97wh3ugtqt_JlyWc;Hz) zi55#z$`jUX1D*{Bd1rt!lR!twptahCl>g}kl*jV6*w9=5r-7)_D4ag!5~lR%+veq#%prgw}7J4wo@S>PNvar z;_CW$68RQr^HxyrWl}cZ=Z7B5>NzMS!EMhEWS1$LSG8>yS*)52+PlO2nlJ5MT(G-K z3k?`Z2(z7v(fSvr#5=}eP)R!Fn|nIE;S<5`W_`i_PQhhc(D^XmP&3iIkY3YYHPa)& zReTWRwt`Q5o5~_lh|k(YduwOAhJ3Y-LQ=$|>VQ9AIkY1|ZCwE=@qck7Y3bHP1e9(E zi#S?j0awE^kHR;wAFVdNX$3_ZJ>*>v6y6S%v1tsc!Q%$vGA6{ML8%R-`!LYqc@qAX zcf8U>N+M}Ik%V0&ruY(fvPikon@k-6Q;F~_mm{Z-sr#sZNng^55Oxp-M*YdZ3k~Fb zLONnr!%1+ zM;@hrI#=v?yiXBNogs%>+r^M_ACXKwJFm=u^mIHqp6&X?txbh39mRl%$Xr^^*Pl{T z_r)J~1m}te4&rZ~l7dL9v0Ko@>lR7%xhN`Xj6L}N2Iks#%qY9O2ylBqlzIShbj&RL z8NG!;);Lkfqa4T((P#aSS>K4i$A&W>L1WdY$dP-QcR-DwXdzW_drMMYb?(IJ+*Cmm z{~ax6?VJ54Fu|SEJomW3=wze;wB}%vlK4{XQ)pqN)|D!6Cz%`h-;C@V<1~g#8MXrc zBP2lm*R#;ACEF)?6t@}hVe2bke^C5HMp!c6_E-ogFbOl3XqXV|tsPEvWL{$ov~{L|Q-QQ}J3j1j^UJZ?_+a)pk}z`0g6W^ZU)Xw8 zeDnDw=&?!vPp)}f*6-w*M@yhTk)YKi&~gW~zF_I~eo&$Igxk4MK>KgSJ-=RnweDO$ zh`D)hE?%+j=wtlgdnWgPU}`^X2pP6J8vASk*9(6L)UX_S8`kcAjfALN-3xnC&Al?@ zzq-}!`rfwhLDn!+`|HlV5@K3B8{9?CEgM-Yezkde%j?;PjHTe-t2HVGtUrGJx%2e? z1@rK*sZG4UnypRRz45eYU1WQ}Kh%ktiy%SOLgZxRZk$L?=X^wU(}DQ!T66xsmMWQn zj#k)2lxrYan%=QR33wqCT{SV#7pt-RX|O@W98-u#OrWcR;Xn*Iu(HXG;#X;=7U~T> z_RK&{<*+_G5)I5cIuavIA13L8nEgttJlGJP0^%}=z9;MUHimMzP!n}eHgb_sM?x?D zzSrEc6R#e~(RW7g!1S%ciABv^)n>E5aB$*u0~WS>BB_9EqPK_qFXs!MYL#CNuZl@* z)UOH%^d^1?5f7>b6|a=b{OWi8pd zEo)u4ycqV!$(j+Ekoh(-NC!|%ai<5 zL6nKWm*(PrvwqBzMI|NRNmVp+=DryV{~z%D z=Hzc%T=hp(ZE01LS}C(iYM{3nS%dx=-Gdek>HSk(0jvQd$A58h!@-?HnX@eF+ z3HX95h3~qhW64c!sTeZRzU)Cv6))E#mB5z6c#)3+=Of?&Z`FTy>H_BCy12VF;}-)M zFFcU4O3$@An^AA0na4a54=<8$2(2no>%vt90%l{{CkB`}+KCaasp_|#Qz4Xzh2d}n zGl2FY4;JD4mhN5rknH@xYV)p7j^&s56OwVR5r~j+aWqe}Hhnn9CgDf~#M95Wy?4dj z=RlC0Q$TvV3Q&N{j70qcJz|=u>PXOkGp9d z<3*AER=};d2tVX69{UDOV*i%*c1tk#al5XN>UPvM=0?6wQ%_)c1(qhxq+lmE1Ksy- zKKLT~Xr*K2Iv`HN>~_4G#)%(N_0W2BD84{#|VHz}fFWaDdR8BqV3m}EljrNG(y zW~*zyG#$JcdQN-oY!A&khRZ!BM-oQ{^26+^i!aIP0Q=VvlKR+O0$z=%I)_W+GQ zp9Bo~);2uf9~{4jf0wIo!3GT}({ z!ZBN6>2jrbn^?1dVs zkIx!;rkd$)@E-K(=MDNbP0o|Li+@PZK|dR?zWjHR(4ozMt|N_;sE-!*^s^*Ci1_*8 zxsPkq<*@3PIrzLd59(eQ>)D89X|cK{q~DxGeN~qMv#p$O!dzIZA_T}YoVP^-Y+>Rh zkD&3F(}*MQtNUAnr4o2c_{t_uQV~Hd$pxqv{DOqPk=;J)c12PDm}OK9nv(BYNe;i6 zoIe%)-pU4*5G~^tjMnh2I?uawS|PX@cB-alhjah(&4TD;O)N4qCEb=zZM)~u4FSA<{2 zb~AAS}e;~BkRe@q0`{(Hr@Y1{elLqDOWYmsc7vBg(Qf80g*>+-snxvgd~1+I0>q@^@Y3PJECErEbqse9*F!SOOTex`_FHkN%mE!#Ihd` zr&!r{y1D41oudocs=6S5cgt^f)A=o`mI|vsFua}KybUieeQqgGSLqMWt3F@$t)1g> z)BD1{7F`yQ<7F4gl#nZ$N}A59e~CuYrpU|4*(|;{Ogj9P;CpTMpcg;x8qeR_z`Qje zO&yDt1tk=mFOin-4Hq4KdFl1mXS3rglCG~PVRD#eE}qsv>+-W49(teN;-U{F`zZcW z*}BF?va?hDP9#q(@sfob3j((jnfrHj@7d_z$!qb>mefrH(pa77_S7xhl^L(ej7(8S zTD(2YNEkb>HxPRoe(aop6iRt+N zBT6`P-*-{F$l7suj?C`aWi5M9Q3UAI ztcR{17@GvHgQ6B_L-#`pZ%Q zO-w0VG`;vUCs1v=<0t2)hIwgF9W)>uZk9QRAUx}&x5Ehec)(ySVbK$toObbc2bP?e z@k=DniLM&ZJgE*N5EVOkXLdP}FMEEalYM3fJNcAEGiy?VYIFiUy^MH|3Jwrs-{F=l zBvcKISr=;tA{6}knp3guGV@qZ1nSx8hhNCg6YH3*T|kcrW(&y#E?Sm>Lfb?#JViTF zei4pTta)>6-9>q`t*4)%CM9gQzuVJC;9Cvm7}~!bJ+_SesSMnIX`2y*rd(y5Nh6B; z;mS4oVbOI9B+ir)8?bJRrg7zWZ4l zFhZ)t_3%5KxBF!R5SUi@vHHyy&OEZGq85b+lW#)yX8Qb>g~si^?E;rx&N?&KA~<&K zJ@rM^iE_3nFr9?Lmy1n{?2 zZqJFK&crakzR4nU0GsO#^8~E+SJaS|KOP}HID3Q!md&Cw2K*^}I}X6NT^((@KEaJ7@}&K!l>xp@bB7 z>QI?FRYj)FnSQ3s2NR-y?+!eSRn1Lp-)NR(N2ED;QYmJUp)f`r+%GN|Lxch1AhS|r zIb#(^xyo@*(Tlop`mb*6rkL{Zj=W~m*m*U!a+60X3W9vF32)eTp*bN5g2~mx3AlGF zYByd3F@9UB)AUiN$vagltwXa|IW?f2X2G)mhjVtFFx~IMh0zul|te(YN6o`UJNoEMTXLeP`)UblmU#M{d4KF(RQkcsojpIwleCC zLNTw{HG!;=KS&9rxY=F@@k?S|uq37whPuZYM$2IoPc5sNZOs&JEs6>rDJy80U#B_O zZ9Xv*D3N19NuFLEvkTvWlcihK(1qqyVlr!?F6}$yd!)6*IB(kg6~04f-X}t1RiazF z_t0-er)q{!4HDQtiN@XtbdxlJW{k=YIuVd-PjNf9}99OYhtx*C0D5x8x z5igF2+D&b`OO~2ZtDxvA<2_p_nqVO0d%a9kUhuoVAij0}i$48G)Gqgf$}%N=SyF1n zfOP|5)MkvTgHbEFI0e;MYp4K9q4T91nE$L#j({ol144~z>4R{S zP#C5O4k*;ZG|Uuh2N%sldlNk{|7if;yK>4p31y%Fzl-Zv#1#89Bo0MPTE!<96;QPh zYBp?d_TP*hD5~rxpcbk%;DH$uWvpBJ&Z=mp{3-;CGPYz(uE<~xKlmqI%uY|&s)GZv z!Kfg+v{GGEA#|Dr=lsvjhr=QfO=5%{pBI-rvuY-#D9Vge(Fgexa>Z9IQ?xN{PR^d0z$R1M#Mdb)k$&hR) zgPk4zath-JRl@$s$ zK-&Bll>PA1qn3-dnxpDC(dnsTlV-5#6r6Dvn=Npsumi#wVh#09g99lfEuW_=9MeOGCB7 z(Af~JBbbvMOoOU?sl}J^`~qt4uGSqJ){#jEZfB*TXP;N<14cyHYX428%F#kxoNjvD z*!E?-atb`Od~_9F4pN~WO}yDhU^Z=}S`p6sF7%vm=I{nx2#foN5SJQj#vxAsVVy-) zm_)vKIoxq1jxa`w5S~qyF|}qp&<~n$D9zy@wFe|N7n3+B;7WN z4l{7ABPgl}{S#59of?Mj5oN7KL`P8yyp>azF6dM#kMY{Ds9T#qQaiGg-bT-Su^X2=}-pEBL_u| z-!5S!z)2D?+Q{h^R*R4iVc?znn`U~Ohro)wNcC-F8}4M zSY=NY@T(uG1MUx96onPaBr08dC`!&P-w!O6h&C;y`#<1iM)&P#OTjU^KXr<&>Kr1z z|I&q4T0C%`-oc?2U9F`SgM}6&bsB7&9p!sY!gMOFnu@cW2E9}(Z5kJ*dK#xMg*nhl zTBSzZpvgjQ9sbpKS^V6bPExlXZx_E5B!1m%XM%7NxuSLeD`^B+Jf9cEH zu_BX2pB&Pm6PC%TP#H^RH7ygJf`Mi*nC3Rxh)Gt3>67$)9ZUW=Zz;L;uCke+mu+Z(9jwnFEh8a9vTbZXcMab@XP ztHj9bT^AH|+;I{*{*i%M&C@UUg$1CB{h6UpDi%5=QiH9SQ!2j+8Q&+!g7?;K?*Mo^`aJc=xi1L_>#?odJEDqDFw7qwNP>ryIg-!9EVf1q@MV z!G=6DO95#v&g+6EE$ZgQJNZi1#ePE{`2@#KsvJr~S`V%X*&QqOm6$9T{V4g`=4+uK zH6-z*>^wuyMn(C&&$Ws4V-ttw3=cQt>Y#1uBDs%HA?ZdGsl^-+rDAvMko#x+7+hEgHQ^hBy{6^GLHh#bAGHHYF&Z}%jb5^e(WF%OW zND&Y))AQfN?-norRG%#A1*SU)e)~7>htPF_TKiH{eoK!%e-0_f@$?o0%TZ}oIa|jC zfS!Hnl{k~>((73c&RYKAj*Vwv`8@GTGy5uk*vec35h}fK+ZP5jpkbggytJF;`&{W> zj~n+BT=iDyNgnUwL(;c(9J+LNH`oSn?hrQH!@E1CmagCMY#1x&7E^4oubq0d#)w_L zB2m9B7jV}FCvMk@(U*PCS7`#uY${zaxwkY`FK9k*$8jbN9A~f6k)vhgQ1+*`BXzemgbr`BVVjJ`_3Jpz4&|lxnuWR>?6aGG zC1wDus3Vl>DtfZQ`Hu85r%-0&jcH-adhnSvhY?-li5<8Vn)$rVHukNj$>`vu?9H- zVSk6NPIHvUZ-R)I4JQ1!-ZL-%a$Ze+Uw|b?+baCs`tsG254pVB!7DN-~jlG@yf?I+2r3vNw#*>S6HJ?hC_-~qS zUd!qfmCL13Wi8U3uTK~{qI{Zt`VPXcc7+eWQVJ0U6)m{n;0h|Vnwwj-cFQl~J+mHW zYSsO`%}%)F`<31Zm(&U3{y|m??dgO00``^BY3gjlqmUgx^3vpnVn(BSXx-@?-yUT} zAk}sx^=RYK`39wNT31^;hCS=pMafamEpKE*Gpo2hu8h50KsMcigK8+vXCHF|;%b@Q zcy6zf|3GM5eXv6!O2L%e(eZWNf%DiS_Gb5;N3u2R{wQ-tk@`DeM`sbPV+-l@RQmHv zThly%Dh>a_aY((qzX>2)m1u;9Y^`Dnqe}!UT)V*%t2Yn}3ChHC&mj>`=|NJ&lH#Z@ z*4WvK@m#U+*IbD``$wM2LuX2+we3$EHK$9*Eu`mu5(U}j?I1;(cQA5Qf_N>>8jY!{ z^>dHFEVFLe2}?McUc4)4;+pL3>*2inxu5sZ^B{%$|{58 zCKQGPbvYp~@fdDQa?~_(8N4&JC;cKqb53>O)|gI)2aA*PO?Pq2M zx}xLyfuhv9Lh_4$-@YQsW!K&+(-aUj)$?1<6<(*@eA9o!iSDO3p4r|LGVc;Xby8Rr z@q27vf2Uk-L&>!8KbfE-&uoIY;%V3!tg;5#)>-vKLUk7AV&TiRn%QfbzVsr==N&g7 zK8Wq397IaLB3HB^Lo{w@?84JtmE|dw$+K?ZkxDAZ?iU65%u&b#{m=6`3*ls;q9SP{ zq2VP{kXwJT@Qd9H^^%Eu8GJNhZ2QtD{gNhyo8rQC%b(x)t(APC(Zseo7P&wPiZ0?K z!bTzFzeq$i-PJj$uSmounxbxI_`>tN*KUp+MfD9~)cukFVc?LCatsGg&c0uzM{>)| zOYx@=mbvxPnYERFQ>LN5?Ec@2uu4DAb%2|5o2M$z(R_;{{Umb7Yjl{lKaaml!oE9Y z5B&(spuFEUn*BV{zp8`_82bx5@c!oo@Z!ncvd(S0P;RwW4Wd+kQt{gx+>tnN;5rzU znq*ExF{0*Kl$xnHNCfcR4kf9vLk1Np$yz7&HD9k`LX#f#3GMXv^n_4)YFBeU6zvM8 z`thVsM{6=}Oi^*cgT1tIettEZN2Q05yxwP6mGTs0EYk~#a>Z@C0*u)M!mJ3@(T(Hu0H zAE*1WOmHVNT=H=FwhT`>o9{B{p#QeCug@Ar;O`4H6(cY`FWI`_fZDR@aLJ6GVCHwX z424n)hj-h=BcdQ%&I#A-3;-Kp!>^49FI*Fq&W1AwE`VI{WFd@R z2>bLKW*?eK$ONNv8&TO%YAoky6pBTTj)@_kZ>>TlxkkhbV%$Fw446|?7^LphZ0 zCsa@k0rXSikei|1XxDaImkd^}+{P|LiVw4cy8A z&WwIgO)v%-He-@`Q9HPc6Dm!C(i5QDq0+Q`U6dch7zY{8lu{wVQL4g>05?K;H=q=X z_I?Afs?J<|Vs)B~wh%TsGLSNR)Nm+_vX-ER%c5++0KX<`=r+pfnX$tU^+eUibU1+q zXLKRZZE)sJy6BVe16AmVwzkYeFe3$uiPey=Uc|h9Hm5P6C|Xfk3sxsSnfy z`QYeA_4BDvLR2>)35qEs)1%%^^h3wjrGj%I2T7>zOxM5;2o<+gr_{x$g_Uf=2mui8 zQz2{K>g0qMPmZGd+5xU;Bp58=qM5wdJ)GyxC zK*fxdbfH?B8t^?}26h90gM6%ogbI;?r5@-6FxpRuf!DToEvJ?WXUsz~SGGTXze}(Y zmZThy9SVKcd}4e^*fG*KF$XQ>ng@J9FO#)2#B>P{)d8(=MlD?R4+N8^V8kuG^WdD| zU=k|-Y`YQQM~2pkD4LaLjn)YRuuw)}*R^LbK&Y^$(WmPyK?DPt54(^Tz0xy(U4IZ4 z19bRIaMz-> zfT^~qAUyQDH@Fok+}@-AIR>K1YmM>7E55(Lp&8A&qM&jTCh0Ayyman|pe41%3PWR# z;NfynE+xX)4q5DGKr5^8ggWsJ9{4P4X}?dDA^YBjGZy z8zcy>GBZfq7CphOi9D&JuPA58Sd5i1u_~5iNqA(XAzuGl25w~rL)E#ZcYuq}aD?p4 zv`ndaaCZ(8#T@3E3}~W*O(y=2t@Dm%^MN0=7>T{J1u0ssQB>8gs9jZ~_DZbUdlwZX zu~+T3Dps|%imF|)S5>W|+KNq46s7a#dw=KLbMHC#KYxUi#G5y7KKVS)!|W?EaekZ* zdb<(gtqJ)$QG2_=mFgxdvb+i~g!n659UOqX%Mm^Pr%!%_z||tYmmpHkpSl(!s;6md z;rGrLms8S3=%u0cbTF^|q_Jk``D*CYIxM~!0>FMt7N>E0x8E*7&@>J>xQ%SbU)2|P zP-RZmdENWic*P1j8tr)QrrS}WYy&}=_JSHr4=3kU03O@mLLrp!ah#i%QRajb=Nmwx z5H$`8u!_nqx98F-8q*p!{|v^5@>9Gax0WGQcC)uLFBlpv0R#CWPyT|*evt~@VwgBvYEpP^ z8Q=GHNE$t?P6L(|Ax4!K=VQV&I%*sgA6UuAQ@W960k!{%4hm_-tsYA_AX8)H;(9d1zcY3JJOHeNJ9jKkKGcEsQv_{B;1lcUeEF3Vgm_ z^|VMH-+^odi{r|y_3cBme&!X7K)>qfZw17>HxOA(g;9z`@R{%%32CK)&EE}0s5#KI zT@i*9n`hqLH$tH&$;)KJSNsKVygxt28~QFFljfYDSsy5`#4%3|$f4pNM`*XtpxiA(8C=u^I@W(t7L-AL|nGG%X;tDKTgrB`NhYrYC*ZkKlmgabQz&U9OgIt6w&K)dm`x*`` z+pmy?93jZXP{!Ax6jFj-k|H%Yv33A{!xZjX1ubv^-)T_5WFlC6AsOE%!p$$e| zAUJDCe>R-Q?%IXYqYD{ucajHFmHnA8xKHOfXTV*7(c?Sw9uKNPTin#E}L+f9yUkLJ(~OYjRF{ zNbQpS4VN`(NjZTwXTyGYbOBNTPQHGD`Ge1J4+DA?cyVb)(pYrZ>t;8MQ^`K3;hoA z0yuI~Y;NtIh{zg1$yVLIug1?lCDRNmwiiv6zbH>Pc1J82@Wn?y9kM}_-Vh#N97+rDGS8wl%BFY`hXfO9C0w^9U$q!|q%(_)2<;7r zE^sFHJz;GYwC1zF)t9PJ-YlZGAObMtH!r(@i%vZISQC1P2uv);l)OsMPgSAXP@R;~ z;c$Gi$lFTjp4<;#|D?hCn~9G_7>V{C&jiR$P&*m0>Ms8$^SED2a9mm+wWOTCA#d0p z`aYzKONp6Z5B^XUcIVSzDYLjvy?%VQtrlsikMv>LP>gRHi!;l-eM$NJ$Xak`rhAam zkNGG1zSk}+q0#s*&U@xL^|19K7^NiAI=&!9g0&e>+d@bZNljTvwx`BcWWS>xtfG7M zecfLxis&zg@aCTU62{?go`b+{q)plFK`stn5%C|}ZEFM+Eqt!_q{NCyb?w|Bij)f8 z^zDO+f9m|s3R%&EseC!eB_=+;8}^nSh6xa^cy;7Pxyf6DULE$ zV@!wdi37=fqP(!kZEae%d?saGRW`ZLR>sLh@7!@>pM$jJixbwhzy|AT*o%<1eoKbz z_p5CEug(?TDyyH9r1f87+pYQ`spqfo#PmM3q^2bQ{1Zw;Z|AvZC0HDqZ7)0RXfGUO z4m;G*G=e|ZylbqbERxMHz4gHM%A!+R~*HTut0)yx=UVqc{MZn5IcwNe*OAt zv6H}MRSo`XKKVtV%~=j+cy;7YFvYg|{uW{4?i@RKl_%iFpx^he;C8X(THCt( z6W154A9`2Kyv>i7KZUo{X^DOn3t~zfY>}~&ELs7icRSQT1k98F)g9ljQT*rmbH=ID zAx}Fg-32z1MNI0-m^A2@WSUCuJIAz$aFEbZigGA$L&v&mZoLe*>hZCD$XK;@PnDSG z`bcLY-}RycZK+JQ;4Z{m6$&f<~h3cmtKNVsWiK;S#aM2#)VAAl1Vj5t!JouC7<$u>o9 zs}q?}CUV%7lKyAW*Dtcp#}pM_;mLF@FnAUsV7!M3w8?*9qlvA|nZKt8yOmUsS9m_D z%s_oJz<*O{ZH+x%`>L|0Q~nJ7wsXM%O=<;QJHBAmz|;*uqr{+BTm=6{d|tE}@C3!Lx_e40>j4dt}+ zS?Us-`HG&4j3dO=BiXIn^Rsb16dd=4O%0+0%Q73fQd5ZbAGWcrtxWTEDj6l-ctT#o z`!QbZKZxp>Uz=sk#0|MTtSBI|cvckxBk~B+{Ns5<1EyNKhKqH_H4>sN$^14qm92?3 z4MbDeznkrHsCH{H+J5!jnGE`Od7!gk2XM`a3(o4i(MmbA;(Zv!uXW4UVU?*8Qtoq9 zsW_NF>G`6o6R5UH(Cpvxn+O8^+O#*Dg|f`au4Va}3ll#1nuGf(A@4(fJimI=r|TRu zHUFr;^XZucNTDbHghjam%JeOMQ}|i@A8ii@;*ed)-vv!OgnQguFJ>A&nRs;B zb%y}@{c64Du-r7RGdqQ1EH!W`?o!;^KxgB`<=fj$_g%Q3x4R>>n4Yo2Hgd2jFOx}+ zKmNjDtunrC1C!f@Ti4Q8Jz z+O;tFtN6YL^IP_u`z$j~kJa8Dy;Ro}9M;q8fH?DGQ^~0qp{YZi6#?9hyCXd_^!89K zkBStG`x>^82xV!OxE_9Ph<(vOco=5as3pbuLAaCe8>=`!?fwFXhB8bhmDw-wI}VTe z+r{mevz=S87HB1A2hu9r_(Z8C+NX~|bF1>{q8CP6*RC8!-n<;(9-FN32I5w2!N#By z20iyMH|{NS*EKimoAOWd6U1l;--zshR|IvQBZgks!FgNw}!hzz(pJzNOJ1Ubl-6pqu=?m~!dQ?T7S< zdTg@lS;X=_v|+5B*i^jkchV?eKaqeNix5ZifDjd?$3dqmggf{gIy4QbdM~y=q_JAN|E)h~`8N^a!!< zBvzL5ow~>4%qp2&CW{uKDK6!tciz3Hz(E=hFS-;+^&9?e>KSTn^@_0NXiNmhH603; z^2=tn%9IZL{zxJ6Hn4QeKBgZsW1ajhK`5)0H!a#3_IR{7c-jo!YN>L7X!xfPye^KL zD53bZOx}m`9^|{W18Cs!X&Fb2C`qI4?`FOJ=bx0#>UZoLL@Y?m8V-ZaJ`i zZ5O&asXQ?6{>!$3`7&YdEj$aG!XA*Ekw*sJnF+_SR}*5jM@^wR%Uo=HOP3Ba%fW?) z7OZ5G`$n9C7c%qG>zA<5Gb%rOE!?Ut^Hu-k zG5B25R7!xuCGC>uVn33FQm=|Zh879ul*g@9B+}NEuJMv>ClbA zOMjcsiH>P8wUtnl$m57Infw9MCEeBM0sNELa;|fkYI7VT2wCr+{aLCiUTkT#^jD&* zpKSCiyoG#Hz>n1A9qT@cIm7ET>iz_{hO+NkPuWrGqXdZo{{Vk0p9% zc)8skBvgF1RsJIU{Q-W@wwJp_o@a!!tQHu*ktj@7GqXs>Sh2*FWaHx4RO~@NufDOJ zke`_0l~o|0I)hu;yTvp4W#=C~(-$0ikI;LSSdzx)^^-dp%-akjie zitzJdEtvF7nDmGmDMUe-4T1X+Crd)|#35(wQu@SrPS!zEt3u)f8$tKIlm*^gI+y%z zTY|eeK_;#uPNj61zgsWi_51_=?gse+%cmvAj2$d*tO%WS_NZU~GDj{G0r@R+2}J+w zT#skm4WVd;qyqY(++`a9V}~L|5)o!DS7Od}_Ca7`_c|;gX>^FKJ&5_zr5mwvOzxIU zxcS~zKc=Pc{G&@R=v-hWjxdeqOsCHBB11C2P#GFY$t#Z;f4Mx;dnYH8@G5Ift0XS_ zIkOBhxrRQIQ`jy|B1O^V+T+AKl?@%mPC`UQS3bJLB=OqB`a|wv8|LqQ+E$yGa3@rO zTca=mNP7k}?uch`m1cLQJ?FmWPI`*l=HS^;Y}-i+M5vYX=NGuDC_00WxG<-4sReTn zV#hOr`1k_YGK}qD^`s_lOC~xiCT?=la3fn=D2MfP@~KnVm3pAKotC>T(&TMgq7zWh zO2OhUZtT7T*?M-~FOw^I;#NED;&x!^1m$#;imO3@&T)O8~ zQ?HD(_n?#tYx#KS*&ihmekGnWWDuBm)+-#ByyU@bP?EQQhma*Cx^G|4-N96{nYGJSNCk zLJDYQNfddO&3Bn-e3yvzl=-<Vd=SLN`s5hiS& zX6jETeU`3w2I_eL6Md!+(&T=X)v-Lu+OMh!?=+N{xPiY(Sh@>b@-^E^qt$k9DH5l7 z%!-GB8OQDx&OU4t4q#f*gOUT3v6Uzxz=j(|+}nrll&J#5P0}ow<)idgn~EL>B}z~~ zxW^eE-4eN2HOv&wZ0rTx4$5%fNoF={Nshe>G*Kby*-dv7XzVhv+Z|Zq)wc>} zo5@PF)@GWs5Z|W?srE2cnMtN-0t?*#quaZKfJ66*-To>7g_KSYtre~UGMV=ef^h0m zOr9m`-34aeeE{~&cG@^zAZDNS;_=m98q93^-2-iT9mv04eMzU?PD9T z%JaY8K^Bx4&0|zCl4$pYMKTPI0RWX8?B;$Fm6(YBPI)1@90hS%{s}V`g^5=sn4On; zsW0vIy_8Y&;6KfzQ&%PJm!wm7By${5{4RIB3yD<}DC}urOzfWX1{SygtK1xJvID38 zB*$V$_DQ`m@xUxSrc>|5kVD6Uc-jq!k1HH2{(w$BNOZo$Kerq|9#6l}CGKZthbq$pkMidNMz0HmYtRNxOh4R+}c0lA0$$DL<}feOObc%Lq8`VdTW`FH2Oc18S6z=xUkw1L=#c0GDnc=!$6r4q@jBo_4n=qgJ}JH^#*ff zqLmMD)1O2aNOF(^#o(!%5uk}E(o*#LW;m%nic}Eh4p?2o29PYgN%fKIM)8Rj*GM|o zR=a{-iOwV%_PP{BI`O5Esjgiag2Mi|W~&Vv%=$T-`Mgr1updcKgQO8giV*@;MUZOm zfm&inn=zp7sLhs05b@FD79U`Ju%||Z+uw&Cv7u`v;UND==gd%$PB5sp*=0Q&)a6H7 zZw0LP4g&@Cau39wS0d);;l|+;F z@4KBe-vz!cnDAQ>4k6K=;pXRVpQmzyBhcQ^3HY>BS|gMd$AF; zDx4pS0q``yxgY?P8M6K#a2&m>@X53IDDiyNPGz@G-q}`S%ocGF)coRG-ZSsYzuQ3C z-45{Q5!l!a-lbvRC84|FQCr17d=z803SvP2*mke6@3-9dB({QPKkNWEJcx_?mD#@T zp*s`tu3UWVF)PgC5k9k{-v$czLw66Kq5`|7c4Izy5U0GJvV03;UpG4S6^|l46%Dld zvR%7K`s=^@`IDQj#8y7~;J#!~?V|fr*g==C57BQorX}c_JjdUU0fXrMmU{=pv)$Ui zJ5Ny_B$YrCJL&WD{Wq6=E4O@NqrHfJN6hbb-{|g6#O$6wJA8fn__FC&5|A{g52`f` zUaAkdU=~6$3wfeTnr+=Ki3Gi|WCPg*-FGwlQd*?1u|^VjT){4g4bE_0%a@ z*X6{~Bjo9A|2KD&#KS|M_ymVX9@j~nv}A>T2?zxlZ&iN$L6`YMC?w=5Iyfwt`F_Zc z42S)&u*AC&VQ;KS>s3GMkYR(mVNlHz%do@yQQ?>Iq4%9nuZ8Scx*ly1!taB=zq1L_ zO$@K|2y{#de~0ylriEXQJ@tKi`YrsVE+B#{E6h>ZK^a`T3d!r= zyifm`9txF3X1ID6gzYH&BUxyY(uaH%=0b{hd}cHJ^6NtWp|{Sr`piWHKXE;N-Rw>m zd3NFT@ns=UeHh8DYwvQ=X>ly*N!N*S^tSJ=C+Ol&=~&RyzVEg3_cly^zv($vcy#!B z?34t%7k2x{-+~_>#=kc|`mH;BIQTyFsp)=bN@(+=Ker_V1|yFD&V4JcKK(a&`ptdk z$!O4Pu5ZPQKi6YHjx5pnHlFFjzLg` z#~p)Q|NL9OFJwxpP5t9&?9qJmchLHHHWs)P0(t`f>u46z?0aUp^u4xf_u4OJhO^&) z@EEMIp8R?KzftO6Zs(URK!GuD9B*%w{k@7z*L8k>#z@o_$<}w7jL>>Kau{)4@Lm3@ z5%2FJ&GW_=Q+@_9By@cVu$=Dy!I%7Yed6xaY=+i7wc^7ff=-3mb90yUr(XS^52B6N z%{Pvee*Sv==0#oMS|s96jG=JzwU5*TnU`Hp?zH~t;sQo@Ih=qxx>opN)ExXgIc&ix z1J0i5*Wx=r9~K)v$w=ln{sxZGl&XvB-TH>B{TRemNMPCxYVT^~LpX%`(MMt3V3sfX zH(cYUioX?wst|gY{Xh9#h#%h7C2N0gzN!85pECaAxp%rR=C`<})Qa1GycOH~9`WU* zE~27{%O&1VxGjTW-Rd~9^IRD0-b%`^zxQ1RC?l<^&k@WeCAuT)je?1R4zxw+w%}-4 zu4mg(nok>Zeri_t6!i+GJ=+4*4M&d0T-XcDt1l*%O?35wA`{A zaQjL1-R(6#p^O{#(~TUAS&Y)sJ?DBaKHWO}*2I0IBT`T|X*)tS^H=PwnnblJzK`2r zBH;|18v)C4J>OV<`=SPl z*swLJS6>pTLx(ZDlK#K)THB2bEy67~vKGT0rz)*R2^tKqU{D1>#Y1H;*0KF38KPz4 zUGtEh)Y6(XzBrmU!HmyQfD@!*chia#u(rP-N_Au*V#h_q|;jJbxW3@yu zpDy#o;N1vO&B?gSF*QG$E`@$N*y_Vy32dre*u3|+I=K1iP5LNLQi9|?!MnuiL-kh; zR-v8&)P)cfDQO{OLB&j_w4q8uIl*dFMB(LyWEr&*l?z4~A8?H&Mbcd)p4kMAIh>4Kx3o)3m$E=jO=1+ssbdi=DNesXeF(}VuI&b5*u>9{!s-JoPS-PABL?TQX${0${DUnz8> zzTUL)iATR;tE7DO?cvfBmuDW>$_K{vi5@Z;BCj4ir!Fayl!gR&9Q0uCpR0iCcL@yj z!RdLG%PZVRDCd??%_PYwO!s6_8Yv1T}?%~)eh&cQq79ok9DF&q;X0K z#;YS-pX8zjl37Bn9Qmi6iO7u^e-yy3GGO^(<(~_K{(JUYGCSof>_^GAt#X(7Bh=8F7SQRd!G|TmT(T#_gU<+pW^xH& z*=CxrZ6Su&B~*5EL={BK#*`HV-9k?&>X5Tj>X-v>(KqC7c!_ylK9SbwaUi6Rg^SCI zmt`n+3>17#mk{yxeUIr@u58)NYT`7nHsCWJiKPWWLfqe8G`TS7@=0Lj5iJFysYv{t zT1xwo(?)ZD&^jxifsp

QCVLz4%+ApWFZ5jK(#Eue>7AUj*T@aX7d?gBGRKL4Y^$s zbP)gBKJCY1wa2W;;au!?X=}&{DV$*T$4lUso5EwrpT1Rs@%}~ znT7RY+jr>H$y}g|95kSCzGfJHqH94a7fhy%Rf9TQ#MbMC%73>|MT%J_`D}WupprO8 zSQ#9LfR1TD5S8c}=Ce3xNyd~}94)>Y(Xg0ZJu@jW7ALu#k1*;_dv26JuBE}__Rbt| z7IaS?<|~0>C(#a%P@P?O)uoQL(xIM3;*-qGQc3<@Ij{@owNefB4WwXCF)T+?dk@gL zk^6%A$bqF5jTLG7Ijm9zJU~gqB8$8tPBAEK2c5C;9XT+skp16D=-*OcXYG)-_Jj1| zfJsOI8W1l@kuI*~DqBoQu(u2K>37a7pc@L3sv5r1jAdmkUP9r|IwD7M1jdVHUiG4> zRgGDC#?s2>bu7RmQR)dP<3_P{R`q z*Mp%sy}Yaxd<)I>5ScEP;r_I2`&Ug9E_3~hg;Z73q^hWmXczZvR`Qa1xL(q2@H1cq zg>kJPtxQABGsc9(yIwV{a}=Dzi-B=|)?KbCl{mILpKdwsi0VkYUeK}Pn@>xP*)>EE5z zHrA3l21W3*XC@^8ago61G1baEcr~?vNq8}pR(AKYp>5|-LE%Y<_A1i;*fBLs6M%+d zsK<_p&fI4%&M0;9DLTAq6ezI&OIne#on+c7tI~|+;?GH-erU?=~AeF8KZpq#M+>ozV#caYUU; zZQ_&<&O=pRGSS0R$#KbePO<2C0a5UhoVRTJ(a|3lE<)1paiw7uTieC(CSByan^Lq=XT+E8+$e{f5f0C=a6bdQ8K~4d@ zK0oeud!xSWKb;bWNuw ziRM2+Ip==w7(jbCO@hk(mvF1cyVPC?!fEbJum;FC)Xr@98y{EtQ05jn1F&cL)z67QcX>^X1p1*;`HRg z+&i%fl`ur~R(P?G3u^ac$t`E?R{e zEW|VpCg=5Ufmc0k$eZ5GS8K#S5&SQJl3AdG&aW47?6I*|g5_7J5!%Pbi|gmtUpT}; zkCzV9q%v7`-e9jB!!K%`l0n*wXOnB&AR(sb3%fATIG}A?YCxjcMvGb(iwK4`jWkjlxNzId%&drX zM%1w_xj3kgu$I1UTwEaIA^vT+cs@0~^=hemUgvCX3O*w@Y=bFSS!Jm6k(*zRDo3V6 z`(+uGO1S{{BiOUn7ekf?@0lW^Ye)LEY)QtB1mLYkw zVSe%DW>blaW)SjH_8M#j6_=v&zJfzxirdH-&9J2mRz?uFe{S=;JD=`n1cZvy>Qas!Inu_e->$^}u$~gsk$C+oW$?-A?M)i`A{^K?Crs-d z)kys-DZUhUCQI-By!K{)`}0rQgIjGY!6)wh>`XmZ^-r?> zy`0hwr*qWkla#l0#!Ful#J(nmU%D3^2Nx?c>pk{bIg7xQn&lf}D)>qC&^{yw>q|U0 zhS$*op>fLWr16!vFBXSAyplT#Z<-OxOJ^f(g|?EvBhEjdc7)9U>BvWgR_Wk(N2M$A zJDyLg9*s>zQlb31Vm>{{1q?j@J{Xk$a57CTH@k2+ZDf3cDI!Co zJC;3FsA{{fsa^Z>1XT2B8)_Cc9;s-UHI3;zH{O38Buf(jNN^+Bmt+B&oWC1da~l938xS)f3S z#M7C4y$}G1$H)H=koXtc!6%>)C$Vv^^u=Z*BW}#X8T7CJj**mP6OTj?x6qsA`Ro4Z6C;n=H5~|%@@>r0S=M3unicwz0S}{4@r*yC5sRUTJ zyDYhUuP6aZ>xYW_%=aHRAf6F|B1Zv`5n+wDs3M5B(tGj7E<)s@pjs#B{S&r_$0DCZ zikWyzo%$oyKJQDgPr6#(rye%j7)BZln_21_P|-!&M#aNjfE5%H?M_g^6ttwsq<{On zNwbYfOh62MpUE$c_&t+t68Lzcpz)xEyCvUbSmiKJ4~KR{U~ZQQ9k!t)iq*3_TOuwBHyaknuWz1f_=KA-PROwm$!APXCXG&oKTt3G;F|LDTwirm`a^`ifgrf{ zhLiSxXPtg;mYIs~Kuo^{Uj>t%-;}QEl<-!D8|@|V2KY}dSJys&Z|6s+07<4iFbDlP zHCvzm{7;yX%m>tnK@q3qqn=GZM(C0w!(O?O!?<*##qREHFk6r7Q+c7MfFy*oliw`+ zU#-fquVs0M>NxwvZ7Knq1neaC0!fsAxP&rqZc3S*Mr zIU{d3^n!t2_VIJ@W_*{_4(d@-F#E=fblo9>Iyl3x1ZgMENyu-7>k&|?Cn|h_`u+kq! zu*7YT$|QMVtzmSBQ6uq-eWp66eb$4Mvvjlra${cZ9(WvmpX*_@3wr3yl~3bh^^j|! zipV~tiT8JOYu2^$eRG7A8SjGEHph#4L`AUKtr}%to_BZ~6GmO@65?I&mqBX{X!9?j z!mVyex)E@>+5g?g_oafRGeeHTrLC8BMor_l9R|@q&+6W`8f=~f58S?_zRn6(kjB=O zxj~`^yq;${^mk;kBarx|QcFWf*r(qI$mOl5)B4w*WX(3Z1%3&H!y?Dil2dfiSM`YT zCRfrNYjoo_?-V*|+c}xa2&=kI@zIR4CXBMK4Sl&$ePC!OSV$q@K-ro?%9d7ZAjPhc zEOBo4U1D?Vg+5GEmtOx>o9Ztl0;sW*8{LPP6F`wf!;}ZpX^*eA?<0%P(f_^ZbO{ckU+V_d6{%dw2A0 z@#zPlByeqJu32;tS%C4&59Ry@#kyO+v2YGy1u86g8%m%%=BnGCEvr7G5DQ;87i}qL zMTP9?ShxMB^mc#k-}yb9kk?O(B`<$_ub*PGsMO0g62}zwClYMKQYe0re`G9uN< zQ(DVWs9+PMeN0*s4+W2lvXq6>BCoM7KL4nGI?s6iAk@Au2CeYX6A6U(5u`ORLYq~j zHOw8Gpl?)e6n5_+L!+Bo(@(+C|<=GAh~r$p$Y%qc>TN9AB* zS)&U1rd1K8=aRj|bEVe#-`&!(cZ9V^^HfgQ6D4eQSk-qnu;3d~!)H$8zAf|~H&f*& zgf&t2;*`x%mGR(D6267^HrqmFv*P8lmWE_6E{>?|&3K2*C=suZJZUS9$n9J<+td6?jj^FtFB@^hFsW67B4HIA@Q$jYBuak%+{9{T z{8=DVzUV1MTi*<-kgayA(Q*?xVydjz;vwCL)d@y6qE#DMSedzR11qJAL`(3yq98RX zs&|m+bhP=q{b_$_YOusYu!a@;DN*8i+;I!c?T@0T!dGAQ)HW483R2wAww!xp!kKoe z47z7uVe&(}+v3j`Sou=)S`AtU6SDL}NsTWrBUj1>4P0BM-=!0+K$(<~>{b98Db&HvI2=Q-p9t#PfDn47#^#`eVh^1H%+^763( zL54;}+4oW(JiB{mNaK|w;&lKH*DC3KO&7LnS zAagxSor5McqRUAxHrhC_NO8^QR3`%jZB1&3~ZU-Y-HSy6%^47Ugl$*%3 zs@a|eOB!4yDO>sJg=K8i)fg;=eWeS0e2Wh)$SGwyze=+tYsa9(fg*wOub!~%E9B=Y zF=x3%*~ciwF3#&JzTa6eUe#$Rqw;VS9iBI3D$}CaVrE1k*(!| zXI$sjYWAd6XRV-@&M@7QvWM$AD&Orl$aFXtmY@JT!%z?dIw-6tWIUaJc4boeIv4@{Ktn%gkuw?VTJ z8NznKDF3352rYCxO288j3D;~%Y-SO5{NAp}q=QYZ{ukx(l_^BhRx+xCbO;(%Q!W&; zLuAM<#h(cDPDhZ}cRB1`Zcc^oALP+_UEhNKfmW!uv*A$F8JL;#lfMI8;|^xjZt0#b z_N*_37e`i)>Wx(%aB&iy=Zz7s2Y;2d*grd-YQl0d>iHC2A!+LP2gwZ}Igm{%6d(z< zyg+HEG7gvtxzdQ#{&rH(@&2*ZRxT}33kJ3ig%Ip*12LmkZ>Du{O?V4eeQe45Mg3D68NVoWTr-x``oc}W2ij<2=kN9h=Xq0-#;~w7gWaQ9x{66Z@wBy)xzaskBNsjN|wpQviH5PT0pO9 zC-kP$A%yPgr(1L(6CB8T>6&(>ESVwiFeP*@R|u7JH@-r|8=IQF3<6pj9$$6!cd%{A zOt(;x^t52tKE4im5{+y!`%Dp`shqN8F;={Zy4KeXxPL4y@Y-aWo@GgYWsNmvtRw)5 z6v?m_m&Mceo0mmKM6`EV)FAZ$U95C8$3!oH9=KG(&zEbSl&T$JZx~sEUd*#sjnOE7 z3G*>Y+7UbMoO;e;L}YRAL;rnaY;-X0#6@DmB@145&&2hheaAJw(AR@Llb+aRL!$wI z059l6|F3gB(fHJm)-7(xa5VJBF%O6?wflV_a%EX?M)i8^Y1KJ=;H&`lue!IaFlsC0 zhhchB>f2VL8kzEEFjx0GthJwqcwC~-l(CEV(2kuX9S3muXAcpxx zI+vl&_Dp}XY1XKOBbKg(>`jii@SU}*rv^bjU~R7lDvOyD6o&=R~_4;GP+&os2zE%^HO)@w#SE#uUWfWDX{PE^qG=H%SlPIj2BW6tt zuwLJ~VWtImKP;rC1ZTLx&x`81y4hD=!o9*{Jz7>xkD(xFo}u>Cx(b#T1mJ1Hm#=xG>#Y&k!}K}aG-&dh zUYn_(dIa9Y1g%|T16MCA&SQ>&D|Xbbb5-D)w%q_@ilkm>ZE`vf;(gvj1+TB-65`Wf zxNQ)!+-m!yegIC>m~YYYc12@eU#Ey$_jL-XaOaWtP+`a`YG=13MREkZN>tSK{8N!F zD}lSdy%gDsIlid`yXu}7nh6)i!H-hkmVqN~7FnyDx`PE6^k!aqO0(APIwi0$7{l<8TqudJmGT(gxoMEGra0JY9my?T0T^QZ6O1J6EUwS-I> z)cnQllAD7K%6kz!%zN_9Q&qh!y@>2xa!w>-N76Ba8rL(8gyo!Tc7p(oDT~AB1St3P zV~iyG;XX@AzIztFu2;e~D|+1$1}6q&g%#?t7e}J-Td)|Mv??=Tu-Wd6imBwSqAslX zx!!#dou(z6@T1BtZQNO_O;0PTyzbQJ5-Ghvo*T;wP^2_ zNXQamm6);Bo~;^Hd)I1jblF8wwAJd}@%Oxq=Pg{Y3vaYpS!Dis7tlcZQ3@`m5C;PJ>+Pavwt5x=U*} zv!)0eb()~Md8H>8Lok8g7tzB(6gn%f97J!VPyHkn>;nf*K24nz3gkaPP9aL{dtNq-67R}mLh%Rr+p?vnX>vR`5{rYPES-J>0=3Lv-F0#WUD2UpIqq+N}&r@ z-uKD(QJL725vwtTVq4U#uk)Rbf`irKybu8nHF7h&Ph5M;8!wP>&7^=2`#lhoUM@V}ll<&hwph6_ z9&$~*%*foUhCC&I7J!Yt470pRkmSCX-w4GiOm%mHt5>fILXavSZ&WaCe>dz4et@LCi=~jqSQZXmg&) z&VC|nLOG)Tx4+dTfzO*rCE?YO$WWnBT`o3&@1M>d8TH~H{pjv{E3n`mmEJS+UvZ*# zK^95_Q`fYjEdz=n2~5-*54|-b_!*ui(!~@fVR+ttc;B%Iq3Ws?@9TZw`k=zoBMP;@ z+toz-d(>{5bia(OQiGRW#j56nT>fd)FM40lqeA_PEHA*&T0@(RG375dTbp|>qN^LI z&l~g>fp1q=0;}eUhe`>fqF^F&A9G1OxFbT;x)2ITBcg;^0fiImB|uUN&Bc?3{P#G;mU%Np zTmuLs4-jni=U-Jdr=!kQx^WQ;1$|rc{+3$sBQ+e2; zJIO;ppyXcV`QdZF?wFtGJM;bC&>1}G9SOzjDoGD zaz1}y$TOk;K>7sKjz(jkNgX-;N~v#qDXzv{3wPgV)Iqmu159iRCy4KK8RZcKQF=54 z#p#Rz<;OE@!apb+2&V;vc|Y=`2sBH=kY!$|+nM}~p0<5DFk zKvI*4GQ75m@hJy|;Uy4AnvT&B!6USlkNdstuV9E1AgR9ie zxa^c86P0MN;gG;BWCr`_P_ywUMFhz&AB~$Q#{%aSz$7pcfRiYK;haRN2xP8_p96NF zIWGG>O7r%A9`d-)OLzP$w*=`wijX*RuI8`?hD%iTvgff%$;6GGsaS&lsN4$p)zFl8pRrB-JE(-mv@T?KXFPlIWcdU4Joi+k)hI0s&UMhz(cRd@Lf+gs;3a zaq?hliL8y(xTjSZJf-m$v14dNEh>`s_Q+jXq(4k0BM2U;l&vj*v*iSw?>H6!;o;$l z!A)SU9tpW!jyUDRiV-W%=Gtm+bWj8DE7F=)c|=A|$?|;g1gsJ_U&6X}jA5JThi~$z zYUxWRYnDfNzG_BRn)1;c^{lWS4rw>8!~9ibpHz{! zH+=%Ge0*L?`-%maQkddKg&gUB^~lLH7LFq?U~2F_oCF$qg&8Qy2j}_Cjw`jCJd9Ll z=skKdgB_pqJE4>)Z<^so63>dFpz&GP4bHNbU2!84)qiBAnp|8!^Lc`nV=#1&#TC)H zZ7sl${P?;9Unvp;d*zVAPA6UIzlHHvnepyOD;yXM*F-xvv71vA?z`+M=+x}eT1qB< z4>fd3aL4|z_Nw%YcE|CTlg0aR3zwLdd>DS@hUpi*IY8H68%pFWov&DBA3wZ+T=!z80qC~%7**w8a#t}dOwDrJ zm+tnS3)YbHQ;j-R2O+xq_$V8b@lRwLF`K@D?zWUBf&e;NU9RXkV*Dw?Hm<YPt#j6hP}=Gc*oq;^{k9W z2GNQuhQ9err-$>e?s%?%+Zk@BFXLt0bTuqC;7gXWc&M2Y;A%<6vRbj%rn|{;iR+L0 z*W~(wOT(V)*qe>M7R`O*ja53%) zA~0TN98(NoFU5cid`$tD$ zd>!XRn~XwtGN6;-+f$VC1AOjNpdmxyB_x5NZ8!N+<7MgFdMFa*5gIF1?s%?%k2DWR z)Ao7U^{0P*#uCu>)*+UKt}W-$wYeIgGa`YTN{jYPw;=Zn*73m3o~l=@-_}q1TADKy zGm?~bQ;B*cA}zce?JSM%A!{+jUD7VuFI{rk*m*vW`5G_;3RG5=Icu`vc_A53Y)77t zv)2rU#2nkO;D0wNw%--7>W2>+MK$MD{Bclm&3dq1BWkQU;>4%?N4%LAYP&I?Fet0& z6XJD66#qI(=aqsU_SO){HOJmYQzsg^UTA-+D)8k`DJzKplaHZw&v$%&ldO^7^QGni z4yKWQn^`BMf5yD@aeN)cYZv1?k}LPp?mog3(w#oj*+g50MEQZN?TpS-JbJrjE|5B?&!NDY z^eT*Js4|7muXwzmok}4mHGF&0Ok0juf6Vfx_8m?R0G^j8^U8E7*L0Zshl`0_2_A>3>5p0)wtK>@3WI78S49rk za(uQrD?1MG{l#&=sE?1Jv4D^elL7T$)o;AV9HR1~qVK|klI>3*w4Fy5Yf3)eDC{YQuM5i`25e5yi`=}%$Hc6UT|Q$7hp*u3fW z0%8-frk?V0*m@eKm%YINXvP}f0w#e>Wq^r{y4~8{4wzsES8r6vfS;o|7?{s2e*t737uz%@{y(-&X0cC1!vKAgYMPK1G7?3C=xi zuNQ~<`FFqh35>R3D!pMm^Ln%aN{z{Z+r6ctCV6r38Asf4R}_Nhh|c733iEpUCMh~lC6?uc<+yVn?XvW9$!I&_M4q7U*l%HcYZLDzApfZ zX~(;Yr=}Ddd;OO<*iJ48Qy4lAMUJB}7xUzJ7_NNP7BdXM_?gKqB=G_8FdsEb5V%Py zU+_*0REH$*2-bxIUB`s(?KWDfaw}R|4w2ao)@ie^r9x^8KmBJub)3WN<%l*lruonh zeIDOZOPcep#qKd$zd<6vnUPuCM)9KZJW0=b*}!xgyF`YDJJVH{K9?q(`4;Cwmj2HY zwxi2^HE-%EYcEwVASSa%1iomA`QGimX`Ikn zL+}lA{#gYusp+2c(1|w6bBS#Qlsod2s4i#rK?^c@m%DP{BGiI@Wkg|_7^=6&ln4z046?5UbcI52sW8H$rx&&O?73vjLqAxJe!^r^1>=W zc+LU^MKnUum`hFzfYa1WNm?gg09$D*3jV?znAxXzC{-2u4>aZ?8SdoF#*m=Wij6h} zGNb8H9?EDPYI%ws>WVt4i8d05Zzqp0sQSFg0>6FyFP{-)=)Dd#j&`Udz55qPgU^u{ z({E)X+5(K~MuuU@`FaOJvF3Dap866@mZFyyxvn&A!Vt=1@8+iS6b+`w%OAQ4p@hZ$ z*-38?23BHcWh@&_K%PHc5-%KM5n?-j7%RhPpvo=m2~q8Ii}o2#rfTcHwy8A*Lgb6XAP_I`~YRnn+v6p3vOImImqB-i%{42k+Bf;pJm9yZrE~W>#cS) zI%&V6G@f5IIvB8HV6TI^g1++JW?Ce%}mtX5Ainl{FQhhzD(*t;wA{KenmVd36TIFE))nE#q)uJ3fb@gA z_CbV?h#}$3$wyhR8gS0{axAN!r+cU?9>gnB`S@GU)#d7fS2%8Zprz&dTE@`4G9BI{ z3X284ukBhGWcrTHXue%edk+n_=yHA3iSL)%==l(0Ajx@K1l7;nG^j_$D?Pq8=`Wn+ z@)wvjTwilxw)HPPDn4FA2Q~ZJd0ND#F2fu(n==^hH1sYfKDcg5xev8mkE~Voa1&bV z%)#&%xz)x3ZOH!U#O_%patfgFKuT`SVsI}z+KaKGAvlRsv(Xp7##e%^8R}}&uc)OD z1p-*1Q(q(<7aiaoAJ|jw(xK1De*Rss$sIs8Fv?I|IO<$72WII+OO9)c4SE`)*At~w zx7MGaFGre2%QYMa9Y*Q_IIFjW&0>~I7Y!ljQ|Lt6o5J}*mec8}8o`)v*WQPzs(dmp z_F!*Lnv_NY=a9*sPh~XHx0UkMy|f(1e&>`^*cND}QlN93WNQ7QoL(nXi&5w5AR8~W zs$b^LTQqW9#Bq@USz!UQ@_~`Wju6ER>@;xZ=qXW^s&arp2cfwZOt51V{_*2c9V>l`SD^0h?6fVcZVy9;Wb`*O{myWZ2eL8sV3%$)z(K~v zd`8?ckY=S7{Kz<7qoE=3#!9xGDpXrV1nlG#ymo4& zYZ92Nn|jWG**P8RFGIE^xzkS)BzB*At*o1??bE!fN)LUNndLP4>%^Z#r=jPs_Ebze zSu%fgN?SwH7kv|+tz0RX<>Q|EWZXEu*fxYC^WrZ~>wR(b6NEfV6|`DTV<*8zU zm9w*K8>Bqrc%Qk@q@69kRGb=gG7-+KOO8+#f#iS$Lhs%>Z(^t`I``AaOB~#&O>dBu zroK(@x%c_Wf**DPl`j^1%`@xY}z-$2Z zEMMs{jX2MHl=ATZ4cQ2MQ>c{IsVijH-2Q02uBsngSn>@-ItK@@4i}J(7(8h(jZnAGpH^Xs)_nU-wpL4P|DLEs47j*({k-_n|lv#PnzNF(@ zg6S&eETpG;_e#awZN__0*2ABY8LQz$ z)FTc2vz%a7nkZ|rnQH?*|I;t_@G6~pqa)~$QZVt8hizGzbX4hA~gaa8roQK z58!1gV%BM<h^V52eC_vnfHol3cf77T>re zV}4*tyC5|Yqbgp9m6d2WK|vC1(bVDy96Fh1!<`HjH<1^#a~1PqNg`09@k)@vQt>cx zYLGIQx~?yZut^G3N-i}NQx&BX7e?3AMKU-`Zci;NVF6}&qv3U#b?O6X4ZE^Iw^IueOz^MGi%GPLcjV~$##4kY= z=ai(O*~c^^NusymM8moft`D$$@n{RCm+(-4}EN43BVN6;ty&dLAgSUT^A|)^2%+K`Ww&7dJ$!oYg7Gt9))?4xe<+f! z&PZ9u5rjxB$x2fIBI+Et!MdAjul#tdu~NB;UdjdgUTo=sxv7Ax=3wCjAA)Zxo2E9z+!=W`+I3J@=9=<;#Qf$dVI~SDD?>pq zSga))gQjGvTW2Em<-vF7A&G^tjNOkqdW6<65x;5Raolj9L$<{bad$Wt_*N&U`g#PG zPa9&Yk*ZZ8t9qR4Ve&+`*Y{Y%H>{M;&-Bwn{!1PDVY5w7dw|gqrArG|CN3wGL+OL?Hqa3*NUHvS;`y1vTb^^Iu zYh;2xJHGL)!&(3_Z^Wi({u?$8XRTaycC$IO@5&iAwYLxr@J7jvxY}jmi~rwpzpU>y zhD`J56TaLieY7YOI1?$&+J=OVi+RiY1MR54t`Do-5)4t*SY{EiWN&C?*vQ*SUA*dS zDCUldyyHjl5)GF$G#_lW2%6ZB#UWsWc7!<%->|%?1drS9r}_!~O9Pg?j=RJotov+P z>-N<(Vusw1C3^x6M*sw@m7w@QI0TkbBgHd>#G$nX&seKRi>8HX%k8KjCW_tCmM;kf z7E1OG9OE(Myt=%Pj_&gwDTK`Pl>h=-)*;N5#T_`=Y#8>~BiyUwTE-wasOsWLb5rAw zV?kO%5`bS#)(C=e$>41{W?eD@mMUK1hizde9KH`%+-V#&EU8)w&Ps{%f-Q80E@G=s z4MmbsdfihQ-8w?gTHYl%ps^?{c*z__0xV~0V}v}32&Ju`mxpbSGOYlBi;Rvo^vD^< zIblh~pa@DXP|bJ*KJAZ@W6IJW>!Z&v_YCv?6a5whl{~7;rvL1U*CF96%}V#)-z_>i zg~zGT@FfZ;;BNmF{oq#Ecd1&xoEuUV7vvUBUKNd3wbp&JmQ%mZxtw7JoJ0wfZwy=f zeN;8|w8Ziw+It_ud_(%6ULFEKiPAVIbyd2_scQ9bK~_=*(N+HBola!9zdJ#^8pEZimLK$3 zFU#spHi|Bg*VMXG3$AD!X*o7B%6@vMa$kP=gMQ6A^uE}(!5f;gbh$o<*e(7Eq2JHM zUCvRaM*H#LM@W&mW}su8#LACG24g8SD+8K#$zug>$%K|c4hj(J;ZeDA6zd*{d-o@K zu1+j-SF$<#mWaoVC4b{O-l^LOB~?6-(bH9)ub&D!&a`mf+~(JrR3F+5M4U;z*u^DI zF{tk_-044Wz}L0gWR%0=xG#2d6a*h;O_T!9>J)i%2%2_3()_FNzhn+FDvZ6uP8F{f zGhS}RFN&+l={in-lXpJ*>RaxO3;!O-Ck4$N z@$UN{#iZ54Kl!!W|NT{b`R(`L{LjDsF7WZjxCkG!@D$=*um;Au=m=QM${$}KLyvx6 z1T8(a`4@Vn<6N!_Th5xDSmZ;HU09+xPe>~F;tzSv}3Xg~Ll?i$~VOgCYF2d_l*n)OhYo6cp2xm^7<@+8O2 zDb&%c+HkFRF5B(;b;r4mlOOpObKF?BoxH}3KU#0(y4BoqntNyZk-V4ZHXiNlwQcsX z_b}gW=YjLwZ}WA2-U4@Fl8fhC3yT=>conW4zMp_D5)MSz0g8zTfZfq_EDqwWDwz~e z=sHS7!Fp^Fyg&fPL|c+>`*adXCIF6n7mu9f)@bCYZ?iB;Vd6&__9w z@g@tGYXamEDiM2=I}Z?G(lP#G-%STpEz?qJVMs#}oHI_6e)i4xb(%JA_zaqsn#N{syoVI zJP4(sQNA3SC7_t00@O$rFBzoI4D*1f{?FB~h7VW2xH+#f2(yQKz)X1&aj0I?;RuC{YD;RTe8q+;lwaBJryI;&*`&xK0CH~e< zxXggw+S`m$6pWa{(qBNRt&_Hfxg&JBgW~1PwPGdw@~(J8@FzJZGkiE}f_Wp8Tv`Y} zwuSBs`O@#*Nhv1}m;*4zVrGvyThEI92%~k+N8^4bS5%R4QT8|R#3<&&xJ?V-rapKmAD%9{!#Vzy$@1j-~RdVX#4lW1(+~@ z>_VoFP~0NSNrJMNd0jqX3C2>TEoIhdGnQfFhKb9WJ7!5M(!w_El`I{{)KzJxv+1i@ z*DpL;lV$~Ft!33*%l;@m{=eLhSv&vaugeHixa%C9#G(xur}UBy&h^}~O&L~6#U`hw zy7H6Ecysk9&Q3?|maK4J{Z_WlSmU;=)2rs~?CbBI?8vfKTX(W+w%b0-j(_X;oW1k= z`L3J@|IuEkE7Y?m=Pc2;mvcjYU|%jtb!b1QR(s@&+)Kl;FFBvhUVfDqv6=ju`~Ux5 JcuR%v{{U}+io5^- literal 0 HcmV?d00001 From a672e9030055270243e4f705ef492f071e4810e1 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 1 Nov 2023 23:00:48 -0400 Subject: [PATCH 21/28] change matplotlib version to * --- binder/Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/binder/Pipfile b/binder/Pipfile index 4709b49f4f..bcce42480a 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -17,7 +17,7 @@ geopandas="==0.11.1" pyspark="==3.4.0" attrs="*" apache-sedona="==1.5.0" -matplotlib = "==3.5.3" +matplotlib = "*" descartes = "*" keplergl = "==0.3.2" pydeck = "==0.8.0" From 2c0ea5874b2d73684d384a1ff7cc55b7b8664f62 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Thu, 2 Nov 2023 00:07:14 -0400 Subject: [PATCH 22/28] change sedona and geotools version in Sedona_OvertureMaps_GeoParquet.ipynb --- binder/Sedona_OvertureMaps_GeoParquet.ipynb | 10759 +++++++++++++++++- 1 file changed, 10731 insertions(+), 28 deletions(-) diff --git a/binder/Sedona_OvertureMaps_GeoParquet.ipynb b/binder/Sedona_OvertureMaps_GeoParquet.ipynb index ba2b6623fa..588cdf415f 100644 --- a/binder/Sedona_OvertureMaps_GeoParquet.ipynb +++ b/binder/Sedona_OvertureMaps_GeoParquet.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "92984a1c", "metadata": {}, "outputs": [], @@ -14,7 +14,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "e4392353", "metadata": {}, "outputs": [], @@ -24,17 +24,64 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "08c71acb", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Warning: Ignoring non-Spark config property: fs.s3a.aws.credentials.provider\n", + "23/10/31 18:38:20 WARN Utils: Your hostname, Nileshs-MacBook-Pro.local resolves to a loopback address: 127.0.0.1; using 172.24.19.124 instead (on interface en0)\n", + "23/10/31 18:38:20 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n", + "Ivy Default Cache set to: /Users/nileshgajwani/.ivy2/cache\n", + "The jars for the packages stored in: /Users/nileshgajwani/.ivy2/jars\n", + "org.apache.sedona#sedona-spark-shaded-3.4_2.12 added as a dependency\n", + "org.datasyslab#geotools-wrapper added as a dependency\n", + ":: resolving dependencies :: org.apache.spark#spark-submit-parent-44adfadb-de72-4666-a002-5a5ad761da2e;1.0\n", + "\tconfs: [default]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + ":: loading settings :: url = jar:file:/Users/nileshgajwani/Downloads/spark-3.4.1-bin-hadoop3/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\tfound org.apache.sedona#sedona-spark-shaded-3.4_2.12;1.5.0 in central\n", + "\tfound org.datasyslab#geotools-wrapper;1.5.0-28.2 in central\n", + ":: resolution report :: resolve 70ms :: artifacts dl 6ms\n", + "\t:: modules in use:\n", + "\torg.apache.sedona#sedona-spark-shaded-3.4_2.12;1.5.0 from central in [default]\n", + "\torg.datasyslab#geotools-wrapper;1.5.0-28.2 from central in [default]\n", + "\t---------------------------------------------------------------------\n", + "\t| | modules || artifacts |\n", + "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", + "\t---------------------------------------------------------------------\n", + "\t| default | 2 | 0 | 0 | 0 || 2 | 0 |\n", + "\t---------------------------------------------------------------------\n", + ":: retrieving :: org.apache.spark#spark-submit-parent-44adfadb-de72-4666-a002-5a5ad761da2e\n", + "\tconfs: [default]\n", + "\t0 artifacts copied, 2 already retrieved (0kB/2ms)\n", + "23/10/31 18:38:21 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", + "Setting default log level to \"WARN\".\n", + "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n" + ] + } + ], "source": [ "config = SedonaContext.builder() .\\\n", " config(\"spark.hadoop.fs.s3a.aws.credentials.provider\", \"org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider\"). \\\n", " config(\"fs.s3a.aws.credentials.provider\", \"org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider\"). \\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.4.1,'\n", - " 'org.datasyslab:geotools-wrapper:1.4.0-28.2'). \\\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", + " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", "sedona = SedonaContext.create(config)" @@ -45,7 +92,7 @@ "id": "8f3340ee", "metadata": {}, "source": [ - "# State Boundary" + "# State BoundarySedonaContext" ] }, { @@ -61,7 +108,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "045f24b3", "metadata": {}, "outputs": [], @@ -116,10 +163,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "id": "d0c55157", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/31 18:38:24 WARN MetricsConfig: Cannot locate configuration: tried hadoop-metrics2-s3a-file-system.properties,hadoop-metrics2.properties\n", + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 15.4 ms, sys: 5.42 ms, total: 20.8 ms\n", + "Wall time: 9.82 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -135,12 +199,26 @@ "execution_count": null, "id": "bb62f16e", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "User Guide: https://docs.kepler.gl/docs/keplergl-jupyter\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + } + ], "source": [ "%%time\n", "\n", "map_building = SedonaKepler.create_map(df_building, 'Building')\n", - "\n", "map_building" ] }, @@ -154,10 +232,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "id": "a6443d1d", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 29.1 ms, sys: 9.95 ms, total: 39.1 ms\n", + "Wall time: 13.2 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -198,10 +292,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "id": "6b9d6296", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 6.31 ms, sys: 2.74 ms, total: 9.04 ms\n", + "Wall time: 3.86 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -234,10 +344,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "id": "8541ada7", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 12.3 ms, sys: 4.61 ms, total: 17 ms\n", + "Wall time: 5.03 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -278,10 +404,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "id": "224167dc", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 17.2 ms, sys: 5.56 ms, total: 22.8 ms\n", + "Wall time: 7.55 s\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -294,9 +436,7 @@ "cell_type": "code", "execution_count": null, "id": "e5f8de53", - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "%%time\n", @@ -316,10 +456,10565 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "id": "ba8eff7c", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 18.5 ms, sys: 6.13 ms, total: 24.6 ms\n", + "Wall time: 7.13 s\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/10/31 22:58:32 WARN HeartbeatReceiver: Removing executor driver with no recent heartbeats: 327091 ms exceeds timeout 120000 ms\n", + "23/10/31 22:58:32 WARN SparkContext: Killing executors is not supported by current scheduler.\n", + "23/10/31 22:58:40 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 22:58:40 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:22:37 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:22:37 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:38:34 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:38:34 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/10/31 23:39:56 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/10/31 23:39:56 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 00:14:30 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:14:30 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:23:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:23:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:58:17 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 00:58:17 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:24:39 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:24:39 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:42:00 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 01:42:00 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:13:26 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:13:26 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:42:06 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 02:42:06 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:03:56 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:03:56 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:04:06 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:04:06 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:26:39 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:26:39 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:43:17 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 03:43:17 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:17:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:17:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:27:46 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 04:27:46 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:02:56 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:02:56 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:28:38 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:28:38 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:45:38 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 05:45:38 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 06:18:12 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 06:18:12 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 06:47:37 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 06:47:37 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 07:04:36 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:04:36 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:36:29 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:36:29 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:52:54 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 07:52:54 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 08:26:19 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 08:26:19 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 08:44:00 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 08:44:00 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:17:34 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:17:34 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:44:58 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 09:44:58 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 10:17:21 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 10:17:21 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 10:51:25 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 10:51:25 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 11:08:49 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 11:08:49 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 11:33:51 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 11:33:51 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:05:45 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:05:45 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:19:25 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:19:25 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 12:54:52 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 12:54:52 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:09:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:09:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:09:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:09:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:10:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:11:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:33 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:33 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:43 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:43 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:12:53 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:12:53 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", + "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", + "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", + "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", + "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", + "\t... 17 more\n", + "23/11/01 13:13:03 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:03 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:13 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:13 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:23 WARN Executor: Issue communicating with driver in heartbeater\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", + "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", + "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", + "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:641)\n", + "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1111)\n", + "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:244)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2088)\n", + "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", + "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", + "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", + "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\t... 3 more\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:23 ERROR Inbox: Ignoring error\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:322)\n", + "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", + "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", + "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:117)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:116)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:611)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:610)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:648)\n", + "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:123)\n", + "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", + "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", + "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", + "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", + "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", + "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", + "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@172.24.19.124:55734\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", + "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", + "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", + "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", + "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", + "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", + "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", + "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", + "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", + "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", + "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", + "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", + "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", + "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", + "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", + "\t... 8 more\n", + "23/11/01 13:13:23 ERROR Executor: Exit as unable to send heartbeats to driver more than 60 times\n" + ] + } + ], "source": [ "%%time\n", "\n", @@ -343,13 +11038,21 @@ "\n", "map_segment" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bf6ef3b0-d76a-4ae1-a96e-8077a7a64581", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "apache-sedona", "language": "python", - "name": "python3" + "name": "apache-sedona" }, "language_info": { "codemirror_mode": { @@ -361,7 +11064,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.9.6" } }, "nbformat": 4, From 46282567ed40153cfa4f83148b4cfb91545d9080 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Thu, 2 Nov 2023 01:15:47 -0400 Subject: [PATCH 23/28] temp push removing pydeck from binder pipfile --- binder/Pipfile | 1 - 1 file changed, 1 deletion(-) diff --git a/binder/Pipfile b/binder/Pipfile index bcce42480a..7e8f34e667 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -20,7 +20,6 @@ apache-sedona="==1.5.0" matplotlib = "*" descartes = "*" keplergl = "==0.3.2" -pydeck = "==0.8.0" ipywidgets = "*" jupyterlab-widgets = "*" ipykernel = "*" From 5e8631efb5d19f42c842b02f2e2b7eef32fbde41 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Thu, 2 Nov 2023 01:33:36 -0400 Subject: [PATCH 24/28] Revert "temp push removing pydeck from binder pipfile" This reverts commit 46282567ed40153cfa4f83148b4cfb91545d9080. --- binder/Pipfile | 1 + 1 file changed, 1 insertion(+) diff --git a/binder/Pipfile b/binder/Pipfile index 7e8f34e667..bcce42480a 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -20,6 +20,7 @@ apache-sedona="==1.5.0" matplotlib = "*" descartes = "*" keplergl = "==0.3.2" +pydeck = "==0.8.0" ipywidgets = "*" jupyterlab-widgets = "*" ipykernel = "*" From d1bf3b436a22843c74341bfa87858040d74a798e Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Thu, 2 Nov 2023 12:53:29 -0400 Subject: [PATCH 25/28] constraint jupyterlab-widgets to 1.1.7 --- binder/Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/binder/Pipfile b/binder/Pipfile index bcce42480a..4deb5a85fd 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -22,7 +22,7 @@ descartes = "*" keplergl = "==0.3.2" pydeck = "==0.8.0" ipywidgets = "*" -jupyterlab-widgets = "*" +jupyterlab-widgets = "1.1.7" ipykernel = "*" jupyterlab = "==3.6.4" From a95949392887b31268ce8a9f17db4ee3bae34771 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Thu, 2 Nov 2023 14:38:19 -0400 Subject: [PATCH 26/28] constraint jupyterlab-widgets to 1.1.7 --- binder/Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/binder/Pipfile b/binder/Pipfile index 4deb5a85fd..69f7569520 100644 --- a/binder/Pipfile +++ b/binder/Pipfile @@ -22,7 +22,7 @@ descartes = "*" keplergl = "==0.3.2" pydeck = "==0.8.0" ipywidgets = "*" -jupyterlab-widgets = "1.1.7" +jupyterlab-widgets = "==1.1.7" ipykernel = "*" jupyterlab = "==3.6.4" From 9093eb2b5abb0ad2873b1645b2ab39a12f5f64a4 Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Thu, 2 Nov 2023 17:18:01 -0400 Subject: [PATCH 27/28] print only a part of band in raster notebook --- binder/ApacheSedonaRaster.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/binder/ApacheSedonaRaster.ipynb b/binder/ApacheSedonaRaster.ipynb index 9822141cde..a5cafa3d8b 100644 --- a/binder/ApacheSedonaRaster.ipynb +++ b/binder/ApacheSedonaRaster.ipynb @@ -476,7 +476,7 @@ "outputs": [], "source": [ "band = elevation_raster_df.selectExpr(\"RS_BandAsArray(raster, 1)\").first()[0]\n", - "print(band,) #Print entire band as an array horizontally" + "print(band[500: 520],) #Print a part of a band as an array horizontally" ] }, { From 5b0dd094b86b7aca3ebef1b55595cd7caca0eb8a Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Thu, 2 Nov 2023 17:18:17 -0400 Subject: [PATCH 28/28] change spark version of sedona jar in SedonaSQL notebook --- binder/ApacheSedonaSQL.ipynb | 421 +++-------------------------------- 1 file changed, 32 insertions(+), 389 deletions(-) diff --git a/binder/ApacheSedonaSQL.ipynb b/binder/ApacheSedonaSQL.ipynb index c277a00a3f..9873e84646 100644 --- a/binder/ApacheSedonaSQL.ipynb +++ b/binder/ApacheSedonaSQL.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": { "tags": [] }, @@ -40,28 +40,15 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/30 20:48:53 WARN UDTRegistration: Cannot register UDT for org.locationtech.jts.geom.Geometry, which is already registered.\n", - "23/10/30 20:48:53 WARN UDTRegistration: Cannot register UDT for org.locationtech.jts.index.SpatialIndex, which is already registered.\n", - "23/10/30 20:48:53 WARN UDTRegistration: Cannot register UDT for org.geotools.coverage.grid.GridCoverage2D, which is already registered.\n", - "23/10/30 20:48:53 WARN SimpleFunctionRegistry: The function st_union_aggr replaced a previously registered function.\n", - "23/10/30 20:48:53 WARN SimpleFunctionRegistry: The function st_envelope_aggr replaced a previously registered function.\n", - "23/10/30 20:48:53 WARN SimpleFunctionRegistry: The function st_intersection_aggr replaced a previously registered function.\n" - ] - } - ], + "outputs": [], "source": [ "config = SedonaContext.builder() .\\\n", " config('spark.jars.packages',\n", - " 'org.apache.sedona:sedona-spark-shaded-3.0_2.12:1.5.0,'\n", + " 'org.apache.sedona:sedona-spark-shaded-3.4_2.12:1.5.0,'\n", " 'org.datasyslab:geotools-wrapper:1.5.0-28.2'). \\\n", " getOrCreate()\n", "\n", @@ -84,27 +71,9 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-----------------+\n", - "| arealandmark|\n", - "+-----------------+\n", - "|POINT (1.1 101.1)|\n", - "|POINT (2.1 102.1)|\n", - "|POINT (3.1 103.1)|\n", - "|POINT (4.1 104.1)|\n", - "|POINT (5.1 105.1)|\n", - "+-----------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "point_csv_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -126,27 +95,9 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+--------------------+\n", - "| name| countyshape|\n", - "+----------------+--------------------+\n", - "| Cuming County|POLYGON ((-97.019...|\n", - "|Wahkiakum County|POLYGON ((-123.43...|\n", - "| De Baca County|POLYGON ((-104.56...|\n", - "|Lancaster County|POLYGON ((-96.910...|\n", - "| Nuckolls County|POLYGON ((-98.273...|\n", - "+----------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_wkt_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -167,27 +118,9 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------+--------------------+\n", - "| name| countyshape|\n", - "+----------------+--------------------+\n", - "| Cuming County|POLYGON ((-97.019...|\n", - "|Wahkiakum County|POLYGON ((-123.43...|\n", - "| De Baca County|POLYGON ((-104.56...|\n", - "|Lancaster County|POLYGON ((-96.910...|\n", - "| Nuckolls County|POLYGON ((-98.273...|\n", - "+----------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_wkb_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -208,27 +141,9 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------------------+\n", - "| countyshape|\n", - "+--------------------+\n", - "|POLYGON ((-87.621...|\n", - "|POLYGON ((-85.719...|\n", - "|POLYGON ((-86.000...|\n", - "|POLYGON ((-86.574...|\n", - "|POLYGON ((-85.382...|\n", - "+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "polygon_json_df = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \"\\t\").\\\n", @@ -256,36 +171,9 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "== Physical Plan ==\n", - "BroadcastIndexJoin pointshape2#614: geometry, LeftSide, LeftSide, Inner, INTERSECTS, ( **org.apache.spark.sql.sedona_sql.expressions.ST_Distance** < 2.0) ST_INTERSECTS(pointshape1#589, pointshape2#614)\n", - ":- SpatialIndex pointshape1#589: geometry, QUADTREE, false, false, 2.0\n", - ": +- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape1#589, abc AS name1#590]\n", - ": +- FileScan csv [_c0#585,_c1#586] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/sedona/binder/data/testpoint.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", - "+- Project [ **org.apache.spark.sql.sedona_sql.expressions.ST_Point** AS pointshape2#614, def AS name2#615]\n", - " +- FileScan csv [_c0#610,_c1#611] Batched: false, DataFilters: [], Format: CSV, Location: InMemoryFileIndex(1 paths)[file:/Users/nileshgajwani/sedona/binder/data/testpoint.csv], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<_c0:string,_c1:string>\n", - "\n", - "\n", - "+-----------------+-----+-----------------+-----+\n", - "| pointshape1|name1| pointshape2|name2|\n", - "+-----------------+-----+-----------------+-----+\n", - "|POINT (1.1 101.1)| abc|POINT (1.1 101.1)| def|\n", - "|POINT (2.1 102.1)| abc|POINT (1.1 101.1)| def|\n", - "|POINT (1.1 101.1)| abc|POINT (2.1 102.1)| def|\n", - "|POINT (2.1 102.1)| abc|POINT (2.1 102.1)| def|\n", - "|POINT (3.1 103.1)| abc|POINT (2.1 102.1)| def|\n", - "+-----------------+-----+-----------------+-----+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "point_csv_df_1 = sedona.read.format(\"csv\").\\\n", " option(\"delimiter\", \",\").\\\n", @@ -327,7 +215,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -341,57 +229,25 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "root\n", - " |-- osm_id: string (nullable = true)\n", - " |-- code: long (nullable = true)\n", - " |-- fclass: string (nullable = true)\n", - " |-- name: string (nullable = true)\n", - " |-- geometry: geometry (nullable = true)\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "osm_points.printSchema()" ] }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------+----+---------+--------------+--------------------+\n", - "| osm_id|code| fclass| name| geometry|\n", - "+--------+----+---------+--------------+--------------------+\n", - "|26860257|2422|camp_site| de Kroon|POINT (15.3393145...|\n", - "|26860294|2406| chalet|Leśne Ustronie|POINT (14.8709625...|\n", - "|29947493|2402| motel| |POINT (15.0946636...|\n", - "|29947498|2602| atm| |POINT (15.0732014...|\n", - "|29947499|2401| hotel| |POINT (15.0696777...|\n", - "+--------+----+---------+--------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "osm_points.show(5)" ] }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -400,7 +256,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -417,34 +273,16 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+--------+----+---------+--------------+--------------------+\n", - "| osm_id|code| fclass| name| geom|\n", - "+--------+----+---------+--------------+--------------------+\n", - "|26860257|2422|camp_site| de Kroon|POINT (250776.778...|\n", - "|26860294|2406| chalet|Leśne Ustronie|POINT (221076.709...|\n", - "|29947493|2402| motel| |POINT (233902.541...|\n", - "|29947498|2602| atm| |POINT (232447.203...|\n", - "|29947499|2401| hotel| |POINT (232208.377...|\n", - "+--------+----+---------+--------------+--------------------+\n", - "only showing top 5 rows\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "transformed_df.show(5)" ] }, { "cell_type": "code", - "execution_count": 35, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -453,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -468,49 +306,9 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------+---------+--------------------+\n", - "| id_1| id_2| geom|\n", - "+----------+---------+--------------------+\n", - "| 197624402|197624402|POINT (203703.035...|\n", - "| 197663196|197663196|POINT (203936.327...|\n", - "| 197953474|197953474|POINT (203724.746...|\n", - "|1074233127|262310516|POINT (203524.110...|\n", - "| 262310516|262310516|POINT (203507.730...|\n", - "|1074233123|262310516|POINT (203505.198...|\n", - "| 270281140|270281140|POINT (202809.394...|\n", - "|1074232906|270281140|POINT (202816.420...|\n", - "| 270306609|270306609|POINT (203639.141...|\n", - "|1257728000|270306746|POINT (203730.740...|\n", - "| 270306746|270306746|POINT (203694.827...|\n", - "|1401424769|270306746|POINT (203717.829...|\n", - "| 293896571|293896571|POINT (203064.162...|\n", - "|3256728465|293896571|POINT (203078.302...|\n", - "| 371203685|371203685|POINT (204114.915...|\n", - "| 387403536|387403536|POINT (205969.794...|\n", - "| 387403537|387403537|POINT (204667.758...|\n", - "|2857654988|387403537|POINT (204659.690...|\n", - "| 413542774|413542774|POINT (200735.109...|\n", - "| 448151936|448151936|POINT (203784.389...|\n", - "+----------+---------+--------------------+\n", - "only showing top 20 rows\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/30 20:48:55 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - } - ], + "outputs": [], "source": [ "neighbours_within_1000m.show()" ] @@ -524,24 +322,16 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "23/10/30 20:48:55 WARN JoinQuery: UseIndex is true, but no index exists. Will build index on the fly.\n" - ] - } - ], + "outputs": [], "source": [ "df = neighbours_within_1000m.toPandas()" ] }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -550,156 +340,9 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "

\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
id_1id_2geom
0197624402197624402POINT (203703.036 418398.613)
1197663196197663196POINT (203936.327 418662.604)
2197953474197953474POINT (203724.747 418602.854)
31074233127262310516POINT (203524.111 417303.619)
4262310516262310516POINT (203507.731 417345.373)
............
6567050437666846635874242POINT (281113.731 517914.616)
6567166358742426635874242POINT (281121.096 517899.875)
6567266358742486635874248POINT (281238.276 518490.396)
6567367367721856736772185POINT (291347.707 557611.803)
6567468174167046817416704POINT (286325.570 557253.517)
\n", - "

65675 rows × 3 columns

\n", - "
" - ], - "text/plain": [ - " id_1 id_2 geom\n", - "0 197624402 197624402 POINT (203703.036 418398.613)\n", - "1 197663196 197663196 POINT (203936.327 418662.604)\n", - "2 197953474 197953474 POINT (203724.747 418602.854)\n", - "3 1074233127 262310516 POINT (203524.111 417303.619)\n", - "4 262310516 262310516 POINT (203507.731 417345.373)\n", - "... ... ... ...\n", - "65670 5043766684 6635874242 POINT (281113.731 517914.616)\n", - "65671 6635874242 6635874242 POINT (281121.096 517899.875)\n", - "65672 6635874248 6635874248 POINT (281238.276 518490.396)\n", - "65673 6736772185 6736772185 POINT (291347.707 557611.803)\n", - "65674 6817416704 6817416704 POINT (286325.570 557253.517)\n", - "\n", - "[65675 rows x 3 columns]" - ] - }, - "execution_count": 40, - "metadata": {}, - "output_type": "execute_result" - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "----------------------------------------\n", - "Exception occurred during processing of request from ('127.0.0.1', 60351)\n", - "Traceback (most recent call last):\n", - " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 316, in _handle_request_noblock\n", - " self.process_request(request, client_address)\n", - " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 347, in process_request\n", - " self.finish_request(request, client_address)\n", - " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 360, in finish_request\n", - " self.RequestHandlerClass(request, client_address, self)\n", - " File \"/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/socketserver.py\", line 747, in __init__\n", - " self.handle()\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/accumulators.py\", line 281, in handle\n", - " poll(accum_updates)\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/accumulators.py\", line 253, in poll\n", - " if func():\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/accumulators.py\", line 257, in accum_updates\n", - " num_updates = read_int(self.rfile)\n", - " File \"/Users/nileshgajwani/.local/share/virtualenvs/binder-Ql50YD1N/lib/python3.9/site-packages/pyspark/serializers.py\", line 596, in read_int\n", - " raise EOFError\n", - "EOFError\n", - "----------------------------------------\n" - ] - } - ], + "outputs": [], "source": [ "gdf" ]

@;`1gCfwE8k8xr{ja?K0HuA^l9N(>6*bx~Sl7|0b7+Oz z7K;|`^><4%=~Jrw{tcBb9z2KXR+6+@5pYfj;-FAm*8QmhH`1o{akI)62@ZLZxsK@v zv-%{D(~C9OVTphh+tb1N;q-acsulu(`S1w@&#_nZn1ubg;${bl6` zqt$AQu_Q@kX2nh}c}dxS0E*O{cKXuxBDiu!5owj_lP+nbptIJG2=DOXkgZf0=(0mx zIQ6{>H&uDY(&nk}F`b5=P!jFuL7hQP*x&nKu5S5)><#wgpq2&bOe`V_GW|L5C0DZxAUInEYoRt(_GqBL9WZ?Ay4Em>u)BnU%q;LfUE>?R4a z!NSupSv$Y9QZ<*szh20AUqjAxKSyoj68>}L)+w^Y%f#6J!ber<9Loxdzx!73FmQq#O#`lh{mXE=Bhqk(CE8BQmtkq&Lcr-i~=k-Z$U=bGPRBb0_lXK5pmF z(^n^-zsz!T$6jADRuqMBfD$xu#w{)iD6T{;>=JXig%mVZ$JwN1HB?O~rLWvnNi^@) z1>%sHX{$@KrFem%o0*bUo6ogJdgmXsuAW3Il^DD8ZPWZAykt`k)8}dmk8k5mx6Kv% zHES#%&q$an_lI_h)y#QZ{Uw%X=G0^)Ios~SJLQ*e>ucdfm@N$Iu=M@Vvb+Arq6_PM z?YG!9CQ>pr;Eth=2sER5#owB}d{FGc$htTD-ny6CIa-45r(!7aNBOko(FY%+<_j-c zyqb;Gh=X?<7CxT*N_$%tR{Qze>z~CODWtyT+0&otwNVJqMN6{-pNQ+5KHv~HnCy5U zS#?<6Qg(Cl7hKX%|M#%m(+BJmL!&`<^XK0Vm(M6ROsQ@b7zMT6R()r<_)+~`&?~y^ z+c}6VjPbPUKdmNyZ5U~yDe|56MEf^+3ZqX!oI-_4m#)Woy@C zvYB}UIovUSK-<>Dfa!gp6HEsX@7J7-r`e>n=L7BYE6$S3AO{^)_R#+MFrR$a)+mSk zTtbgM6dDlaSRi&5gN3>A#Mpn}#Rob-)?P-$7wGULV+)A-g~a@|nP_o{c2Xn_v8lb` z3$57tmoBspPx89p8l`&)!bF&}gECPg?Wqv~M5p}9i>VGUeY~fm$AvSGha-&oFcYxl zmcKo;KlHo_wk3;n~RY4V_tB7HNrA7gBh>uMctm!~~o z7Y7;QakKrgj*fA1(|+c-MGmVA6a4~}jl(*w9rzl7SILS-*-Rd8w)_dLR!;e%!wS}9 zsKVi;dE+LRxtMQoGi4MD7z!RAQ#!K2XhQv?Ul;yqYB|=FRxEE%Ye4u%efM#}O zU}@k11fFt5-f`4C4y@l4_^h2+3!$&FNI_#Im`a|5H8-)TChiv*GYf+f#&A7mv$;UJ z*U&UN`z~VCWkO0FWTMZ84b*YTGVU)d?g*PwWo!u7a#Z>1j=kJG#Rg5;fY85>c^x_? zMR?E)E`yn~Wv5ebBTHO%L&9HI^0o?_Ejq{3*yX?4B8J(L8jw>wWR4ZnT4)Z9)>5%~&Is!1f z-}zt_VvFo+zA}Ivfd|QB4Nr`&uaOzyWsFv=7)j{?zP1Z*%j0Dk+zNDJXG(5pxv^)Z z%NCt+!0C@nSz$)9Y?3R=Nx%KlWD7As>^o->KxHPmT$Yg82r+3$1fQLH@D&USt!TJ$ z9TdokZ*jmy7%h^}R?;Gl&Fb@J^oqBkAj_oqsApaaEeKgx8!jS_tfy9>(Cl-B~COt|Z zjn9ZXx%T4JN+n&54S{$FMHVTkI4Hh@67le}`KqijF!5EOw>pfIC$A<)V;$KW8=Q_` zRMtUtn~PM3SiM|UAxV{6CJP6rW5yK+{lB503LmWwS(R(&c2KY7hBMJULo<4ZsNR-{ z48Wo)$aotS-xtnlJN}YiW4|lDBb<}+B?wf%BUtvxU*M%n5*SO<#dJrcAAD9NS#Y8l zngUb5?myR!Jrax`yT>KFO1nJWLap?LrF+^0^!GghS46Ahs0*s3IhyIFcsV^_Q3*J= zqI9G$AfBo~Tgf^Tj9>Vh0T^@|40&iZA{>>0-%%UvJ``g3{F5_aT5*t1YtX&-JeGW@ zJ~-|u7(aWEj&HqU^!UQ1og~%Ilx20fVl%?iMQwsmOGKr`mxH5N- z>oFVgoBtO9=U|`7^5x50XC_9y9g!}JPsk0?e+1)OLO5QIDfX|`SPy?>&yfA`V6gj_ zAZ_=J3;kI|SMdwthH|5){DP9-(y?H;`#r8|N})XS@tc)I-*@q~QzN2k&=A*4TqfJ9 zfxx!$LfXT|?5PYsZllS6?2H&&WGwBR4B{7~GE9!t8)7l z2I>BREOQqA=!}4q>n(JDg*w;__GJ%MgHW9)25*-xLO~8_|M!xr;eD>^%Il!mbwTH) zNUr~`!ZY6p{8ke^Ffvrr^*<04N#V4m%&O{YOd+1epexkT!!0Gps2i0VRKTK&{L)us zko`3=qkw+U0G}Z3ct|%HQv#Lj3-&horsKz&THoj{F>HpVqq|h(L6-#8jDB$PP+#(8 z=zO)Ki^;&xhm&NsQIUIp)88*Ce>?q(LP4ufx&VowSMLPFuUT?oCRKE~ZVJTL$cNVc z=?CSuqeOcCa2|}S42tg#l-=qbhiKyTy9i#Hz zpXVSNWF#mU-xTQZa#D>HzBsJI?e;B!=Sy_e-w~0~_7D^xiex^EykRC%K#s65-nh1O z|3Y)d!-y9ob@-ch)lhbm=hw!J$Dqe=A)*d|zOLK!6YA@@btjJdZnT7?;j3hQ z5r0yx&bV5H>S8xiiGqONf-imw)CMET<#K@Rnm#;T#)az zXl0t8g8S@(=*gex1r)wPZ@=$MD!4Cw2ZbO1j`_iCJi9^dK;oz9WqRqy&oB8=d8FjV z_e^I2Q~-Vxh1udF_aLd$i2V+@%tIb7A&&7)l%y7lI*CeI5c1nZ$jWp9=8)vJ@8w~f zdrd-h%|iG+L3|&o1}iYphC3NNw7NAxkUxfbfMzrHb z1jsE2bUOS1Cy;5loU!y}7x?XRDYFBbt^4-!{} z1U!O&MQ9xq!l}OpFe3u^L!q&9AV-9te(MPn6%3i;4A<7A=MgmRq#?kiBDFaAqhXAD+NuGR<>p)Oz|IvQN)WdN_ z2-bZ~muGOw(FP|_s8)2hUVYBkZu!08D~`lwTF3{`B>-4Lk;eo~UtFdI?PzfnnpWPW zO31`}32?yU%LK{h9=e?;XjH!Z-hYR;q)#psHL)&$?n45)5cm-!t`_-_9IsTin@ma9Dv4X`hvkryNL)q4fpAk2x5d$AJS<7Hz}ur6J*xC`KvV_ zE>46Fc}?&5*z!tZm?&FdUP*zh=cBx#Na}*^eqZ&-u8%$j1!0qpQ!}d&n zcX(kDnCA$SY9u)uhH0ue`Y7I4((4192ohr8KN>zdHZXb5;q%MVNa0J(XH>n-0$al_ zbxH(RkC;56bqC>_`M3HWm{qUO1oEN|-rGtIecG+F9`L_;HYg6GcG`V0;5L4)+6D^pY(aB4cM0i z>p!>|+fc|0sIkr7O#7>iUh04iv2@9K>Q@G@g%0CZM)Y$eIstyc0S+KKTrJnDCNO!c zQ+BT2gt&bMoO~FgYG#HR;7^43P6hVm26EJqfF2~e38B*kCY!?DQ2L_TLWZskUl$KE z=ts=MKi!)B(r9bu16(tP35NQjUYH8tI*{mQYEm@>;0;3;T3mI}za07|pZU$0iorw} zmb+F6^UKTUU(jsxOlI6NWxWHlaw)d}Ve-?Jj7=GN>Z_Fk%Au)>7goICW>PF(X>%W@ zD-296KCvTE*+v3a7ZKz_)Fgak8|eSuwDes{5Z#7AH@(kVL{gr^sNNA|o0O@! zD3=sBLt^(8DNum#akz(B>XrL^#S@{)DBL)bLhFjPAt=QHih0oUnh+}1k5|ip8_p|p zG?8iX!DkpH`8bd|JBFXzHu0s@w~BY?qXq@L`ay z?zMvuQ7`pSY?~`};wZon3f&;^A9D9lky?$UltO4wCJ-vjnEWQ?bEn||&DOF2V`AM= z*q7X)**650f_DHjDB5{7qY_TzKWJwO=v)!nd+T!*k_-FjEIpJHy+F{tmVeUWZ^6F=9`%U$UALW&az3K6dG2o9`GtvN1Y zjB72>L769_QdO7&|8~B0SOBw}BY&u&06ap`|>fsk6S z*NiF8Uo>owny`Q$B=STHqKECFT$f(9O1#*;K*LiowQzJr6(FGPUX`AND>B8j?PbeB z5anD$TQg7VrH46f)Paw$T!nQvqsgFTQ8!@R};ROF%;Q3ceeO zZb4WZ^5d}xql#wP&F=-D)^fuy?_Cr4+YK-h-w@)y_yD~JnHz3Fik+>YjQ6J#$$~6s4nmn9sWFFRh$-o z)Pg>`BDL|B2m19?+beraE%J6H&4Gge)FW{f|K!@?&n# zyCkn;%4zr!g2qr%o7e5D5h~*d$|D3#IF`drq}~BjjQDYyV6^oRz#WFqgSl;9cQX*i zXq^+SDxsv{X|PdH4iuOFg&KnpV1;4`WTFcM5CcXl@^{Wb&{q6|FtE5h_!B~nssl#l z@D+<2u*5)z??CZs1ibU~J0}=A6};&QLm7YpTKp6pezGB$Vnc&Jz|jzX^(t_*l{!vs z4CA=LBzY4T6fiUbqh{mC=`egU4CncU=chB(mye>%2T-P^E32q(5Y`*(7zc0yCqKqy zkE)3VT;CV7GfW(zp|KD*I_Q7`tnFKFvJDLH1SYG7lN~pTM=P{|aTHH5v)ZRpARk7Y zk7{hkm@O>){~Jy^nJE50!^uoNm;V_~<`}sCH=MM;{J)2j<+HTmq|sfo|Av#-X~W4o z|06dx^|;gKN*hjA-SIS6uCS=FthVqnZ(9rfKXT)@cbjZ{EZSY0Kl!!T`C6(}%$urwjb!5I5nl$Qu_SLaL+UNqof5Z-3wX9Uo5Li(|%T98&sxD{tSMaGIm zz=*4IFkmMXxe{WRjN_T88WwzvWEmTNMtpGIyrFTX22UA)PMw=6Xj>Z0h=%D(Cp2;E z@G??l^!OR6+5)q2OG|QWrm-!TSm{gZwb+K~#?thsH0Bhz6EMO$JWSjPN1Qz-;tHIj z{F`!|46zfc+zDV|EgnVym`U66eOfpV<5jT$JBFp-7*ly!b}ja}qx$opxY!AGmblmg zQ4pXc7VS-cjrkl4eQa(Mi`})WO)PO?s1Kow3dbER_8PGQAi$FUIcB?SuD@C7W9u)m zE(i~>vzx}&UrvbY9+F{?m*Nm}k9%E$j>6BiAb?-OmJm$%LZBjxGwk!N7{_bl3ZTy# z;Rv7`o$G-w0BcNp;Gydw_dXW7z7NHQ9sh}R$OjLxHe4-FWuuQxo8w?8d3@P}o(_vF za_ctcslf|ta5B)92=f(u6*h)2eO(e}1ndZlvoqk~-4MnN#4NSuD^~rulkQ{8o37Yj z((NrfXGE_y-$HH#Xlr^%yw6x;HI$v<&Taz>y?~WSV4RhhBs0FXg_Cjjw~k*OT?scG z9i1AM;X|ygKP%w4)6oj>gHD*O`CI#5?;>6FEM-2-evV~{F7{f+%BkAl`Q_b@z%SDg zsa8A>9ihz|)2G7M>T34{1iKg`G%k44(K+6Jcl_sx-=`mcg)%HZ9KU($IP(KVngrAp zfaNGTZKn*_9H>e?E0f?5my0lZK|O5ppg6^o&aYzOjkpxkQtv@r?fk1)J<)ya!?$!5 z6WoAq8K&-TMMmKOZZfxclSM3;u0((d4`FN;Hgk{fX60xb++Y%V1Wz>KJg0uDe1yF+ zAYJkZT-B|^G`r8qIIMUdPgEFtBFsv!VINpiu|uz+f{T8~^h&v&5g_#96`jnXVDDxM zawUG#=k0C_*hO^BBv8nr);v+uUpkQ)D@?a*O_JMylwc578JEgA0ScO<=TamAhqv-u z+%`Wv{O&;V+gf@J?E2_@V<+aIja{dN^+U-cq>oR{(v2bcpHE(FC9YZDyr{MPH0PJ0 zeHzLeU@YG=sRuV>S3}nm8Apm**Z{i)v|~)1fzEW%X{S=ZrGKF9e20mPlAqGc@)UG-4ZKow-58v z?|Qr8vPWfR2&+8_Xy2k5e)JA{l)3xoo&Hx)vEBCsj_PNZh;;2R`U0Fe`sOo&Tu*_` znuFv_ZcDdl_oJTmAUdVY2G+Kj=PPIzsrL`BhHOqe_ineB+oc(Z6yBYGaoIO*`%zzh z)Zyw7i+58+Ul(ZL1H7?{==10G9Z=E!QtI)2g85Z^{}_|=Hcd`t%7;(rX|m#-w2#$c zQuOSYDQ$OaJhCsoeCd40hja6`V6l=rCm@rxPtWaN%==z})Q-+qKk?Z+>udowzkP&q zaT~+;4gmMgq*cOeX&>2DQyy|%LOTOvvt(MeZCN(i*j^o(zi8=i^p|mEQtZx?o zSLDimf5f4&_F3pUbdi|6GuzpA>29Dydz{zX?J&ip%lG|0Mip}|v+A^39ik|I#!P#9y(6hfo$dNr6 z4S=WI4c_DE=v46YmwolC&itpc>4&tNC3qEW@nKgn0+tlJeZzuvm6tyIJoBWxFvD<~ z(~C?MA>N)=%OZ1bm;Ri_*q^2SSo=g{fvxIX2%kmK3=IiWtOm!`%vQs1N-PP_v=s}(1E?>H|+5Os|8uWbKrYsiHE{n=3R-!Y(rAG%4CTmAW$=tvP z8i&K5_2Msk{@JR(qAaKrA3-m8My}rPO2Se>a*)6kF z+U10`ysh<{gUQ>I?cBx~W>LmiQkX;qJNbW6_g+y=JpdDDdO`_=-X$O+z4xM_O0NmM zgetvv5kp6M7Z5b`j&uP*LoWi-MT!Ik6hTE4ME;h|w`ce5d)z%|_bpG!%}qIX=Fa?P zbw?O7rtd0d!Hm$5QJ8*nGcOeD^pHhSeA#}S@5Jv~;?@1U2Mjy3f=Kt#);LLrSuop1 z8+x$h%dF4J1I&hq;WeMnf-P2eyzxSXrc3E}AXoo5)Ujx%uSGcN?r}L)iT{R&kp)(y z&650ok8V9JZrWGAE$gBt{iGN6b0kVu<%fqii9NK z4Hdt$m;#-<)w#kG+Y*Lcct1MBm$-TN>9T(HK=IE*GE2s@Z&uA+wtez0>H9+ZJ5r>2 zge`rH+g(?(mv=bowLiXB`-KgEP-<@vLiJG4DBq1W49a%4#Cw+y5yj=1Es z@K*g5Be$xTUHN&3Ne$bg|5e4}o!4ySUhIp{rw?8fJ`5-y1mSgeo-Dk5Vlk_e{ng_> zU>tJLraiOwLtD}-IfsPH{6wA$I?syz2;=UemFuF*9~0AT*dKGw)$X3z`1PdhDNTy6 z;Jjp(woT3XW!5(Iv(wqwkFzHZ+jDQ1YVj{$*?sXS^fnXRL7tH3Gf!Gm@Fxo62T5Kj zX@5=kAz9^ba#TFt)y9rn^y!Q<(U43&K=6_Qilgeg-HAMEj|clrZR3 zz?_&m6@BcGuPs%^4wZMi6B;e_8FNWg<0slc4jB(lCTYYcS;U>0I}HB&I!qs|PiH#t z-va-L$=k0_SWi0KK=ZS&pG0(u8MD#{GIVhM_P@_2rB0>u+kyWPgzn_4_!b`oZ#yTG zC*gNcYcea@0q-ycNkwv++cW1F;F3CEL7~V%DEiBP^Z}4Hqpe-(*R0=Oa({`ZmkjwS z9f&e11i4Pkiaq0#7_&K^untya{1RfFSHUISNGJV`PQqYy$&rtt5fpSX$Kdo+a+0+( z;&&Sq(J@TlV)gSIOpIg!y=DsRFglZ3T9I@Up*vlD<;-mM2JU~vEdBAnQ|=!fx3AO1 z{??^?ow>~hqv7s&e!L(~Pd|%a3=4iYwWkwyW_}-i+9hIsU%0arcG}2k@k9`5D={Z_ zxezERF7`(vp0a~Bnjec>lKCi=`}1o8zYY>G9&1*!aZt-7>ncnmQ!RoV;m^A%B6SF} zGi})@aZ9Fva|p?tM#6K8q>WxGx3(0QSuT)oU;5AEE>oVgOvgrZ?ytrRmZ`IvNr55nb@#7uQ}<5%{l?Z>zXFGR>Q+NwMmRJ&}K9X0vSf z)s>BjzWvPPgEHOu-uDT!6Bf0+hjLvvB^jXtIfFa-1MBpxAC~vEvObH>Nt|>gB2L9h zmNHvKcc{o$t^u%69j0q0I+(C<-aO@7`$%O*Z@ShURm)K=)U27=qFm==H55|M;?dUf zkOA{`1OHL2++`ivFXTMC^VX=}K?+SYQ$61h4nbAG5iAVgA}m{Oo;xZVwsWg}nxc3&n;Di^p|So2^6~oaxZ6;&|uJx8xa+ z%->Zzzww>%TF5yh{cs(^3H4h3cFyMZ^f(1C4Z{O0uSu+3%`Xs^3)yD5NmC$?{jq7^ zbJMEFpYaT87VcqU#^T`aBGa9PSvh36%IS8L31lUEY5^aw$^w} z)Pxo1sl#K}yKUDiXWmuee@=$79=i3uH$Hc!DZWM9V|Nhp&xV$R`&5~KF;x8c)ZrZT zCdk<9C_S)#C3gP{sMg8+8f019;9Kp4=aiJc=WjosACr?K&uZ6QOoL94bNUcR4f!x@ zk_ptx(c@evz@os7U>UKtt@|UulxQ3I^iCwf1{#@Q`t*LJ)sfqpmi@W6N^ziY{=b_r zJ|pQ(p4p3kqCwBoEhZ;p77tb)D^$gO+~^C^4Fhoc;%T#mBq9ah`c1M169g@uFk*Fz zv3dXb$?N++jiB@$@A=h0Nls9*19a@aNY*2}r{Pf#lB3vCpf>PGoT~BasB6(MFzhOH z;e&ZTmFS}0=Yygc(eB*<_TC(+NN#sf&AM1(CdP!Pw&##mgxglNlS8NT$=?16ll5sj#2Qe?-`jja6R$-KR zMpQrL(j?{}#Obij2P0@t;tU`lLxlIDB^9do-^5AIuGek=4=_NZl&78-&p(W8d`3jJ zP$l(shJOf2@q7o=wN8gt{q%%^1o2_1h4+LuGy1RPd%Y|7-U>`3q-D|q9;{z92LTQh z3T=fm>YZXlFN5g@BfPi@qbZ}oHs#F^Gi-(KUZ+@&e}id02GhdAZS5cQ-+k~azCH+oYON|;8othZ^ngQdQOY>S4 zTy2bP?SNzLrAwWPM_r6}-GG1HWpKSpczsNC{lMM&%lHPBsxIz>3zgi^1W0ymAH6{&~>CE3*p6^AfmFHN0-#E}E>7A5L-E`R*A5Q3UPG2t#{b(Qu`mW;)og+BZ`1bqyCXY#~TjT35 zVanVlw^Xwxrtfu^iLF`k6}l;({EtPFrmt&C!M01?$`@F4urG!UkA+sW#((sy`070$ zQg(~xQy}-G-OA63RQ;LI@(-3D{V*MVj%*}>VF|OsYd{4PMjdl^SI&Crk9Xzke@#*i z-O^Z!7^>^&E90@yHVWbTz|i2$c~Iyg`jT;`#<&1-UrCQqyVS<1vC3t`*`%W=0~Vv$e{Vp64uF_VA>w>HB*U&8G&&&7d@3d%P* z?A2|bkKv|p{jESA@A8YV@=4*@V9tYDD%@&b^p#urW4{#c4R9^3(7ol0K{UwA;zvGo<`7Nlu_eOR!%9t7QOYL4h#GKZERrgoBFNK0p<;Cd- zZ1;Ou&wc)rXQA?K05_}j%Clv-?7q*=gCn$VLBzCxHiJ6@%8(Tn-|PRdPQS|M`axx< z&%;!XX^Wx4@kgQ0Qx(*L-5KNS@41(X2xt`~eN)iQ9aE;eSEwso?EUm0{oJP%RuK`v z)jItm!+0saC7xT@!OWlQ;M}KL|Caho z!icef0j%c*DQySQW`80S2ynMtBI~?%)He|A9fOgl14=_SH;(TU2BmF$t zQ*291Ia4ZI0wrx+NP%42w{Y+Gl1c?BOX}wAw+d>PgG2q2-w>RN>o^FW94Uvrfe$^$ zKL&<1T&vx?%VH~_k9Jo^=r9U+hdFWR<~4b~(9NKAFRjxkmz|g*a~j{T$p(_zQyM|s zTFO62s~`bLG#I7HjY-`_35{PYacKpSGk1$+ClDj|-m|;c1ogGzNeFd1+8&vX@nsKMeaUg-$~9Agp4D$GPLEC7=XWT!oRWeYp;)ZYLeY) zI?CizY}Q>2e*0xj(<;JF7CAjH6{l95(4*vR27Zh-RfmuG`1&x1mYiGWwFPwZ6@&{lZxW)FRg>sIONmCqxCQ)1wS6)VuAH&wu6l*kxpIF4QDX1W z^i|CI)aFi-(1t;lcUumH4eIcfL_B9krEpVYhT?1*WsK_hCV~%wh7cc8mHZiSD77*(`hz-g?wOy5W!N<|li6kcAeuQ?d(9hnK_z`wT}gYbouw8X4YEyXx-a z1_U3KUgF1Klq;=PT4|Yh@&bwNJpiA<7fc0XxW%3*e)0eoYh^@#QdrBk`&^Woee*6CY1Ox@MDb-9kfDY9~)TYx^0Z5W5kZ- zMVqc(aiDc6l!i&rwZ)UwprjTqZFD{(((K;-VJ`_O_9j+W3TWFXR&w4LDLjgDcg?t) z-gqEJt1+mYrUBzgB&0IACRtGj>PC85*T!IIEoLw(pJ)gd)86qH+Y5dHg#)>Ihp?wCUAUG6JmHo^3j zKd{t`c(LRl%EjlHRr)9AFkR#qWiXSLDf!R_+z^7?9aMm$K3*AZNJdSE8PWgXt<{Mr zEdX8Mv8_>3uq()wE@nkI`j~t2A`BTHCYM_ImpciOML&Wvq0Vydno&PP-%E=J3F-)gC?Fa;8pmUaNYXR5NUURkhzd8jFPx6DxCH|c znG$AMdH1}`sE7TBbo!pMfu?LT7(B(A!XE6b1S!D*`!?XIaB7N8LB+tL0#&MFDYJ2) zx!)k39Mv*fHJSQtXo3f96VkjpC=Nw$X(*A}mPTF~x&?bzj8AVItVCM-<&u~+$&ri+ z1k2kUSmAM{N-_RIEDiElJU6P3bx7h~z+5hCf|)n(m-cw-Zw?|lH~L??zTnAov3?^& z_NM($O#w!1r)+`wGjOi7r^jmwtB{*nT;{DfJv{uOnHneb&p+n2v(*(vxnOk$KT&G4sZ z_xTirlO?BqfNHQI9Tq}=_WsfimnfjnAGx&dLPkc^bna`Y2+=*gzc1MsTu<@R_jwGJY6MoPG&KZWq7@zDho(M4U zNin%d$ONhJe)MCIx?z{a-b{W+SNRW$K0s-R!-0Z~8dsQr@^GK1{{UokC5n+x;-4lY zI;faC1r(xjg&{|gt}zWsm_fFz^O8T&qrbdJK%j;ZsGJK%21F5hiI%hjbfu=} zhtLy$Gw^XU3=1*Fi~U{{DFVFXC2t|KgK7*j`v~Vq;st8*5h~fA=9@`Lb{sExmx&)n zmkmRinT5~XV-q%K0PN8bS7=GY^!QVHz!x`+2PEVdrnXM zOTSA?kN-!X*O~Pt6iImZ!QeSP<`)mcQb}}c^N`~$D_8E^0}-i|-Kr$Dp}6dgi^ z?nI(b8Te#a_(C}NI(hg``S@k7@rMZWcZ&0$N?((sv&Sa7s{w3(53)E zdkZY#7>NOJLI`}x$_yU@17KmU5E#Z^P@o%1(dwi-*LC36&}I;lE=&_b!}srMCjo!f)oU814D<4e#grY z9MD8H5MBU8lmlbbpa3T*$qP!xNl^F`A@+)H0$|EZlLbI{1qcBT2c*CXg3uT%D4GCW z)=>dny~a-^6#o}Qx~4#o0+a1!-a()wOZo3pA_Pal5o#z#4=n2i4z+-E-VrVMBtphR z@#bP^O~?oY3=jjOrNEe*l317w+6IcTfq@`UfB}Sn0TU!ZL`?|gBrk<(x?O=1trqXV3Gg` zEdwT-f$@R*ZpGkkZZKXA0?>m3Oke+6nUAP?zL_sbYmvyg9rIC^QMe%nv52 zfr;7>vKch=&|m~EL@)poJ8YF}a1K27Pji8pZXtJ{? zCJ0UnH3z8M$PU5)o-lx^9vK4xsA?2=g78;>XIxTf4Qs$HTYd<*hf0#{2jypmWBgzQ ze=%|x9F4Oe5#T*~FsulqupNw+0FgvM;r4DgPH<5RBs>5D&@mu|!A{HJ211m_I_tcy81lt2{Vwt3M305$G1B|%|O$zjmfI%@nFoFY= z5C*q$g^``T$!;B5E*Aqw zM^NF32$p#m(anowsYR66D6#={fx!41l<^V6rAjSJA{=AuL%!{U_P7oRfb|x`S@fWk zjcu~OCj~@7xNX(@)P$(0QTPZ*MuW(TxHDG>M&APWpV4VCoZycwu2z-#=~&zeB{+s+ zJYiOsFk%oK9S1McgBA%uFgGCbmUcv0B>+DNuLNEjk0}a(SUckL!(dPED$88JFusvw zR~Y8DV>b!<^JQ%02&|WxmwYd_D;tWzS-ma;sKhqBnt`KL0|}@O7yzSqOyY)5D8^e%6~|44{#FP@-j?7dLeIIh1|df&WQJ z_Y#`y2l*9^#ryi8{jns=JK8P*r11QzR*z6B8*5!KNzwd4r5jP+ZEf0+Xz4<9)Jf&C z)}*ou_k?n=Lb2aNttX=Z&dD~m8Qmn95;M%I-rHIUM(~cKe4xrsF#MfRz-=84FX+=0 zUh>_FGx{jz34TBfY&|Cg@1d_r3y-{>!WIXMXhB~>&|MC&Uq|IXXdx?#lIWYItL#7` zCy;bCh#+md!eoI}lpq*E-lS!{zHY=eU5T;IRYpKB{*#QnXRPEF*wbGU?X4U=oYR+M zgVB#6n--O_c|IKuQ)auHz=^P)Q!O+vdP?F`xPaMo#p3;7YzpZ9?-6}0X-xf|M4W}4 zy)Rx9{2FXdIal3}Yw%aw@S@R?3~@GwR?*$*jKPtW|H?7e*WT>sV{C0O=Iy^yiyrJ3 z5ey)T^_ueU!%4T2*5*W_>)keaql;fQoLvv*w9g^f=MY_Cgy>Qb1QGo+E+jhg!5!U# zeUFPybTQ8RoWYym1GAMRgqa(crj~@~)Z(rOv4-jbuHx{bg#>OOkri0BuY?v=f9e>1 z>6FgwU19r*5KRqJsMdT&Q)|iQH*nJgS9)8~S|k#wPkG#xdYiG{t``^+rF@g5-0`!+ zn#0-n!JP`+X5O1I$ZKZ_f@#)@2{v)sTXGKNHHn<$CA2~^irG7TMVLi4n z90nkkuiy# zH&&km{p1euY{y_`c2JSE#$a;I4<6Su+o`ghnhg{1!*Rc-rP^})(%)01e_G#M3)Wb> z(}~qGjC6k9|01qLe@H=n(q>m{b zM>`5ZqRfefkWz?p>2e3|S;LqQSx^4%87fGv z`N0la<$0$t)D|lT{JlR<;8=UB3F0XMq8Z2l9;ZLKz(LvRAT%6;rLBRIPuq3Lj=@oB^LR_nTleR^|9HcT?j?s-00J8rk59Px!k;={FaGy>UElAi<7W7ETQ2&K*f+&7%J$sgvR~sQ z((6s6^=S&1hap&1cLz+FJn zBXg0WBuxvc507m=7;JjEpz%-iYL?o|4~!Ux zpo&~sgDJB7Lw@J~Nk!?2^H42R#+V%>$_Pe;=# zqLO>C_)!ye^2{=cc}!j}O@XchJ*9?B=;1lKBSF+T1p}7S=1&t1>-c z-)n~<{(d&XF{Zv&rpa#U@zWz08}C?q#YJ&)?A);UU(BsmIQlrzo2G{`w{(oYLr5bg zd);Y4O5dbR-YJ@Z8~POQe7w|re-A(uECCW8{4s1aK9r(xOoDu@5+8lfV|T!7eOZ6eW~ZKs*fCM){EQ` zN}o207r>XMT$4^DiPE~A$UnalRA^-2f=k|)XH;=bIdlhM?EvMhV+O~N?dZW)d^>7nV@xoM?`(E{-B!IWMZiage z1oi1rRPH>!P*i~(@mx1v^@)9P^`$dW@#;^&)194*Q?%gYhL9qO(qDIm-ooP*SO;SD z@7t>hM5{g?QTw(iBV<&Q%*xV}Q)%{`qa{~j4 z2G6aJJ^>aQXH-)6{&TKA2js4PB3y5#b7?NP8@SsvqGT|`%-89P(MPbni}PN{LkaCO zPoqLL%?)-@{B@b%N|lHjFOyg@?#s|>1cBLI#?bt3n+T}{jGDfAwMS)+;48Pd+3BM}b(5+cT+bp3?VCcV$R)5Vz& zI9AFQgEBGsJqj$$r{yH-xL50=ir1Gv6`gKGO(0ZtT;@13WzT7s?2OsSpRd<2CuH3! zIX~M8zf#p*h16&VE#r(?)d^T+8KTa?+V-7^_8&mQiaR``k=1F6-!ONCUi(4wS>1~3 z!{wAmd-Y-)wV@M*<}t&#Mge?_+`Ep&(ruN>!Kk|K{N~{e0|18eH&7z#XSw!Um%L_e zDYSckAvQB)wv-86OFj`{&t0HiVr(*e%RNK&%eEQf`Y_06#<<~ya?_AUS8>EEE1!LM z%U!4D^H`wV#wUN=u)RgPzd;*bnGLh7P{pX1S`54GhT}4cIa%YjO7AzUW)bLG3wX74 zC(r#ToTFbM3`E!y^QCWMSA|iHB_QhDGmjVawOOu#z$-eNr|cMvS6^+-ctE~iRE|YV zx+#^vtpt|4|LNL)&M}gz$G=?wcr-MhixqCrV&OQodaF={*LCskrmV2Es=Z-|!-`6T zIn7EAFtXmLX;rp6_{);Bo^q(<#MQB)Vs48jE&U^Ib^w7J29%Qd*7z$=H%~9SKKy<- ze^zovX1{~XUdjC|7w#CTreMRgFMld4%4&W%y>oS_Ec8us8^y*r-3T`fV^G(vCr9;y zFoq=*_Z#BrAHp21oVU#rxO3=x4usK1r`8IyD7cLT=no zdk5203kJNc)tu%6?^C3f#oAk{8DF}F7rBR1N3c(KN)33g9RckF4ftdY*tb^PjU^E8aGHjDm1 zaar{paL_&Ws};RfRS~xz8Xvy~=|!hYpHKbVcId9&qG|kj0s=bC4rk5+XP-&9-}%Wo zfAHam=Jm3HSC~1YW{Ja}djI%mzaw*=^sx2wK3m3nk9QeSCFXzMx%a8q^NQxL zJ5-#bR9l{}?dq(vWL}$-^UIxyE~bQt-ygUd@2kE9dVHO%dX{wU_x+QR)UalP+Gxh# zuEsa|E)pt}DL$9%7pi~Jvx-7NV~otv7~Ww^9noG!xO=tPBbSh zC;l*rFSzpGaiFGWnAy03YcEKFsB*dQh2V+JHRN= zq;w)@In$7MnMtab*w8}|GMV1&vj*R+VPxiM{N8Z%9Kg9mQpNVkm~)oCuGVTT+br$# zzRmKCm*YOEJ(h-fbpEFOsp1Tx#;qbvb%x_&CkK;*=wL??GqV=nW{`_DTy9oS4f1f+ zU-K!gt@c$I;`bIe;=qW>T@#vGM!qU&V46>)eiYqrYeSdenfj>~4 z%&7Kkyc7Wpdvc?TRfnIlOWz-`AH4FPw?Y|5hehA0q2KkG8=M(mxKz+(EAW!aWGbwx z)-$R??Q5!`PoH-6jvhi~AHrxalCyksGIB|Z`c*eIhX5b*xq@+=4IuP4+s9N}{UX-9 zF{KlnGh-`Uoz-;JBTCi3&A18%tEdu`3spF5(~4tNS>^cDR@9p0=mkjA>F*y8pq6uh z@03Pk2n+OpG#GkdegXpEX;M~J$(aZxUC*}>&gbQ^aGv76qC3J!1Qw1~FAf|B=tz4r z@E<k?=`O`m*!r0jE7`+NQK zH;o#bD_8d7uWBa_8u3o--Y)S=Wr_{a$E>L^S zZ2-VFN@lE1ySGYH&o<4Ls7VX+(OflThE=NhtJ!R_mRquXQ(%4bP%T3eNF&vSsG|;* zN)OSmDP>BloNw4>ofVCTcwcHV$*(w$d#V8zU8pTfOCzbI+{B^{-wQcdN zv?+Q~L-Y;l2>g5=X;PphgQH}KYK)}uV-?`NUc$&GCopcDJ}&_ZlMkEZ9WCIEDG`g| zsdTZZC`>e=YA>YPxzCrc!{k-bvC7T})XRtj5aFe2YeNNblOt9<81S{sHzpf&#!ctO z#~!*4UX?SUuS7Aoem{7=<)fT2SRry}Ha+%M^>xw0Z?AyOJb8^m$KM)S#^(1vw}I%Z z%d#hbkD1CD8CR=o`6A_GODm!WirioG#8@nF|B#(2Y1q`{4Owgqd0=`*FQK`=h-5Hl z0WWfIRPTAelU%bDlH1#0-PGHLKZ_}7IWej+Y%O^B`y9)St6w^d1NfbWi8d^ zERE$Y9ThD76)o>7S!O9)R;XCEs#*@ISXQ^(?raX3dSoWC{h zt_?2B7FS`1YqiG>IpF3TaobL~&(63%cq^of6_2ZxxSN%_yOptrm7}MX|81+gURGJ& zRwV?&|C&Gf|7k@BpJt%=+50xr+NVN|o&V=6ItXF_N2LbQi;G`l2|*!W=0mts+S=nd zQ}{x-IY)(rqT*A54FT~f8Z)#!a0Gh*cPelyB3^UyB4%CNC7p`ymY$Lq2Mkzs9RcfP zq~?Y19_R9=zJrwWrVf+CxKnmX0i3X4sR*9bcTC=NhKxe4tSP_*6bH1@%ZWAFt1uUx z>T=x64O;~GalyV1{@@y8B;CH5;*#smo&t#CoV@)nhb__~>fWQT<#D|aCCheNp zu|O(?v{4Yn+5pC`-5^&^t!^G$Tga|4(!yA7TsmZRSILAi>2|(;o=;T5v9YI`Bx(rFf*FMoF@gcMsT}lu(OfH1Kx^#(rXw$-Om+pVz$$UoB zn%Dv-RpPD&yBb|0@j{z%*Y>ReJ(%!ON8fDiTs7I6V_m-xLcCBxCgM-j^O9TuDo6<6 zM7GY4NS33NrxdhNiFlF_rGe7N4^NogmSSDL{(FHFx`8lac+01onBy5Qq>D-ZHi0;${9wDsKZVh6!_y9#h+a@aCXmv^s1g8ZY8Zh858wWYM>E3{&_S#a z0ufCM0pQuG!9X+~0+7>)@-_0}`rvRh_1l-uA^wd`io&|kttqC$Ckrq_f2kZsn*bzWA#?XXt7i<1^OF1T=UFLG zJJ?GF``5WBw99?uLok5|CMh?hcp3N!v_QO!z^fQ>1)9w|sDNLX3J{Jh9K@_e#LhI;LfvQHnOqY2KOfGQZ_s9Jdk_Ags6i z!6ZciPhv)aBD_8%xjQ3yku>kuM{q#R?|0gDhT_Gzgxz`=$5#yQ6Iv>S_ps?@3<}Ki zA6r?Z!?a(T(&5}%M02`e?81cFNYQJbjy`CWCY8*mjTs1l8p^FI-b`So?3sZWuHHsd zNuk+a=6r5{2}LqZwR*A9C1Kh!!tDM}datX&>eOk{ZD^Rr5udeXoQ$eMpQ+CN0TYky ze-)X2P+y72;%^>&#HZbroF%WCK^m|eNR@5hy9dd7-Sb@ zdOEmrK_U0NR(h0r?Dv?-GeEQh&CBA~&(e6Fy|(e2HYq`O5hn&VYzT0mT@8}QbD;UI z&d!zG_%q#3is;{7q=7t#??-QLQ&+Yd&@Qb@P6Vr&M%iESS|2zp?4i5c*8lE&W*$B_ zrB&Fi=$bfmtTnLo*lW{AG*I6~#)QePA`EKD<)B`Fr87FAeIJ ztx)s_-WPEq$&O!CF1h>I!0wKn^cPoG93vCjuRKVO_c5V*U3LZ$D?}byPu}ZPfIWNg zGt%tEhh=AG&WgNE!-L;{AE6m{zIaUO%Io%s9~KUeo)5R^9#JzNPHG2arwi1Lh0ink8G^hgH17s%)uP(Fb9L|1 z*UjnafqXBsTSNA}OG?`T7e|~6uO7gg@w^`#YUyI)3@`RX|E|+!fsDOkspC8n%-lW4 zyi?}=4ovtwSG==-fGhuY6GbiqO8$IQNHmVyn)~z!kw5=LU`Es)nL&>Klr|^H{OoJP z5q&E-RVj{cLofyVgrsRzXEIP{=Px)>OMfFl$mD&-;9-mQzS!RMp(&DHn#oAw1flyk zShwU`=KFRLpC&UwU0uHqYaX8Sp093#sM!yv(1k73&t_M)kM-u-dZ#mSv@e;9{E_Um zlI&mGiJt~ezNnp#SV20df8?mtiZLfKu=Y)Qt=})3F=~2=XxRIDQI#h$k2sk-CLc{( zz`z9jYOv51=G0!-5U3eDDNEN0nD#D(Ss>;+rZVoM-jJy0 zUuDkEk1bHWdvz?3keowoa5X7@5} z_e=U;;!E0v--I$GCXW5fXG~LyB@0Be*93eL&KlL}54H+@FDrBQrka4wgx7eh3eum!gg3%Vy#q5s0G!IpqU4wLC<{`k<5L$#CbaVqu9b%E<3>8n=((oQVa9@;~V2{YJi>@*lkOBN>C#n zJ%9aC;x$5282M#9^!bio-A+S(5mIwQ+#HZ8lrJeR7n%j6>7u5KJ!$Fzb(vldEN|{G z@;jA_Kh#~D_@{8(_Qv9Lc}{J?81nBkT*Pu!sqr&k*V~O#phReIqMi|Gxh}k2WN?!SoOQcMqvA{$}Q}>G8*@scq zk9z&IZ5{OJ1=@jCS?!^&rB`BS`bcZtFkX*x@a<28#g?Unq~zQka|KKe$LCPW*RH_ZLiX8603P69X0Q-NZS!@+BLgUI6o37YgQySbV9w-=_nk|J|7%)H234x$A5xmzB)T@@Nl_FsrF0A9suk%w9uc-4y_9~7i>HEz zXN0$BufOMc@NI?g+Y!;Xd+**pkM~kY_KLXg)tlvYp69Jll4xF z)7$EE-k$vKe=jQhe^V%?anHBQ!Cc1zRe8QgA5k43VwQZu?S?#`I{v@JayS^ID5_x$ z1>qotHlW}XPA4c1-B<)J+p+H2E zG75|ybf-?hq;bO&(d+u$;7XcJMs$3XCq<^Q>(VElkZ;f=U?`W_Jdq69k>I8z#a`fk9BH4(K#aNIU?~Pnj_5lVgPKBFyL@ol@GYiC^}Fm@0wmUK}a$ z?lO#E(gj60k;w{319Ax(3M6<5(t>v(YN%P#6y?Sr3&Iku3Y4;?8XG3K-b)?E+i}1& z{L<+s3Um1p2}B}Aw%|*yz~V7_%+PqEI~lo1T$iG;#G&Q}{+>D6$l-B|{O4rdJbpS(I3kciVkfM@S!KZ-aJ>h;UH>+U;1yc49_FdQ&Xc9<$!klXtC<-}(B9s6G&bB5H(ERx$`0MX@ z6qG;?7=s^Czz=QKMW64~%n;|dkXpYGDs*7t4{g*kA4oWEDYJ zHS%A~l2vu{(3ifSY>YAF&$^5KtIKiD4xck70Nc25@$Qk*)vyZQbsA6L}mwkgyQ- zt=#7`|Lh=}#D^;g>@uG=RpquEiT^cvj17cx+^n@0cw+sZt1*F*@rLy#)vmS>ASf3^ zh}158TyUN|U+FUNBVd`}+Y9pN%i%}sCB@siBhWv1QQ=r*e6A!c$%vy)fUz#`&SDg; zcc;QFENG_$8fLovtLKEXgL~8|#jL+^q-2g`Yx83=o?#iFWKDxnlMAKB!ZFNt^;@nh zPK5T}gv7rzD1Zb^Y%Dc#0Z#+e{A4Sk;}EYCHq1$;2?MYLmDjg`0I_QsMrhDL)O$e; zvsa--*1Q4R+Hbz}kqFeb6Hu2Mw%jg)0X))VAx3CvqLpEX3?MzU`_18XO3Hqg+b}4G zeuN4}3EWe5LQktU6aI^2R6?jUcnG?Q)c{a!{>E>*FI@!0Z2Z*_3WXsJs2K(*X)qIf z=%X3mCj%pGJ$R*wrq}3`hLT&D&{7Vx@g=ez=pI|JnG>~!!wWR#ora)*)z(VlP>9}| z&j4(Q%C6>ZH8#NF#!s_jraNycXetq)T#NoRFI6Id#FmH-mDX5Ls3@TJ$n8%QpHfmG z?0In~dg8SkS0!QH?QB!Idig*EpB^cRuJS}_yKw3%6tKlT7>0ez9P%G%Mh`BflMd!g zgC!BD{T65DY`3-oEiyC=d-r0fMM{|*0qx9{HFBvai%$sjF)x@DK@U8+2|xS6Gm8pB zw&>hL7jtufQGRrKN~$pO+7#-|d-QMz6`XiFnivkd_Yr-)E%bzuZ{nnC#fDo~7^kPy zRF%Zqz*|Kl&M9>*9_fp8ZRx3cYlR217Nh8`36A4vl3DQw{68mh4I3)@oQABVSxc<- zRrEpElgQeLp7S!B7*M4cM&1fQ=1RQ75M`TOiq2z=O+dhvT4~u6LEI=1L>sP{ z#@#EzbU;Ylt!<9yZAPf6`!mu)l_*xvKm4^RvYDo0G>*o)KF?U{eHUi!1 zOa38buU`Y#!ZE-S8Cbs&C)kjal6%+blsVMsRbU7MZOS$!(V$ZQWVdad>9! z2kG)%v7^JS_eYJ}JT|HvE&URBqaF4MU_+F9RPomh`a2CxI1`kixF33GCOn zH!&hJ$5E>FO|)#zmqt{>2B=y8L+^y}jg|&4aYsN43uu5UFFvFVN*oB^lk!B~E|DJ` z&?xd5vm{t`rjzD28tfk0?aE29%{6VeNn4FAuZcN_XX#202upLx9=xdzh+rk;vV)fi zEMW0B6E8)_Tc_w(x8ma1ke2=$(xYhXEV_Xr+Tef{UWh`6N1~?SB*cvU_XzvC~ z5dfFHucaS$43GiSGf;*Zes@w?Qld|Urm*{W=uy8~0tDZAl9>$!6w(9r6ocLlSb0<7 zc2rlIiuyqJ!I#F`WP|*-xvo@=Di4yYhz8FzOuja%nA8ouZ7H&TkNP6<4kabrTU0QR-+J6fe&uY5SK@Mm)8t^JPcCjJR)i+iP+b|t z-FSAB|44#caTpPf@s=B)bsoPhQ^Q?#&q9z+A&AHg##}MLO8`mK#sq5MD6(KF=c%&;O6=&a>+8Z6uK5u{FQN|JcBr(djWR!YpPdMHKkWU5Wyf<5uJ>~R0@Ivx6A&942&1AsF%u4 zMh#dbd{Ia={{^Oo?fZR#YMO@oS0cO273z2qa~bo6HR`4L3GpNl$Z$&; ze~H>?W?SioNM{hghFztq9}@TQe-)x*Buo_fD1D7$!rp>U^1qG?g2BT&@!tdU)CLa#-01iyz9#$wvECC@5IQ4MaXqK zS&A*Zk(=GGo$lwawt3%y1THejJzll43FI=rD|i{m5ZyMzv)gfo6;$9;`I%tuHTSg^ z%^O4rd*z8YBor#a)!MEQMXziWpCL+hm1jkp6C`AKdA}c*j&L7q_?aLV=vy8Zij@@% zu!2!t&ZRMzU-vkE#cbziTlVk#?BB_jU5_I2j5(c?kGP$^JiArJNLUU^%-aDQ1E7^x z&+(F0v#}TdqWk8nL~PXr_nWgdxym=aR+b$ZlPa?^U;jB`+-4^AZ*7`)l~zd42aq^$ zSvsMP%_AXV4wb2>?98GsOh(e~rGB4Jbo2^wkg;6th$cRaLOje>ci8%C2KV-qM(9(e zs?TcXn@bkY5*BA9*o~B`X*rJ|8j)vDRoSyGPj030Y0291_EobNojo(WxZa3;_Jl2bRs@L4Ex813) z(W@UhsNX!Qzx~=kb{efUA6rK~wQgy*-s!Z_?6vqm)wlfbGU(a{9?f7x za^smk*fI-ck+_BH560=H0_lN58NvujG*RMZ(rvF}PY84uOv+dxS}YBc5s;W6F%TmH z?{s(IOG%P*U?J)(@zYY(}>%pJHJd~Iw3&R^JEEL-VAtHLD zan@Y~PVghA!8+Z&HAWUlXOS@czLDAhfqKa(4RSkB{KUd^+^saH9EFUuLcBLGoREJO zow_i6#V9q2a#kQUDc=j@DiN}bv5{ya@ zJ`XerAfkp?=AAI2b7YDDYf-+*3zNXv>eyT=f0*0pvwu}^AQF>uOd`qM4mD0cy6C}T z+MUM1eDKcNc0QQ?xGsLG3k%hE6M5W;9cv?KvXF%kPw#ZUz>`|^i(Lv%&~`!F5db~a z6n-&x!PL_fQ*`R(qh?j;lYC?Jh~VxoGz=I}=l(z+SLcQTbe5ll&%h#+e#(inx+$uB zSu&+fo7ZnyiYD0CN%j(gZ_&)Wpup(e#8;P@E*a?-sE7l$*Ca0iqJ%*QU&?|up7%Os z*%ZSn6$B(c;5TA;K2X~!To<-^zIvT4++FbS)2R3ypGpYAiv=aI24amL#GKcfdwbha z#YTO-{pwdwffNpq)TZ`qnEvXyAN1}O7)LVcwj@XL-&j#3NTO-+ti)Hzmap8UOO!&W zB(YKmsD?hAh+Jf(DoNB!YQTx*Gy2_P_Nn>+SMncy;*`>7A}ByO$B~)(oS$C1zX76ILcqwj_ojIhzAw*6z#> zoTPpmRq)^WJoV<#nnFQ!nJqR8_Z)-b&zI}tWxl8&01VccbUzo$CKiJ5S|{9g{+6 zx~07l;WP%(`t0Ua-)WQV$JnfNHJZ%khi1CG!oqrIlmsY`mh>wx=fJ-V48%bxA3MckWH&`x(PwcGU`S$3+pe6H{%5%0l2am|sm+%e$?!1wAw5WI{E0g|nfG@@62}f2 zwWP}oGaFf{wtN)Jn_~kenr;DS?*U6Y9ROBbtYrLLk7xAWn=U zAyxi{x3_c^^sDlC#7qwVSqmd8;ti347bs`4zu+79j@&=?V8~WK1&vxMuzUK3kcbZ+ zEgscMl2qQnP>y!}K_7nkXxyFRq1*2OexG%H`-Z5WzGmt|TyIBw&aIRAjL%fCQN~5? zh4ncr1xmeHQ6|SQ>1=NT%Z(I&8C5vrKtS<4i`HdTsWk$ysEM}qbi@<9$nCk=f?$e( zZENA)zZBsI;Yzmn2ixNEErG&Z)Q2zcO(Y`+{jW=k)moH25>Gf2w8USX0k3fBNVC-` zij|MgYPh70aMjJUk$-#|a9IXs5AJFzx9>kGF-4%tn)c@YxzVh&Ir8kR%kK|Th3Qj| zzr0wzUWQjb1u{8E@PB-sQyPwXTki zeA@g|)FX&Et_+tcJQMWR!{f7(ub*xCgv#yVXQ9p&=WCz1bkIK?zO|O_P49Xq%m3-x z8aXJQ6Cx#r1!+$CZgE$VYyqT++(~eA9F;{<-vKi2`vwU_R!#LyTR$s_zoa5F^mOyn zQryA)?=)!Z-Wp-9=7Nj=izh=uU2x5Wf8ma@XB75;61 zt8Y#&bNz`ANJ0{|LFLyg>+pt2zrx5hbCWd`sN6HwTRiMI&t>VftJOKps(wCtgof=H zs*F!`OL}>n^SuDyp5xuf zSdx7F^NF+5fKQ!Cnik^o$8%c=KC^d1aD9rsXJrCs2XARcDcAMH6eTd<@8gpwvgfc# zd8%&Lq8Dv1r5CUTojY1f)0}i$n9nN=e;*&r?D*4tn8sK*dU-!qRc#z-6Oa`&0A-u4 zFS?~oe6UeYJ-9_!KF|9w%FA!aN0HL3$xkf9+N}pZv+L*aQ;xF?UVL83rj}BiZF}-;2 z=>22cM|XW*EzaK;^HHo(8Pg5%*ydcaKe6EC{P zkNr&a-Z=WxfAGh9>v!VEUw@b^Q4tQ1*#R0iz_O%p0uZt=geC@IN`N?NkSq_w%d`NL(tCoQb66Aeja1xDs~q9d_DXcIE>PTsa51nuAu)!EEHjJ?11o<)pQ9GCR3& zyhczOMtG zpKmy$n0^S)=p)f@u08(#E~NQF;}lc-KP%AwA7vP8SMA18t`MqCwU>V@ANA&@A@KiE zhIQsMLmJO2O>&mRaR>ZyuXE#s`wphc{+BYmc-gniUV36R>Qe#krjx=% zW%;AS@Q+?72R~s^hWG9{R5^w3T1DfE&16dyR^a_C%CMvA%zr4uP+J+7tf=Qla{pSC zP6YKTTsdAa5fq^l&r+r(ook#k7ki${+_tEad1K)ABR*n^8FSVjyy$YzsuFXt7hSDV z=VE#K$gpfXeQMp?xq5N-u1$rN?9|$;TW3b!uK$d8Em6oJR$13fPLF=Qzfm(;bCmdO z^|6~n`&Q}dAwB!r0kjh-?BZ6ODr`}Nr)7PHb>&>bP4kxvn-+CBtWoNUmSBo}rHj{e z^Z8R^ot!baMT28i95Y(hMLbS02PJE~_b80qJyof&pgE70s#b9<>U|n$-`TYG zhFGW}k8>{BdUK#Ye~yOxEk3_;IY#W2g{)q9e2JmA5GVr(v5-d--X6X+83E#6tq+G zH#M`L;!EDMElfX9Fc&b>AvvGjQ}nPw`we1E(S9qg_@>J_Wer7sgTILdJ+YcD_xGe< z3>WrX5+N4${H$=^Hk0d&3(t-qmlggs>*($<$;trO5niG1+#R?!TqK`<}_uAoO|vBy37YVcLme{YhXTZvgJ{OvAJrON_JoE>RyQ!ZiX zP_!o9^6Ki3O?D2Gd{c;|?d4Q8D~Dn(4anEnE%S#{`6dTkmVE2H7m563ht14i z-p3nki$zzYtPPB=Cmtl`pV$fVUg<U;P4RBRawf(8K;uU?Bwm1W#^Wt(+VLo23c_l zUJi_*RMLupDe^k%!90b&Fp(#7_1b;eM}Vyntl3o8>p3YrR!ZjtMGx$u%a0 z6D6aCPbvk-@;$`$Q6a|#Ji~InbxHOuuZUf6xnkWoWN>o%;Dn=SVm?Yi&q9Y?+)oph z-y{tkkQYi=jLI``B3c?A9Fz7QxmCpGYi0RFR;Fy{egQg_X{BQ+SE^ZA+*ILemC>u{ zALVtE=ewJ=#yMI4rj=s!m-AK^W5%R+UaG$NonFGQgEfn1e7Avt(5PZHA*8Av|~qx>rXQtj9~ zEtC)#DUq?5c?2*n48^mSby_JQ34j=s6vGE*nL!DPT)B7~_9VO#oZc%3jdDRj7`ki( zlsp2MI1a`6J6}{9&c*8^lN90|fKd||HXEJn5yAz@S4Uu51i*YZ&ZHct9J(eOz{J8R zca6&NAv}&(Pn&nL5)Swj-eka(Jt>g2F7B0gKUN4D?C(-Zb>W0$HJ=aOI0mIh3_%#i z>;%^&1bzB=p`NZqh!i;&FAAfu_Vug<@_||^eQ&55!7HxHP(}tinanz3B^B!CSdjI1Lqb>&Lm3x77gX-nNO9|P5%FNd?+QYHq9VOJu`p~tzgv(lM<+=IP8|@qjtbx+#Hc;wi_qcE2LZZr83eEJb~bAsrcggyxeIVI zX0KnT_u|W=-wdB&39eP|VquJjO>?Z{tO(e!;TOfA^f=yy$44*eNDy$~rZiN*Aygyi zJCCbypnM*jRz*y3ilwRw658>F4GULENLAO32doobcZlL8RH-8HOmV`xi#;9%1xV;+ zfXsI>tUk0;fX*I(lMZSaKh{t3nugT9x5h6;4QP4{{dG=0MsW`dr_^DR^F6uN8QMuR zp7$xo|J;h$XQ$tk?3tvC`Kq*sJto<;xsP1A5L^)rZD}Z;IPUF04mE<}FYke5E9C0Z z^Lh*#ST;_-p^~Nw(Mg0Xt+!J)@2S)G6s<1AMZqu8G2rFe;}@-6bE8orqj*nlknG6~ zX@8LNvyI9c-MIm%r~q>zP|CH^I=b1LsnchOzz_=rPc@J@tHu-cjWQSZ(;NEdg)=5d!k^(Ue@HZH!G9|vEC=#<+Qzf zE1z*#B#Xferw^sQ!JP};#Qs6x+m3DJUx-gX;>?SK^#_L5KW-y>4*&Dl-Hm(kV|yRw zdk?RLycgH#(mj5?6wVq?!^yFx3 z$JB7#No9%y5;#1g)7y@LQfQKJ%Jhi}`q1Z46p5o#EP9Flw)!j00ZBPhf>MeFl54_G zUQ@UGGAjgxlHj9z9qAiKWl(@BiRhH}W&=K=p;&jM5*7Q6(v$~U%i91`F^^h3X2ClW zi{2Dg*RRb!wRX|GGf;DvlI{?&*Xp-8EeDcui%e}`;hZ#PznhF#Ky6OJDd(H&zla@Zt9-SmjP>1ph3$A zHW4623Nrfaa~-ONDhW~zVRZfX4OkXprEqK~mcccmP}fgV@4_n8;&ed(Ge<{$CvlDB zeE@?BVN@K1F7y2p$g#$2-f?y>f43S9^7la}{g$jaVjp3AN zaNO@ooyQ1(rfuni*yAvAoGcy*;`N~@E`S6q&{>wZenpT<5$;|B0E2{%w9v7Q95on_0IqX+ z$YPP8nkW>IfvO69xug)wY?8&I>KP~aYQC@51~?TMQ&`%3crgpcvI`hiI(mdDb8X<7 zJ~PaY)r!6)Y2IiIv!p_K8hAE&B6>%|4)YBkG8|x8BUz1Uw!y#WY`o@`BN`PLVBRL2 zM55t+L>w!Om70caz{zai5oebaEixRU?SEDv5`ADOv1c>^e?CoH$r70Wz;9K zboQ6MzAF0TfmLG!H3-2; z?(VYalCo6lnUbRmf)rKYO-DbL*eq!Q4V8m1l%}~UJ7}Adh($9704m9k-WuEPHB{J zEb!IM1zSBETa@Mf0$szwto;6b6GhOY+T@{H*PVuaqrP%d=k1gr%B{l`42aj99{=<_ z_s%h@$+1U{fb)yBK>;18!wcZ(=wo`3 zGFNs)Rd`k*SOkDYL)H_j7>2Ml#y&QTjT$W(q6$|1Whxmr`zZ-YO9PrBHmTSdj=+im zBjWg)PC?yrZP{lEFWayMn5_T@oZzcgaJ-`x^{hqYMe)YsPdS2+k-ol->xw%)BO6nQ zp{wO~FCGO4!8L15F-@~09t(B!mW`3=ni$Y__BBK3Sxx}ZaLHh6y&AB#%_1CY3C6Ac z(R$smuqoB2xd#bTGoo;8f-#3#pK2A{8*;mGO%IgDsljIIZ4_g%7{JtA13nl|0 zFK7&LNDlpwBH77GwyH~*Q9W;*%0K1I{6tGKaTtLP%go>vM(M<{*P-VgrVNKZo2&B{ z-#LjKb9Poxlf-+p(rupHh#7k%3eTk>3eKKSj<*9?vmMwEb(j0OGo@6RRl)McbY&0P zhu7abCV~`R)To+_C=C>y*4IMH7Sy!dsP?Pn0XaT3(s3YFT36tb~8IklB!a8B0?NycugFl7*lvYc4rS>tc}$!>A8U<_hcik0v*7U;b%GhXJotCdn2Xo4jXTOTw@KP`Elelv}kv z|8}?oT32waxouI9{?C;7DFI^$&pH?^27O%n7l58S6(twi_4tQ1--rs7q5X?4??Fp> zj_#<;iq+$A=n8k%BiTk@s11!S3E4n@|CBNpT=hJ=5H_?A1(ZN?p8cc`PjU=BbisPk z@yx3`u|3{$bkTksJaTt1;6~SrEceQsF13-iMlSNG&E+@w;qj)(c}3yG8*R_CCq}m= z_pr(_wQWa$()_y*a}=J9Psuh0+CnAr5T`1Jjx4DHm6n9Pi}oQ(Z~Ozo1)m^+zm zcQf}NWNDXY1*m1+uFu+UyrKR0Ka}CN_8a@1+1kC?F@xD{quKkfbF|;(#LVQhz0cWS z$kkrTjakWU+sNJjmZ!a&7jux;c9eGj$=8AB$Fk=?<<39gE6@=rh!rk)DjFgQ{tx|B B5cdE8 literal 0 HcmV?d00001 From 15f260dad0d0587feab46985b30a81530e9027ea Mon Sep 17 00:00:00 2001 From: Nilesh Gajwani Date: Wed, 1 Nov 2023 21:23:41 -0400 Subject: [PATCH 20/28] Add test raster to binder --- binder/data/raster/test1.tiff | Bin 0 -> 174803 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 binder/data/raster/test1.tiff diff --git a/binder/data/raster/test1.tiff b/binder/data/raster/test1.tiff new file mode 100644 index 0000000000000000000000000000000000000000..bebd68232e85a348c1bc1d067ddc95d38ad66c0f GIT binary patch literal 174803 zcmZ^~XH=6-xA+};LNB2T3IRew4WT0-gd&6<5FsLBC{h){f?$CVAcP(Q(!`L25_)ff zQbI=sq}jOXAR;0vB6{QNbNwI}zjLYEB!O3f)yr{I%e zF-pt5qBCR`+ls4q2p(!azB`ARyfW@yS_@0FaaC4(-PDSv(CaU5u~vW2>tpgKlk}=Y zq$tX!3pF0C$ox56^~LRZq;|~QwKK1Wn%{^z`%n}FZZoYXH;QSCWmlS|VH+5u;@y19 zcdx)$?ZW4QhDSOl)KGqPO*4vKM$Y6V5t93=Bs$Gwb2?=x$}8lBM|;7`gQm8ax!7m# zCO$%6ceg(1nePhNZoSYD1@~xGI}vjYi9TPleQx{U_=Qh<%Nz6@&Dy#VSH>I+yZ`;k zK3#3t_-wOFadms+lY>Z%J7MW+%nk0=K6G*Q%b){nR|Xt5Em>tpdXO1@0hw7g-h}-m zQN}d%DcfjVd?}m+FFq>UJaXz%8dO^tthTRmd6enzCz@Ly%mF29J3z3k9@oen8?C9z zIUD6v8hIZ&${Sc|6bOf{^pD1#-+?ln~^wfXBfo4%tC=lw%%H5xASIo}BkL%q3}@-UY{{UM?SV%(84nWXrC z<#QN^UBW!5Y z=_apkp;oSm;VBv<2cFngb?Gshj}vOJk}*{-;GGq!AIVI$LZ*d!_>}9&5*hmNm;m%R zH|z($v3k#=6Z8+N7IH}S+%+fb4;(NJWH9@MlUg39Wf|srIqZ}GiR$KHgCx~%4ABkk z|K|zM&z|5@+f_q5(l3=tc4X#qB6c2sjmoG;h~g_Zr^Lq4+b?6oOy`tLaCiMeA&cW zS@|AbtGcwPM}YW#N7;q@7u4IoHY_~e=Y-dfu01*+51Y1;{5O7q>qLuRf7O`)<`2O~O&v`bxi=i1 zdL;-VcW!bT?Ps{9Af;jhC@=FUd&3y7?-mjMSn|qrxRdK!{?J*zT+BVt^cQ)vp<1r; zePRz9MG_1EXlM8vJ7@I0G#oT1m*AWpNgm;Y9koyJG|NA7pdsA{zvWAj_cCY6J;`gY zv&8&|t_fAiorYX5mmufc&)o^}7G(M4tT2@Mf-9v5x^Wr%bDz@b-8iCWMTXxa3Gb9D z*xJ&(_$#GllA1Dt$4F8oYhE%xQd}g%cUn!-X3Ytfmf*befMP{+B)w+ntsvgz_}PTA zmap1bjdb)!JDhp0pe39_U8dkG0}4rK{s8_`=ywPfZM1Ez(kNatFottTbq!)P8rO*0 zrsih*1dQx3#Z&j$vCA+Rnbh+xh4^m0br-~&_gb@@h4HA`a zf$dNOITO5kL3%sL)tLlmjJOG*VFof?60B0{q_bxsEPq4PaZ7X-gZ4aB`T@Cm4lQL& z6hV+zil2eLgeD5_#Fpn9e;zZIytn-VEV1S7jcqYSvf`fT(t#QdDW}xzW;euaiZAgy zbm|~GrXJ?D)dX5pVz~E8GW@r$jEChu5aO>gPN>XXwmfl_=4x8cPJWBFQn?(JuB_7{ zd!an?PBahkAp%1^hen+(WVLh^yWh42S!fxn$G?J;3-6^j#~vmxuG*J zU5PGlJkr*YvkY1;2Wh($1v2Cjcu>&4+q9ujq-`CROP~eWiI5Go1GOa$%WXeXKIr?y z5#zFK-$Q_hhhxZ%%DfuP4QCGHygdx?4hrkNC(BBxPy z4G*iQ(M29T-YMDyy`auh52Muga{e86hIh|Ab{UGFW;&c)U96I&Hx!*3JE>EbJ}1$? ze0gg$=r&BYM1Q8lNl@>Q&J*Zef%Cjm%a@XJrD~Gg|2;t*4)b!*4}7N+55MHvsR@4+ zp*)3^Ak(gRhecOZe8oDyWf-bV0!!q|-%!LrL542Hv+eWuJW25f*vqTD#WXj8Ww*XMIR6-x!!*-W>rq zwk7hH*_z_^FY}Mu^nN{DdPe5gj}GEp ztkF)5%O!dKu4hwsUa0-J=Y4+8r|6BSSlf+vXP;*qI1pPZpnSzak5W1vl}(jpJKdo5 zE4GeXn&-ivba>Fb;$FOq&V?Vso}5TMX+1vUQ!!t%aeaJ2*-P%tNyfhC;MNbuK>o~= z4P6&ZV~gR3oz@~w^D+ArQ*TM{rgpcmuop6}0UYm1a$@Hnil=I5CRwetY*JEDVTnr;;Vc9I=|EN#VFI_g#tb2u7Y5kNm>O=Hqs|JJJ_5V+?|j3RKzJD zbM-Q`wEpy3wEer0!I(j&bG3y0m)&@RVMW%AG|usi+i;BUa8{@Bu)dQJIbWh}chU6~ zNr#|x>mxZzWT<;oP$0NDmHTCn@#)utXyXU>Ox&O5+YvoBY$6`>jVV2B@tBlSAZe1{ z42!cDU+H9*&|_wd6nB1m@ceq)nZ{P6!WGZM=OiFnp$0P(yvYaoBkq%1g6w)GaZ`6p z6*)9eu}a2|&Ahgy7$I^*$@G)q0}Bryw(`xLn2TEGGoBwBUlIE_H()WLx0Xl0TGOz# zKq(ul#7%Z^V7}ucE(UveY~*XN!mKyEl^7UPNA{p{V3KO21dWxzz!t)8=xw;5mV3CM zF9s^MeMdXciW#x^$csuaCA*W31I4n1Zt*TNkpE#duZ9B43a<%cZ6EEDXAZR zRi3+VMb*$slbtX&a_&kSSvZR?hLcL_k|#8j9Jv%5j37QIH#Aj73U3coX{-xBxrVvJ zZgq=zOKlBn#>=`K5970Xr;FO(>~{vKM_UR3Rg-aNYLS+ko?`KfW1Q3anZKvi_ArpU z7uMT^voMAM?y~(cml7~li9@s23;LrFU7S-sYx^w|zdNv?L<+OoDrPBSHLy{Uruhh2Ag1$D_xIMz*A_}V`ir+8Bg-EujrLwCeR z+iDl#=NnRPrt8mrHmL?{1+?Ul8Lo_m2HkU}#W8J7zgFptG%Kk7h90M2R++GFweL?{egQ_9 zh7U6!6!9rGIM6e~EB@uHpR-aE{DijvGt4%LZ)HV}8L+XFd%PF6(U-2YZ1x9f4?uWr zpX%D$?$u&396{b}A*%aXT0DYk8(Gd<^6>R7XBAIPHJ7!7wAPS@$q|?L_K>!XlLmg? z1C?`nL(|HmF81_(oxwUyhm$4v1c{0gAzwy*6;q6(ZJ&F^;HoH^*BCmt^=?a+IC*wC|NJ|+^~@57EnD~ zNu47wmJBaXTo4w{z7m$m64l^H*QyJ7p9(gzF4M7HAk%M2 zPrNk!=&OpltG0EtV`F}qW%HKct3=rT$>E!YzlydqQB^`1`;i=!`u`d4&K)!U99fh9AR>L?E|7ZYZl*sE^+ zRM{Hb9TxLxltTYl`M5vBAmMlt)Zvy8Vl<8a%<)HSl2>J{(s_ju8(e%9vM83=;JHH8 z$5nZRq-pYQ!IB-C*W={|A*_vgu&x3p!JnLSW7hJIX7 zTiu>VZKv_MBFDJwJ|#hiqT$Omp9a!pg4N_#@a-e->QIZ8fwtyx2M&?Ia99ULPxO@* zvWTOy&@bA35T{LtLII39?Ki*ID(|wE4d9P0l;&kd(78kQJ`=WeC?DvI17TAiE*NM+ z(6nZ{RC3i}F84V5Re494U(9jPM_?=hU#c;E4EueR%s0YtfnBHs*f-jm_6-LW-C%BB zQ__a>cU>bi;Y7b1_oxrtAM4GZMN7*-m-Pm4PToB067lIs(<++JY70K0&F z2U=0>nsv?B=kBfUu=eqj&)vhujEgy%F03q`w*Ckq_By4A4%5(fH`&;P1Ec)VCCYh9 zdw;``EOcd>_Th}phou$fFj7Z7q0(1X;9F0GaKm!y_27;`z(y3HJ2&R-E?pM5>6a+$_%Y5PXhJvP7*2PD z`sW!Zt-vHBDCH-!zii1Ksv48*bbl6HX-<>#Rn|&UUFNQBUi8Y7saj(JR-g zxB?yHj2x?ZTt}|b0X}V$5zoJJY1W0S2vbyS^CZoU)qn;sF!bw{WV$UDGM&4UrDorz z2VF~I$-s$~b*`enkcScdd(oRgZXZxLSNw01f7@4+=JHPdUQIGOYpCnzeQ$H7iG6M8 z2HA12VNFZrS_X%Ec+4#hum--`S%`cZwPF~sZEgKWX&Btjfk|8wGs?5sO*j)%e&!TZ zUTyW!jorGWAzAg{!e9<{6-%f-twFz$*06No$lD4X{ew;rf=)K2(@kpDF7S?SsENp( z50y~nNeQUaznOr&5#f11^^Q>Rw0gO?a-QL(M4ATv{>Aos7l^=? z&mpEgDqpDHQjEK|-fQN_fLa}2LP2{aik>Xzx!b~? z+apV-wpuI)Xg4F@^AKwlx%vkpyb`$HI2_(}2sup|Fw;Fd9AJ4z5EAK-ILf((ElUO` zmr>%bWO!)ZU3%H;mJn$;zviK;MM(I?IaV@ZmB+~TR;DtbafIiq?+Q?ETs*oCc!ohb za5`gWf#+`bW~E!DswJk&LtMh2?@i*ByReaiPbro%;{t-UN|YA%MvzAj@imO}^8#bIK90k5n^i`4aiA!!Whwp68(+M=ZZsO1fAv*)5 zrPHo^m^b>*I3j)E({YfN;J6l$ImNh_P!8p|9xml50j|d2|Gaj9;qAlGl}ez+7+Uuz zUM}Kj`&bV!*h+2CHSUb4cQ$0Co>SH!S^#V8HntU zbqlFP!(1{JR0;bY>OrlJs;#D*uiaHEedr18ZJig?t-QGwEd7}_pB^xk%MDh8&=@~& z-do>I_)Ps|Xt!%r6C7m5Y>!un7+vApxVS3Lf^)V`x76oM$D7=#6r?->bb-q27GujV zdpRBma&NAq9=!T|H`1VRL42-O`HWg7#(evEBwR7BS-rwGR*ZHR8V#OcN<0(T*7$bc zal_SIWy3sW1iHGTvE|H_h4CgM+xTLN*x%B*&sZo%k8vT>5@b}Oj!HgZLy`rT#(3(= zw>lK@HxvHIO6^^w@56^})Dx#g;wS>4LP$k!?WGMwzk{bAn*kH=GyzkpF*i?c@Vtpq z5$(twcd4-YBZXvt(Eeer|H&znq4{!P^sh?;R#qL3?s@~ik}{(0ZQ1QSW-RIV4WonZ ztz%L3PfX}c3#A?=#mCV3r7L|$FHu7xcFE3JoA-$&>8)mC3_)k7Q0D`BR_V#>wiP2( z%%2{LH-Uk*$>Q|@8Vcr}U78EdiTUhO>7_z-X2bGYC#A?I!Z|Pc9ZvaXeCJ(@O8mq& z?{9`3K^>LbjZKSZOLxET$$0$n?GTs9j=gKEb;K)Xp*a>uZ#{Es1@Wo@~Ivq*}n?U#HB zL#SJQ_r8N=p5+JCz2qMlP0JJNI`JuuWJQN`8q3>Fe!-G@`Zrh|lcFTDf5qCsVC6V^ z?S$L*p~g{(p-Er_dzQuhkiF+fcv@ix`=CR@!B_9+`u(j%%1Pff+HbBZ@!zHB2HH6F zjH_184PI5f_r?0NW!gRGnjU%v>xqpmL90c>92&_1qa|a_yab3Y8%@7n>Pu+owLn*I zX_R)8z4vlD0{>YEdLO#ie$^UW-(uvzF3<0wW+WAhtM>VL{^)wh+=C~JpKt$mf`d~| z|MNu!*@`AOskx+W?0-l1L>WE$G^d9>7qjA(T0R$%(4iTxlCU*5aotNK7zCln%r3!>huZkd*q+7VHYf zq&7p%jW`;fZR9~4p07T)9Vz=KR6!(ttL8&(;)BVCDZ~9d)juu%kl+zoq!4fKQsKmU z%>m@cpkGs0akweE_R6L6HCr~K#Mq|;;b5cwyz?Gi%K9UNq2l(a*sHeoQ*Z~ zmSf;xNL07BN0aFZ@gC7ohf81HPSp%W)yRIveDw-xh_$iLyn>w7lj_OPb|G54^8De6 zp)d=iGRO&7j~#^S3h4?Y@CzIu=)Wpllvh1wo>5sn5N)FyxQ}Qq9KItp)Cw!bCO3Y8 zV>e58x1rc4u*evaQk_W1QEo-dRVSs*JH=_*BW|+&bl@FQF~WY}8Lv_{CX!U==_i-3E0=u&X3A|BU>`Q%reN{9@a32XgB3b=j4&Jf7C^k_ zbJFwPfKYRhR`qL6@af3jil9-_?ZIt6VmEZBG*-6md2B;GjAREg`zQG3Z-&V@E6k{~ ztg>Zisq#aui}N-Au|;9L`^1sz>Ivb8?a#ya7bvwke)UM^GMt1)INoogU1iBVY#M#h zBS=?o@-o{3eO;kIl4H&`K{?(GjfIH0505tP3=9-jo{crTg3u8=*{L~pU0P9N3qu@~ zY8ltXlnxz8p%tuaVy5)oUg&D|5(KY2t$Hp;TyD{~yfioQLK+AMI{UWRfSj|2q=*(X zKXSyYW!*Id{1r%VMO|UX1DiXc%tlJ_{rKRj8RT}w$1mUj;CH!9tK zGYLM@`CW5f0aSXHwYHpj8+-liQQn#4h%xHA4{DOl&1S@9myCOHIN}->6H>;N<92*Q zc%c@WEu5FOG?($_$V{U84wlTms0<`8DsA3ez${#({p3+hFiaq$ zCh#UL?N?-N{LAiw;aY*-}FTYT-Rz3-~?K!Zu}7_a>it;`c=AhBCiwa}pO=j*CjUJDnD$ydnS4 zSp;bBqQPd6q2y7&(M!y}!(Z6%^3(c9Ot64mSlYfJRm64i$EdqxO65~$!F|p{yL@yC zyj8nI4C^;xNKhfVJb_RB(aigahlYud%PR62k~o@NuB?qICJ4GZfi20Rl)=_0;N-N< ztdr7VL+Nx^vKzbnMUw5G#|OW&bF{xGCd8aeGL|zF^29zy1Hg$?90n!q=Nw|&oi#co z`?8SB9F+?^F+(@;gmamKY*{LKKyCC^nTCgGsLCtvi8S6PIYZJq;(f)YTRFZK_-{t- zD~}wB1#)_H!-UgxWUEv|kClXXu}P$00!P75C}Z}YyoaH@6CuMl3Erm`aoR1NtE1OJj%F#3#`XPzxt&j$S?!@BpG#hz|%`TDD3 z--~@M*0*!AP)yUP(^v1A{rhMQKae>~c#azzAy|DfW8GNaWoIV#GwM(240l&%WQXr$ zVIT_1KZUgf+chB#6En{hLuXy%++;RwtccB31MKtTcPO$$*V#_4U2>7n|IsnM&?+^1 z&7T-^QOg$K(He>O6fwHoSshc0Z3?$*$Zk@`l<2-?#Z-Bi)uxV4_sM9(GE>TW9@rfo z5@@ZKc-NTIS`KB@8Oa{*)`Ksc{S_-a@+j9JokP}oK061 zNq^Z|HSMI!u5`A9#11V>`J2SL`#B`sdD;2YsV68%hM*w+3beoF7%63uX}8Ry{q==B#;%wH!~Y4`Y7mqmoPcE)Bsd<$80YYcEaURr}i`XWPLy`SEVVX z5t56`6C5i9DJl?MkBNr3R2r7l7~19el~?tv#_DxaXmHBpfuKS&Y1x^w#mvmnQKZTl zjm71xn4XCICTp7q^8HON{7K)d5LZ3ZN!%zQ^~p9cQX6@|QTJm0CjA))A~UB8DWZ%RdG! z4M+eMMY0Oh8ly#eA2m=h`0Qt=dgoF|SsF#?9@`%p%dY$uyrS`k`lzob^ZSV2W^D!H zd@QqcDQ0G&ai)ANyRgUOcs4L^gT~U~y5I-}c9%s&b}cx6=xL*WR!RbI z(C>s3L2c8Y*vYd)h#rm;Py()*POVjvcRE#Bkn{71q6p7N&_|8G3gx_3oczgmbmQT8 zjc7KZ<_j-B;5YAwc9?vhXwls!sg4u-$>LN>wo84wovbBOOu<5yXiJm$%1PVXOqcdb zZF+6qpC)2dP0arxtZ=b~dQAxRgZy!XPpYEKBDH%YJ$35b3GIKn+*E{v4s#A0$wt8? zci2F_^Zju6#A%0Ep&BHVXAmq3+&G6+JxO0msW zO7qQFng=3Z0SCavTf=G8)a(!SB416;00zM1EP)WI;k^-wz0}hUu~~gv(oFICtsL$~ zvn)lU#t&7QjWE$5WwG9$nT2&GDfG!?Lg~YrAnG_`X6m(H!B%^AC#xDj zSL~o`uh^#VK!gF>^|}MRGvitKkBZ$S&FM4o(+AF3N7-XqdA{rVze-bS4`k)_UgAGG zAn2(O<7VN!Q)}{kQ(BPUn9QT`qA%N-G7W+!U}J}gxn2?A@p#xsVlBIP9n*#v&?sSq zB?>pD*gl(NjvEU1QJ4DEMMlBQKLTVZ%>pyf@6LlLE7Eb2K#b+-{48E;F7+IfCWKU2 zJdo(*%z69Ubm4>9*IPrjhnd;;Gd*k60sE)Eb)-aSKZ*9KM<@tUyo8Fbgu}CsALx4 z>^lNQk&-Tlrp0L@v@h3yIVPe%|C}tqD&3eP_{TM-y2u@V$dDET5r@^oLc~fnd8Ho` z{K~!VwDaCfdyF|VQP{?7NjHv{6=Zgb7V?zRjr+csJRC8~gLq{y?>szw!5iU|kBw|< z*k`k2=PkVX4zl|i@U!HwAd3p;?K}q627z^Jv)GTQaht@xFhMyvSG-hed=3*a{3ygr zn5s88DhWL8J9@ZNYGt5X`GN>G>v1E380&=tv%t}RSj3M-UjTfTrqUmKx<_V+s>kMy z?bh!`6FlWyI6j|dR z+bW-qX39Ed(7WqEao>vpKVBIp!;JIzc8b%RSqWI%+{s69ZAFA}dm5jE>{n=R=QL=E zP=3L!B0CAS)k~DVxMF0$;dw={6>Rxam47cD+>oLOqbMTCY1CXkg%2hEj5Owj)YI%T z*DtiDq@2Rs&l)-G9J^~B|3l@%KMs?aoFn6ray|jx9}dzT?>1#l}{~*rT#8ud`i+fR~&)#8G);C12dK6fRU4 zts*DwnJyZ70{GrMQ5jYQXaE= zNc8g&VXcBP0*(YAiS0TvBuUvSFy%VjHX@TBmG8A0g(sKS?EC03fbnUw?^o916y5e>9y{La{%jAEbiqewlPto+lQpFwhzLs3ga z7al8HWb%W=L9r-M@P(Au!tj);0GAmtY02CZ5U?E&7=1r;%=iiC;fV;TqRCpZ#o0@j z%>^P}epo)rM79Zef-{LIxljNph|ZhMKI#e-E`6{;;Y;H+#&tEFO5%9kqa93(0Nqt@ z%Ku)Eiq7j;ph-JqxVKegeUuA!%4`Ztr+AZ)I+D>-GWmPi9E-(qTQ57h+?Tb|U`Bh~ zoRVJxyBn6xyWYr6myHFjWdJG)N+m&i6c(arr~d+z_l|^N+ZlIO&SZrbCrR<&7%nZ~ zT*mBs>|Mabgyq(OBJ)sezwzNaO=^K8o$);hZYJi^s1tXELrRWqDhEoX*NClAc=r{H z;gVYnUc^T`*Sl2yfiG-NAtA+rY`c_F%fh%PRH%iG zqQ0iAyATlx!Tt}1DW1uq9w$Kkuf0OzQnJr?SjJn9;5}hfftY3B(RCoHU#PWGe_~W+ z5U!?x$4eA&mMfGTLx~RdH^5=EoY)wOx=4XLdR($$X z<{2I(lp6z}kh`A*^GHG8PF)ek=pUXK{Cy&_`Wq!|)b;A$Fv!wa0vTP?em)_g{Tt`< z%jSCJwV*%k%eMk!w33n0FT&0;g6)O52t?mzR!`%KG?o8chx*P5FMDY1)<*WXH84SQ z9+3x%DwQlSBLqfV1)#JLW%w*%`hw}vZbiIvOuQZJ)0DYTI$6W}>&qOSB=~_kX8}sV zEYSZH3CZr)%NY0&^mwV(?8Y77AzDkhS@*mUALPU6_E60dBv2i)3Oe9b0PsO}Swe$o9yKrRMOOmT|a1qnZMTs1{W zH7M%%7L5Du{I(~4(cvg;`4OOhDRd=%hMGYvj|YM)o-I)^_qGLBD+tkvTgqJZYx?|& zDI6tWsqajPVP;@$#ml?Z+Y^)sW#hYJ3D|_ku~9h1yQ(T?^ zImEH8Sf=;*+~4ED7ic|d$P6S`s?~!oGcu^w6ViigriIKkT@RsPp3VHd))R?&kvNju z0F4rdpF}H*rOw}NjFt7NzA)jTR)CE{+ek1+M6Z87Q`mRv^Y@VJhP9aLkV#LHc8_yx zEA!jIK9UrwUATkx{4y~IR~DpJG-aa~{7{OTRTr4R;aD8gF@QEbGep4-)K_D&VL0Q$ zR-z?iX^Y`kHCFE@nq9NuXCpOY(>=;}5)D5&m8(-o`SD;Pf84_=U~97(CA&L1&a|t2 zf~j@Q2#ov=jHS=Ix5*I%YW00SyH~k0j`%oHE|0z&xr;nP$JTmQzlLCLgXx0`IeMYb z2Ek6z#l!8tGo7zJXfB*MfA{7;^4Hv5iTy_pD#9REk2;6D9IByAh?9*fVY&EQ6s+DP}%@>%QemENp1+rSvGfNe>x_Q0=k zPN%lTjJF&z`yNSY*2{R~8L8X2$KuTG{`mIVent+Za@(g=G5jO%o@#|o_h*O|b*Q3( zSyir=Mfy_wU1lr3@O@mY{Yl!313gGsRjDaI4*XWN70e#d?@pPZyGp*9lqN9aW8-}Bq8 zJUX_VsTu{bWq1g0(Zo7f@mGu+H|L`^j3J58>f}mY%V< zA5V#LE7>`l_ z*(F7u!0?2(j^%e7|P2x}iK!VJLd!;~Zf<#;U+;XhVr`HV{)I@fPsUi&*={v2hI~P3SL$!PvdMS2@ zdJ)%`@(hP;5rZU_o$FLeqC8$~Uv*SUn^SdYxuI&=Fj<^=>Eya?l|-_xo#3|y8(PHg z$4xaOT-AEPRlOfevKL$4coKpL*Pja~qMu`9^7w4>8ztCkPLCdbPTRaotwN&0&+2Xj zFz~lXm=!Opl>yz={8>qBd_$6IA8ukMrxZWYB3Sy_k}bX0d|P;#dWbo>zH8S3{YDb5r@vdf#@;-8QY+R2SswS4s#;8zepkbWX7_kbo-@D$67YZtCVtc zegZ->f0oBwKPT98(|3Wl=9Sh6sF>m|`@(L=;l!YwcwYIKWv*X@j!Rg#x;WL{nK#dE ziv`9DBmD*zKI_Bz^DGjS*(HM0MB@U!K9o@hdQ9b<9hHZ>4z>AAFVjld?6h3tfev;b zq;Z-Z0~z}e3y@f@0`8k>7LsN8+|^1x09;7sSu7>mmiZqnHM+BN7dw4XhY%b#=OKn8 zpvz2Ezk)2mqkiWM?~zOh;&YYjuIxYjxrRX&*ix~2$xD-1!(bMU@)d6{Wny=)4r()P z2ml7LT%kI%O*JZkujW_A#FDb)q|uy;q9H4Ms0eTwCs{Zt0xLTy+d-P$;ys^*y=GoM z@g`cyzmL~WH-o>IT@2km9xR`^CvP;!-v80kvxlPfj+O<%Uma^8v{|1zp|BEnPM|Rm z5^nOu3Uwvi>5f#PL%Tw@Tb?u8;!KV5NvtR?%q9Ow)F6QKV3D2Yw0$E1#`D?->z4Fj zqwza)=+U-1IPeO`TcZ~&DGwHT$BMiG2t>q3vCJy_K-NTJ{4)n1*p+t=;wxS_ z9G)VtO|v<;m>?w_xAo;dTXu0E|2hy>{Z>$>g|1+MR$&RVbv}Fo4ZQT^o{oTE#LA zlyhry&iWQ6{o;4)$*G=Og?EINlGC&FbWD8t3;q-j_crK5l=Aar28s*ZqCZX^NaR?I z%O`j~zj=GMb)KwOzy2`R_pZ$0`9fu@rKBI+2xM@h+rjkmF51fkt)LAs<}gMK_9^2z zIpq~1#sOV5I#z-Gu1mP|bJyx;+H#^%hSFo6k1vTDHD~1^aXQ*Oy8B^{X;W=BHLbwa z^^9v(6Ba2tm(6#-q{@vNPuY&kSrj$t>(wiF-W=$DAro~@K1rkcY}Fs`GQaSWkXUow zH}6`xbpv;10{dhJwQV)-97+AT!^wA+MHN>hHqU1l2?TU_TH0zV?@XmY?GxBVQc>F0 z6is>5HwX8NDsKdQ0YFF87p^0rpfx+_mXAN3AeHmnDZPOs=X zS{xTAK_sd@uzVijVSOxY@1=c(%&jiJ(M#34hpHw$LfZAS#_Cs)x{-ryv3D=i=}C=IHu+!9|#0iwdpZSoxb3=IP-^#(jLHqDLJ14}o2r3XY6viM&? zu*2|P^~{-7F7*U@k|tzsZRM(`^_W$S35W81E`{SS&L)k}eE`2XOF&j2U>eIN)i00Z zRQZ-5Gy{Rc1{iG=NX=!Ar)Sn`bP~=9(r>-HmiN|@pogou*n7jr`p`1<-T9#sW14E< zD&8T9OvF=_n9sS{{V}1Dm&Spq%t_UOE=O(;L+-qLo5rxsm^wRsn^hlAm45l58}9VB zAG_X&2=;sUy9)F-(xczBSl&nYt9?nr?rh!dQ0N|K@ui0StTMB>m#nz8%J2wvJARwH zpNv@`DYEYk0EY>W6Ou#I*lt}tb4A|E85SP!wAVi5R*0~YCD2$g!l(SmN#R&1DF3s+ zxBPWOFGawgsj%k|NHD(K58|TYKhqgf9BF}dqsJ+w5!|nlar~b zXvSlK<*{NJ2pory5I)m0JA|wd&W-TCUF+f9&zyOPYttT_h{V8m#>k}86H>gkICH-U zV_26BE#?eIiN1eGF+UY97@_(%M)~bHM)(?r3Q{#=R9}IwvNXM{%GE|)r04)0+n{nw z=vBX`&i)bn0X4h+HTwff3H>(8yR=khm?jyeNbluVCIjt_x8DSR^u#p&V&bz8bSgw6$UvY`VVK`+;}RGu9Ygdv%16M1oc8 z%b5!aCp#db?S6%-bb>tIW>JA;J=W2e0xFx=x;A^x2X3=Dp@Sb zeL~WxbLa5f0sV1dpJSt}f1Gz`i%Iu*d<7dqo>8|i;-;A?(07$L&6VRP+~|a``k~P0 zEjkrqLW!4SD}}X{lRfF%hAN9%E6-2kMl2;cO*oZOl8!G9EvP2I)RM!DXd}Y-Ul-4v z^tlc)4V*h$F@POpePzW+RB+q-Sy=_-nk9DF_yTy$4+~|QFU8E=t2Hfx4nwEjE)CC9VD)4?*Mf_&zjIu zBH<|Ab-ZReVo{N5z&L=>J#_mcA64wC+gPcGjz(KrU0Xd)?OPPQR=JlNqoZT%1kZxm zFuAYnF#DNOD16H0SPd%~bwZhd^7d5Xjt*|+k}v9-i@v57oFn*|eCoTAW$fPv!4&q> z@HiB^fWDqmj7GM3ANeT0Qe6ouZ}iRl&-(pKqfe_=juaHT{${l|*q#TImjrQAvsRTG zHeIvL>n9Ui!H~G?VL;7x*`&RkC4mpTjjoFmkVo+O&?oM=QUv5vR0zQVU`s*mSSV)rTx;F3*s(q zGc^+v#`msRDzBMtMIU)rw0)bWHBiU#s%Aee%(F>n8g>yC0xqw*wsH#+7G|w?&bda) z7Poy}b1dNUF)qO&tc%^A^QxBAXKt>lSRnOQ!`~m87auVWz}4E0QnFZ9(k~e2MMcrG zg!>JU$^d#(({5;!;oO*zTf&f-iOq`-(=uN`3KO_WWCYc9h+Dp?3SMI>@R+3S9*_8clov@xHZ5Tzpl|M10g0hKY~lLQ7W-RL-hJUp}z8VOD$|5^9$}7Aq3359odU#`)x`u zoccBSOms#gbp*q8?$=EB_=6!{&Y^w4_pGZ;Eji=B^6-b`i zO7CMh?FsrpRwyma^9O2= zhvho~;$v==TeGrLN74r=;ZaH9bX%WMsbY{It3M>Ha|M2AqIFwde**Un`bID?pZRe` zjhyB#w5ZCl;Io2w|6${*byCGpl^tiEZO|&ls!$`~*7}67M%e=mlF0L3Bw+~$eQnUo zcj&`v(#TZT`#r;JZy1Nmh)Y2Kse5F2`5)ynf&nW=H~A|BuKb?%C(dy~ch%fyJ~o(x z8%E2d;C_?@k7RD{>bztrTfg(HGH>#Len&hviqd?a`1>B?p6dI+SWb-WJm9J`;Nw_; z-`T|!lPa5eNp6&btF4dojoTWi8Qp_4>JCD^l4~4!gp{YG)6F#%5ntgbvEp}H=E6Rf zA^R-nUMu=LTLIHF*-IqEzgUkDM#Tp7MNFB#Cac6$<&Dk_1S~V ze!gC>XCao+mH)m+`l@cH*`vvPjlpQ-Br%Wthl~SIJW7PE%48X9L>yA_a}Z{oY;0QZ z7;K~NMQU@z5>xNJ2#zMG;!g&3z`>*#!?7*k}@PEPMbCCsj1V8Ut{nqfGx1 zs|%`i^F*UL7dD?PaGqpyh-!Hxp!GvS*K-NF8)uzM)Kn1(9%BVR7b+%}&9TdqNyIJ> z7Yafu)qx22gG3?~OILh*-$!KyEVlkj!kJ=BQ|i zE-FN(OkI0D8pz9Izp0=LGhg#yS8U4+H^FDdp%nE*tK>&tT?|QC$svDwE1e>ZiB%AL z#0bAu+GW8?91*6U5wK)vW4KY)uBk4ZrYo7Mu>XK@-k9a6x zTC&|{;h-aK#gI!62=65_BUi_|60_f$b_A-voy-W1noCEbLek{wc&E6e;x*Mjg(YF( zYhUlI91*M;Z7wx8DzBYddq=a26t$75ZCNyJ(ddoQ&t}Z@KvC6YI&c6`ie1U4wqvEA z0!tLFGWd>fh1Mj==%562@`=Os$s45CU~_)0LWH*5qExg0?ZI&jm{C=fYK8prQv+z4 z>D>rqsaoCp#N1sRT;+aYW+$U5dnNu}Bu%T#QN({_zeY+wPfhlmFA*Mww0+~9JuRTEdB;{TYd8HN2p&vzKh#up%HLV$W!Y*x9EY#sn zt7l3MP2&&7o<#hJeO|cOX|u_D5b()+UZ1xBni`AzlJ^halM6=i z@a8z@hq&WmcU0h_MZ+3^#B*ewMjeSaoBnS*^jp7Bf)vXt_YJpGu+sg{=O^EH&`V3j zL$l#>zhUj_PO^efntLluH{;tPHa_+PlckSgUuO;D$X{Dc3k2EGB1Vcg1Mxjzx1I2T zs`{WL&-0LF9cuC>6cvK5mkeM(ne@{%D*Mk&VLjmL2&us!R4MmPsRPR?;iix|1*fPd z7sw{tD5nI!Lgxm!60S`+2vJ!O=iT}O92sQIX-!u0KE!kx(p8~}h(mD!jxXX=lHIn; zKae6uENQ-dF>xi5r5_y1Tp1(MqKQn3`A7)K>%HL7?i`7jxfyTIHF4E^OGHz;SG?&^ z6RR!;Cdz=rjrpZ*#COQ;Zv0V)%A`Tu61qob@PN&R{5wfp3Yl*F*NHbgt^}c_BJy%s zB@x@0_1-6ry)Ep);XbL1bjZZx3Q(IO@T?@Y?OtN(RT!6FJ#mU3)VXYpx|3x=9Oajq z(G9_?2Bh5k@?;dw;zuu(WT$-MOmXJ1eK9@-3pYGms)Z!8Jo^=z*nc3+G}DB7XbGHv zAIAEf9eNMM&ye5RftY$j>F@2NL-7uJxi|Tx6&t>H-IANriB9rdqE8#%8{S402z6$) zAk855Jq8eRZi2DjwG%^_zxowjN$6b2b&uE>8*a?BLb+pzuf_3m0%T=$Z*PN}FTkUB zjyV2p(=C1&;HngE1D=YWkk)Bc?`@?Fv@C`Ws}jONF}JI|NFEuCA$)E4CGqAi-8*Wp zO7os-&J6h3r4r;?vG&psTURac#1E9jP0 zU=GW0f=B5zzEI5aoR1a@Sq1=KW8>pt0AceejovxZ-N<7O%#mP0*g1aO13o`e^A{h7 z(owbStu@9*Vsy=?>vR`-qh!J7ywBaQBzWr{4S(0R8GM7$S)pIaCq`;qF0tw0|8iYp z+f7B;Vf1C6&)b5XTviHUP#wr$48ygm0W}^ zQnlR=h+3oWDk~wLHhXJ@W-+5w3r#{d_ptZR27*Z(auLQ$65Auolg*QjXbyb7Qt|-0KI|uVq zzZs*j=G(NIASwIQfi0*fc=Ay1usX_WwDEG&+12Lwa8UqrxLPJaqCeNl_LeBvrOxBx zRFUjd(fiz<(;&g^@%oE;*UJ*AEF!UsN-bSlCcM5kkpOi4E=8`i)2BItO|>jNM_Fo2 zr~iB6$ey$n!r<(wv$y$BdI#@&yVBFtpN~(@y^*T$c7N?RrxF9ooBn$;SmIJw@K#~#Ef;g%dC}+DJ)Hp-W=Iq0x!i>$iL8IP)k!wK%HCruh zzro%wziS|%=bd>MLKumKrb*K1E@I80dewYoqe?@4!!cgF2rq>XiUx|$nyDYde_kBM z4Nh1tSn!cL7P-=bBj?^)9e#wW$hqWPB6sm3q)@zp{A~0rvBf(xEyJ`mJF8`>=)9O) zrzw4BQGdo1mBIT{TRkgYq3kNZn%~jW>W)>=ir6Kw81|7qsopo=C7{J+Sr%t>0Pk@( zW{a;YWlafuJCvJd1U3<{6(j&%;YH+gDTPsi4zp$fL6(0nTF2tOcM#58^I^|{<6kyV zzCj8%9jHS^=r+9!ga!R#I@5%r~eYQe*C8C7*mCP66XdTPfz*nYBrs*e=wH*hOk zejz!Hg$0%br=Wv?(-?3Eu>?*vfb$e2{-r@jAuURo{>AL6H$a&`%J?P#&v;79e)m|k z7_?WK3Oq)wptAVo&>0li0lhg3!tzg`t4mX#-Q8;|9qyodX;3k3sT~=##S)Rlcg>?u zt71*^uK!>_hdWds=XQTS+V?iy#Rq6%QLy=!uRx8?+D!v=7s^1gJE;$WED-38*b+v; zSfaj0?p~5hkFE#xMLT%HIsG4kDl?TZ>t;K27h615u+R}aBlv!+PXKF#SM$rrRbc^Tm3-2CP zshW88CT5G(U%OYs69O$M7gI7-a_-HP89OraA>FW6(wu(B(=^klF>%%_`TQ>9cSKe} zx9F?1Lc4V2Qsv>05A3bx5++W*8lU~jpjyr7F+NC_ODgKfDD%6o>1QXF7TXL+5`Tf! z2_Pc(-lfCx8a%rh`GbiptVqcd2*?^lSTB)qRd1FT+1ZpP3T0LmCD%@6`nD=GRRYZ+ zRJ=?Jhf1e*m^KR+Y{$V?9Si|m%+R>>y;6_`R&=3?wlg?n8Q-mAf4q5@X1>SeGsvnw zjhend$l6i<<@%+W9FrhBhn=-q)fFbW%*S(jFhVLGDXd~$30xg1JWrWvZ__>d znZ%DeOEwt^K>gB&_~EXBZ8gMjOQwrR?t`GwjyllF>!YgOVCSl)knbYFn!u?y5cu+w zkTi~waPDb4_t@deh@#dTD)M|P9vA8$_`>Tb@|@vZarWP|GRJ0 zK;nx;oJZRBt+I_zmKn)DU1U}|xWf>6Jk#1fHUJVT-GI+imjK_x z8|A!EOT$xbAK;3!B2}}7=+qVlr5FV3ZxBCXKiv-d{jKP_nAXCp)Gi@Wf3eKXIL_W$ zlDs5{^9;WDI)8b4maz>H@K+hxr7RYylKR4X-& z-rFxspQxHqSqcz1-z+AJ1eTxml9>h9skX9@7cee?wy4ED>ar2inK7?b0c(x=7#Zx5 z+Wbq}9MgP#?PuLVx(~h&7>=mD^n&M{&p7gpS3(gyoWW`}NP2aO;c^iSG7_lzChH~l z1oP8j{St3hW2JSR^T;dk_2Bq;(^SmtfOA}GES@Rv0BoYL{;{nsz2nWYwR{$}v6DJm zk1}QIv$pbBU=W@9D058IQ2ZKI-AUzJ#bga*SeHjU!6%txMw~g|h z8Kjh2?W?)#&qT2?FteR)(6UYl6tJVF1wZfKm8O zY6M8}n}aRa-|5b|vl&T+Kh##$qP|s5u-nB1H>Y#m>}1xTcy2SOjcHTG_7{KN9`RrZe5kFpOEAhHg4AJ>ikcpjf9=R zQykjW2DX51NG+&yjhNgP=r-RST{q5>HHEvY3L9iSrXJl0dZgP6xpFFJNnUEkq&NSU zTvIKh9}2V=k>8q~`0>Jfq1JmrilS6}*rd}>dLG(+FB4vlTr!zPz(yBpbnVhgAq}Y6 z+1wk$o!DlponEF~CucE1vzFOC@x)(laqML;6&_Omz$kIn;EtD>`9vufP^xV{PaQp6 zwD3UL^=kPNfdPsOj42GX5CHEzkj8HQvxhwPs;14Hm>MgddhMZ1Nrz@^oCEJ?^8g65 z`Dx#BBy@(#;bzV><=kB|WqsZxMh$)RO@htc1?|C4dOgnU+$sfbCwW$B1Me;LX?u1z zrtO!XXL7(>HPTd>=4Km+BQewOx7o>tdapY@=~A)<-_3kl%Os=&dU) zTdl$-z@g^KlisR1RW*v7VTmp3a+q;}^(zIFxf-5@Gp7;hAs=Rfj)FwyMX>KBo3{)C zuYfbaK^NPBdsMr{nh!r_s_R7AvI^9lN#X8x{brujUw#cQ3gI%B{Dzakc0j9Yr2^~6 zH42Z#fypY!Il6OJLwHJ6;XmO=Uxrb78lcW`AeC;htn%P#d(6F#hnLFK>UY*@&8Ia) zW-sOR!pbylJNFuWb*QIgy%itHu&Sszt*wljW1`f7apG~indst}HyQEcd6x?h^Bqus z6NSAFqYYcC>V1PI;ygLenC~2w)}J$e-Kn-|P}hs(^2*x3k}fwVw-u+NEXJNxjnS$R zjIkZM1vF3kQ|?k?jl~hWj8jD}FDt`K()M~X!f&&O>w%*Q%S*NNmJsGJtD>qy-eFgk z>&~8KCcReSE7)|){ak(J0dkE`w&jkj>W7a@WUobvPG5_P!vpC`uZQ~fOU|FWbyE7C z`G`!7aqpvwANB}i?zU_deb~1>^+JH-Qc|}y0yna;#@@Og-fns;!O63e1)a=etPF;5!L=Bqo^Y0ZvXJ@citfd_}>nn#${W5fJR;g z@XzeYd?Ygn!6nK8pxIASD+&Y@ z=vi7q$s6A)Uh!fv;IC1TRm;j6B<-Y~Z|%Xz*FvddB1YH0j{-k!n7 zL}5u2Kd%>DdxZ0{LUIy-WZ8dH8BRtE$|fZXUJ7iU8WI;uWJKi6Fu8+p_@Q!Y?{uTQ zF}7D}*j|*sjoN%F3{{2RKoc|92N05S&*)K!Y?>u!ct{TSf?$HlXCMPE33JX9$i9-)91)JvKIQ~kf_EN zYUtDx{xWMp${Qnw&ubga2Y*T=%(7+5FTET^DQ!?&v8)u;KQfJeQvhhApZ%35bW3mG zk6iB3cwBC61S$XD&egm+kq6&e&1sgj5N`BeXob)noMDg>^CY>K8fB!adqZOjgH2R| z+MW^JLO5E6W&vz9&CcwUM>#+&#vN6Y)p-?6muIQ%kc-H`q^A>VRZ<*$1n|xMuuy3s zkfnD^atTXKCxa`nzIL7;Ss;_mOb+R)G@~qC*FsK>05561SN*eF?QFPf#O-@I+MuF@ zKHJQ=C`jvL^sbT;~O+wP;Znen?Romx|aC|&s6I+R^=8=rK~0Swm9&ii+WEEOw| z2|CR6b%mh}Ofrgb85)L<`ZA#0QOtVMPoW1E!WmEPfly*?*@&9nzS0kYi7lqs2G-fEQQRJN0;p0T$^Dw|ewCT67#1RZNqJup4$4r0kfNl;Mv zSDQhk!4Zsx4EsQ;JcL9l3wrS7yji7AzEKj)D%IGaBUcPcP(r2vHKw7s0i+9Gk)!P3 zJ-zX!Fn-#D(Se#eO7r~c?`8vs2@In%*qJibHW2bOC*5~=?cKvRsokZHi-K}RmA6+Y zPeh_>Gt;WPn5{wXnTGSf8fG@B+_KRGbbfYv^Jx*uLcq^Fw}vlA^)>pHivPNif(PMZ zfze`|t`s#94HML-hrroYKXi~pN^=o0JF9%%DE`nZ(B%_u|6R&DAL?@Y)~Ox&6i73` zX6-E*Zv+zJ1?RIjp5`;O8IZbM-+5OGyju_k522tqAurL_t*l+l3X(hnT{jL{HHFYC z69a7NozN(T`9#XS42x41f`no3_X>KyOPwQRjFZhh$|5zszMvW1;QjH{w1?zAS%s79 z=f=@JuJR1+{gs>aEYy_3(7K)RlOwVt4l>-i{>*mu*R3Uxk+YOssz2K}LCoXT5~!Rnaiw?BW1N==JjTDX;W6A4X#m4L z^TKX%q9@LJ^EdIU*Yo0J6LH4Kl9Nb;7=*Z3Hd!E!5(zUZo;?f7lD~E_#1tN%Bp5BK zns2W&%y^>2zlY!dCR0~os{M^Z6UBh1w-St-0xFe;AE4X3 z6weqvGORj2B7`MbjLY6KGC%jXBQsvtQ~)43b) zZ_7i@;e~wl!aADT?Gfvt&_9O?-3?QK>mA?pJT^?qJyutLKkg8HWIhZag~;BoxEG## zm?&y5B4w1LpK__1?@1sfFpbtLAUI*vebQFKONqdX6W<2qSZNxgl5S+LORMYF%h$77 zIzRX2KT=qEtJkhvcsiv;^E@%1)qkp2H|3kx^~%+kEa?*{&`j0w_h(*6SUrw7{oB)YMj%dKkq7;l~HrS zn3p~9--LmlwTdYAPuwTiYnd5U_=MafQrXUfzvQizf-l9;e8A-8{fuzorEbHZLnLWH zbYY&W1m*oFAbwsWSA^VB?Jq5mWdS@fbj$pzc0`2{a@hmh1(1-&APCw=a}%cq zPL40iN&Tk-s^OS$!*&L;oNfCNR=1`wkSDWR=_f;kZ8vy~$!&ux^92tL?D)G_2Ddz@ z>i_Ah07ffUuC%{ix`zb6-guXE`^KVs0aN+2XihZ$M<-3C>W|;|=UTq%ybRSgTjyd| z`}O9jTVKuS^b682TXUp{!j$UnH>O;lukW#X)|*{jWs&q`;F70PvrIdvcQ)fpe{jym z_nc;x)m9?neB+*1s%O?{DTVXaQa~x&jQ7ufEGE$Byq8#sqz*ZKN$}sZT}6g-Z}Ozy zQ5(-!C56JGr8@`}DMmA;!r%KDeaL?M5^I^hYn{v$zVKDxy7+%=f=aEgnZOO7+gHff z#O}=47s!8PE~P2BP5libLxXg5S3S<2k~X(s5ZTgrcY6PbBwOPWc*e|4p_^oi3NEOe zA>5>o__IHKD*SP*BM-PNWc5$_V#^U9ERTU_HDU@_285*S>WuP!Ym+aZ! zoFOB8F`Tg@O5mh480fV>1*uA%u-*qBPZY3SsLi}~rDy1FXROjd&BYYMuH>#x-V0JY z6?uu@57dA&Ut|9ZaH*27ZBD&tGf?&Sa8-T%Fr7Tl0x|I?}yuWQH ztzb^Wm&h_oqF-?QqE}19R8_jKe5Pn^Fk=zsX9I1Hle?;cS;)W^ldql|WVI84#%3SP zH>ZN>RSdC66ioXZf9V&nty=iimDRYlx--`6BtFWl z)$a(UhO^6x6DxrjJ|NH>aK=)qsZZ;zNHBBU1Gen>)!I{8iQu@}ZRLN%-gd%>pam;c zGhE)y-)2{0^cJ!6T}DW5t<%`acMbz+Q*PAF3HD{4pz3lmHJN39bKcErhRSU-nw8M9&JNkm0Oa@8Q$Ts=i8 zr`%^_^a>(iIeARSW{lXr5>I&QJc`qk9Wpc^M<)<4%D5Op3Kt4GcH|~ zVqbaD)hWff+}6Nzn@$6s=P=-%Po28|I+zRLDSipNuw0Le(fK(qc1jnV+Qr`zzJI^-(xx@Gs&mn+%`S9k)UgE~7+v{|}r4sE^-;^p)vkJ>=U^nNd8S}ojutTmols| zMajzQh}!^@adio`o4Tu;hJuSteN<;2lE)%9av7mx+4>aYa6To**mBoCz{j6AF(6v1 zK)G<#cdb(+zM!J;NY59UlDF>XZRma7x|Uy{3FAp_F!d} z?@XP5^K}d0Ulb>D*?Eli8}84)TC0m@EQd{v8kVB@wKroaTIv~p!fMwrlvV>AI_uN; z=gQ+3Qv&Zs$ej%F84?a4^hQ4slzPuc&sF(i%qtqVy5%%84lBQSxCP@Kcn$NtUAnf_ z#90qH8Hij?oNcVY0a~w2mbwkK(y#;+{EK87RedcOh6`|;_|%TPQmaznY<(zeyrtH9 z#GwRusBK4?m3t$Lat_gKY`cCClw)N{;Q^`PMa_-<8_s@~v$4~&*HFM+IPQdG-+C%% zWNmha|24@!#$jtx+o{(v#~bkvdJcAUaw5RC5D8;C94m&wl;S60-7pDIv5x;7x7mDSqH=t z9pL9|?Jql;*i69r1l+CSbxKA2c@JzFz@zv>He6NVuP@>Lx#c!9-ih*Ga2-+iM?cru zBrClGiyX`Cy&_D!VXk`&rID$6d4V42z0BY3%qx1^6a9i(yiVb4B|8s%+|t;*rx#d= ztgDbJ?M3jKMdErhY|e%S6rbyhM)vjLL$1oL*kT`hJbwGSl&cTxJlU;sT79|I!V*2T4imuAV{&_kJ*D6pO6*d;gPT{4QQpB3xW9 z*TF3d2aXNZf|q*4YAl>cOwytd>^&W-dqk@OF4EzobRfDTUrE?LdlH*}dl&`U5KcAsitBVl6v0%l6ynTTAI8f|a`|QV+&2|9IhQN%+ctU{U6oolFR#*cK~i z#QJmK8a@nioMcl*aBuK~p*qYO`P8{(#AJBCTYH8Ld9&|BTtJJJt%i4iF=-bTckF;f zDMPM&h(ujf+Gu^n+ZG3Jt0DAK7=hrhWX>~T{+PN4cO#D~0M zj#QGrV3{6>)5yh%$&POV9lGPL2D(|@h;XC_gFYe86{)RM=O+Gl#!-PquC)&&Qzq(M zKlTCq1q;WvZhI78SZ^*vM|n?2 zTP@G`(tqhj#W+5g^aptZ zK1fGh=?~16@AuCzSW_~fG?~;oSzJZvmiEt*>dFV?j~%$3dXKIzJFZ_5bgGX_PM%O% zk5qAE>m|j*7-uW}Eoz20tt*H=i5*|K1n=FO_J~fGFrR=vN8MzUMBN)Zs&^$F6K@%> zq7-W5wEQ@Q>E}=V;$eLDJrzEtW5C#1G|>rbnn7JTDuR^2JIU`P*59%EPrVF~Fg41D zEH*$|u!6K4OXnQKSHZZnbBE*+y~-%cZ|T;Qz^lc90ob#a52e=(PxK#B_6UIM4XdPK zDnNp;UZvVpO0=*EK))om^T0i`D5R{x-|B9#lFg~uSe2JxvpVUU(l10%4^(k#x>YYR zp-tK<|Akj`C5r$jafc!{6fyRjCKf=#JgU21=1HQ|jq>~CPve2{+}C-EZG%%bmQs%c z%;5iu8K9$ytpRaoyX4x;V7*^8jLPojP)YBEp~b1qtnK+1We? zwAz%&O*CmeE)q;XUR0--ymi8}9^scZbUnbLcU;AI{K|5oRjXb=vgf8`4L~pu&R>kc zqZU5=?JcvrBhKKF1lp970+smR+}5gD4ZF1qo4)pvF#s-xHNkCYcJf1t5L@9{Re1neMx!7TpKQPbeR~GsNPJ#@^RL5 z*bOJ1CB3if&?q-Ws=l_g_~Q|nk^L;+dM&ot@`^n<`s0}BdSDnKQ8DS16k3M>HU~)L z8S89iov@zT4pq$Y&$w|c(yTP#7~fZIw{{M>&RwA7fxm?fwhKSDX1Jzvf*F6qWKQ{9 z+zpJBlm4xQb41H*?M<0Jd~WgQ3fTt1t6}fIdZOaoKfF{gqfT~$V*G^& zCxgubIJ$foW4v?tkCIMnsT$rQ_4O2W5P_S#7m#=yPH53K`MJ;Eer`n+lN(c3o~j(St;6iB`?OOeQ+}`oni$zH@UG+2rDPn_hqF*w^QA-zr+3`R+1E zWw32Qb+6DOk7|2+VwUTb>nMPdYWMJE~*%v>oep>;LdD^jiKHn6McQt&QhZDpLMZVA!IG zJkpu*WY}J9LOc4#o=k-v2gVO;)Q#o`h!zIJHSqy1e2KiShTo2lHLbCfp1#uBsL)-< zy<`OI89uI`{g(`e8B{CXN!I9Kdt?NE(hW9y+SCTUm)`OTCl-mVi#q*&LuRH?gSzX} zH0Hc!BWx8gPkahlHosdBOr~=BPw`t|TMwh>$r--9n2ttOu4`C&sBiRs^~R&tzvT=l zndwyh%%<{*d(@x(&X_;)j7rzhXOc~pr;U7O#R`=fKs+q^4(i9XTW#+2w+i+p+6Uws zAs66ol+!9IbOOJw4<7JzkHMvKtQH3$vv7wb!`%D4=IyQoDz1$StUG<<%}D;B2P=(c z6n)={OuwF;Y1%q1U2Q>#%dhD-@37V$`~HS%&sNGR;gdBa=T}6~+nBdgij&0i)oMOh zY&`NjFgs784>$j)kM5=u7IhQnHg1++C8^)OR6gv8*F8nEn>D%8|KCm8^;%iURY>Tv~H39i2Xx9cHLk^T@YdXW7w-hLodzKT8c&OPLM9- zv+i4tTT(axtJQu;OC-7G8P?@k4-h9%>8&_^n}{*zk&+gW%1jP!^Hq*cW9bwo6qUKc zse5G-ZqDaUPTw)3*7Ird9|C%+N`?XV1!P)+&*IAl%+?$-rSye?Joc)l0^C}aDcCPU zPa1CZfs`~ZBWP?S&p`lPRh%>>!P)mz>;KJ#qI{6Th<;HryQ{%ZR*`6tBB)suCAR%V z(_p*a1(p#x+;Ecp_68DP2CF*(4f}aCUb9qo%L`?q2H4{ z{KqWk6MR4p;!D=9D$M6*G0ZEyA z%t*Q=v?+7HGkCS?vDyxlcL;#xQ`Z|to9DRK7cJ5kGoq zsqRZ(i_9rPlkpkN5a%^0s6;e`;19;ZnC(iCmrToTQe(1ypZ&^ zpvTsnAjSqww4LvPdTx;XN^obQq^SruR{bawCxbf zmhT2#J6?!39IpT2({==R=mK~|I(wn$q}uEz-4B?RRC3Mv=?mZ~r{)e*zpg@o&0v|* z@1_-gbid?~5bXzZHF+;fsO38|VXNqSEa04~?{Og|W_~U*QCXB`p?tya?F%DcK*nf@ z+gS=@f;m4&aPdm)Rl)2nrO@+_hKOp1TD#=Ub5Bl2zjLz=zz zCpJ7zt!dhbJMUiEF41+x*HM<2@rN?VM%vn%bB8>q_<=T!IGZnG<%f!5K}zZH!1a5Y z7KYhE`uW~`UGF5K0C+v|6kr6>MklSEb+L14?@xJ+qTN6#j|*r7g~?P0RnX6R zoe#IocLaMR{4sP6+rKl9J*$!*sG8Up4vDf$ zs6VjY?3CsJes`~2aT=!rDxF78DCm{OQLX9?MQXBCd#1!h4SZd@4Oca;iKgAG#c}&)~+OwUna8iuQx$8-TTA1B~euj1wg2D$81zY!c z{!%@^#1H?Xs-I?f4#9r89NK%OG=&nvyh&-g%9Tw6L01mbE+5)nQ&|>!jI%($_=uG0 zN&}RoH$6-wI#@WTG#%tu0e#Z*)BWtNmb?U>CoiyR zmUcNSJv z&*8GjWE5xRs8@%?bk*J9rx_iZ;;|TH^`#6f25fY%o2QZ*QzwBjiy!pxJ+UNjs&74}*BjKcfMH32z+K?()c)qw!wFK^(*$rnzsYp8Mm|Su z$3S?Y0a*1|WJ;gPNfTaw^T4+Vbel%2s?COKicU4$S@n6$Ee%r z>_v`fq!nXMIbVh<5E@^VAk5@AibEe7zl^G;L4k&KhT-#+m41-Y1Y5@Tb41~a*eH^PQ)@ADQgSYM zBst$XRVdXx2@PF=wzh&XEtQLfhMhJUelh5T*kWHt!6rwc{SGklG6e9l4&OoNX;s{N z%ayRn!0qxCgs>v}3u&;TgmAHlGkQ=Pl%%xj)CABWIaBh*;NsoG8_)z=B0F}bADQ`t z14%QVkiI4^JY4`p1W3yrrLU)EZVQ0+%LPJ6T`p3n;PU#SueqEbEKj=FtTdxvE_;K_ z4=Vx%u=?}skj-D&Q&7%cBlH0{b3eFaR4FIC>!Eo|2cBIs{ZT*rR{DCBs3Acj%$SjO zOOD}`3**RdAEF?e#4sLNX)FKExv&VV^SVs>h6j@NHOs=Iw_Cq2;Mj?tNQ!T=Xc&Os zHxJxYk?CA($V%gd-f^LRrMg@d1=*?j5y7#h*a!@8k$o3_L7=)0Xvb#!Vq|YLGwJY`3KB^T3+n(ZE8{(;~xs>4UiNBqDGHLAmdV4e-=!^fo^ zwXD-mG&zhz)gZ~K`XN<;1Ux5mcp!J~rp|YIF@B`oIho;1p&R-nvNS=$hwToWTXa^0 zBiaSgSCiMt1+LdbvJJGW&zKp-FaWEPtUA60a)z3aRd^CzXP_ejbm%`@2@EMKU9w0u zHYGwLMSYiwZg?!eE{z>6?|5vlB&Rd{jY_RfTTa0Y7&;~tqQ1q=0KYND0m9MrhgXg>4T}kpF1wT+yJnnO>b@)l33c(NH}gG4xm&vqc-q zVY&{nbQP3I!c>+kgHy&97tWmPE8($y(RG}v^x_B)0r3!2=xrw{Ys*}itXwUi;@nvd zudG5gO@nI_votcMYD&Z3Ovj|;tp-reT{~%PD-o03mp{QVIh~B26pFb!67{_dSZCNY zoYm8&9YUg#y#%UOicBm7HKQe6@=tVsm)5wMnP1d>+vEMo93;y_aow-i+{(G!~%4CgI(l_V#&l#?yd%5IRYf7J=)|X6Qbs zN+dMxPaa*kBp7|8SoU&q@THTwx7?)8ROOTC`6aZ{=cUl1v`^$(2YqVTV`|IM@=)RY zfLqegqP}l;yv#pKUSeR=>6_`pXce48Eg2Xq1!rw6L@H6IgsAsC!BRz1; zJ&4SQiulBgr|=inO8Q$L=(kV7n=;F#$@IEJdP^B?`1f0j^!zswOR|_CqQ=8@x?_Aj z?Z#{An9Qu0UfGoci<*EmnZh#8MEQ3QY||6J@ZzPYjAJ01W9wXz?K**t|C~nq+p;LU z%;A*qhv(PNTU_EaZt_i%H;t@Ly=7QPsNhgh-%xMs>f7Ssgf5$ z!X4NJIq6R=J*Y`^e>E7V*44-pA8Qwfk=(W7$7*9rLn z`Y>j>5)g5;m{h6of-Do>h|(Wt)DK#|bh&fr_#N>^!_iln9n}cSB>KfFXcqF>o9#?P z&LNZ%?5*i*?^I56K2W_XbE^%k`=w>)OEzy!>2(ITQ}BFjf2bN#QWd@K0E1?i(` z>OQngt0#DC%2ScF!Cc`^-*jFcX9zgF`Ph)59s`MgQSn|vJ}`TzWH(#KM5M0ZHM*uc zJO}%XExvv4C<3MK{Aa#L*zluq-N9sOcLP1=cllrg#CK;C^GMC`vox^C*GPbM@?3yc zou!`{$d7Oaklz&Zh=u=8ey>gOKTG45c9+{<5pi(u>H-#sp5EP~x0+AOZxe>*$-0F~ z%CxbxwX6;t2sd{jQ<~%=HJZhsTj}w)Q>?l48kWcgvc@8Krcd&rnP-#7H)xxrOXAYA z(?6j6(s-LQX->56r>ymM+t%Y=CBE8ePnx>Z{klJub~;kwzHrqduLpjc%#<=0gA*m` zA6}#$_pQ9D{?+K)ncS)0CCw+=FYM$@cciWVrBlBF;|HpPD@(r_KPVwbPm?{3utS`8 zDH;GAXB$EjE|lD!iyxNv>ytl+uS&d;WqeCuGoG#+DdO$K*lr{OM;UgzjBnS3kv~tt zM<&t?D85CZ_Jem8hPoaw6{Comt|qrP^0kB^p?iZi-(IkilJYqTOjo7Ctx;xEow)TM zEq?u`GKSkQm6m|vgUrgT?}xU9v_YZpRNVGZL-}Lr2DW+}Y{OdJ5~eX)dE zc>G;|6qsP!3a@Kr5agfLxxvQ=&jZvQgLGK8ogtZy{kD_yNWP1PqI5V32TtgINY%^ms*Hp~R%!noT zikZ9QmeA!ebJ^VQmn4_DUvf)GMObJ(%UxTbvxgQDDioRi`filF0E?DJc?4(ttcb576Ijw0P3Yx>G<{4{rcGhq+) zT^uc@&tx(@s4XcLwYkHs9=M$$eWLY|V`@+rrG-~Gpkz|B-dautR~No(E9#kiEEVFx z6Fd1;gt~>E@We9ol5rGD9s!c6+{&jsf*Trq#V0!{LX}*Zj#o*hqEoDL?!Yz8Sgv}b z7uy$7+L=X$lxOkxOUr@iT^kvWP>8u?RXK=VqzUV*l25jef9{$QP?lRKj8}o-&Z5Tm zcWX7h2-T|WvIrFcRX5^|Q$yQ(R|~5U=_Q?%OUOS}l@Elpq~e$$l`|XLAlGVBUeMc# zx-5Bd>-Ck#a@L{P%``JNUernPmX{=hGf@7(>xlsI1*UKD9%X>FRinPG|f9* zUj~%y#f3@jo~t!W{%Few`x@{a=5GeKe4|->wXCgTu3P~n_@5%B5;^ex+{2I2d#(l5 z$BJ?Fi_(Wi89tH3@42-to3|eR2qm41l`h8AW@j5x2ofx5$DePtxH_GPInz9t5ZO-` zh1r>eOSLw*B45=?1s&vedb#e*8Z@+xN^5*+!!%iHL9Z#sV=&n-x&rBw2hf?>rHM*# zgIWRdJGQ5wh9rC~M-{7s`-Pg*#iambde1#xm{tDQ#*w)g!i=DNb zDDRMn|1U?XTrR@ucz4xrWVME}5U^TqmLWj%)e>W0thzt}$}hn%7o+^-L%Z3+tOSg`loyhnZOVAX{YLj%V|XLQSDO z^*zq&ODPzn^%v$*0evBAy8h&Cc&;3i5a4Ycmy1A(O3J?uO3HG5qAes{^Vre$ewJIn zy(*LUa&Ls9=#;b9J+|_yvmgia4!^Yl`}|QH=6Kw@EOfHDUAQ&noxD#T$oWy zB1P1an7cMPGX#ndvmn0aCTvzEPZ{^-ZMGNj&Ba!zM^Y|J$x*>Zpe_1jMaQ(rhOsqE z;Rf!Lo?;Ou@$x((QO|nAtxP)kK6}PeDzUdPFC6D&?RpiNKuTh0boq=eU9b{JZO{9$ z<05f81+g*RDT#9q5UDfs9U^sAg6DnKE(KZ%J;SQ%-jJ4GUr@eQQIepitAcCOe`DUWn1v` zKcc_7+Z&p*bHM@+xmmHXh$AQWce$t8EtjKe$*HT?^DvUBMYrNB5(`GU@9PYwc2&O- z;eHAAMh~CYKlM25esS2%vw}PKVKnR5!sB?KHEG8*FINpQY%PP@xhkicXfp)+V*WC> zA_pG4j$akVw7_$~s&mrJa|303#Y<^Edd@t2J(V!vtG0us>ymCMFlcE1s^Q4;lv*ya z{}EVK7wMz@vo@YUaZ;Uo7c!&5>fw)_l*BtYjL&%7X7ZAyrHF*!)OXaxdXb{+j|x}o zF6uI_AY>183TC`g&^P;)R;HCJ#F?RS8Vj!;?=uwVaq49L-o-gU;bz9H3#l+}B`e>- z+iF78cK7XRDXMLLLW6?3(|>jk%7$^0Yn3&?<06yMIWg5{)#q?mh5^BOTl+hqAMH7v zycz7oRFSoslqLwjKBMubzrpaM-%KSek)RBw;9h}wE)2ZCnpu~C-v->uk%!+K4>LGF zKU5<*fO2wU{%19OB|QK3l=Q2?Kvb2{w+q0o+_Y#NqmPG$W!NMryS*wOZeg6;|EG6b zSk!^d{;Z>X2|z&9jm!T2-$S&kK@wI~EGV;3DaYe(yo;C+uESPz)z$35&o4{LJa28* z)%UZ_c0{l1yq8s=6~A*2!`^a{c#tSW0Ip^6q0$3#)h{EX{chtEf`$P(FRo{*UPsm4 z;oRRC?H;povaL~mu9o{wym8Zn7~#G2h017US3i!oaCjvz9ck@atgS(7DZFY9+ox^* z+xp>LfVvtWXmHU$b;m4v`ee{>oCs3AIcA%bfr**)m|SPuWi`^sTT?NgjSeu+jFh6> zrzGs#1V;H~aScYHF;41&hWV;e?#&N+7mY-RUcmfJ$k*)M3LQ=)_s+hLm)N;xA#D@Z ze3^7{ZZ?@nYsIzb62@};|G~7V-wVJn$X)WL#np*5FhO<(FQ{r$V4mt3y?fs`di`+Y z?T%%IAYx>mu2k*po73{&-ssrjQ` zd894?5;ThmgFm09*Ou&)5yQaG{w+bY{Bhu+o-L(^(4jZL@UW(7Q18KP2U?O^%z;8K zd-@lvYQt-ce*;4^|@y^egp zkXx7n$ zyY6ujcDQMktWM{WaDHLxCT#dkVThxH&T;pVZR&$Rz2_%CidS2e8)5s@v1bF5R@?xD z3ylK?l~F;JKrD23u_E?2(1c(yS6n5ewcR#5cKBqi(XlG^Jk`&*(vG;Eu8qH1h!;R` zK^pj-TlgO$K5TTQp~s03l0B=Gv*Yw}1vRpK{13&gP+n|C1`q7{$E!_qg#HF*LMGO& zI2GqXK%y@g`-$O7P2B`Ie*eTyurrh`V=GrUN|*1+)tXr2@lZg8(yo;_fp+U94JyJC z56w<_G;h0>X_gR)+X%u?-;;c@pE8nROm{vNSVYB+O-oO`7bsZ-Xh;QKMiXDgZ_;Rq zi00X>ic+g74s8mc&l%H#WLwe`rw0wx>yO6T!7iiA_>|?y*K+jG&IR=`C$jzx$XH0- z5;nCXsIv4@IM_x6803|xnk6LHrWmEl%J@3=XjW3LaseE-tn}%gewE)Qa2XuOlB-gL zrxI^Xs;igrQHwgJ0(p@)MtvHjFlnt_=_-*R zEY49=pH@@{2x$NudH7SYG%RGC$~w_$0HodnIQ9%Ef*IMxbHU<)jZf7x|3_`S-&hY{ z3xfKP9V!kkw#|unsimDb3iXoIWM0u3=ri|H0O4NiZF0>LL1=$XJ@zaro}j-Lf@gc6 zxG&_PnY_RdJ_s$M5_CgV&2b&2jYE}Hcj0zw(D5yQZwRT@8>`!j=t;^QSpE2|)2VE$ z6Q|Pn7Q#>t~@(VCry4;MRAwNDD8{6(6|9xC1eK?}?3M%JxF+ zr|))C1nZ<;1a**SaBj33@sM8fi93KAjA|Oxzh3nnW3{YBi_zW)8mSC1Cj;vT>fN$a zak~pz$5U+x$$z^I83DO-`2IXZ&mJ;QP%pk3b_-G-AlGo>5&%i!^CN_OaHY5St6rrH z1qkBT3)BR*abX<;uaSjq2B+`Giu-Smr?wAusu_kLYTbzSkePVsUU!`2F3X zQn!R;JSQC$ykc;WzS!2XJe+OK@pvTM=;nblR$Hg0hwlRXJpHL&#&gI%n4L zk%`_AcICzhHMscLqs^D|P0$UPok7S~o?eQVxqr0j98a)%~q7z@FL(bWh84X4LDVfEWs?b z$UVn+^hJD!Jx2|X54>fM?8N&-yYk5TM* zE6Sb|GVe3wMr_nNKWhGi;AZMLg700n2@K0F15*qdK4k6O(dT4YG9k#MCCnnBtz^bcp~4_l9@>OR-YNeUg5Q4 z8Q_+v#T6#s63NQFpX2C_7dX4><`QBUp_gxvQdoR~gUJpqnbVK(GDe|_^*n~{Bi+S4 z_e77^Y#`JlM#oAqOxd9`!bhK;&r?WV@729G{jE(E(lg*2fCW5Arf1*g=YcW&G(P8I zXl2i${~#~9@N4$=PVMLc7IcAEk(~Fs7Ctsf5E~fo!`>#L9_62&kSrJUU`^7@GI<8r zJn3Kd^HB-Drzf?W#?d(_P;F#mj{NDh)%YLY1mL$vyXSpIP}CFN<|#acJF>NeuhkCf z3AcSjf=x0U0@F+EW_5I|44vN?T5l_um;(@7A~#dSVux;fF-#y!Vs_lreqd}6WOhZ( zqm+|5c`4JI@w?7K_8{jRk z=eat^cMh5ttqVx9Sn@-lpJkVny^8nL@8SI2)jP}nHD6HYo2uM%(4*QZ2f0dDG{337 zvw=f{s(xM@c=w107p0-ErV2Sy4ZX+u?z^YwuFhkf;r8HD^Y*qQ0ziRsyqnpw4>d^o zWTXbX@qcN+jRso0qqAPyT=ULtmo8~*)@}W7NF=S?M%5(Byx6oVQoR=7_)&~ySXL@W zHy>0RHh29-FSCySpCAErj%(-p2yqRb(6L6+tE#;SwH{f|y?7#3Z9Iw=$k-rebLf!` zzb+z+Vq=vuPv#`i}wzL6-6AF|xOU_{A;( zlMVf+&By;hMwWG$d?#=Ym&Kj0{2RH!ymz<)J|8;weG>%@EK6C~g;c6Lj=E#i618aI z2(e$%4A^S`w#POWW6$HHby>^Oyipp4LfBC7Ufvw9XT9Sh*In)VL8mulZ*moUha`&P zu}31vm{1E1wEpU9A$pg zH}D1PBtZW zT!K-}hZXFR#oaLi8nsya<4_u}D_Mw{P}fC!J>25aCk?-ci9st!dW(_z9zyN`XA8w2JyGfWNrd z>@aJINo*M$0AoIjGElsHp_|9hFQTWgOZlGsV1O2KHTEzJeD zhS>-7k)%^emYA8{c?RJU5ntyyzG)2P0r2qJQXJ4cAfkiOVzXnQmUN=JuKZ^>RSV=3 zMZWp_aVGAnm~Y$sL*?7&veW&{OTFNRts4ALB)@U_+5TDPWkA+(5BRu^8gO|#<5V!Z z!DR@NjW(QoD#?~SgAM&#V3{)&mN`oD0gsI_pvR{i4Xi}*Q5bvY4AqQd&5G2KV^)e= z_ArqR>H*2Q<-+D}0?8@XnV1gspi{lQS{bZK9;8s^pV_9ZQ~1f}mdB%~15(CMuxU>t&$ap`4>whjKvjsQ z>+RJfbn8!fKy5(0tzOZ1v!bV3`is>nPtHuOF75hIywwZ8J*7n4Q} z2BJtu?_V@uY$pPFC3?YJgBT+r(S_dg2?Yc69_Bm+jexjEW7bwWo` ze>51%Hx-e)&A}5@DR+E;mbpZ)q~4RuW%>Y-V#J>h!s#=MpsYjLV!&$5p%u zopAje;- zpKL-37sxBaKQ(6XI_=cOirM@t;`LG66{5EO_|Ic+BTL&KN2J(Cj=rV+0yEZLTp9_X zS34)l*`6Azg^Dda7Q~?=OR4*H1DTIyn-#T0SNCp=9vNTe^xM`$#%rvsOz|MKHT8e+ zk5hLlNW$l)d-;Am9`}#FyTR^)94IJGs4QauI`jD(wkfF;EYy@;{lI*8B{TDO#O4nI z_#b?tAZb+hVr&Nx>eC>;ctJftty}pdUNY8fAF(0hk%L2HNPf|MZ>ue~_ zE>O~J&!`lKvQm9NBY^&+A`p>t`Gm)Rxs)h3nQ-x&*pNrv9g^1L+Wey>Bh4A6{S$%>YST{fW5q+{`}P<7fr(T?T2-p zne1*) zNL|>hMP_^p)@Sx)6NckBAMKv+P3)BeI3Ef|2f@O-9z3j9rifxp5V_>850321ljtd! zg#Tw5pSIf*yco5gI1 z0$Vkf7Im=v@&}izw!T2#QZLGcQjDrhZ0%a+byFm$iUVNBlq>VasVIQr;D2G#|Hyj2 z`q`<#k2c;sD23b8 zx{u}P_vg#Kkd|hz^F%T3g`@Ubm)=)ART!wvn*n2_9aCyl8}EbC?mTkYwdHT*@lP|e zm8sLX(A191WInyGBN@bf=;Wm6nuWStr*8w9EguuV7eW&VT_}HyxSZU<4S4qW@6Wahi5dZyhz+$HGv50rv-U^lp-XU9qHQ4NOsp09nLaVa4@fW97 zv*a-(rmR6LCs|fknnqr7U9XaXMEbq@uhZxyNehT@^bC1* zgU7Tc&rn6A4y?D*nZ`Wqr7y5%Bg`c> zo~>}x8r(JA{z&i@faAg-V>{X*Yob4Xo-8a9a>)_2XNfGl6bdubi*A*#eF%;Nr26Yh z1ia~KQx}~+-e>y73O`voI8)r3lcxJihhpW86;hL1`_r=_^`lmT_l4m< zW)+y z!;zQ~zNMF`_IESzM1lPqAj7{B4?3FZjaz8lFIYa ze72EE%*|*ik?vlWl^|oMRm6Eu;)`T6oH-)SamBcTcN zXI-kY-ExV~w6@?PExFmSvYz{j%?*AQKZet9w~G}?skFTiiv6Ha`%}7`g-&&^cCdFMvYV@t8iDDBPSnC8@kxFPQn1#S|1x9vky?sh9yD8}46l zub0kfq<;;>piiZOi>kujHkTDuqCbd7+nrJPSK`wbJ;6m_rAQ%D>b`Rdz9T?7xlFhP z4pmO>gT%46RpdZJiI4es;=@5LI2(7JG-G~|SX*sZGhwSTU}dB@;6&07D#LRORJh%I za0P3~Bk%I2>8DfcV=&A5X?|ZpK}kEpc<-Wn0JP*{Wxb=$NarlBT^QF1_B^f$lwNrC0JtL%NjF#i|wdN|}EIVOPsyU32L7@(f+ma9`3hpC+%yk1Rc}$2dEb zJ6Xh*8rQy5{|QQ4$cM>&p(yU?2q_^}>%>OtghLmZGtu#dqfdJ}K??R_{rBfm3fubk zOU4D8qt?`wH)L}9on}}P(;entEiKC$g?Q>ka$5_9izh4GOC(UDb!vb-w^LyYGM-P+ zpw|qw6Q!S{st?>REpp`HmMLK_tkHTQzr%#u9rI*r!ZDm$OMm_Yeo5Ln)omN~>rt9j z3rpUpV`X?j1)u)49422S8kLxGy+&j=RDOpn5p-58>Z$PTJR)ily0FEZu@dZpI#fwF zTWO^jBP16E&ah=`F3T4TfbkdmX{XhD|tN;s@+!B{Z&Zv$e#W|T0=_o z9i@1#K&DG%jNT0sXH2>tsI}yS<+uf~{$ulT3m=;~-6h&Tr1h$!=0~~}?hC|u`@=bs zq5TEUe>a4Iw?TIW&iqr9{_5c?3wgQM)}@NG!d)*VW^5~t>>Zg33S+DHHL;zZnMTK4 zV1<*|{pUz;(? z^)*Ha^QE71n~*7-$jBVc<>!qKwpUji+u*@np`t`NhWg@M?&7So7 z0Wl8V?qwD!DLD9CL%6GVO{z#9rj#`?Rq?D{;^YTY$FzT{w0{tEJ#ICucR^fOV#(C@ zR|)ct@)9dgHT{>ygeBhIZD>OL>-Mt@Zbf;eT$~e@!TUVQ4xPqoE&rMsRV!i7I(+4H8uC~SH zE6*Wgsgla|@!l=rr@xwQ- z09tlk3qsIWBA;f!eWNUoY4xf&R{6M2*%Q`=_({ykV&_`t&;Zrwc~Mo1bkokf(3tG# zh}VWpsTqUvAKQ&7zn~F7x!82k+Su4w9%Va2HCfRU44kK37foMDoDNRHS1y9sI2F0k z;}vRRZlEnx)E6`J88mrptIYTp-6Z$8pgY5VI5dqEo(YvqYFE(t9*C3sm{3_XAs6Ua zC0&d6Or7^aPlB1l2YImnRpr-83>N27Rg{GAYGtt;N;rBz*xtIrK96pVKk-*l@Z2 ziQFc^C_9H77WyU5SL^x)WX_ET2en8Iq}Y=RbE_}TX1!hv4DZr=J>p*JZf|%Set~<( z7}7CJfl~~i@2L4_IJB)FNl$iDuNg($ zwAGqgA`mpdJm0751=o4@}0e|5Z!CfTAtq8lCW&wO?L*o5f* z^HyPSF<_||c7R$<)E=8wvB-!6WpQeEOwvRHrX%#8LZni;iLjN3PjyMUsRZd}9X9hx z(-?}G6(^)^Wby@~mfAPmb`4%zxkP^0>94byW3aJ%`qO~_K`NJBci_tl9lWAADcjJ^ z3KYhB<>UBNQVm;An1LvAO=5@#x(&9(D&CG^WZ}ndI3?%2#zviC*0Ar-#(dI7U5R5s zo6(;MOG`BAR%;?WW<@;AY`M39?TsCNRILek3#pLlyj|u06{I4W&iTF1&U$yjyfy_# zudHl(-3&M0de?lQBNbS-yprw$t5JnaQi|S z+I6*0>QVF7*QXL1eq_{jY4KwRp4~Hk(!S={?Sp+&ABVo&U*tRp|g`H>7$J zhrdCb@4q-!V%8#V~RQmpH?*4g^fs)5HLXHK{H%;~B zH%~2{EpV?4|8L({!!eY!@$BvX#~Q8I!3x|-&}>%?Y~l+%tQoJmQh{Dq?Sh)bp9*0b zzV2OD5`f4QoIjT`viU&VJHW@*|L2S=r*#o+_+L$7Ohh>hu26CeU^WU+ie{NHln&Nx z;pP{ai&0%ggD$M!Ol#l>oKIU0*B){DKU%A~HJA!A(swG1ldG>|4L;mFd{6{CE6N_u ztI@rDx_;XAe1!1R6wL-?7W<`a*n4G$!a!acu-q`}3J*oxTTeOT-)2i86*EX(^lfxC zh%o7|7Uk{tLgN-Kc64UX1jbmFfDA_im5m|35xW}y;=_g-EQQRYs-?KO2N&ZUWGkIZ zr01L`Mgawe0G%ZbNSXbZM~TLia)H!p6JR{L9QFBJSP6f1tdNA2I@QjRF0<8y%S4+5 zmy63Bo_7W>wcXy4mz3oKHLpeeF9>oNF1qgOpLj5)IPgtb#wkC;ks@lq&RkNQbs!Ed zvZPz-#B}f8!Zo8)UJ3HS(g6MFQQZpFenEGciVlk@BP9)f)4xIH15i*FS84sX(HIUX z7GJb(Zxi&jODZ4|l<5TtDWP<<-MdDg$-NH3_n6?1?3of%aAt{$@gd1!Zo$>X%Vpqf zXi1R$Cx(kn+Jlr!b_XTOC9XswNlJB8J_Uvrzv0K!48eg+$A36}4id@ZN6{rp-k%!m zy&cwSoWR)qRpY_T-n{2F3NLr@9x}+TeRL*adm-o3Tx~Bp=E@k$ z(=?&{cCCtBfo%ZvtQZll8bADi_){k@>F%8L$4erw@ATUqY}aQc$0#mmWk|QCDT_z7 z*emz@-D3R6%847_%p>{2H_Evg;oqSWGkprfWh=>65Gy8eInrE)KiQn@%}ZvN?kvc$ z>@bNB$ZWw&8zNIR)1Ss*fp_Bzr)2!tSA34pvX$>=J7s#kWq4e#NWla4#CNRC{%#{Sc6+0%{>qH#Aj)m#~f2?MZ&HFN{~tqI z{sx92KX59!S?i zm}%}(*7(|6FBg7U&#nP&DnhTke|h!v7W#!I{^xB4y!-0N8>E3qVMW@fIl3C-jB1ES zN>-TR#b67l_91g7Yw)g4@^EXR;7u^U>T|g{U8(bv64tq0>Mf!Qe`D3P-kN%^J8UF} zpV^pZ>Hf1%HJ2wv$Gdc#3&Y>is;B9;iV!aEKu!cLi@M?H!kN)4h+dk5^T&D2s;%~jvH+YFXLT;Czi3{cK}_-U^& zAtljS>0O^{_v|7qLP^q#WW=;$n(+^q^*tSNcm;wxRy+*g+aluB*#8wXnSKY4Z zs&X9rJ7OyQsYrWw49jJgM|xoMAHle2FP#%OL;?OE)@z&icZ5&aut-e@5XoA@Oft74 zbH#buOQw1;`ab=(bhEdss-=g~$k{l(oZoNonHB}7in*4y-NE}MJ`-8(p{If+WY2E! z{Ac`+x0WE_9KyG6WPYtN_GQ4sbZnheti48&Vk-bmAqzfisQ^TWjgi!1D?o zl$EpoBL0Zkc#c8x$gv@(IL^m%!`;*^ewBdd9DROeWj@FVvQHU_C!@*(bkYVQc{Z)g zf4~W3C}~`sN?)sT)rL{HNhX?gENj&}&W*KdIAW0sDUUAzE<)Do>xe$F#bgLvsKx56 zk_6343i;ZMG?1#M!yNOFo8a%)I7`|{XD`Sd6VGkT2>evddK_yNXKyMt_i6qLUS%p1Rk3{hgw6BXvCBD~S2*82KfyjG_xIXa!E)r-W zh`%6I2Hw5?-8Hj^hfkCru&&*JAyKbto8NN)%AzmM83QWF^y;_))`o`63S@TsIfCA0#GWh%I5A$t9uz6)$T1_*qTVAt$f&4ez--HAnAP(q|DD7 zMZw($R8GRoPZ#XVnn2>E+C&rwqih=h?Lx{Jm2o$^4}1(!%wAa|JJ7YIomm}TgT-qa ziRxZ$Ubtk0+N9aUcbv~@`+-z{Ite~zcezqlyC(HQd@6ZOhi;WkUPFp`mVk)krRy+> zw0b?y|G{}n6b87rXaYFg?+eh)n^Gp#p0G*!a4ww#IALmT(+F2FB=~+4G}pNmsBkgf z2D>wEsY&h_>nls4Y4HP90ZYM^#qdp-;iw3O{=U%eNOfRk@L;)dVC8!Jk=DS$E%MwA z^JZI7s_h1VGpZ`~QBfmPSI0`hu-*mYolCh<4JV|Z=jQ)QzoFxMHLxZa5$KcW6{C?v zNqgB=V7`J19=F-gO-(I?(hJWfhtX$e6SfcTnBtbjhIBdS`7{G|h?}M(70Tvn@}o%g z%~UjW?k_cZFP^N)hAjFFhqr= zulXeg-^cBl>NivIe5nz1d>G3a#n+A)##ZaL!4f@`4xPuRl+l%kSf6zCN05;%Zft|1 zT3cq*tYf6Mat^umj57&OdWBC=kJ%(EZGDrU#i6)x!4>8CA)g)mANiR=q}bR)V!5IA z@abuM+8(>+lVSP^0%;y$-i_ICqJrlRG{W{}Bl-z7Y2N$Y5I zs)9rfc1_UH7C3X!Cc^yK4rdcYHw7chB;)}pYM|zLT&%{xRRgTm#rALtYfawr`J_k0 z2l}KMHBy2OysoPMz`D?IOiF=!G|S2t4HR@u&~w3=93L@XwM`Ej&)c;8iQ{UEYLN{G z*~h|Smig%nyrkNgt*5oVS4X|pz^Zk`c_S~s(_D-iUJ*W2K;+jZ55Cvh{_V#cftq~* ztPw6hiZbR*x=?!>hwc?tY;~W&8tLq+NdXd%T}Y;v9KUg3Y?6zmNnmiGraIa0Yq0st z@kyK6$wch0b(0tTL40~#jY}~+;4Ivr4(-jD8;8aI!jn6U)f=Shm&2$@ik|fU8Q1%) zwESoq41vXdx80Y!$o(zqL4m3QNLw5v)hk+9+!T-v$VL$JUsaCD+Wdsc}eC$K2c`Kb$wYz*vUYNXK=wh_Q6{jE8QTHnd#GC_qG6@4RgvdxhcQju^51YjyQMyvo_upZ(^fpd z2NY;D7SK3IF0>s(PAufo#{)c@^jUkmeadv&m~Aa{0ftLngn&V%`tfywE2tkXMP~$<;Z4QhioDVuo(G67myl94+^p|(H?BO5KJ(u zHfE>UK8f#qIvB=`pY$Ht@1_$j!$4;fL3LEh2t_iCsUa$0bdhj?;Rl#>WLqCkz;?PL z@nRdal`235#k50@1R-5F8EtEH`#NymBQI}3MRK4}OZ>Z6c-uK^Z?nJvA$(EcFz0G4502WY+~Fy?b2Asud{T(u)ZSNK34b#^q^@4}mK)JU{J653)M z5i|(45M#mTz;smVaiFLF-jm8%K9yyl=E1LlexZi<1b7Lzx^U>{{Eh2Iq8n4}#W@$9uXrp$z32BQM{^Im%> zGf?4pdeC@19}?03=a09ED!1$;+w|=Azq*`t--Lu#32=tf{tN0l%nW`PQh)j*Rht`| zSQ&<<*EhbN;>}vl;`}E*=%*UGU5)Ry;qJ84aI}u$Z28Qa|Ky5NTR?bO`H2DF3J>SX z@7MPI9G-~zbC-KEm!qU$l*7X~*1}kPn|j6cUyxQoP@9z|YgcT3EZ~N$dd%-9`({b_ zz^5wL#XNhfZ!z@8G(%6oK;)C?t{uYx$}kS5Z@zLa6mj38Kg<8mwe~TZms->WaM0nV zx@;7M`)k0F4k;4Q7>FqzPJ&RYwYb>{T2Gt!+k7931R+ZFXa%R}#Uo!Y$u^m;d*9R8 zZ>3E=eaqmvtT_jX`?H9z23@6y(->d|0~LS+LL3xd$z41$+`0U>2lTq`%Mo54YGb%< zW--ZfdRf0RwSap*xo|emys_Y@>q)kVYUQCR(4#0Co6d5#ou82;A1=lJgAOPq;=T|Hs!mj)oOQPlvyX%qKOc1*uYx*EBE zpeBk8!Y-TnmT392oVXrMHUm)gY&h_CU-BV3I8y_Ew=V}$Pkap&xv5X@Yfg4Izb!Ap z_a5_mo<~fHm$M$?gjq7dU%??QNw!0Y_3?lE7ywHtL}=XTf6KU(CtDZ7LdeboD_+e3 zF#W%r@zol|Eao`>)9v&^ZH?_FSNVaQL zW?thZ_c&OtYiocGb*?WDs|BvkZCCV|_k!Cr#pL^+pw{O}%8HrM$kg4dNN(x{YX=^C?auyO=iE&)zI{p{%&Vp2uK zlhq@g6<^4vWCUmCYMG4~$-z&uxxsREV@ErmkviD}k5PMz%Ohqw#(wQMYk>YVn#crw z0w+hD5ol~x`4g@0Nv*7Yl}ZJ}XA;Mxo>6vk zm>N2_3ZZDLII$h@*C=N#F-zfPbfd5QkpK7%nVM+BN!D@rcT$CFQNZU{@h*SrdV=G| zr;%*sj&yKBU5qd6`cg3*)nHsw%rYHwkfQ_aaNq>&4+{Eq|Lboy{HhNmRSaxqKt_Fa z0e?nogjtiW)jQ`dDbGd}=l2SzUQv^7c!Vfc!txa|Ix|8tBT97R(n?g2yt9gqm3*gg z?CEdTW1Vdx{cgJG;m~o{sWIck+ocQ%`e)y=J7+qQSG-gU%X6|$nN1Njojv4Lqhl{;5f5f#Wd zL+WG|oW;}*N`4#m#0I!Eer*vbl#t|^e!pJN=i`xr{^8H>gDp*n4VJJ& zIF4IZRf_%6IA>dRjbBb3M*9vjGyfN3$JZ~}(DESnu-IjH{Cn+>hVt2;2wm2=+-aWg zU$}T|ZjRYMhqfQ_yv$?cHsA<&&iIwlq8zZ}pmD`85us>)7vsCzsg}sc2G56zC)+Ej z(z_Eu#qox$S`8phQ~xN%a&4r2S3tMxrS;(gyy6>B+QpndHSUt?5Nf2fk!rzPm}L;) z&MdMGy3H)#fYzS(vs*(%8JSKMS%(<6JXlZ<34uE+qY#1Gh zq*=!bo^nXfC>O0g$?}U8_G37RIsg2Ik~(*0zbCObQBt4f^G|NQND?Zx@8fewCWTp! zP-nRQTQ^?1)U2!|&!J7Y79}b-4j1r>e{*@^^w9;i4G`y}0nYkhk*Ki2t{I~bX(XT; zd-ewFM5le9D-?)VcVo46-KGOw^pK7>S`>@?=ZY}46m=!J&2{DJ#jlz*n0F0PnL$eK zo)@;xxjx`)Djv0Q7Eg}OqD9W-?@Bcr4QQKWY&44qM&#N1-PDtHg(+>a z6eE0;K?Xr@j1h*Ny^uh4GT-8!X9`O~oXKOw&zNPCcl%z*29MllS?Y#RBVYlHhhD68*kp zElc5D-Suq6pzAM0PbiDM(SbZ$l3!0RQ}f-uo69qw2X>RxstGd86&tFw4VhAU$o(2a z&2cVBj#D2Ee$C6NkIlnq24pU($TJ~g#l1AqEWKf|qs#2g<1ARf^9Y^2Rtwmsk8jj>;%ZJJ|!EXyD1?AJ@RzQ&|X1J@Hc z+JVAfG%^M`(HE?I4T4+eiF*{fdZg+#K4tXfRgCQ^#lP8}e|j9n1-XdHJY5Yp?7Jtp zu`XIb$MINZ6?DRpL*^b@+K3gWa zQsqnu7?_^Z&cZ<|s}&XGZU z5_ZIIdv2D}BW}c))*s_-nC*sE9sWN-o<|7E6J>GqG&H z@{{w5m}t;^jjnxTfOT4?pOLsfjn5J!h~%}0nPi2}#l-H2O>*z|zLt*3GDATI#ls6sN>sx~;vAHPE-_?}yfT~#w#p`A%YMR%E z!@D6#hyp)BDJaLj^}#{NJxSjP?WB{}0Op>7-s`rQYs6PCR_3lc-;51^Vy94sCQ4?+ zkxo5D%_@qQ`}Y z)NKlX_1QeN0B&kj>S1AGp(FRydl@W@i(BE*!ErPqUYu~4Hx-;;9g@~F3j#l)uW3W2 zP*7G2Vl|=ez@k{^y7*yYX~=){m`O3P0+}|PF%_DwV*y@O6wda6K+@^RLb@!dJ=ieQ zW>Vl_9B^=w!7C?12^2X9EiuT*lm(H8J0V3Dkf0IzO8TinBl$@sNh&7mXRZW~Zq|Mw zbmp~$jvr-}uaEgo+b*ZDDyQ&w(y?Dv^4w=-=4l|sZN`3M(~4(WpCGVCAkn=+$ZWl7 zasWIv(iPqYP^`NzrBE8R3-Q;3IvJ6F3z5wnj{Uq}aClK=zqOXzs*(Ig&`==V3CK+z zEc<=(RHKew=LINu099lvNL~artSd|spdFU!OUelZ;Zv-~RGtfIzS5eQ*Yuw(7;mls zhKkMg$`3!9)bnK(uG#q8x4HkRod1a|jNlQ~2J|2R)c-+jn!%3D6CR~d4|O3X7vQ%c zIh;iES<7`!6T)U$t)7Aet&yuWP&yZZwF!APB@!09fasHg@8fyrL&X3o^r>f$9@djz4>7!3ql2Hh4}*+O=XxMDx!+|7dP|NcWWiYA zpzh>Qdv^7CurJvZ)|$3@TAar*MGYy1{#(e96S29cLkffr=K{B(@w`?UeiR|Enw;%7 zijCz7psdV;S5h2Kf(;cUmB_`V1QMJqooq^HjFcN{<#6XI%_d-T^ve+5wO@cMjAI>@=!Suc9mHi+OWeB zXy0qtUJ4|+Q6Ve&Kp!FI-5kJQ8W#-`PZRip1d^foomZ)_G9g~$=k`DtH%s`JNwE%7 ze&9c=MMVJiJ=&(T#NZ?Si^OSikK~9(71x_WbXUoCYP#T6)g7394<(z{B^6`H>FKtt z&A87#OdB~#-li6uGBI?Y7ZlYik?l-3V^~W8(1r1520)fPe9+(sl=)v?*mlO$OAzHc z;Fn+4)N-Zjx_PI+DkqRDzCga{dC|OneR#rj`@MqLL&jHbUia^ub{N&p0|L+F$N1pB z2%OkJSDC$VVmuL>^~@jc5}M2wP7`{Brb%I5pnf*e=bMe9u4kG}GBFzT@I}#h)~Ig_ zojI+N{ZMSxUifQc+9j9}$(F{<(yMDrvX1qoIFo4@u%Ha$c zknKySe{t-iG)<0xS6i$L3y%HlXR@l=X*HQV$-w5_d4mDOY>*&DD~BXN3qIno+?7Uc zP{|vap>advLgl*IR_(C-(Y5S${q#m@s0~#Jyek;Bd?VUM>2f1uaS_pAsVYhL-kwOtQw*MNCu-z+(NJ6W89)JkYd2 zoQE59Gasjnx^Hw7geZ`)){8ArKUv*s4L(1ABvvs$*IW9t{s4z1R`YO~Wz{}HRdU+Z z)!};0sW-Jf3m$nT+~7>bo)_<*kuN7CSCyD~{&MOt&7{W_iQW|FZWe~^QiIC6q9RY) zR9B!kx_drpFB}kN_$|y@>-J~0&vnwH!#EFhPD=j%MaG#5ANJdX)Y7kzR z^d8GOh?R&a<~qu8!Ja1oR*tpfp`ZVo(`dpN8cw)mNyX%qt;!19d**k%0EN8}KJ3*G zKM;PIXQ3!`&#M=~=q@OHbw+)fyzrl(hNOY6d}+)uV7`;?Tn{eYsbLN`$FJp&IxvDB zlOWU#lB6G1xCYQv_TjWedvtSW>7>9&eCiHMVi=YQY}YB z#q{D;cV9FhC_ncAUbg+1=1!7;L8x4VdL=nyIlKHEwHN(;;&lRY;mP3b1YXV56=YX^GT`qo2wq~kj_0YLRGH}Jf_^lo}oMYTQGvYJb^ zb4B2KVA{yBW$AhA7*5*7f5O}A)*&{w>TUMLb6f(Ce9f}54(aW{Lhfnt+*>B%qvr^y z2LxEx@@iPR<8-?Y57p%UAjd&0?zXyHV7g&qeeCr!@6Y9LP8CKgC&9n(d^RtAy_V(# zmx>w4iJ9;8OD!MXakpEB2d4_`KZB24MEst?eiSI@&Jv3ym1<9-)N)T9q@K|p5E_#b zwCT3dHesPN>$cy(Sb7xnph2M&J?g17@nwz~fs0#3>ZX-aHJ3Jb<>m{?W&uc~yMj>C9e$dkw&893cuzr#v4CPTYIh+YA|e6dL)K{->boiNfx7&Qg2T z(p!ei*p=Z zQP?&VY8%=eVSGOJX#tHxj*-LC8iDTl`PnhRY-Q+C3mCwd2JfX6^3n|qv!;R`Osxq4 z9u_i+GlL0i@)JS$<4l4C6sSryn?I)gksi@5`BC=r6XF9tkOc7X@ZF^&9&Ufv&hA9Eo+lODL!Gu3V)BcU)kU3!nYfjCa;OuY`oR=FxyHED+VP3T# z^lz>6i{~bft&Vvrl1+nPS&*V!soihelT{2Ru%TIGXAfypSg{(jNnN8Y1U>l|^&aRd zauC>Y7%1gPP(z?HxL_IcPyXZH1sA^l>$=o1-vA+=ZXjfk*BsN$`?a`L7!4GVs0q9N zu5`ZERPvrkPl;!HCcMvLj1Nyfe5F;y>qZ_)3D3&3`_!@semxqm{qp`oqac9C@q5LQ z+&fTL<|ZK2lksNT60tiJhlfp(hg2gBa`9^Ct9y7v39Zf#y(ZpaCJ1o^j7zE>@~g9v zhV*!dD^WB}ym;_za9=h$+6V(7Tb4fcVBKP#IoHjF#;@YuRF?PY&uoO3jiz zF#zpl_6=Yf7 zi>`ZGCLErY^aTB+6SlHmWeylJN|1eVT~|61UguWrn0li5e|2(h6ZE2%fNd8xdAJfd zq>8PL%i0hv)V=pc>NLYT5IwwfHUD{(|3r94o&OITY<|b7YhgiLKk!=abz3?G&dY8G z?`&~X=!)caUxdIZ)L?B10u!R>UVijv1t&VwNv?dAM@m${LjaC8$01v;%pv&HCX?!@ z7Sbe0E!BYd@|YFp&g{L7H4I3iGGbXU1vy;~tD+o^d*9#K@Y$m$dafL@H4)!v6}2;&d>gwr+%yqAKmhr%{7dJlE7T$)suZQCE58*zFY^{A#`~)n(R5gRDKCr$iZGV)t6vk4OS zPPK}vI^3nAPDNoW{T9-$lv}PtSJ;GFK5Sm&W98>cA9vymJkjE#1^T+QO$Vs~-;zh# zvmFKys;jzxSG75KV_2y{n`@m>_PWooYldqR_D(o~u+sU_BP3910&7rGoncgS&K#r| z%cM9Gk8@X3VZ@>GP;N{y_RDI;Ajiy*TdhB0bq&9?AV3Zl@TQbh23(4 zYIL)QSD098oa=Hc#qVMCshVHtS1QL2(L^?{G%_u4r`WO@XfAB_y{?AIm3F*V$4cS_ z?aS2?S^;;Qvv_XTO1_VU6!nq*eiiW**-_H7r49-C9;%%u@7V=Dvs9NcCts9PzDTes zFWB){N)N;#-?R#%*{$zp1V% zV7`iNh+doQw8X8H&*mIY=K|WU68jB2bCKV#j;B2{z#7Z(C%)#G4^uy#tQ z-PRQEa~KJ()lvQ$YSh@%mD{@whX5?qJ#Lm^l-=aF_ znW~~bs6NvRm zYd(vlwlJ$FO*y`V)8h%Rnt=T7Bf6(v&YXi;13tD4DVi85Cy0^G2!7}s`k#`zlDwN0 z38Z&QU>MadB_8jeRumZ&G{)trJ}BLzK*EQ{k3d5n%B{a4G`-SD)`t79WR+H!U(G`p zAd%W!=E@g=FxqqfjW7*)KX3WLYLLM?!!skK%T|?7`zBt}308%&A6L(smIvQ-7_Jh6{+u9Je<_9!2Gv&nOWjCjlRe{=uhmXV+opqocDqy;(r zZ+65ZZ-kGSo2X9kUhu+e{5EI);68rNS=MU%qMwPWxt3kcjG4`Fp8?49ZPQ+dc7%rfV;rt7TAlk4N$sJ!~n`E>WS;M$p5wZKLG^vnlpwy7k>f#+IpRk1fzst_;6f zX(!6hIJpnk`2t3}PS5w|>ZrdFQ*cHX?M*$B>ESZ$@6fCv7&qS<_&f_+cO~GQXO+qq zUKaXa5Q|U#f_{-**GtN+S4&z?4T_34U?%aYy%(v?)RcSd!1Z-f#zadz<3Fuyw#F_} zn#xL_fuTgtiRuphyBo4ek_6W?0MSI{vF&Al`MSOyGXiZXX&Q6s4q7u4t?}8PKmQzf z@7UR*9-^bMAN4S2m@TZ_=N^SLN`5{Ww^9BXZ=iuX@<9~M zve|mXHaFtu*Z`<1XS2TI79NkHmh+udE(*h=OL3tB4KddT$TUijBKtbr&)~%BGVDR5 z{^0fT`j2ABzd8cWIN?cyzaXTBd5+eC&GA6*e19y7U31W3@q4SL`Yq zj&?hH!VK5dzcvBud#;ua`ByGD(hvqO`+UR+K@i>aZDc(K>W$e1O%W>geTs#@;&BIU zi`|fi70y3Ukr-tT4bQ~vpw;}$y09zPRpMLAJde~-%KV)%AM}(pgODmeEBb!ft~rOT zEqd|RAK90Tdkt>*`wE=tMolg|*x)Zyxl}g7O+(q&NO+98t12-A#FM^Hifj#R6tMO= zc}W?;Z)s|&s@K=Xr`Q=R(C&uYoAl>%8$E&f(5CTDQz-?c-xvLNvp!hG(WE@4i#QjW zC<`q=VOocPs2&5LpWgy=9q!;-oOt$d>|hqvkv+jbxDxD#M_&bK^0po$T?w$vMAi(^ zMuZJ_kUF+#YFT|!fQ^2QA|gmYg~vt4!g&GUiX;-Yxi)JD+I0M6FQ;_bVtoh?$3uDX z;TAThYEaJasvN7Acom_hSfBdzOQpQB*Hf6z4{H6=!&vEGjfZBF*Iobe?9Xi}r|@Q~ z1)>x-^ss$e();Z@WwKiX@Dh9*-tx%6w`}!s{D&$)oj+xQlpoIL(|rc5s+_{FnBUv5 zsL5+D(@Rx&_a1KFcVQJRGlaU!=XP0X9dz5wr2$Hiy<%d%HBcG*BGQ|m5oU@0_i_495?}16 z&gj2>WbYU=x>nb{S3g>eLL z{H-WDdJvH64+`6`(X&^!v$$ZyGsmy|+?Av*O5KuKCGcU2#FMjjrDv(#={e@qW$uP0;y^iopires2 z{KGD5hQKYa-wjMkUi+m;@)S!b=RKLf-*PNA5B(0>DCCfy`H6znd zRb=ln)_OXh4R75dnB)duY%_8~cZrhkln(a-U zdO-cI@>k`1kri(rs?wp!lB}!rStnPTlbX4plVZjLEtM?aduL`E9&2r7gWTFzBzbpk zbijJJD3*L^6wmc@n?8}6_5$g4Te*dqAs8OJ>z9aF``Z6hicG-`M{ad}oVsl=Q-ZY& z57jXq_`;9$5}5j$RO9BG!a2=Xj^`mMhhI2-33Nq(Za(6yzh;I*(t!BLONd&TA;-J) z__Jt^Mv5aGM^omp>99ZPZM~8w^h#iM9!a?voS3OmF{!^Msg$H1yq2#swh=H#a`rVcX3~y6`#TR4Zm2@9fjTZE%@0ci~NpB zR&;=wbs9?XV;&k^lt+>L2jFl%wCK%*M{6zWD9c;%UeQj~HJN2eO`&}g=}6F)MPfZX zi7SKXY4Ab)yKNi-P_DmPqNN_I@Bx%-IWVJ$TuoW-^-IKZmGkf|2H5^WfNq~pR4+^% z{g$h&>&N4Y^DyRFG2gh3bXVg2%%YEWUZear4rZkAUH6g*DxGB3S8M&3MQt`r!jNcZ zE9ymVOgd=?HLh=JZItNEF`^W8&LSgCd%Gfuo|UVB%b&lc`;HUaixlI_PvHKCW!L-L z{<^IjW9M{9otJsrTC?EUxAR|{uI93p`EK$n;WY-u(4BhlRkZxp0xMg`_Vf4G{%u4~ zLK+(c&tBw_{Bg)>B;+pA(eq1J1s>%^y24&vW@}!m-c$ZBQ6e-E zDl^tvnvBmIO+YLEJk9yp&`ww&grx{aBDd`yCn}sJA(htc!`c0n>~su$`RuDRiksaw z34gt&&?Xij+eUarm?X4}NJRC=VVE~J4B|eWT6H+Ci~%^MO3Q7d+&(rLybqCnzI2eW z`{Oy+pw(4$07{YV4JRUnV&}s=m!^x>>qD-57{u%U+P{YQ_b_zQ>q>G1|L+FkxrVkc zSBs%=FPNF+bwzSblTtv_rO1V-|C)ht9SpEJ|911cu(w{;_K(jvA_%gtok3pbTiIHz z-`CoBz-?DO+WHgfZSQlUF^`%oRjV=P%46*oqtdU_RJ>@`A$}cKrz5;daKD+6uj{FG z;Q_|=$)CbQDL_jjni1~@Z3zY~D-3Jxe5t=LhWTA;Y#eJ*^K4zLt7nrNs((^^Da8We z9$<0{VL*6~!|`6ZU(FiPdlVAP!iVHuSvn8lZKLnb)$i>)0EM&#kS#bdjgNPX$|JM_ z*h(=!Q8mvM_Q%V_BwTRR;C?CQdS?}{y2B5#8_6gP{vrnFaxJU_fZV&E^5-zpX5$;V zm1yHo@g3SErDf?DM&L5B{Fr;y?z&oYLj`wR^Jq-@je*@-3fpTWKKZr?`VAb8e=SZB zZ+k3Q>O$<=^SpPPrgzP1`_ZRM3#BBD%a6TDzy1UiVV{TSEsa0^mfq?I+KlQ%vX!4) zZb5+WJTQM7&b|DMfHhW~EZwiv;Mh!-i# zaYbEvp2~yM4ZLm!YPGupIkWB0;gBvN#siUm&m`T(dStAMT4CwQHWX09BY9Vp3?%bj5A=?y>-NiSUKpo>a#5M_)biT&%3@vOZf5mg zj|%IP>PNCZ&j0`=U^CLxqsNqIQJcpa%0|21bRHefZkXvtSv9VIuxutJUB8s{qcY{o z*o%A{+n|%rd>;SC5UMo#>kIQUQ{|)zyz3idUb5(WX7j06Qo2;^2b8WZoemX)m-b=w z^`_sM!SDF*ecubP+Nt=KkcBQY{oS`W8RPN_@9g4jcxCwFjNN~vSJjD2dV^eaH-{QL&i{!%wC-wnJphf6uXp=89_E~VrcuY=|-OPU^J zVs{X?OcHI^xS|a2P0}o6C1B||CGpf2Hr6vvLlK_ZLj0~?hf@^vFI$)}l({NNPn#*s z6st=u8nbgPzhk-VRy32U>yc^dhjE~hajv5I)NA4}J7E}D3s9bEfwLWxJRhB1(fQ}V>0_^8Q9e(=ca{;p@l^Z$rg z#!l+vFE{_T_Sx+RpGl9re_q4QCwio@{voasHj`X-K?z;q{Puk7xiG!W^o0CWH{sPf zD&cOg_H|4c)*zJBJ6ml(Q}q2Gfk3BQeNyH(;XDw_33KjDm3bcb>!X^D=yFni)vwyY zlf!E*-iZi%6qLj>*`j2DoVgUoEN4r47gWV^xZpOBXt&3HlExaEbeRpfC1g{(&u%B=EhJa#tGEUmDvQf&H71BCLR zTFO;ab6*C&Q)^+|t$47;hXyTs)7NNXBmF1SpxPof-%yZXUuS@sn0%l_)4Ac!hp-_E zj_|mY7QYQP62=jznOr>^dPPZp+lc3U{^w^S3L}=YA?5nbwr{H4^y>I@ui8pBy-^5h zve%gcSShfxImmB|9$CN7uGTJ&4U2gvRMk^6Gv3yZGO4QTuMdML z<~F#QbE^Pd*CS(-#WIF>k+X=Y`xsgISZfWe9|ER!7b8^tmhOJ}OZit1a-$`+=pB5X zIo}fMPn!XnjddVCq!qujl3oJkIya>>U%<=ljzW8|eul;d) z1!^_51Qf|X@RHB8IEmdB{d-aj-8C#TZpcrOF8Q+P_`3J(`SqL9)}po1^FhzWGFoy( zd1(LTgZ_uFQ3i|a$o`M!#DgZylJi*0m$J3vqz2D9;&hvp@n5=b2zkPr;W)|CB8lq< zkbd<@z!n#`ahBya9jc=oC7>pRu@24vI~&;w&Ybt`wSg4zYzeO&vTd?&UzX9K4xUCqB5hyMEE(PpYbpr1gmQKUB(a8bs%^si5>rfc>0Y zonth|+-{8*ZVn{nW!{}#_#nyTiIX52eHz8Q09cRLLHgKB%;F|np4qk;7w1;xgD~qc z&+r1ml^>EtgwRX*a?KGXd0FJ;+dDSJ^0<6A+Jb?aWw?@n;Q`@!czPJ2rv~!N%?f1R zd?NW)efc-jH$Zr<{N_SDgkOQm z<$%IP=M(!jqLPMRVZU=`GNBm}O0q9nhz(*vwgPJ34L8ThUC4V|!sjB)A6ziGFaD4O z>e%;CfWWS@2-mqZr85Vdd+biA$ll(lNV=7Op0F)O>EJPZ zHJ>TFc0Fx_lz~0JlEh$c&O$Elo=w@o=YE#qlZn(nl`=jkMRZIa@YNAbg(S19dCok$ zdBNbDj%Z%wrKsUx!r9bv!)JJbbCxWYao+UeecI;9dgyoeY1F&aR;VIBdYMaN!Vc;c zXHruPlxUL8Kz#NeAGh)-QR+SKVl)t97i&Ij^#p|qBkhL7W5j8=6X~v{Ma#Y);Dt2I zWu4gCmlP4xil7YzV}kpOb25}<8_-p8;yU= z>SGFORcsuyf^d@6_T*B3({tNVCP>n^N`c3o0FMqn6kkd{R8QWK(tB2=;-~l8?zDO2 z*NnH%9`I&2f3n0knj*NzXS+|f{dkEt<8<(UBo*Fjm7HC@1?_FfiYl*1k232PDl)V2 zdS}nDh3VDlD!Q@@_r4Nic_*fjs*{NB9H`-S|YgRLG<;1 zmJ7kdb(Y^mmaS%q_3s7Q4Uo!GKGVi@XGgnEM!BHV9`mf{y%SZ#$){EUvuf;u;Ej)3 zu*2e7UySgLUwl`E-U+%mP(at6_>tn#$J?e4-k+%m*?aat{dzo~qRP&n6!GfLEefFP zVS6_wXhOqTu5za^EiF~Zz3tw#+`XnoFljR_n%^+Nwe>SF_2=UPj1aB;*^~ZAfMY7k zAc<=G>am~3af3#+50Ubs9rx$w1<{pcg92*y9lh;;58L%qV*#n!4*(2`O4UN9E+!NA zm3mmlbX=EC0H;fTNr!*a?pHhRG%ok+KM7j}ke^;A*Oo$KHr1I2MEJ|+!zCC~0r`pJ zCKpiJ>j6#nneNq!G`B42x)YcX6T@Bo{n;m?=M58Tba-m(vqRJD`FR<43*aoVFIvYt zF4pzxAQ4eo+F8Aa|H~$Cs=5~`OyvkQ`7?HS;an8nLVO~_I}`p?8>A{6yVo~*i7Vkv zb!R^0reFs9oN%a?S_}smzFoE+iHQG(KvYp-Z6>^jnP#gR2ja$kZxh|~U|?W8kBy+? z7AVTDWc39CK6Lyrt^a6X){B%MLsY&#O0Q_uH`Ysjt}T3=-zI1z0=X)+GkDKWL(S_A zWPdAjlvmWlhsC7!1+ zB^>t?0PB#0RbA6gj%SbK>S!z9ni9iVubY_sr4 zHRE1WCv^1bvBPx0p?CXyGZ>Nw?+a`I%J8uRDKU8pjp^BhFJL_^m8rq6P6Ec?qru9U zAI|0Zn51GC<@y?1*W?Nr8PEKl=O+n(Ni}H^AA~#v>8_?i5PkY^J;)_lsJPD@Qrs2Q zOJ=tfBD*naA>FWCo2mcNi1`N1{l~j9$PfflrUM!jWUI3LuT{oGJ1yW%4=ChIe5fCoV=Y5D+9oK$l-}Ky>{Y8jT{w#=fp3JSoSMw1Xh>W zlW9Yn4DC`&@6LhO8AXnix>rt~k^nc8!GL6wfsytEeBRDuk^TB4pHRsv2NR&;Jqs78 zQyMVBhi~IIb^F&Ku<+S1vD~1KY9gU+prI{0-tO{0gZU54p28eJ4T#ZG!KBFjbZaYo zMFFIzMBd1v$ypSAz;NDa`Xac=knH)6!t4;*NU(&N92bf4%!Q$#i;(c?pdZ9LMhT1h z{j`nQt@dasl`RDMMdtL1Ltj^@8%8C1ojYf=H;_{&9258Af)jO`Kb;~%&~Cw!F;aEH zS!dMSL4<^tjwMnR8DCDMQL!LacXXMxj@MV!H*=jJUq4GoF10W}1s;DD2T!{B#jaY44@g=B^U<&4i zk_UV!dyCxgxm#ANQ-?~aiOfVlECr7Uq9~c+TVV0XLK_m9{8R&$K?hr-eMjG|yniarhRQ(M|Tamt0HCavEtMa4E7f6Q$C6nGajF zpURG=kdYn3@U6@#eC}7=xIrF@_D|BR0zSN>BoyCa@E3`Ve*^oTG1)aDDn>@8o^0N! zVE@i&z9-K>aB}nGtrbf$9DIx!@jqw6inBTt$zddmw^xEVtxx8Y0py}Ov=UchaDG(^ zR6j(v;ek1TGGJC!T{R&qhZNgC%x>RdHd^T$0glCrp&O!3*hV&tVA4j6^CpWS&e!f~ z52`VC%7c=R{d7*nwq?k@Ei){vi!4S0UFBFo3fMZ)Uq!AilWM#d>LPVDcWb2dizqcM z)iR-E-;+W~CyG~{GGj>kjGAN(U{+8LSthN9^_Fb#S%;CD{xTFdOe+_B48tDFx8G|k z*5X_JRmMoi7e`WWz8o{t06INrzMw-5uFNUAqcguLM?6XP?R`TObes82bDS*w;sWVG zc#2+=jgRa7kW+XM|K@PlRrQ^iy1AL&Vg_J(vGH>TR}0v&<<#_Cjnw)4Cbz8I48QgV z%@z-I-7B)t?B0aC;&6gQbDh+7L)yq&aTn|3#+bHA2c-}KbGRlmnCFRyy=6>X@!&bK zr*K)RwrOpmIZ}}{(+=FHCQLyT&OzA0&+L2!ECZxpDt;6iDuo zo%z$V;op{*E@td!9Bc2BvK>7YPBS2#8=t2Z>YghfJr58~q3pbsSBmKGy)ZH}`&HWb7cQ=^klq)(uog0d zlLSAClF~%tkQ`PKrl{~<-+vRtO(dXAj$McS7Ss3S4r02na+X){AQZz?LjTW%QoV^C zb;+Z)AD>KZ4oa3iP~l}n@wjAA{4mY7tl|3A7^MNwPP@^q||9s;(v-ZX8&#KYP0{N z-fuu*(PKZ8pFREbOA+Z62r5N*VU!KD||pO?@KnDk5uF9xA^ zjvekUM}Y8>xBB~bG_Slf(-thv-*OEpJgAEs`ph_grTIhgSe_5`9+`c=_a^!t<^k)O z!2W1}B1Wb0H*i6B;16W+%F$xYL}hq2eX?5QXLW(yVd`OcuqR8y5N{9T*Xj=jNoKTnw z5aRu=@cK(O-Qj?McNiRyO&JUcdCtj5oIrK9$h zwRcv+T5Q66_q2nQt8#qf!ESYagD3la-dSn?B*5YXKJSj`+|KXMt7jB*`(Fo@a+6J- zMLeYrFFQj;oMx7+4y1A$QUm-}slja(&OznaEpzbtc%#A5l9!?^RvfHSMtLM~qd4c6 zVOg<1dFuEinQTCH5RUhzxjefNGjLgy8_0|bi6J1~Ox~{|8Hn(pEiQ?^vHrTm>rbgG z@#37eHxJx>;OjnlZ=!x#DbDeK^{W>m4))uKW3qXjv6ala0=Rcts2S_?Bb-KRztg6m z7EMdP&7dlQ@d_yB@)&fOHsrp&?xjf`eg#lY5!I$O7^)m=-yR(OE9u1d{H%}EIG<x+Qgf74FOeZ_Y z*$9$Laq*aT`db1cTbt7zQ?i^8PWG3Ypi+P+#L0>>8C3Z-t(wd_3#nkhHO=P5?de5> zB#W>Dv5F+bndw;!tmy4~=Nu2}lTh9JAI|^&-=|Q5^uoyCduO?9!?~&Cs@{6bC>%5^ zpjtF&Gb)L(w*O647hMq*`m*ts(aVrJeLAb-qiE(?ftj5aV1Kd1_%i9$DrUG0`r|Y~ zgGe5VRx^>o;UOtc@F#34$^>n#)NZ16Zq9*V(fqb)qZ%HMBmLGzqdPzK89uu4zgv z=dX^{XKzL#*^iLZU$MQEvhVuY$r#Vlf}kFTu7C^_@Eu!pO0w4`ihrC`EQE>!BTeh-I!p}U|WD0)U2 z30=;}xP%#wP7F3UWlvD$q zLVi`h*Q;e|T~JaJ%r?FZc9Lrm1GH#}ryM*nh9s+*C6(c_LuoT~*%7je#%CDb%SOgA zNsQ|C7D3PP@SH!YIS*3{D!=JRqX3@q*+jZb%I*-r6PoizDy_-6|C+%NB;zWxh2tV* zuz4dMq*>;k>Xc~q3ORscCapvehLsfFSKPsV*e+|DmFX{f<-H)+&X-nw!p(Y3Gt!*Xc{kcdW-O_Z^Ke62M&hPxr)2DsYzi@Nu z1X7rDNF}SP{GOp7B&T>AWJ|F?V+a={L{EeA&aa%?6DM+W$>oz}Gu1hB^z4YZXyVP& z$%tjAkw{Ssz9t1>Kj9>sY{D^{NKC=7qvsLb9rGgQZifGA9K{=}^iZWObPGjZhLfprlA!l}F}+6@ zScYK{v$N8XNoUu`B~(yw1fcKd#nE4}#u}nd`9B@b5p4g5wBq8@#3|1Y8`L>t5@5=_ zzZ4}RY}C^QF%qa~`fyL&5g>Kw30D%2c+aNB6zVGWUAvaO&ku3xKrZV%b#KJBN`C!+ zdKp0%6pwWz2iBeHS6!zSdbwP6Km^OFxUNMFlZx~J?{rbH@GRk`j{=ybsp^#fGjuNg zO#OcxpSf*}W-g6XyW8e|za*D!m}|@>mc-0m2uZpzvu18Hb4}$k*N9w7QeWm0QXxr~ zUtjkqNjKg5{`?8&@i^yv&UwAw@8@%idZ@k9dpS4>YW!MD*`&=^g6em*+T!2{@ydB} zfUP+r;kL(Ozdb4;?>_lUL}@Aw%1wCR-@j^{8Q^EV;w*&qlcJ^OS1~9dK+}AaEQ&TD zM9o~(3W0|@eYe_O`v(sczLOq9Bsm*9hYAzkXt+hE?F!pE)7n3hF`C*6ZyG&V`XVS` zcs>j^L&eM>Z69*>WdWdv*0kYM6LV@V7Wp_6 zdTtp%yJt}K%vEM0Kk$a7iOefn?vjz`{%Z9?(ewc=67J<}wJ8404BKPLgIt(_%A0Vg z#@cNk;xC+Ryr0{gQTpN*cn2b+mdHj7F8Vb|z|o;;4A04Aw5)*(M*nt_Y%k}=>%v^c zn1qA*VLi4__}vHJyD2YOBW&-K) z)bk^sG=FsudT7(%^;x-=wN_G6BY1!$8C<$wL8^|Ua6#QR269!Yes3Z>M2Cs4|xO0%Px{LD8 z3Hi~_JY`L{)S8qY6e<08Cgebz&HH)Ff{(A^!;RmLh$bU+(EpP4ZkWMqZfUp=j=D9u z+V+Z{UJ$SLI!zDMTCx=iG|XUgkJIWbvq@*9wGA$>QA)aTE0ztOSqEND#~8=o#gzax zCo8h=SlJW4sfw#5-FhkwR=ZY|!(n8WcgJJtb4D7g^E*&bQ7iTD*4VU%BU}uqBLJ0O zB`Ru+bc0UnerJ2;xHujj`)Z6bJYS#aQ6ZC@%x7CWDuir?%4~f*?%nrmpVVfhyVZ}b zWgB(?sMwnE*&DA@zz}#CGD!VaUD~p6@QYw$}JoS!=(XCpV<9XE~;KyiRCC+ zSw*Ybi>&R|Ed=e!%mA?HBt~-v2Benzf0hoN_K(0!Tf{!GioxjSB6=2k$*Y`th|Ik6$J`ek@M^Bs(A^I;jaz)5-c(MP79iGuI;E(af4ziy@eFU5e}`}S6M-GiGq>n3v+9k&HdQ?lFedyEpazL}0_Q+qj|m4ueYRoP-{viW|3N{(ek7U$Kqdz70&vGDu-8C4a_Nvu?6WtFP;jJZIS?}nZ(znN;!Sq-pPwJ8 zki?;x+a6AgL*e(RoN;&0TsKxE?qUKVo^|~`$;6;%CDSo8tGN}heRuGaTS4q8_Oky< zz74xaJ0Zn9>_w5ZCZ2WEhT~!mn(HRL_5m*1=e-3$w))uf1t^$#rl^9|91W z*>Nu-Ov&Ky!DV=L10-JZ6MIosfjMqA?1ILy!qEQOiyRBbZRa@Pu^O2yu37Cn4=2oh z7&=8UbmSPx$Ud(f9ScaDT!S1Jcpxo6j`VB!+r=yODKSC%S#v0H!qX*>f9m|an<=it z9w!?uVq?-fT!TyoxyF$%#0KNj+(hqs5{U`;nRx{OSuS{!8jg8Uo9mBUUTzROcGmQn zhb}IQsU7kzH$FruocJ`&<_2bMA{v+1Ms~Jh?f%hvJKXT9Gh8tq^eP!`al2s!oN4pi z5q7lUe3K+g3yFu<2A;q<+UKVoUS2jI1pRi$n>c(CJdpX`;c1e=LsR-p2H$#+-Bhmo z+tenm;-Kfm4AbhC3!+6A$!$}mnNK&)TH7J_4}RJSVp>V(|D*mbBc&snq2^L=y&9NE z3JWp(y2&FhXek!Gw5eq@aqd3QZB)}1zmroFJ(ha3KioSp4zS$fvw;S`coy-YM&^wP zhoaOjP(j9QwWmZIy5JFAG;f?>z(lEi4t2X09<7axHp#NETh}o`QP3+ERn!2yQm zGumPEC@jrqP0sj@n?04KR{hY_Knc4}Y-7HSbfA8gZUjj8nj?hnB;1Y$8OjO*QO*)v ztV%y=B97NrZESlr0ua$=%?;Z~afKyw6CoM}xoCK3nIUE8aTm8e#L!@%=B5F@!)w$O z^R|Sw2Q!}kGJz#edL78?GQoMiO7FVkZs9tSwgHkkyrQ@3#EY|1r!|_Y@nnm!#gb$P z1O|2t7rA+4TBPl%aft9!6iPoTk*2NHw`z1uLVWLFFfB$j@`ano)SLO0ITF7)tiV~m zAT{E?GMdf+>v?(DGPw^k>8!T1wj?Ho{TkIi=M>sunX|RYFeuZK1;a?6wg1Q_HC_fs zo9b*^ZYUwWW-?h2eW5*IWbyp6fyoeD*1po5^2G+Sb&UM?IHnyp^2(4BX`OHm5IVBe z&_{4%J%xNUb?Ay-?q6K}{aCsY&z(KXr4jW)2p<((U=);0CuOm5xoEO>`E6H?+ruKe zuX*S3`1y0o+;6qQe77UgBP+xYZK`#z5&7*d4x$wD+4LNr+;GjDM!;}wIYib#p45q# z83_RX)J7y5WWB-}Ob@NY@YJ$aXCJP=pZWE~7zc4D9e45&wTj1z-lnW-}uuB^|9 zbzC&kA^?u(W$O%&B0}AaQ2Tz|usM)h>QvB2zv%MuafbGcjM1n+=5Y$Hka8>T_sEC0 zr6PcZWGXpKY$H8}<9fIO7;dx*hbfcGCr}0v{mh!P)(&D(%rShykarCo2(k-v6E{FUHxSR{ZciRkN_oC7ul--q`QwXLRwF70s@lhF*5LC>dEk#z)3zLV?q>@pG*oq8E>fQ$#s#=pTYcV1lFmeJZO&RW)Wf4 z7_o{wKiy-Z6b)WAi8}8p@z<#~Tw`e2d34i)OuDFbHUkX;9qLIiD)Ce#WoGH*u0am3 zn|c^b@M23mT;R~`n+D0Ru>^{1ceXL8-u}HqAPHlOt%zSGWpG9gVE~+qeb4GR zGEV*hXZAazm)A3I$=}~}M)ELX_GxJyEZ!ngT5Qew>e&1zd~A32>UJ#0a#vjRH8+iO zgB&V(3(*-H*U|eYmz2-u*&l8{LZq11jkzx`%b+3W9nQ-+K5}QPYNZrqWzvriReYhY z9WuZj61T25iz?&br{4*(T~8v>5+6=T{zWeiIXrg2zJI~ zhFKY9?PQG7UecSP`KL-*dbbC&C1ELRZUI4A9eFq15G3fYOy8?RTwIM#PtogAAoa+A z3ta|?jGhdB}_e#9nyhY32-Jn1;l7JO|J*kJaEwMmK zDHi+^7NW%-2ex%DWi-8E8|*_$PxXSpwN_ToLaKZ4@h zWe3PeJg}SyaxLY5+ejGdd zsh$*#f|xx|TTK0F-EXYO#{X=IM|<1(L4tAOK%+r`?N*;n_jj#JZxMr|%!Y^-7m`@L-80P-P@D4l?4dGfO4ih*t52?tR>W7*Ty5m-CuEZU|J)c&PI6Cb- zyHu+aP1*uR+;>k1cCn}s5&w-_;5IUy1Q=D|O;t#&b_bwhYcH-0C*6`^m9w9O4~ zEp6Lv=j^7+ctbT7jsztew$pRio#iP-$ychu)m34&wB?B7*v&md3paKV(TuZstP&)- z`9K`%HVIVA88Lnwh>pE)cSbFK^(VZFUGUdYF*+B&`Y~eAG3z1_vueG&MSxGBn~2Yz zDxMpW`j&*FSS}@Jrrk_7a7@oKNw3vujyZ0BF<+axV(_uu+~q%|aB_-mmvMA`Lp=5J zA`c-?^TLZl2~ZAN$r(?xihxjEW@=8j2u&;J8aiJFd|kRp`)0e&pPS6$Tk;st ztB!w_P+YVjr#H?oHpbB*_Or^H5EuGZ`ADZ)P!X5EnyJ8`g}2=rLd=?03_kzs2y*5N z>x&x~Ea>8RqMR;_KM0yo4B_eQ1yfLJ%SZ*-+OL8*lu-~r^08GBlu z%H{&B^N;8`N@~~Gu=U+hp0s=50l0@jc1M3(Q;3|(OGAjAw*%Puz`e%$sSJ>d<_N)6 z6CKqV?ICw4vsYx1XD50fM=q!95=4t4tj#Wydw1Y%g0jrcwWl!TuBeHW zdpNu!c$85$LkY!o-g0Pi3Cd++G#stnlPH}cs0r<%0<-q0Ngdu)rrFqoY^rIpdvAdX zEZe6qzjLFk`(R!at*hCkvA0A>%=9O5EJOVzLQR?;QgVDl_i|Lma{z35X&X*XNZ5uQ@z=a3S>GB&kW?@+7ePNcp$~{nYIkXqWfNXP$KBY32Wbk9J@fA-u-My*J3YI|A<3oG<0q50KVy? zPXbF`_8K)81tK~kCSEG0Y|M^TjnnOEsiVCzhLuvDaT4HUNT|hAeiAiW0bD9?$IL4H zTmq1M?&?&l;s~Kt4ndj?4y{mm9jto+r2<9k_{r7XNj6ar`jG@?RAkliJ)d%ND?Q?D zo(4Y9Z5pWPEvxiY)-bJ2X)Z>6D9V>F8rgDQkWz>-`2{0S-Y+0ogdoCUIu;s1mp*Z| z*VMa7SvyB^D0t7%)80#@SI?(?Vh|4@5AF_ATc05LtSF1Ux=Eyfm*8*dlf!UWO}l+Q z`mI%X)hg7d0Rg$h4Hys3HX`AJAId5E;xr$E#o{FSL{BVrpShsM<&6L53pOKtygST$ zXQ@+L{D{%t6<@5YxP-QN*KwlNld~VE2Yz{nrgr8>tjl^#zD9HIf}I@f?g;%!Qd$oG z38cqWDt|di>d4_)P-E4+=my888#Gx9akSivV{T18>)HwTHMyvdr*f8GC<8^CyCBz* zg7nt%uOmnEBqlr^!i=S=0Ehn<!TwnlI&_q8U}!h3^Si>>E$nf}Ps2Bq zJGxdU{jCqEN3J8WSRv0@drN{?zAPI0sLbdZNrx zb$_A8c+0@$MU0nN06#;?_?Mih=vNGK}&qJ3IbRuJ8?y{}7^walyZYTd9pHB=4`~_?8=0*qs3VIT>g;b$Y)Hi?twI{V$gxv+myjank9sdnK4zG7(O{(yg&Mp zk(enxGNRa3RDxfQEYJV8w$lq2oQm<7-1CMJOR1J}RiI=HI~}%)paPEY)+1HE_8_I- zIS?8SCQ$&h`i?jD$>5v^eUeIC#=PQ@T9L)|wVKVBn+%xTjGf6ntMGCpP2SU*O*|D) z$p^Li6sc74rdn(#&YyzW7COToQ5qka^fxSzb~_UL>{dJbHr>~sZl3tawm%S)|l z(Xu~@aqw71?vmQM|A=0h4INwVntRdQPhJB*tQ@A!seMYMOr_@rG;KnZd=dYHp47Cj zvwEY8d&_xBFi~QC-Osq#37N>YJ$i9m&7-r+{?*$NOLj4Fl>*CWFV7*FfkjM>2A^zQ zpJV;@SiJ-7FQ0hZcyn|OWGeydF z1v|^@0U_c-xB48d{U=%sqX6Ce7Q4z=N-1(DG-SUsveqvO6C1Ok%V2nZZdt!^`>4|Q zukzCgh$Ms#Z_x-)f($OW5ha!9Ow%&Z)hF2Jhy2S6e$e1dY{)LMB-rZQ0D{{aTG|II z1ly3wLLGa_8 zT4}+vaKJ-_yGU|n6%FSZeT>dNrGxF{&+n7)MVJ#6WddGYA%3YJR%`Ctk!4>=E+m*^K}hZD_f& zD*r{QI%!6sCG?s>lZE|+m6)vn&EZEr%kIb`1=EC0!poiyO0BLFesaJWi;$ca{oqkm z>IO8jm-{`aU;6?_n|HA%LeaR_nUDtKPpQupr;pxJ@hp6RFDYtDmz$iITC5j*-j5=q z0QN#yRc4d_4rY};*mz0+eL0{G2QQ+EM(?(G>FdK;Bfp$-LrSEZ`P36*ZH7OOg4rgWZcja8A zTU&XTJFjDuIIv%;`rtUD(R_%tr*K1(0$^N)=kMKv7Yr^+H+{B*E$y;rbb3x-**F87 z1W3qF8cFU~BQ{GDGjG+Z$bLVS1CLRL^WlwG^PpD=Y1_Mq(w8uU{O|YgmhWntGtD;` zgn3t6_vPf28xEiT)kBPc0Qz@gSd0Eu{o4^%54gGT0a*3>o1b^?aOx$wRJ@%?!F!+_ z4VSY%J5U$|{c$m?#k>HlX+qy;w_$}!n9^Dr&V$@8|Gsl6vPHp<2fo;zb@u zJox;TarwV6d-paMX+Z*?nlNPk1YL3|0W!0OD11_~Tmnw#9SAs-f0|HIp>EW=rz`ps zl=%6dkQu^3CwU|2h)=oLCG~OBQ9nysLE&Eco(SwG`H2aP=Kqc;kjc0)a(hCHCi^{@ zaa|An#ACrzb^1)UT}gAxDUTI@Be~cIf{mP1tn^w0xh^EH+LXL(@Ia*f%>nTeXw54k|9!^PBxdk6pNM{+ked_5rIbkUUt%^{ueenEe zCoHp@SNMMa3zqjw&#H*`z1MR(hCy9^Z+iCE*ugdoQN)|&L8`>qQIg4t7Cv)%*@-iOg zfp_KdJBkQ!D1S?Btu8ADdiy(n|LtuA=EqY+r=VXv@9pMEpOH1(EB$VrTOOC){?MfQ zqk?~D#m2-F=(I4~jmNEonr2lY|2t@TOVFL7VTD)!VgN}M-taWNnCch zCCN9q@(T(AS=IS=IsP}IP}S{c$z(%*&YaY-keVNa(PuiW31*_FNKH zsdAOI>Rj_ki)(q&Adgr0^wzoLDvT-o;zknhVNPXH+PZQ;Tc;dJ1x%pAV@`;v0y z+uY2&7g4UsAJrf9%2mJbf6?45df435Kc#yS8OkTi=ytC&xDO1k^#@%v`OMFIS%nI? z`J=1nhbCcZAwN%3@%tYE%}ve{6JhWKq?^UK^;DuOKRCX(l06hX6$`KTz4?f z|0$^_6ZGU6qHI-F3{$f@x4Zg+!?_`rrtX;aCad4^~|=aN7Sa94QO8XR^FKO@EX;<*GEtoY5-(0~CJB2~`2Z&be8tL`IQ z;2<-%J^5oZg*8HTO6zUqvhS`)t!+aNpvMmx)!i1UR*aPD(G(zBv=;3akCU*cAq{hnOq^3DdC{JhW)4SidC-}d@3lzB_G8xY9O{e2P&q<|v z!WwP~ZnVS|yh|#)w|?kCb6Qi>!Ta^&&AUTG3J0P_9dxQ#{LdZsptljS+2?S|(xt9xfp#w*@8Q@b%MN}Ewj`1>{c zV|Ei4v?VM?LUv7FRS|d$zp@@OXvt2f%-hs%Od4!^o})Mwk=V!wTQ%1RmT@5jo*)W* zpuG76X-+H)6gs7o5!tW479o#kZ9-tJry?haT>m?}W8;(%xB9bBt4H$o`YEeZ=6sYp zMe?f(7LO+zJhb&}6e9<7uc+WYOj>Tu?DZ%?t*sO2ld6^vjLYYDFJ|v)^awG$&MbWZ z8weLtuE^`TpU-|NfBPy-8{JqK3g5bW3Au}B`Bd)nlPfPKrrSR3A{o>VJph*UZa$N6 z1~$n(MFAvVZ~;R`E)Y(@!%z+AEZee)#&2}-!sq(c?%0)()E&d5fS)Bo=8n)0R!n8p z6y|QXnpP~jJSpGkN+LnCX;c@3ZK3IyTVhf`mo8LtePemtp)#)Aa#!^QNKThS+l2im-y4TEIm;d5nY*Zm=h=hc^ zH_IrToge|8^Kag9PhUgc1j#uap-nPrfF!h>ADlL`ZHQH@jdX}(&UV+0GrM4u_w&r^ zoaG{5aIkN~t8KVT-vO8%7O98c`24Kp9rUF& zetygoT$LF7BC>La4T|qG8FQ6;NR*^k)#23(hWZj($yqOtA>r@;ej7CoN>@W}cTqq?6CX zFS_$dSZQP9#Ah5tCi|6yqBV}-@D@w}86}29(+UfX*t+(fassT3UYch$GTZyAqJVD| zCu~>;ZeAjyE4G9n`^j&{^&e`idRx}kk^#1iDR_}$RBxJJ<>Uf*uTnPe2)$0804I5^ z5a_gghU%_E*RM4n+T9{CJ$hXl{k&DX#^-Ee{GR(i{$9B>LYO#J#QE5QJY10TWxH{_ zVn=wCz!t5zR#Ru^-R)g)q&?&Fq@^cGmxQdIQFO2hOJ&aJc`#l%|Jj7#SMZL5@1<2E z*4dE2fnGcf$_fDUMNgIpq^&xun!?I<9%TvC))DvanhJhh`moFs$UF_HG5(rJK!xsc zb|2m)?T57!`=L0Z(952dm&PB`E-UjZ&Us?V&v}BeB8&URfm`Jktqz~=?!2Rxo}aO4 z4uL2-I&JuoT77;!hp7ajD-!xuI4SpNzC z3h`Y{uFBs(T?grEc{+5APflrFT>q^fYi;eP%4#nuI>K8}BzV{ST0!Hp`Ph7;uLN+N zG0&?n#eo=;o}B=)G>(h4OOFY5){xs{oDe$-Ro{tFS+~=T{OG9tn3xR@LM@3pW^bP2 zK)EDhh}n}Fk4;A0nhpp!9k75TTy$YhAql^4DC9EE^9H#X#&+Y?K4M_MI+)emLxUFN z6eIE1kw$7C90=ajw(qrr#XeW40$a-|=ryI}GS9L0Tr-j(Kn<2s`(jK(r8+X`<}{t| zD*j0nLz9S|mlWVC`^=T_dAdkCD=*`w3FmHFrFW2Gk|wo}YCLij|89s<=35prPvtiP zbiQ)j=CWOl@}VT<<>o8d3pBKJ_EFQKiA*Z5muR;(1TRi);}sefEVLx4e*fz-6&$no zlg`0WWmXPPE&_);c+3uX$WWcu?;tHig6%hLC#aSFGD9yamV)jD9{As-4e4_L_AF1~ z2{;K{u3x!Wa?%Y&OVJNPnW=ziD$BNZj_hhx^f&W`0W)nL{fJVT|H;EmcoZ|?ilL8& zTHc;rn8Y5-bm{Qv@_w2mF0$l)VzP^G1yq~;KUsCjX6lqW%C`}B?yhCF@~(3dI>#jj zzLC0NsA_1%p z@0l*O=r7HXKUFOOp1=+!A9ox3!ofwB?>Fk3cqF()bV;uhqV>FvBO|U3+imscSU;;j zQDl|(>e9pALTB#OWxea$Q_5CsvYJtp$v{icHB~9<{;7oBqA8H9v1mAUXD=XAg)!-) zqOtO0zwdFsgx!Y|ky>zJENX_HyZ9GAk~HZoRp{Yjj&M5=d{AWPJO~4#~h*rGVAhHGsa2Zg6D=j*3(M(w|Ubn z{9WXVFsQb)i$wi?Mx|CI#_YLTzt3}gJ|(ujs(!*e8$?sR4J~87Th_McwGp<4=AOYw zX}nbAsp3D{SCn(lJKq?w=pQ=uTC2;nT`k#S)8v(b9}uWaqG2MIKiaN4iWS5O5;8w` zNdV#Q{BpeYc+J=1Fk0l%8jx*VA+pN>5%~Tl#4prRY5~JeLJNgXeil!xcN5rkY`gd^qB=N=i3D1nLhqwn(1}A z?fD-T*Y>-ufym{%kavW9SZBwAWDwAOc&~XKI`|Z9C#VLYIg2g9p z#ker<#Dm!ZyP`q>LQcoe>uwdVRx&HIur0L4voD1+%?ui>8VI)$7wE9qK7`fom*|g-y37LXezx;?KSIz%@TmFa&w`Q z^CV^ERIC(GdR<;2Vu)a+f`H{GY#Od5F4 z{yp_%bXMcZ-N$>heqShJc%!4%3-_LBL<2IUtSTjRdlH0&Kqz1wWpJkW-Lfu|eqc=8 zeF;49aG&qy068+wV#i3PcsMKm@U2V?@86eOSI@kFW>o47Gmu&5^+Y%m@lS6ljgwgh zp?R?-?o<0e{bi!K#p>hxpqLggw`4gB*euf3u+r? z_ta@E6qN>Lds+^97+svtW`7CA+ToUTL@X}BC3zXu^A5xfA4xzu_L_`vEtl(sZ`ob$ z^|?@wI}iH*aqqE5R{mWHx`#S%=86~yw<(Z_K4YH-avh(;^N^9(?_^bI4l(wvtL=Z2 zhKcxcZYx16K?lvaX&kzCa;aAr8+iuWep@Oi8pz728ER&=V1|NF3_l*rq5;%+05qgok-YtjM!vWxc)_Onz~90XlmiArgvT>QuS%8b&CwxCNvULJzS)`P zVd>VpQpf8FjIO2|7q0$U5RrzYYO3^0@i8E3~@hR-V^{Z}|K zB)|Ps*hL9PijPy5@%0X0X}1@;H2MyffOG{v#I20D0?ny9w^oJMDN+-`GzK=E*0}Jh zWRz&;ygXaZe)Divh5BQt`p{=t5@$f{c1GmpwBCjFVsdlEM$XhwXo=OXD?~K@WSGrr z{e(SX#7U%40m*kt-%bc-*Ep)-vcgey3B<%nG+PJV(2h+^VUqZtrHV9IMg}=VkH=RG zcvaZCbRrXGjQ)8v-+<7z4~K>7)KG>HBU!{nL?%hCuIbdB`0w)A|hm3qlmQqA*sU>FQa96`>nQIXu72C(|1A1tGi(O^yNH!^>S(INdj6P8R}B~mC-VJss% zvW4X`6+O7mVQbmAG5WK7StRRA@x#Vh7AN01|KnoKSVUt0WVWQOR|ca&WzAPHqxTF{ z8fHxRF3_Dqc#$7dsH|?H)6O=jzHCIb8aCO$uFHVJ!?)!QA}8Q&?9&O0qgvLGgoJ33 zu2jewMXfYU?eBsUmAn1_q1>MIyUiFGhFzY9<|$6?lb$X1)S zuTl)2mDt#v<$tuE3u8y_d1wga95-j#-Qs-E=?KyfTnZe51=< zW@>1W)D=H3VhA-o_-!1}#~|;dgBL6#GS(MmqA&I6;;^n}+m(WqNQ^tBIm@GzoLgrm zS3EfCIT1a)%MKv*3>w_!yKxnn-ZiHsjbt+lh#O`F>R}$U*jgh75?(2#;hQcbb2_vU z?6!v2I|O&epqtrAO<(!%$%{HWs&MP3i3XIJ0e`;EAr6^h5XpgmY7VMM5~7$nkq;kL zpx`Ky7H+w?{U8fcy*`d?Kp zzPozjDV9S}Y>rpe)K$uL6PJ=v_0NEqr&^7N9=Uu3;!I<&9H|YKQ0aRSH!DR#*o^ zodti%=)i>=Wjf;!R0fxqI?0mD9C}sR>_Z@_X{~LJZ$t`X#l4TI=Nt_5hxo;BdQQg> z)1|D!AK(0S+$0929XJJuogA6*71znUVv-CMoofzjlj0T~!k>9tYvF!-{il_*FOTZcck!8Qu8c>{``CHPaG0dnka>0~Xwd<-hlfTDuc`u<_r z493CZKHLg1vEDj`Z&5OvlCIaY`ge|qWlX9HA>fl8 zZLOoGMZ3O)jLMRisX;N^H1Atg7*3naBX(%GrB3tq4V7X?YoJ{1)l{>W>Ix+gjbCRh3x#>-*UH}T~`Drn=Y1;;(~0X6OaTCa$Is0(_e>W z!dp(k=AQ~+*c{I2OOyRqZORI>ju1$)Sy|hrL!K}2+4&LuhZl1lR|4cI8ni`+N+vo4 zs2dk2=N*<*HQMIhl5yIVg{g}Oz3o)JSl?NU7Xe%DjuEOCEpkA@)0ub9JV>o~a_r^C zEwXy7v1Y=pu(SGV?$|w^hrQ*2sXp4zESl)Li@&db?q{vUT$2&!+}q=4$3ue&M)Q5m>-K}bkXfU*g~p4>e@s%{A>dO^6D^N7-%vDx~d83mMIze za5LvVND}pp6H!iE0Su;Cg~FrH z_~2maj4adi6Dt8`kf@X9VrhgDQe<|1O7o1;8WnFcS@$-N!B>pWMaxhemUD$QA2qT( zhB6sHR~c9DN1FMH!A_eV^|TA(*b5U<#_z|`&-eM3rR-uf^H}?Kth5!fY5Lf}-F=&< z#V&e{EfI=4pB7I6HF%9lz^mSaUPH$oxtZ);j&`hB)Ew!^CwZPB88A?{a=-MZkUh{? zGj|&;>y>>v4-hqKtvA^4EVCx(JCR{Pp7G6QKPXf9Eg2&|6Ic5=H8b(V8?!Dl>Gb6C zwCHO-)_Bnqbrf$gfO#c|OqDhr+m{gO9j^ui=E*wxSgwRt;35@*0l!iE($c+2+I?pf zZauH#TRZ%5WW8C2T=`3FBr#5#M(Wt2A0N}TT@EMi8Se1?GaeKfATtKU0yRoW!XJO9 z@k?`hO^Q}smfAb!d31vJ8*2~osvhID-&vpMLyhrD+oeH%rXjCKPCRswOVO%5@L@2z zY2=W5S7q~b)K{;(FYKPMqp4Natwz)$;1OMrg;GHk z(<}Wd9w!W83|0wy(W`$nl`O`{7)!x1@0;};1u5QIZ9{wa}vg?_o>P5m8g zC7VW2%w-~(bDY3Kp7KZsD>k`~;qsAJsVGXhxcXFS9_ka}9TS*O;?h z57qa82Q`Qy6TGVg1&yi5dl}n_Fbq7EX4R|0nOLzS_-a})U*o@TIVMkj2g8(Md zJ217WJ1rl1&SUotgOR$?u*MNcD`SVzSy~tMx=)lv_?M_Yl4z}~cD)Mvt&udn`K_v2 z?ttV(_|^!NjS^_o)V%}F_U&SZfS@eU=gLmqftkrOY0C>r^7G&s1EvPy<=Y|S~(R8vJ`goziUw#q|Y|3zfx7g8Z5plWYdpE~_R-{CJN5dRN=!`nTGqmK+uA0Iwc;V_ZJr9kLO1 z+mh>p-wtYlaA!ArTgZq(<$~XcN&!2>QWA@P$qOpvMNI_#uay&Ya3tsLe!U#>^MAc^ z>>(w2UMYLJF4xNAN{{bQNisAQBV(qo`1`3_yg!szXo+Z^9Az7>Ogy(ZGoaCVLitdN zT>tYU*B2}pD1RWfBKWhca zTWct*Z;T-a>zc56(}K-0&b?QTqsH)ch?cnY?~@!H!BO+6E!h5ZIxx0(_XD_~z? z+r^z?td(=G8Fu7uILKliL`u(BYicF0sA9z@;3vEWVglZM=Uxug_%l|0fBy=k1NJE~ zChuqb!St*MmVg&My!ME_HWY8qgrn7(q!OJcZuNYuwB#G+g=m}H)5IjegoKCeHOo0= zXZXon^#DxWaEIDjp8NyLBPi7ngiZlvWc(AiTb4j&MG z$=WA~X5?3ZBd*ukDt}7zm)E{)2pKCYHaZ*2jfR{KZ~CBI#vOyOZss)haCXL#q`=V? zhqG4?WL5#%SN*@KU4vfaid!0aWrT>v!s3G@I2QL=^$0$smh52B>6!|qJa&-c4peHp zrwi>dO{8iwvG>D)OzY*m*@`EXCv&{i3*FoVC5tnOIl(j;E&elA3%ktGfdfv`)4_wm zG&H2&P${!)v9IHL``A)J)jY^J1)MbQ*nOQ^ zEC^+1W#wLc!j0_l; zkkz%3lmWEk=)LTPWLABZ?yeIbu(hlxonh`PRq&W1DD{kReUv4Lv>J9k8>=I2(yokp zddl4e*SacgUEKAV^nZrV!!N1+Z{xTD(ZGSYilu^k=PbpAIC5obW{PI6%vGuND@9a5 zR8ZU_z&&$iuF93UDm61T>v3gj-K93})?JT(!a1+=`kZsVpX++Rm&?7?T!(OZ*u94f zNk_Fpbo;O#Z`gCM{--q6wqD5zi?=4$9NRGIMVMKgm-;Ghwmp8gAoywL`0@kocbh-^ z5Nf(U#gSFd96Pwfi!<5r|7m^9Qy7s&xoTEP`*K4udYv~N=DquZV( zBG`B2Etwx3HyC9hLob9Mp%t6^QiO+&T==RD@l?4mJll-7U1^dC_WmNg>Vc@<=J)r)F-K+~W9BlFy;z$<=I$ z#tqTj*WgheeEaFtAo31u=n>m4yw#>Fss*N}opi~kNwoiWrE92*DMNqmxL9jT&}_Dc z9mN_TIS(ZM`&LtZDLlmDM5~#_$Gc;Z)9fXR{X-J3Iu-Kqsk*D9K*w3a zGoS)lR0qs#Y;g&Ae|Gym&U63DvFAT7VLL-`!n%t?9`gBX%6gGucYpxk3D5`l?psnIIdQP1L(_M0yvBLCV zK5e%jSrEi|7rrXaJ0BqGIW!FbgaBks(;(*}RRzK7Wta4D^b)n(0Fm1!^|;N8_MOR? zrojjDe`&AU-Cu|Y$U@iBGKkr+$g$1Z7KARr4cu!OHpJXvu8~eTOXy&w^m@#<;PXY_ zcNtARlJ4S0$Vma>OclimXetEVKLdfyP*K_?W$!E7&&tOsaVP|3AXAE0$>C)-^Ky)J zn-8fL1+kYTfrX5wuD-xge4;``^YDtbz?a9N0xvn zYuq%WT~j}-MQXf_0TRr49+U-A5Jt=E`XPF0-xTyLxCfrbxj|a4ul7P!cf?_iDtR+CdrRa$C@W0h=jO^4SG$NU= zkW&aGnCmr*??jX*rxcbrfYnD~`qgqHL{QQi!(3FCxCWb0$_i|HTw>Q;hBz`!$^WGf zYL@~n5|n=hADQUjG%hn5?^FW14h^6dsosxffzWmzj>gR_9~o}GI!pa+{&MM&X%maV zVP#-XHq0V_A|rP(1D-TqhTdco{hDo!bB0L5_r zeI}5ltSdMxjNE84Az@3zMDa&q?M-rnE3lriEGM;l?~AF(R8Y^)lKA2NMogdXXLkF_ z!#0-guU&CdAz5M@-K~eS_n9_JFH4H(zrK|X0_EHMhXj2AAzvChEkdjw!G%qPd<&FQ zGPtwHTelmbLUVS1HcPf5)JnHlYXbpOdMe?lB~0U zi8IYsuE6ilGZPmItYgHXTG-@=*(QsFA=kJDS4@SfXskA)rn#&yE6QmG%W zgV9N|l9W~P?H7vgVHLWC8nnC|x6Oise9*)vX9JHTAjCb*oAmdqJo(g8_ATgsT~SO% z7BYt!lcxdT>J)d21K%@Zo6J+^B;&&+1Pl#hGP~IyY{KgDOujjAJu=#EHR_Uc`u@>J z-J&hqfy36zW?sN&(v?izsoaZ(YdqNYvq1((O>buWY!4_hoHnncM4vtRqoGvRMRJa! z^&OI8BmDx%rbRvj_dHXaDZjnA3ZwhAYe~Sv?m4@kH$;}w>oePuLg~eYE!?+n@z zF2^-y2Ujaknw|p&0hGgC^(eM@f(}H%sbOfLDvWKOYI$U}(mEDd1?^ynZ&!ewKy8+p zKej>K^k-hUEVt=w0XgaAOwepmRl}kV7r`(g5Ij#nQjrrSK~s8`wcD#8Q)BS>Jq@9g ztRbnK6qLN<%MpQBL zKxq64y3kx!SeXO2;@(U~R!pvVpZ)3ap6sySS%H0aKIJ9uO(%iVS?lu^`9X9*H`DqW ze47b31!exwX32vJ41SgXzYGI^Ko3`#_Z8UjVob)x+tbP5q&ExumPM(eaznFNcMo;> z;4)+#v7RIzQ%dJn-3B#j1-=~C)5w7iIqG(z8}4S0+{}u*DBE}~%Yy<udnEZY8jg5h0X-Nl6T^nMNfI|7U5`6_ptHs~5U`WS8cU2%zOYc)i+w`NnpgdRRD6DW;hnVtcc6uQUz%`6~gYdqYedua7P&5_#(c?vqMVuKFCeVM5wrG23ZbuNOg)zVXj}d1U=(vh>6O z4`BH~KCUk5LFV^coj;C3CRAR`uq0uh;CBBREsM&d9#y6M$(eaE3Zf#q(ti;|IV=j42aWjRoocKO_rqbOxryWpz?p=)fPZL3l7LW`2$wa zW?KI0vgFTG^!8r$Sg|9{{$M8va;SMlKGuDWBms<+kqRl<1!PB6$;oRE>>J1$-CzzB z>F%_n>nq0(siDbcOWEi$AJ%=%!-nhf2yszOQx>#i$#45pgNN85=sWK1r8k7K`Fz7QCfmx|J!eU8}^P2u19Z)8=UZ?S5@l#k zJZy?COSU$!m8G{Uzc)N$!FCO)5#l@0P!Yh65Mn_%s=t*_zSF_>65dR6GMR&{P7dy^XzuX%uKQ73&jJ-8< z7d|vBSk@Olc0v{2khqIDptareW-J@SL`Csej|`Yr?r&JE=A!`KS;Ve>9d%4gU zxuf`+gcy;iwUg#KCZdAwWzHR+H0x2$;_~yeM?_)2)XHNO6t^!*Xk02%T+xSCZ^is! zs#JO+vYb6bI@pW~5QYJ1$Yzy^XfH$c7eurspTiwudn@zkYZ+}D8QnkbvHWOrdc2Dn zxwsambkhz-4bxdHc z!S!ay4-20v$y?{(KYfR9Lff|PelN_r8#cE|`^0;AqGtl=P!&INDSu?6b!pqP?L|i0 z^&DCa1cHgUU+zhok{Z+!{kPJ^cR!oVtAtXc?*&Hh4Ku#7TdbnD!!#TFVfDQSq^;}9(rEW?zn`;CcG*s|kVg$Te@garX9 z04e}lHc7InPZ3NKtU6Q<7?kREU59e=GWFThWP%4P0|}V^m@3zlM?W1)%XnhmdPYbE zyB;TnR~EH3;>&1}%JB+MW7lTTNk*+Gi9~gy7})uy*LM0s%HM;HvgTbwO_Q4M<`a(Y zT^x{#bt*;qWV+F4qgQx6J`)6~IZw@CkB2K0gxMBci)rOfACIn1BS2Wp%U}2&BI0_5 zG@l;Ruo^i!EwqZ8@;=->yL`ic-P><8c;*ErvvVmozyoNz_64+Jt4|tv_Fu8jy42l( z;mV1C=xdb2_(BkooGWfM?on2^v#KwAygrfBtJ>`2QP+^>ODch-`NjO(6%9s1xeW(V zmFUSPLRANTo3MwXx?4slEUMcGnoGWrb00^+iNg2`-@ik`i;~UNs7M{q{C=%-$I{laO`ew~_1p5Ds4~rTZO0iholDv#$h3MY=JhP(GB#yKQv`!G4K)Ya z^m^LkqySd%-sIXxljLd@?GL4-7JNvcK0(}e!@k{awhx4Gf+^e_Zps5}E#PK+U^GFY zavhqsfb%7hdvhw~_9)_p|3CH$CS7y2Sgy@=mGq`Dy?e*Ytx)hZ8u{MRmQ& z1i6})9&&Nb4#Ky?}94YIW!!;r!Y+g`NXu<)Pb50~)Nam7eZw={Q(SvGPoX zrBL%`MER3&^5%^kiYVBsx)Dq68v)G#t(vI!5&DMSmHSFMb{x*bt6~4r@S16$p~=$t zW1|Tr!3P#k-2OT0y?yf~kZClm*PF)!s$hPU$W(*XbPcE(+=G*t z5R29}gNU2Yc1w-xT;Lt+^|tjN|1cE^x}EtL%R-le^R&{RT*3q7@#6pFN?Sv?e_! z=$#^9YUMD%f^9so&ziJFYX;E>RZ*3aiFjB>v8z%I;Z4?r=GDi$Rx+JkQdMi$Wvd(h z$bTMBQXEZ&rad4)$Ht!$`-{*-1e98*N^)9}k_hiRg6y0-BW{+K#_@0AfOYNosWMuj zK2JQ{#?Ax;;&VTwl)z^PLdI9_JAayGfgD3qo+nAz(F9}QDnd%C?q>*glbXo98*bta zAQzb%U)obp#ynFN?y*)jjCer_8X>NcljMjM8uWfA6OAV37#U?&bf8p@VI8-`rKX06Aao4NK=-|7i1-ak&bdl5dj(v!B-b*o+gYHc`dlQ(f1#m@dAYgcRo(l6yC zI2*NnpRAeG%PgIcf5(aw<~ja2fopf)vCG8Tu#?!@ZI`NH2FW%IU$9zTLW`Ur`BDk^ z%uSk)tF+3SP+O$OWsq}iOOT(A?dq1ol>e>lwFt{m^hXtXxMDeCH7??R4acQ^_MOz| zsFe+mK!1#rMCv1kCqFz#UvwUU;tS7=AX*eAm;|BPa6{<0N5S1@dwYX;VK5~n^5PFo z9m9^B6El8(S-P}pLCMUw?$1s)@C1|r1ciC)jN-ZnE=NBX2fX;{WKdWuejnrQLM&Ty z9yI|T4J+@wjfxGpSejA8P%VH8euCc{j69R{>-wXH4ueN;`u({7-b^Llbs3i>%XJJo zjq;a_KFE+%lP2HQ`$}sYUGi;+f!e>kTXOO36#+;76{&G3UheLT*5+-$&w&)HfvHzV zPS9mibC;0j)CGGt3|e-_ukF7Kb_=gqV-SjLo#$dJ^YmXs|!AL z>038*Q*T*rIX6m+4tA5$XwCHzIr+~Ue-F*;rDQD$4%+LUzj8sYYO;Lh^$)-WCW3>l zPZc#Iuid{LF}?N96wBIeS?DuW+9(dfT%|Gj_Y$rod$>D!ME+xXX+XD(Ix z@|O_R;4jq1r}ia7HicTJpQP0-O)) z+S;O_*Ih9QrLL0B*yn{vUa-re~0fZiLL`U6uTa|Dzk3Y(bXPL3d%R4w0H|&vkwu35%l$PJgtO znMr}#KPhLD(OPz)O}23qQ3P5uC5WJcP0R?j6<@vGms6Ae_HI`9mZi!^YT8tXS7->vC#@j_6UcFkCM}LHz^w zH9-?ci9qu;KN7H|A;c3HS4R;VL3&Q`yOeye{ zu3@yzG7OJDx~XkD>9tM0mPWZg787}KYgwJ4^Ul~2HfUH^t@#LUK=Zq=t{?enj>uj~ zuX%%Ou~NY#93SD7U%X+g`sKJ?tKy?(rJ;cbvxGZsQHsOM%4>c~L!{|8s{Ug^^~?_c z%d0Bud*O!&qm1tJgyOJpW5arnhH2Hb2;KbP6q67Bk{A8)jKt6S&L>D-> z(Ece5O1avPZc)Y>Yl(@V#MEJkl*|oKT7B399v`fOV&Tp`0#2d z`Jt_-V_bEGkZNBoJYjg+!+w0s(Zul4RxpTRW>f93j#RY;4Advw6a1{L;ejXn7&zxc zg0%_n65-98Khvg>wK*D}_-TSl79v|jhQ;~daT`Ztmkf&h3f`gDs&l_yg}&EpHkHp4;-`k083@a)NVgWee)n&_1J;E z!jPZh&~?3dM95G?IOz}dLg^_CCxrHNQLS79uDURqfEUDNXxzBzmGl^VC;Uo*!yh!Ri$9 zFc5T*oeyg=YXY31g@hNi0D%T>yG6U!)aEsPzI`&AXWtE9rJeo5KFKKg)_Qj{N= zoO(-?jIan6G{aN48g(58Hs3@w--K=Qp={u(VQ=+|?x!#MyV*2qHd=g40}Nw)wnS(b z`H9bV<&HghE-fBcDi+mKU}A81C-Z+RKEmy03b{f<6~_gwD9$>eSksW`aeXsfICV4so;Nx?*LjTwWg2cHA31>5~4R%tJ(Gi zsL4q@MNv?KksXmYE+lDARK0SE>7LewCYvgu;aTs$mAb!{kKWdf-fXDS_QEQ;#VSKL z<4KJ-jMxvbVkbze`sdz#M^rK7r~VOrbj#mq>y*fA;DtAz-nSwttNEeYXyHx#JC6vo zm3D~g86mv-#2#iO!E`uTNb}&sMwICjVv9}kaoa)@2emV4C$2x^-#RG&dc?ST{SkcY z%Zc%R6(A7V>7_z7EXsB>{JgljX{b*L`PX^Sbn~}qc!(7??p~usJi$(KV^L?MUM=GR z#uaxUdoh%5grWliDc=x{s>dJc35+t8*8`2m|G-k;DDS(qetqQnwlzifgCTldn?OeL zX~pc|V7Q$om%s(PcSKe+WC!(pWTXIEYRqz0+0Dfo@v zmtn%&o2OuHu4JZK6zZtTZ^}ha#jRpABj#jSp9#U=BV$YXT?Z_E@j*($6O>;d5@29A zpovMnmHnYD>qb9A(lAVh|IFW?^ca>I9~W3{or26AQLW~C1+Sk_84-^4)EwCU^68-X z*06sCS!!fh$&Z&M1_^O)VT9F@(v682%!dmESg0!hWJXHb)-9j)wJ0)Qw>^U)nDf#8 z(4sc^?{MMOC*P^+-R=(E=A9v-ltXuwtKoQ$5ej-4 zgmSy*c{=vx=Ub_gSN}C2M+y=>PKMYg3H={@1R*HIak(u0zmOMMqN#Dy$%h77OtFGm zO0Xw60AH9Ag9Q{f<+RmJl@Y%6p?4kYPi0X910Jta-aft@^ih8B%GsF9GCT&@jjRzU zkW|=(TAyZTXq(Bi&R_jy66|d&$Wa;TihO$2!?(+D=?=hA>7-3UD`W3aR>*aEd)OE8e7U^;XnO5LL8=i4Z>Npc0lpcf%^Iet240i+s~! zv`{iZRAooWp<1RBi?*o8-i#syhOe{P*WCCCKi&VR{L}Wz$QyXNTlr)4v2`CAeij`4 z&0+7&O&fSm)(xvb&RLwMkxe~u-QQ?Lv&I&nVykas6pG&xYmj(exAXl-aASy2T(^M2HqMUAKosnxAPmGTiJOrm6@Udo@j-l1IwrMhMTRA>xsY8m)ZCYXAC0i}k zJeIJ2tTjXVAoU6wR9bKBzW&Xa^2NBL__+GL(glSI2~e%(BbyyCA* z-xeLW5N>a7*N@x`3@rSKqF737JtNhu(gOdbb_mtm!?8uX1H-%aq2sN+rfs5Dx%QCT zpzmCp^FS3{{wDnK=!>GKZ@tC@gViKYn7M$ORqbp8<3}aaxLuZ*a$VaKT4?)%amNT!USeoWN|iEB0KaTT;u<2mAQry2Adv zd#V&nBrP-16dh-JyJ8(e8S`xFD)C zo2TZw8q_e^d1mlLOiP`aZ}zFn%HlWm%uVyd2Z_(zh66pGA-8k|Zyv0h=u}}8<6VFK zXm-#SZxBQn*oZErlUS0H-4pB&1F-QZ^L^%4o-{4Q59%19s#T?7 zFeCaOChnKx-f>YeZQ*+73vg7`Pdo*kr{;kc5;kg&KIi07V1qM{P*&XZI(1)6kUXc` z*cJcO@1xk5AvrIfaP!S@GSc?1T9H?aK6J;rt{tZENEi9RtM za-BAQDDOTsKQIK#F-kLZvokAK9TGh=CH*`>dF?*+2?guE>#EX-$!Rrs?EfkZowtoX zM=0YnUI=z@)CA@&G`B3@_rn z&@6l-uyxcw&uQ}7?-`YZe<1m*1J9r0H~k6XIu5u(ba`)$y}gDUirjd0VMPoeseX|Um&dIdHEiHe|AWbuVO?bi8gR}@aEjlWHg z;dQz@q{Ykl5Rj*YU5DWY;1B1vB4Sj(82sqDBN&+J(EWNTN}ui>gB27${vdbb1pedE zKgJJGl=9e9H9zEgPGRzr*57><)XSiA=HxR>(}{B;u3B=7VRe$;!F8I$>tDrtL0s5t z_j_}bfmzSYGOUD&JRZyZjUz`3U0st5&V_6_a!OyP>5>5>I$As$9AqA`KL0cX zr_?s)rxoZD>po+J1PBLPORtUt^ig;z{u0m@qnPtJ#15UuZWRGR84jnnk&ll87@_)c z0PCDTdc%cuJ8aO$zvt)ei8j(D&m^)b!A4_d3_ZMr=@Vpiy~R;0l-`ht$?6l*F{uJ! z^bji}K!d*%ZEp~~vYYt#w!8M#;AI%DJZDOrNSMuecX+Dq6f^c2>>^IsH(VnT`Qij< zHAx-I!)ZxV+`j#6O!K1=v>66hEjew;$o^hxl{lpPloWJ^iiSRxLr0YFX?LZm`cn89JvO zcMnk=WOA#xT4i8f{qs~;wYrh|$3Msp4_(xe2oB>3*Q;6D832C!9cYQ3mwGrI#?rr| zf`9cYJYc7KH*KPUVKRSef<{K<1~He5&}%t>4`;w{%N)*XmxVh7CUjU$F7^H|-T?&w zq}$<`=cE?x^|H`YqWV)E)B(>Iy2=9bUB1{3W9DL>MqH$|@vQNIRE~Zsu#1YurF8WU z(!;u5$v@fwURHR8wWWPR8B}^yf1$24E^oz+kTTfhy}AZ z_CT$Sq4DNm%u$UeBO>W~hORcX$dBMKfLXn^;rq=(8uHz&wd>Z3m{Y8&Io~Tmn)GbC z=4L>4Y}vVJ9^+6j=d2lLF+ulBQ@`UeKk&+A0C+qh_odPMDN&qYp+aSwFmREUZ>;h3 zhd(Bmvg%^0?X4L`E7(BXA)?i<90EP1#&xG9`<|fpT$(W5&KHoY@*sOife?4A6;3hr zQKMy*Y>dtMOZ(bShf#{ZvH01FSMf2G6X<2B z;AD-Tq1j;wT}nQM>Le3yoH+S)rDHH5?O2i^B^*aF{i+Jhv)0m6UvoIY^6+tLSPIL~ z^*G$TI4a>MCBr6<-iwckJiE_KW&nrpdGrL!vj}>dDPJn$2QLY-KB-Z3Z%bP(oSBdP zi`JR=S48Eyz*3u;**_OV2juEQ5zQ{X)e6c?)KF7yey$L zeg(xueXK4lp^>_QT}uL@0YdqRzB}G9e{6I4hpIxe{AW`1BAp4*d{0eBCzEn`t?XCC zNYu?!d=())s?0^-Mq{J>d4Xz-u%R$5QhxJ4%=X}ymN|=uUchohBlNcHx2QAg!#CY+ zFMPx0L4MT)$(*DP7Kx~)I?#QO8BNQ}jLu69bHcIQu@-MAv0pg&vysu`?faSCQHU+G zB;@4ZXzJ~i6hTdwPK@_2z|o4e#f3fwr+VINCY-$YJ#qqq=0hP{PxcVPcrjf zlD082Lkb$BmeYA0{m0Jb`}CQ`NvQ?Vr$lpZ{+-^3m|%Fqli>7hgP$7XRRvZbm(Cvv zP0#j^uVuNAjj-8aFQ(pWsh$#K=*7HN$@A-~dA1awnvo8oKbpkbuC~awd{5P(aJ>BX z&!{)fuX3*>)ZJ<5uqvSDB8S!cL-wQ3Hdq%tSjR4A6W{1!zc)U+G8*j#<2hmTEZeyW zH|bGdjtfUiRe6SS+jrzI&zd$NsSMk4UUnSSjjF0IE}|fiMa!_5>_Z}tE?ZFzz^6m? z!M)_TOa1@^UtcZ00nAJWs@k?IV{7uPJsjuIS;@@}DGPXlN9{=m#No;?B0cZ>T@{JM zKSiqEv(sFH>E3zM(WQ={0-4io)F*j5US9uq_C2`I+h?{m{%SON=ywze_q;gx+oEug zuHDRkfq7Phdl#LR&{sr?gFdIFy89#aLOKl9N75XnL{%^RbBd&}Zd>CW;AMhmoYg1$ z*bu*+J>}NCC;Lq_73TclhNff^8>4%+q&;3mn>^Z#OgJ5Cov2fvDlUIT;lZMaA0)Id zb=A{{%h{T^JQeb}>Vk5vyPiaa!HoUIizmv65fKFv0^ii)53&wLp&D`9hX8x+2&f}x zQ~E1h|8zo_v)`Wt**LF)zSx?aQ8|r-3o}b_ml`CUC~kihBMlEyXEz{4P~qYDtEsYn zha__6{@aEx+DN@3+eYd}!X@c#IaYRNc&GIt_@%dAKJUL7jSCbT8gztDl&JJY*v=p8 zKybWweyQMOSn7ubf$2PVmp;&mI3b<9EWIyY!(F{n)qc)T#*pY-yALD|)oSs*5M|T=ZepIo3jjs4$WYn&UJDI(^OcMs8EimkXyHISi1xLc#Ym3CC(DlgJo*q#&GR ziyE@-y6-K$4!4o+rKqM~!eT$$yQJeCbQBx57jmGE>cYMo?kyWE>wpS6oo4#6kT%cP z2dM=;>>M92o}oxMElTKPAN)Y8^eR|JEYUbfGwkbezDKCJVZy6bCB zVNq$&1+%H=;@g(#VLz!zSMd~%>A^*`{zU5BPi8}|9nLEND4(HI*H z;Ps|1VJt_mqSop^P`&BhWU{5+lmb9YywQ@o^pz$eZXyA?wa zEIPfFpMV$tln8}cCrBNyTN#&byP{6C3yqnR)Mu6Eu}_{ zlCC(nScv|gnfpYe<07-EN+t=+aeM%z$du@rKiH9j6z4jyT5>vHj(x@z&va5zVz99E zF_NpAV}%WDCtGS%{`$Rg4Xy;C%V0g66DJH_%`Fw2kz_q(?_`x)-^Q7i0=cu5+`=*e z1P}VM&}j#aK9Rqjc3j{WKS?DY)-LJpb!7P{yE9Rc^@9UTpks!+9C>yPGweXtP#6&Z zLmoD95jYtEg5@z8zIx%3z`aM=q>>}6S=EYFN8i6W@v{q9T9yBKq;L-pUE~0hBUNV8 zl{W5HRy6_FFv`p#2}fHd><3iHx)L%(%S)DUOU-wWk~f=HMq^aCr6sc=#SKw?4Yl$I z%SU|-D z}}cIK}j=pv;};|DRzXo;^k9Y`re94o;zHl9(rIZgrzM(I*<1?vnQf~EDrRNhRo84 zS+CPW`U|XrPvMR;H1K8NZldL9Ve6QeuskJbq=M&x+ zMCQIAHWJQ`fEq?}9KCckcGSRAW_y;*PV0h(I~2EoHrBFC;_0#fNM$J3IobbNteO~SCNZt!0E0ipoCww0`uC)DZwrekEc38!KVq{ou zi7_Ky_NJ#@RCvwz9AGwU2nwAL&|V_K^|}qq1sVub%8k+VDiu(Y=n36rkfTmz$1|N0 z^Max6GpzlGcD9WFY#3IVr=?awO&Pa?6mgEB)HOh^^X|m(7kvSH3HRrUUusq^-Dlpt z@t|2$ah_?BvTu>9@McQ0%FjGkn%p-PM(m-Mnx?kS=2d{T2YmjBvwF3CIMBo*R_vkb zAQP8Tt~y(-4yo(-rTX~By(i}P@9(`8ek((x_kfb3bNU2jQ#Cu0#q?&yyW3CcTzdA5 zH#_z{Fb2T@fq|(5ihX<<0UU{S^f zruRd-jK#^%fQ2ARPe*T%#z+eRPz?OuGNvU$gF$i5_GG%rA5FU)lktF1oDj|FiN7*)mGb8}v>e2L0N`9^a zF99O(?HVBC@(-W0F~$sTFGJ)VL?T1@kVo;$pB>8_bF}ujuv#KGc)H>8Ip3e$JR5n_ z*RE%gx7RT)`$}WD{Q+@APHK~6a~=D_FO3_23Jo*E6%$k#X(eWg%#UWhsGX9oHRyNC zA}j8ZdXg@yL8!R4-!u#{d`VeP`9bx|vS2m(&BGwahD4yUJ(bv{MBG}7R2ng+2fbF> z1Dtfc?6c{UyE=RLUe~b2%DHiDTrNO+kT?F;1Hrmz{ZO(kTYH=(RDSlz1mI}fe_7Qm zNly?o5W(Vk7`1+Mc0S8AS~)62(k`Tdve?p~HNL)ezfWiFX4mG0Zuzc{n+?0b7T?wj z1cpN0Us`KZ-CY0pxR477 z2R}&m;CabFGtwgy^M&}W`+P;I(3L3h@;@iTd`n+HJdyoo>|tuFBsS0^qtTlCfS)ii zXEHc)mQRn%%Xb??E3dKuBlpfe2rcy%!NrT+6qp8U9t+~S*aR|NXzhC|K8RP>0zYN9 z@1~tA?Lw^N_Ac{+Fg!aAnCJ{I{|$tGCy9Nx2LW<4=MwG??hXuP zn{MyBLf`lMl;nQ(Ra^Kd0S!nGyC@)$5COX*A^jrzV;Y&ufEZ~UMTvGGnR(y)t3@{g zDAp{v=#U%@$PO6hs<*dVbV}7SX1@Es0FXcb8h2^4zp~zf8KjFkv#&|pZY?l=75o97 z_~V@C5T)gf%F|T4(4~+<-%_{wdFOvYw1{WySjt>i3rIJ6JCHhel!b6>R4_X&{O(NU z+!_r)B}eM9vTS)XAqj_YOA-O$4)j3DaMCf-CCyvc^_Zt(2FFJefY4EpHDJ0B9BleH zaLlBnnMyzuGTg$JrW1mV@^Zp$Uimtd{N38x=Du))iF0D`iq$@nt*m=|D4On`^QbY#Wr5ZRY z2#okSClRBF7B+SvdH{4B)ukS!ihlw3^a%$im!bChJKkBS?;P6TNwjJe`+qb%rMq=| zkPos$Q01;7TGhCr4E?iQ<>U4K6>2D`hn-UIuvGbUH!)H@t!lD<3ni?(Qk#Koc6Q}p z8|R9(RAVQjwM?Qp88f07L>I}@Hi(3hV)pqci@8f1He^SdUlu_gB*^@yuUiijIR%@W zd5z@H;nMgm80S$mgq$&+p(S8)(fvKsMmj|TOSzK@ykrBgbz~t;qniY<>0!Z=)g_len1xg)^nIt8rifN)8m5NS zJ3OaLfLz!yQ9cqx3e+-6x?*Fv!p&e^MATdo9c_+OvAjZ7FD=Y8eY&fCw@2drlFH1P zh;g=h7{zVBuI2vx9bRer)zTLHO2`zO8u%FQLE%KGNYN%g81*_erallcJS&)KK2af7 z)ZeS>v}}R)|=jr z6Wk2{`bq+4+*nI^6-)F}&Z&JR{hwk!^$!V0SDdMtRbPqnbu-`dfSo zJNbW3mKTddT+hDRL0bgjVQ4bJduk`!Yj=G7j^P(7aJ|FM9x+;Gchto>C|iwq@t0o=_X>Z>xScZj(%ncU7N4Yy`2g(UWfgy zkAvGupj1q-)q{D@e}Cg89$q}xl1}h`c5IO&({A$DLXV{3Qxf82yT94*kwKDUqtZo5 zmb{ly=|%hdnYLlgoORqaw{^S~E1`#43kgdP`wQn0?~h{+zZK)#Xs!vP0`TtFxAly$5F-cwFPI2 z)i~jF$0O9Y=X}TIuZ~IcUt%?VSdZqKGbTVNb*@0N6vp=Vc(7G4a_qF;OOsRSVNZ(% z-D#Y#BK3)leyOaU4fVPgW@zhl6>LpWVPcOJop;XUIqys%1$Y`muSv*Z-01YNXr7#j zR~Q-aE8Vd3fuh*dNuEk4UHH$iv_B*Qn3v9Lc2Hc3Rs~g>QF(q-0qVT}*@9rQ7jh!x zpm-Q1W)ve+|F{q(qfz5ptpryGTF)x-og_#sFF$Kq)!(!4UHwMABG%V^_b*4T_gANR zN3qQ;j4~CFg+#9$R!8D3&v~}0+#hQW|Fb^lfpm3x7V>qcp^z4^uqbFcZtOEJ$#ZPH zF&|1iB<<*nFw5yKmyZ0a+5Ea5Z6a7b^`A%&84`h&b+aV`%e+Z{a2z=op>XM{j|e2C%7~P z*f>@HH(%{b$g*&oO7)bm8fC@ioprE-w#ob%tRTF}W{4hpKkZb3S;VtxEfZ|{W3KIE z5pj?F6F3b8yA29YHrzE-9I+sq-{&s6$MlUKRfeo3C)uYckF!{`0}JYUb(at(f^o9lG`!W?5hpH)oBM7op<^b1=26M?C1YA1x~zbFiIUFBh8QgQ`#V9!KoQ|(q1rae*B4AU)!ZH z&M~sCPI4Rd{7kPPTQ0@_f#v?ug%^V3%CK^n{QWw#-3}%>RTDLB*!MR#&;qp_V`mk0 zy`ujkjjtD2m+HjLxB37{9*!jGKCvy;t}pQ>{SsJnj{sTV2WTniX?67yb7b&|=L%lQ zWUCl8h1&uTY*sS(`>( z;q~ysY!}mGV(j0k{KP?LUD4weqHrP~M;P8X)4!<2ZJ?$$1!v@*c1EtFa0h)yM{L8= zNW(n|BmzIXzpRFc@*qXJVweT_c1>g%d6 zk&Rq*=Q_HLBaTlwLKJeGpg+}kK6bi_Y#rI+)j#eTC>l4g_bIR>85DQRKJHQoF2Nt} zG@N^BkP2VkCecR8=R8n3#Zyjy)vHYnMCJ|I1PuHmKyd3-&3nP#gc<7WCTUek z5-H}*gems&iYOol!OjEP6QHx-EeAO+Gq?e6Kwk}G}L`DSrdv2rl|#yhU>+1)HVJhD~?;mh4Qr`S5lcE z+Q}8R_OiCX;7?+Dk(mPooaTBlfDLOFPD1H{)_$-ww{4uwrBcpnGko-I{t{`avUFTR zw({h+nv<@*VpKOk`3vcVgm~N};#{FHDm9uqCCLxnUSpmWBAaZ2Ls$h)FNo$FY}*AO zqGG3T8k!o^qcO~xcKYiS>M=cZ)o}U<*8Q5J74!_GqZh)2Q>2i%j=w95cFzlCGu_kuxii6x`Qx-@p8xw`#`#0l?11+YAhZ>hK8$0H5LM-!OV9xQLswu*Iq*8 z+K;a#^|Ved(e7nLXs%0VXc$J0c3kDx*re^ASn>=9$WZ{;Ra9{K1uZSCPcyibrJ?P+ z5CI6fW*j&ZTHmEuYnrq%cX>A&Aeb(=9OELke;hx1m6qliZt7J#XthW&Kyh3E1)5ul za-5^mNj?|B*yHGZxm32bTCS;(tid3S0tjCl`fEn-HE5OwTK;DMjKd>8sL|mL)X;}E z^XWr04Ju_sepiQd#i;U-_dsHa6;y_LNUEQ2l}9e7H-<`w3P67AIz-Q0lKj$x2z{Oa zYx1-2-%iro_s}od4@5=r^*;qr4)?N$rHFpqDFgHuqt@-DiV!o*Ed?QQPa0Ht{IfI% zB}fq<;d9i*f-8_cI)XmxQ7z{mFyKsxGkWJi^7wOp;q|65xHHpS{Vq^FkGxp=J0p@L z@4T)DMNzRkBxNFB!S-bp-F^agF8mSJr=Lf7))d`oGNd39QifZxCee89?>dWX#o*CU5#_70Wb_h?!>+%p5?!J!@DW~-vKSPlwF z2n0lG^i7`)+R!J_>{Rz3WUO3Vybq)&P$Kk^5&Hk*3*%EZke&?(HN=-MONi%Bhl)%X zQ0v9|0mv<*v#2tS#e!hQ6bNJbkjxphjc#7R19*J1Aha>vK4Rw=5xB9BPr%UiMYVd4 zEFEx@yG^-K6vhrhcBd8(?ck@{Pqqjg`iA4E!IOMF27bgl2`3*==#1d1&(=uk+2Tkq zVFc0HY9N{(e5yHJcaWzV)$cU@8(X+DQIPAfvXQS5dNTM10a=hs{ea<)wVnWC3mnn6 ztdRRubee-o^OGa-MF{##Gt1#KRH!t@SSuxF^1Sc`s~xVBz6c;qwf+mzY>*Z zLHbRc)8~w-F9JKg;A#UM&U+SU_p%!6NH=$8yl{98Gtz~B#QgQlRerF#6;wYt5dkp= z#yQ51#KqKc=;C|<@aaQk&yRN+%F@HdEq%}izJ&R>_!MVrJZIz9rs|i3hF;*wO$xlP z*h7wT#h9aoB&ro3Q420J=lh6TdmVF?j7vsp%G4PsC|UG@U00E(`qt>H@dSgr+$7MY zie256oB7B{yoTZVp{J}1m+sO@Sc^cE+-W(w4=V9voRYW=V9@^A6G{GqOIXs$ki`?R zlRBK<6EPjZb%cAEaokeAQD-4`LW%H@dNsVUHN_cS;I0}^!q1wNiIqNBtP7U)S9=(I zrr%LeMCKfPAO};3tm@WoVp>R9r>cFD6qWqAH~VH>TrB~Yh1t&~={{WFmrZU#Bc)wD z2K~5~j&sNS$BVDgN3-Lxbzoe6Os-sJVSn7ZD|XX$Ia!|`Wgq0H7M!SF;B>M-UI(om zZe!BuQc++Taf3Wfrv6h_b0Xx`rIeN}njU30*%}qty@`+TMs;Jumd>u;KROiPikyId z%=1se+mV3_&?Y2z_}X;0oz#P18HhEIhw49)URZ{d-nJ1BNmWy7rMo-YZnoKfM58r^W02m^rmyn|j{qM` z66^q|tt~WrY9f9uyL&iQE6iUPUIE#$fuI})PmPFR7ZQ$a-99&f-Is`(=L{9sRonx?bQ*8G?yx1X(1-YWD3Gj!8-Z?AmB z!<31llU@M(9G?EqD>ux7rgWbbAS}SfRRt&3^w@M5p7o1l<@^ttgipEbD>RI@0Y3z( z=cnwuaF(3-3l3^MS^+Qz0WhZhl3a3Awf(=L`UP9Xu=2w13~@ex5@vpvIS6xbi@AtG@d&v z^OGdkzEycky8)=|bn;(2cX@S$uS{QoI?&_+Vc=55 zdC0fZCjh(6){Z*BF=w)(sNFWgMNN|*e5c`t5$Pqa6k}kwe^baGaT0#_;~aJZ83E9# zv;*m4zVE!2+GhL(TPk{9z9*kgS-mOrK{b)5)HmQF&E@^QmT$N37YcCsMpa#op{o9f zSoLJGSu0VH#)_{K*3;)wO54@~MFF@^p&+7y|M@b;t7rRu1(&wLBqHI+k;Ot3!Z=XS zN4IL(b|va{ee{S^1-AD|uh75)(Kb!0t3AI7O}_!=C`g5r{i-|_#`<_Y^fYxhO7+#> zv=ve8^KnlmrwS$4to#3$gZ%nZFr6&5jpS)#ZJ4GTvDzI^MlD5SUi$QQMDLiL{$v4e|LZrrD2Q*Oi~W4 z!*XaauYhFd-sfh~vj|D!dmsN24yKAtaX_>S zoBorq5{7e@ulm|>v#>VD7$}<(SwK>+AJBSsMJBgd$B{`&Pf}3&n`)|4U(vN(|~6WP}rVT$9H_1?XH(7_d@EMTiFU!*e3nxgY~Q661GggR+%<8Z%RcY!gwXUD!C!-sc zn8eB}hKX@OAbUrr^(ScxdWynq&;&jWr&B6++MP6+ZRBuHLbTO6D$iG3m~d_kGPOS^ z<7=o@jt3&>)CuNSf`dp|^WB;Ke?~#R7FE-aO4^#Jkl_IMa^iGq8)3{Rq0vDQ*ufJR zuo$~lBmi*Rk`DM_HV}JSQv=?dpBydfTOXlT-7l=!j0+jDdSs0@z@XiT+SX-bo&$ra zTvg#}#U`+ISTh5^2M?4$Y9EIS*b0b|nCt)_M=g6(wbs0cjov#J~Ess>uanW3j$uK7qasz(fqb{nU?5f@wV9nir2G10|C~(z3EsJ>^a5jRj963U z+zGe7oPgCf3a(J8*0cFxECc$rTQ~mH>#Is`c&CXXedQL|G3af}7rp8sJX>~5y!jW{ z*B5d99`BN+H zt~9m%19qpzI@zn;v*qILlydfwXEpyRk8u3jV}0H;XeI2B@p2JlhBl@A)5QU$o5e74 z;OpLT1Zchi%E{{`yG@D^uD^iS5mwX5vCt+%_gX$ADsy7~IJ(*WO$9S*hQ-#18n4mO zn~c0iMS6>NHcin>k~Y`*AuMKDe~kV|aPhkIWOnKC?<&|^4xrw ze2N?BPY)!Ok#1n%pzGS^quBR!3B-@qwE2wjh*16uBsIE|WY1$=h11${!i}zr;O#n}T$g`eEekpyEc7PEe-#F;z022ew}>s&cvGr|{A#mHjW={O z*$SodR^!h7jZ2~|ed1nH8oiQc+TYn#-Vy0==T8LEQ)f))xh?N@LA6CkzJ9(t#Qx33 zMfG#A+{#DeDeN8rCmO#1#8;n z(GQlMFMy?(A^oPT9ASv%P>`?d8018SB4_z?Sf>El*xBl?qMkNm1Rv$l@=m zW`u5y1uAbFJ-&oo_*|d;?KI|a`Tw+3?|P;nA`<*}&Em1|R8-U%u;J*yQ&V^5i>%7y znrqz*PwoUEpO5RMY`i}?kOCR3)aYxtb938073n_8 zyR=%drW)=umvj@ot($P=m{Mb9QR@731$oA76Y?0T`S~d zLxHTfDxEp3Fe&NIznlvUBu!B=zDkgCUA?ENv#u*+&sm#@Kj_$-3(PdF-38Yh!{012 zl~Kw94Bk@95wCfJ#1$sX8PqxmYC5QTn5A7oDpu0c zfiKp%bV#rVh(FrV+YJg-lX9rHFt$1nv|6=rbsY_55|ZSY&CJd>oH9BasVD3EuDiu6 zBcR`^OD+3K76<1n8nX}b`o!7WsQy(XAKHhs`VP7v1^mU;_aA%sTdc9O+Q$W5 z=sDF#9<|qZ&VbUlkDi8#;@Cs+wxThC;_BQxop&&WIvu^~*{nHx{9|^TJ!IjDIQe_N z$5QmPK+{SJRP?@@0d4yiRCl;Jf55EiYIFXs&4B#@;8U$@RdevC1vq}wm z8HACiw7;C*Q}QjMkBjW?mOrc}%y9Z=4z0f4t z9#b4sZk#BYTo?i*rZK&XWs<7y@RDE_ccq&+Rq-v-0yX(fEoE-XN_zGUvQMh_^2>E| zH6bF%E8@Vpv#P)ru%b;h{()js4M=fAyW!X21ksE*^8;dKR#BhSkdA!#QKR^2KFv-t zFssC(t4};uYC$NI^W0qUfl@4iOPdFmPgL)9RuTPJEcJ|pKW+Z3;%IK}NdeH38nXMt zo$msMO}ALZUcLNX`6jN$P^pafwPR4DL=j9eyeRG0fze#7wCj!}9W%NH%O3^ivAqp` ze<|$wl8sB6E*8zfl`-+{`Qb|WkqI#4iMlg~VZ1@5q*K|l2R=r}zJxTRKR^&5^sx7B zNn&~?OT`r|nTm@LjULNN8os}_R}8=R!15=<|0>s8?M|~2csyM)w4(uDBt;QBKnWKB zLzEHK0)uT;o#2%4uB_kKJmA1f&hryx1y7?t9l$!}l=KUFQtkyv33=VDw}_P*r{2wV zx%+le1-LFBcTDEQsNQb+=(CW_NvYhVJKe^fD$18jg}NW(k`~#cwjTBN+S^ir+xTB6pu@Z37y7MAKAI|?()-s}D@j=M3Xkcv zc{D&l;sQ7+9g)F;+?m6HOz;x8L8isr;o4J0op~I!;34Ei$DRSXd|8R&;J0(9Gsk~& zdR^`X%_`}{9;?Y@*F2i@c?u$Qu$l@UH(*`JeVnrAEb)JU-86C5F*$H*+1{;N>}L3% z6j}e9E#%E(HRsKSM>?ZFX$w~g^iLSsM z>lpV8i#_?!V(IZ5ZV`+ajy+b^D!nc+Sc^}OG-^t|o1XMO-SlnKNZ=GOsj62Z|J<<# zS50kW{}(Y)vf-*JYsQ?-v%rfWnZq>IM;J%9C!7G8`py=Tz@6red$ZowZ)^7M&JFfV zJ`~-K1-d^lL7k!|@pH$q$0 znq4{l-yuSX==Pq<8y$V{>Y=sDaz~ph$6XEn04~3zv33 zJO0iC%WQGhPB(+4y7!vS@#86;OIdr_G!=>c9)25`>626#Xt6Eba+5jB%!&8OdquhT ziCQH*3bT=H1m`XZK2es2`HRG4n9rX)GmT&I+@q3~BMTd8t!QFOBj;LQ}w&mik%c zTxcmgiMjANPw~;GcFHUNTTD`$Hfi^9e5S_Wclpo}V6@IdYbSx>pKWheN?doKK3EC7 z+S@_ASo|BI4_?)O|8(Nh0*i$uEvq`Cd4kN4W3?nXch$<-$G0R*4^y#9oA)IUvY(CR z(|&!D@c+JE*8bs7y&I=91C-uNWEm)4&aZd4&}(PXlgnzVt{s74LU*MV$0Se*`d@*E z#|MVim_5-HojxH2q=kI+Am94@k>jl@y3k@WHEZK$KP#^&zL;<>pIF2-*zd|}eek~T zb=M33M|-c1ZTQfq66NGW|JSU=`qr}(>RIiPn-=fhI^9syT&h)&k#6w{ROZu{qQDId zk-4I%>!$9)npdzL*{#*A3bW6~tOF;SIz0l5TCm{gTzvj+I>N)R8_kuH)zoo@e|UtY z*{CQ6vJ(fZgoI0i5dN|LUHE&Ogi@KnH1W@`q~{C!ad$b2H+45H2Z&9Dzut;p{G8{I zn~ltqK0F}3o?T$w%(*apCi-p`ozp%+1NREt3+ROo<~}+FQQV=KD)NS2n+gc@WJm@U zB{tpp_p9W@LuMz3w*W`n+CL}ISfC_UzZg@LIQTy8ru2*~WVTSh=$ekVvAEkYgZ6*n zzXWdag$tSalKwd+J;k!%<;o>M-rZ^Q|=6=y56AFcf}KAmCmr*m&07B4PiX|odZVXiAV z+uIpbvsz0PZRSd4zv1;@p30=xqk3sH%hIYI28FNy+8fG=kFZKgKY#5q8^tg6e}T0q z)wb9I0e@OX?rhH{9vQZ6h4FL|2uN9K>zjr)6HQ5b8*UDe=VM&lcd*h- zt31ROr`YNh(&9*Iup#SmAkQ6K@Qg*akL-klg#}bC1d=jw|HUe>Y;`6w!w=|8nc*il z+J0o{BnrwCm)l4>&RqzG-(VsHL1vR#<-r~(?C1>nhqsIy^cnC{g zM5*}m1|rgj=hM1wq(%vVi^uZ;GZ!MRkPe86#crP+V6R^bnd*>JyLD0(I&-`2fhm;s zFj|_4=I-+uW*}-wxuit~gjep@W4=*~9RWhKBR!}Ea%4P8l~<`Ii>uBIy``VeIfz!% zTZHapWurq0KU$i;{I+F1IDizAH18+meaNH*fCO|K5@#?Nkx$aLAeRwo&rbUEaWf{F zpnD%NmJ6TEr14xwG{^hSLQS@s+JuF17GP-Ic?MHh2nmV){@?;7b?o7REMYY#`R(5% zWPbo`9RM^TpO(7QzhR?;Nxs}cKgat z{KjjWY43QH!|s3bC-wxp9!)M2UmxS0LRA$pfe#r<0=7GJ)rNsA z{FWEv=z3PgqF?R|JJXx77EaszZ-i2{*W!@%?@5OQ7 z{3gvVKRBO2eLAvKe3vtk|L9VO1MV;D+_=@9;=4jxR+*g*Kl4tr(HypoOhJSC-QHfM zcwLMmf~l7x9Zk1=f|A@BXHCEFlfInagpbhZk7|L8h}qOnQDg$x-xLuz_aFR!@U3!! zJ;Z%dT6vS><2*5+1W#r^^UqTiR}Ygvy#x0D^BZBipI5_cEU%ihpvSuvhg1hix~~e? z^6;wHQVV~4u-c zU2FI=`>{x94*dw;8=wPpe2kIf4f{Hi&`6QZB(QFtyswo+$mK}yD|11dpzdE|g0H#O2g5EgyWG`7I<2}-71 z9m7>-TO*!3s%pUg7j#{LrPzk~*o07&^T-(b8j|ppv09~D(vvUlVe7hn7=Q}L&*t_T@re>nCut% z9@e9S2+d|;O~mf`*i(RRh1QU@~agtHK*v3fL#uH znVgK;{*7;0U;#v`QjWsf6N^E_Ifq;AUg?u+l-o}rG>ww`cbj20<%-2c(6H@>4wJaV zAyx{e+rOIh9Xf+f(W0Se!C2vu7x+UdEpL!K_a>#u04-$bGD3V>Lod%k&1s>*GyCgf zquZQ}oH~F)bBdg)xa{8_8 zc@2>{pQf{A{tP!m``D$JOz(UWZr{Mqd7b=}urUHL+s_ zF5CCrs@7?s;e?pkzoF*K`V()wQorTQ?HPPEz5DhODb4A6u)3DYF(+Zu!9i|N5)g_< z)t7kS7ZcSF{d)%jU@0GMQq*I10dl?PTL#@EWfN)D52T}HpC*C@}*>an!%S$g6g z{F}wq^?4%Ae=1+`n40DP!?-5L$WhgRRGymPHPvUM?Y_qdhQ{DSZ>e(a&LjbHm6JxM zTO+m_W;hWCFArjd_vY*qMa8?P9ggj00tk2jl%Q@s|G`k!k$0+9`W$O6cJfs|m!tcm z?)lz-N-vNT+!E82(HC>@l)VwSkY7sL-xza4@GqOPnc+sfri9Iz(!x_eRDtn6Y_h-m zXjGB)wBgt}5TsVmeXH2XJFYR!Z3JjQIDC>tI*On*P=MqU47rDNpmSSjp)> zRhr06QE#^x1dU!j;x?gZkOpkH-5oVGasrGQQUJQYY4!1CmIz~IQ&7AS=xV}6({sWv zbpD)`nY|ztYa>r-DoiaXh|iF(I3|Ul&z;Run}zp3mFiWzc*92)Kv-8U)vdHup1Y#x zoEj2t-cq2dX(NB!TC$+DAeh{IP&}A}{by!LG1>TxEb(0+k z(N*QUshOw(*s8q^y~`$9B(58l#1Cz=ChgqDFm|jTV}$^1_&I$(QPnZkp6ePa+!kPC z>t{)kFMs@W;e+xpg=WEUpgUf+%ZQE`Rj>ByUPR*hmVvJRgwO9}&h^;2l5qA=?;+us z#Xpj$WyWR<0`+)Ky-_2m;AY>Bo$XV)@J4#0DQ{KzI-o^$C?@2AsC`A9e{h2h?j6#4 zk+w>(YB2S9CX5k{B-45K(g1E{tEW~;1`7w}Hmzfk>s6^V7|B_n{O>CKQ5)S|1NtzRI2T8`PhutEz82-md8V^eK|KKmIgK1A9G z)FWq$NaD!x&WnjWBY-P9nB+kRJ__(*h@2O|Vudlwak{$I^lfxx9^Km3oL^wF=y5p_ ztjCXb0Z0PG+U|fuLmJpXCwIIRa&ic#yL|fD0#Lbk|043O^um@7j(4brs~4PrbS0)7e=J+^pC-#Ho`RAN3lY-Yhw z`NZ@V{?CBVw68h^Bmq+@bRRxjP7~XI6Cn+NoI!8oOMy6~F-d2tA&PolF7T3bO-AZ6 zK|Qn4UUXb-ooKP7pkJkSi$0ARP$F1)ba3Hei^#&Y9si3?hr4W`>Bl7;SN}oLrv0h$+AYYT z7|CFqe&r8v=42UmIm7M^=mJDFa5f0(M`aWWi2tJ_N^p+JRYme_LyA=c{i9k+2%OZ^ zMRQ2_zf!5oQ?V=9qCIpm7L2ti4508#o#sl(<9EY_Rd;%%O*jul zKQx^DT($w&4V*>&Mj5_jH-x6tojOnK&Hv`AzAzZMw3}i zx+Gr@!-hvdVm9Z8+myz+QbDAsn$&>a{h|OMezfV7bDswZi=`t)QR~6B#UledT+*g; z&&>Zs{rt%Q$Ev8Hor2s%9+#AAt3DPV035PRn6x1~td_ZKJl3-z)7Fzr(rB4W#l|&t z=G<$k;^;@nIkc4{xw_@oqSnHLe+|)Dpc6;_o&Rx*MN~!B3uyRy0^+D#Vk-AojK6^E zu&iAogTk*5vc65R5YQvc^?@gBCN~oT>Jj zV^wESGz~v{b{m5bpw83kYot=EB9W8EYrzi4w1i|w)v0MEzKE;>c>&O5xKyBa8DkeI zrN;e%wt*M{BsEDX_K_gk~p=~fe^{EGKRYYqVR`v8+oezn&A*A~UGse-OF`KP`nbbMIO?@>nOP~wfv6_A$ z-_x&A@F%Czc3S*M-=wU>x#UzdKVf`enT)3DRS#+`X&nD*L|N96mB!u#;>bU3K$n%t zyKLjQA{_T9B}f+!5ND$7E@b%V_FclaNPErJWdM}Q5bQ9SlLUurc50hO*oc82~0u`AkNJ6S<-zV^t{J=&P3$9a0%B;tg^01dT4M9Z#UY#U+cP< zsA)2Z%u0tnBhE#NX`Msg2Sf#-A5U6!9bLVu;rQe4@_M}Jm%?yaS(i%I08Xt_K9+lt z6y)x^2gwd5YdZdz>J(Oufa*>*-nRZ(BGf2mN1xRUOii=(bGkIP;v}`m(X23&pnIsp zNq0m_H(!L>hz>G#!CC;}EGnF6?5RVAk5FZu!l1Eujv-*q{M#gPk60)oZXri_C#Rqt zr9}KF9z}N#6d}$5DuXQGmiA4X3_C_&gEr4yZ@3IY%gM63P5cQ{dm&COutsrxGoLT2 zq5_gsiKqEhh!-a4-x;?-2Mf~dVAk{Cf@qq-Hwn^*d^ve8BQDXl74wNiSYkS~I{uRy zCg;za6BsbB8Za0tSr?5Ya;)(P9JzWxGkelXNNq>atAP63{4*xl>MJYwvy!Or-3s_5 z7;$#l?DE(?KOy!`f2>asaT1j1e8m?k1@m!vt!XZ>&w!*PHJ z)=JB21!i93Zyz{=y293)a@||EMQ7EKsiB6WYQ&47rfKiY!jE-mNUEkss?yR6W3%i7 zqu%g$M z<#oX(zp~T;)2v{Cw$EvCgcl}%#r==9o$8tPMr zs#eA%JSn$91MCD-DvCd5Xs3)v za;7wI!>FFYFS48~)ZPRn+!IU)NKt#tO7sZs)Qi?V8KUgbr{TQ_GxMUU31uqR<`D+F zUsvZYE`{ocAHQ31X=!l6Kxuo60`*)pxKPyau3jZkW7VFxYGp^e)mGhSa-u`A#Zfhr zMruiBPI;M*j3z@KSH^waw6kd3&2nBY3443H+*l`0K@VFT6sYk1WuqPc-4(u%S1Jrv zFP?6nbP~nWymc;Obu)$Mk9IZDntFl1C5eWV_$|W5h00uo+tSOP!?$*=%;K9)QTMEi zO|#oa8S=s8ha(qPuWk5zyks@^TI;xiTYHUhj89T_@`b|lJ48^^z<=z z{yEnbo!m&oKff~H)<^O(DyL|i`5TP_;TN&a&-LTlr4hsjK4As>^^S2i*S$zb4pK(# z6*W66@l2sflU2Uy`9MUp$UA^>8{qtlrrVV!H);1A1V~qE0da1aIm*+VJIDj@Ox&c! zYXaF?b$P!=^-n?ydNw=O z9MT!+WZG9HqoCsGa~4(~*C{Jx=ei;96~}_)8)|Den?w#@<^ylMh#6{bvREwbi!z0S ztmfPlMJP&x2P!5Y&jgm0W(`k-yDMT~FM<$20bRu`>pmUw8tJ$|EMft%;Fv8Id=FAzKy8COolK$O`R8Z2n-7RHY(L)@CWXkq#vDs@-kvBDQ!p zRzCvQen?ZA?K+tg`lG#E`I(w80!Jb>OWI=82RCXco+TR`dQPKWkWL@j3W~-6BaCyK z7^uR53lZ)mlWDqn)ZGYY*vk-NV%1Z8S~axJ6PNmlVldS@^eoD{jiEDDyG}OVWKbT~ zy5|coO9AsL`9Q0RGn1EevdK2cBTS1p3LczsZJmJX(t|P#Zh1S-_zATB?3G<$GT?X> zyWN-^7ngRmTa?b{CPUf!I<^LDknWqe8(^!(Ei3A@n?=Lvy0(yHW#qv}a4@g4iHX2n zy)&9Hym3F=q~O1-z_}edf!V`YX(de(UUSzdljBSh_$N$MCk*D(BsAN$EE#TUVcd0o zDIZvyQWH0qlhI$ODTGMx(Im<4!rj}q@WerxK%b{?hqv|OGnxgb#xtMbKJiq9beJ<8 z#!gFPxdNbgUDjAEi7c5BR9{fiwFnl#V?)5 zY|Tb%5>Ed_;;!PRoX%4({dmIMG2~SA0M>qSCCPEc7;TNsZPoKi#5_U3Nph3Tx7Jl? zqD%tu_mf_V_pyKnZHW~#fp7M2HnBrjBNS&yjF&~vqP(3HpKV@>IL-Cw!h|>pWl#qp zDyb==I16g3(P-{Wi_?tmU|T~yN;=a>;g`>cV?Ybic#w|!Aguf41aYB$4N~yFhPF`R zo)M&0WjxJ58t8W8JFN^2m|JR`uAy1kYu#EgQM+SVZ1rPUf14#-vU=m@j~7~%;N8bH zi=G_1LaRHMY*c1NN00r!t_1W-IHmWrM<~ty*@2l#r+-O_ojen6Wf3Bq5{6f}(~m9+ zc-Y}J1AS#soTkFpi@WaUo8_UlVmJC$ASL5QIL8*~P!~PglyUySnnw2D>&x*{PY~`3 zbsOUDvlr_mJu}YL6*nC`-ag}mj7|w98>IJ59K%4l3BF&Au&j3Au9<~M8igF z?hHcE{~c1>+vEmI+8e_j_&2?c7*}3F2=6+f<-AIHLl~x^vv` zA@_P#bvzTtVC7^V7C-gVOs@k<)IW*sUg`!}oIKN;8K?-4Yjmdk;~8x1DXK?9Nz9YX z=PNyhVC;q&ghzs{tdIhfeB^`Di5qFKs$;VplK7Ln*Dh3z+YjB z-Ui1q#uM|{$)&;EI1JC}T6Ku@^0c~ipdgSUcaVU|*B*EFUt8qq2}-K0QAFVa#tgW- zy94NyI>VZU8-P({cVY#Aok{XveauHg?;D_?+_Yx)pJ$OVc)FW|8niJKW#0?84SM(} z_Z@^wt&4Z}J_>H!xaE@`J?%7rWdSeB_+}Xow^*v6 zc$W(6Q<~7Dx#oc%?08c3Sl;w7#P&n2v$&N*1vJ9QND$;_89^AZteKxVzC7(O;6;;d zVCDw9`m&*kMziM*CSR@~w?qH$ZV!iYvsJ`TA`UbO65a7xS;ub)Q&r zdmh{p=pFWz;q4Ys@{G=p$owQ{eu?p(Wh1Yq9OO~iZ>(Q!`z`6E6AQZq+X_>jv~wvH zRc#TmtajYGJT&p6CG34F*IN0lFEmCoUPwf)RTB0o6buiVFmzv{-+cRZCnix~v_vs= z$LX=T;h=K4xYx#Qp~Ifg3KQf7vGiLRii-LMhy3$U{-hU+pWAvHt*n>waWNg-khX({-#|9(RV#R1qn6#hVdNSAwH+61lyi7WX6TAZp3d`c`Mj~y>Q|BiZ6A-O ze>?SP3~A{BF8Ow9V?WX7@I7lC<7}SlDeTp=Z;+^6V;iquzwL%5HUlM|TjtcW+X;pNzBNp94pkPm|-X9L(YdeA47;z zF^A@m97>XWa*UFsl8)b>-@otcy8nFN*L^?l=j-w4seba71l%N&Ecs}6sw4y1(_`mz zs%cq7cL3g$lUUvOU#>p)H}hN(c3SJX+?hAm<&;7dhU2q#V{iVql$ zKHasqQvx&Rg*UI$VcWpqHFZ)G%Rv@oia!RcRjxWEiTzPJi%)MC2UVo1v@0AT3KYLe zydSy^b1AgwePA(aTTjmhL|QXEf88AyJ}I5Va9c??*ig<{*BRe<6nEkz!<`81yS)_ zq*3%9M+_gHpofTL1#RZ^1QE8=#`GA zn-$DS`Spo6AG!s2%VCdSpe5ZG|owd1efz|1X^~o zP`yq*PAm;K?JgyxWN@FH=tNyG>%Os_= z4OB6RvQ{eM<7d?(o=9p20j)VKuO@WFtg7xXyU;_t?XSZ;586`;a(T&~LW_J>kjp%* zj$KcFzmm&1aPkWUWhS$BYqPM5N8C1~E6`%wcUVphvJ;0ph9ElPTZwKFAl@B=cp_Gr z&TBjds>EdUFwSRnpr^qp>E)1RstHF=$_Y5CEY+e$~@8yD!~dJ zff3rcs09|kn*Nv)z`#vX_tqrlqG)_x`nh%SQ4gowNK``N5z~5Ed?mo; zSr#G~_*Hafw+0-Cmx-!C18OD6$UI<{MB74L92F3J8NxS`S9paJxt$gtQ(Sq|9Z*G& z7??EBv)|Qw;C_p^5SFedu0*dbm$}1W)3e-WQm{*{jf?a#c%D2!y8cO~?ld6cTPBBC zz4!TufR<$8bE%%9hN#;K{3rV}slq{rEX7l4)Bnj=P$&4VrN&-ox^%KFKS;g(04=Dc z<^@9tT;zG!Na-wz@FS@A1!GrJ9%@eLx3|#sCrlOq>fF{PcF? zvE%~RYKqZ z*AWyy-eW7Kgf#h~Dk+Y7e= z!4Jgx-ztNd{9^agaw<@Sh3sX8UVhhP?C@heJFRE^a89gNevz8plNtAtITn{&kSgnU zS=@8VLUDv2q{f!$lZkjEjLFLBy^`*PXP7=07rRLFE3cWaDX%V{>QVuk9!Lzd+yN(h zuAWVM_)j;WTH;?;zC66k?=yhchMqBXI2fRJQHDtcpzK0NK~I20vv+Ad!63#ew-Q<4>LVGIk};qrE@Y@)FaeY= zF6Fi7y-A(@L1Al{%1?MJHH5ckL6Fiw)|Ojm>{Aun8?z43Su8NgGaR^EMTKM?ddQtX zhWPu(04Fwdo+>}qXgc%bZy_Q8BgAI(3{C^yrZuMmNxDblpl@B@2vw&|JNQfVmdqcD zRKZisN}`&w74-d0z^K>Yn?jB%Hhoq2JZJCh>d)MYPNQ~pMi&yf+hKyKyMVYR^($rO zAX+r`qB7qHh2J@J*i=&mS7uL7B7RmSXdBo7rw14-?mnZBE7TGd<@-m^4mdCk&b;1@ zc4=?VHkT-4TY%tbR7Z?X!BE!lzY3=PCoBIcjT2otp^|=>Tp-i~?Z1D2h2NPN9nj;PW@02sgHEJ$iutqVOMrw5Y2` z-<--5%Ms~V#`iQ!l6(MIL2<8b0K=;W{vl^~LM2f--)IkX?ti$54nWr}A^+g5k!f*F zTl(;_GeK9bU}1R~&i-+!f!6``kiQ2nvT55d*xncWc}&R@#NLm{D<&~qiINJzXB&^p z>uCc#>GX)Td2XEd-8J-y6t+_PdxYaB-?jX13===t4Jp=^R0HcNbTW#b6dNM@`A}$>*H`JdMOpw_0Nx_icxcTPup4^$oivG0_ zkj@GFu%Xxe6M%mLj$C6+N*AR3fqa-_85R|pBr|H$PShuVDY|LeApF(~N^dyGk9d+` zr3rcDUc}_E-*mjjx8J!hEV4UoH1Wb#4WstPi# zGaB~#oi&_X)85#yNS8*uKABE;8Org)pL;3NI(|=n?1HFXB#$WpavthQvHB7`w{%x! zR(_!zDjQ{fhwgx5*%_qCDA&FTHEQ>f>#|7KyHdqpApHK6WCJOedd1|GECBqNt*8TV zb>`7+MH;c4gPd%89`Y}BXbVWR; z&%Pek8TDN;S;`JgXGbg*Gb>cMI(om>zt$PDkxQw%(eh+qC4de{`(Ik?$+TWX=`GEy zF~7(4RO?qSo>olRI4r;1eKXQJkg5BFrAw22nxR}NE34u?W|IayeNuLHFEjC8R>;*R z-v?I`>SZpRf`dP1IIv1~pXU1Rlqf}sH(GVGTOafmOm!Ni9@jhDP@LQ_{V*s6qWIK0 z3st38ckWX?z)#U0)}S+X`9zj3JrMA?$r|g~Jr^lu+4D$zVB1Rb|k8)iO=YO*|Ur@v`3Iz^}k?SpbF(wU-Z8&bpTktCU*N z_~6UxY8&~4*+#<~D{rZ(fQ}WjG=I$;Q-D2vVeSFoZ|)2=%}k3xc~Wv|Mg=dM-TnWx z2%yoDK{n((3@u%8GN5W9cSH}IVuO^X4~;}yq_b@55%^Aj0$Q9Xkd-ck)5~W%xv%KL zLn}}y$X$h88UR`)8RF{Xedu;NHG&^PJ;M&Rz<(Ofxr*vcAPgC>Fmekz$beh>ul^8*+AaN~RX zpk&PfLQ`JN))Fa)wwk4olkz1N0j5>48@jdeBfMTPj%?bgO&>vs*({3*8n3v}zN-)U zhnYU;;G4rqk!KOAsScyClySZ47VU1^21ofrKT}Dl;t00y)})=OtL+JInSlbiF zmTh5ogIcyQV9=cA!b10;ZvsupCbP%|t78=7($mT>kqprlPY#ehCglqADPPz6u6ZUa z3*b?^^1OMblNWNt(Kw`{I;Cm!eA_{vk=gxl$))$6iCE$#Gs*%eAlgzv|GY!hmqUI=1!7UE>-XA zy*y=6;5!5_AGyT@0Ta)d4B{T(=a3!;u`K}J%t->qJp=d_9kX5XWV z8C?)-nNus7(EoY0!OzoWVY)uekKp(cXvMlG+o~SNya*|FTg|S$#zobe)0}Nz*FQ(KY+sDx7&WAwaoyiqP?7%%sv* z7ftHx99p>kp`O2Iv&?F?w!1(HM*Df@y{!1lDtu-U@4^a&Ib6%Zql9?ifGosU-!oC{ z43ZZM%Dq>XtPmU%_gUMsGZ+6{nR7H~eJ%TTr!naKJVHCUx4u~|+gE8m_vk>-PT^s^ zG(N?LQG@&BxW_`+m|>;OkH0GPU>9>oN6c}#RE)>Lf51YEBvIMu(iM=k5K0$X{Zvrf z18I=fE9k&vJOUrOB1`xe5Mb%Cfqhs>4TA1$H9;;M409T*6fw7llON-1!(TT-uMGVZ zN;PmkBUngL-JQ?L<+?D-`EQ6ie_U(a%1xGKDlj%}z2s(f&bc14OU4nN9Dx;vn88V0 zK*b|!&U30z#T;qE$*11_>Kzut?(~Q&N;{Ecn({gG!}xxGf+XYpN!fZOZF&9@|1&%s@?-i9p1y^qTJZ(>(|eS3Be!A4@_KV6K$#z_-OLO%j41rg6*Na-6!dHozmP&K2uK- zQQbU;ve!pzdVM#xE7k6JWrCMjaJ`TS_+Py=bLUyrXY7>LN)9LRro#tAwGhv6*z?~d z!TM23TVIuAYqatSdgA3NJ*HC$`~UI-laC|yZ*{EipDnY&1!2-FI*+;?qfV=Pv;*!L z>O)hQd(yNU-n6 zG!4}m6Ic2JP$u-|yn1Ks(1n?gCZn`fuxgQM<@9q$&5F>Y*LDM-d1l5G38z-s?(V^J z%1Ja(=uu5G8=5cf)VEzD#)wBaZ`xbkezC~Uz2ZJDSIM@qxlsOEiij6*h?QYkQ=IJG zdTI#II&DgEN-fv(alq7lv8y=3>t~9$-;CRI>W0LFEgNP&6PB^hz(e^PE|IayD=&^P z26l!Dz7)&WKV>YPH6KdWESAZ4y{9K&i3TOQ!zt_bq;``aU)|7Tk<8QclM`6%*7iUn)w`~qoiE4^;40oRbk})v3UVs=M?}y1&+`0apD&|?`KGG>u=4BN z+Drv)PV>(VEuXPsduX5BRe5A6q?er__Hi^uDyW(~*8w1MFz%(X}^(E4PgT=fv{{ z`rOAP`O_JchC}N6`6<%}-ZXu@tm;AfvbK*}lgSprs#$S1KI$|R`U(0&K>4+NcY2PJ zJL=?J!gN7N{J}@nvOU*q#)vc6N_x^3ihs-s*T^Bgc^8n`mq}k`uo} zbXqe@6N$|~>U*Wwz``h;_Cf$1Rr^4!7*(Fq%#O8+k2rSF@=Ek)4v_R#aj$=F8GGY* zNs_ALUq7p9q$f&1#8@X=6DM?L>?Tg#+nP%VQoqXL5RA8Sb(2HB*z8_ z*f801Wsm0zj~R?f5-kz6YCeVQi5}ST4FKBXS01ZHz`222+BVv_mC*N2W6b;2wpu6= zSYQ2qMN({(WHxrK=(GdhaW3HFAkGvSgGiE-VTMo%S07qa?31`^mv5j(_0{U&GMV&f z#Lfp)rZ0Hp`0}B@J|>Fe84y15zrntas?IL80(!&m6N8y1bywNpCyNA@d^W>aE%a-) z39P`Fmxo{U#HDR73I=tGJ9i@23PjKa3CuYZBE;8T%hm2sKjwBcO~7%b#C3DGG=XoL z=|mh}3ujEUgpS$PR8(=>oNTx>YRRPrEpO_{F5mrSWITdttB{Zkw;MrGwH$G6Q!V*w z0d4Rk8x2=p`wp~Kgm9wzr?VAF1j+6OdRlYeJDe&3I3vT!o!4S0ytXb_!Ygu*hGqmx|>`dC#Xs=R?UX{`||#PbN1b}iVuYCgjH3C zb|x}uFI#NU@N`g^|IXcd2uAIvncap2WySItAe$N$&;slt>l-26yEyS=0MJBlR+;jZ zmDDSYyBw#t>4EP&Ms~3Ypl__%&Es}FHy^62a!LLBd;%V{$?S~41P`LT`Jr_!vj+Ts$QU0m??^Q$91)_03^1wL>k3k}Pk71iOGHz?-bq{{B^9CZc4EhviTZ7vsexLA1_MMk0b!udDb*mj?_m5|Ax>ZrwNPX zx{w=5A!{vkN2I7l<$o%dPM!i2{nyn_Qw>iTU>w>w_DO%sLshKR!VbHbNtF}!{vkt+ zo8fkzfeb_Y=UNFv)Q7>M;%FB$go(K45g%XI9rp6+CoZSQ(6VCXpZos0kMuBRAs3D8V9WX zK*n!8GtYSc^_Bm;bk-C$*Gx_P#&w!USJh%|Ll4Dp?U%U62@0o@5p%olkxiJ00XOJ) zMo1tn=rmR*gK`kWT|6VmR*POBSN@HDV&koThX{ed%AS zhl4B8F65en#?F7b&6&v^m&o!MWKTBjrn=01{0NGfCcZ89c68L56Yah0$D# z)kU?tB&i5p-PBxuz)!XJ-+VvD_aOJQk|LD^rbhi~MHj;?hNbz$%>S44ygapD2(y3k zTQo)Wj?CevckIt$mizVCj9OJ^S_3|Gxetlnc#lkYEvR_FZ(>N;wR357(K1j|)3$b6 zBIA5BIAx#fR!E&sHk?n3#aKTCWyOoyfedj%5Gtj7 zSh=hAJxOGVAb#W8DR<_@h&zJBE7gxiY<-T^Q~AJ)V-D#a_M}=G$H$u6b(xp>v@8f2 zd(&8_h0OKaP_yvwK?UH{c%A0>6)Uc&HXpy(P|&YbZJop=ubI8SZf57WFo%M}Ke9(t zMEMTW^L)}!aUVxs5gS` z$R5Xa3oeJ2c_a1s4|{<2qN(q-ZstYoQei`lb2nF+o&viN+EVqI+-j@7qXSFIAAF(h zs`eK(iD&Bn;RYs#1LUfyc3lvF)4~<5pY0&)l8IfdK>*wk0>h~=Ue3EML zsq_=pL%DC0g%N+R_3Sz)E_Dt`stHjw0_Mg9A*r<`^I2P3K-xWyv(`wS>59t@$j=ux z9OEhxT}JdZf9ma^^mDR9#JN8*BR7Q9chQ4Rjw$DWWKB`=DRj%oy^3mFa##p?*NB?! zT(JKyQh7Q+GiR{RIpgTLm9$Ix{&8K4;qQXU-)&Ihkdu2;24# zsP*0zZD@#z6pO*EIw9v8mDk;oHtSHozsOC`D}zXCVGJh!rJyyy<=*5To?7s(>^hSZ z#0!i2xJiK4cGdTw$1w)OWHi^3go&a&=nE8Ndz{PH(9*ai{mv94eYD<*9{PG9?%PT=-mH2E z^uM3??w4~xig6W@`pvhaN6S9SJ8E=7%5Ucum`?0GA+DDi#!0%=ay96EKEj&fHj;z2 zk4oWA>Ll+7Yrw8=hN+L_-?)rX$49~%p~q<=RrH@OTZwUvTC9BMBW-vGG&hf)U96T8 ztX6C5#A|g!6Urr;*`Qgzr!YVdrCZMHu zFU&d@HB@v_$+I?K@s`8MRckKF6GY~it2xBL3--qBl-oo|=Z7rFf|t=+8IR~wC!RaG zRBW&&qfE5CY!3Oo0ow${x%$DdM_=(SjUj0;&z0aIEws1C2U6mSIQd!(WlK5LCh6(L zqLr-+LHQ(q;b49XOXVxQEh) z?%R7C>o=z~UB8QrDuUz=XM84>h^xI1+uxa95&3IL>$(N}MGG8Pj9+Zu z3q=3xOW|nWA2k&=fLV^2xQ>jFWR=wb?zSCQWF42Ve?16tjCeE|GtoYFIUaLzx0FWi%`Qp#Xunn~xMGY_ zb*B^$^uE_m{4doe`J3AC*(K*e)q85%JDCDG_bFvJlipx$;*_X-Y$XvJWEP%61A|iz zr|G2+ncPYcx+Nd4x-%5U##=)&M^NIh>{J_+f;4~)rlCgkUDbs#Z*6CZ3iNYng=7AmQ7qd;dBDTENa& z@bqePM8!+E`@%mCg-k47SW>}*uG$ZGoNB3ADalzjrKgBfKd*8vej!6-$WNSzP zL7#apkyAKzoz}@!vNY)L@so^EF>8ec+Pi`m_{kuA$-cDD*qJAU`8am+$)zi)j&nLg zo$RWT(en#MfIOP^LV~jyclKvevn4;tr)0=?-x=di${ufUWSEfhYq6MTV7ByMa!%%8 zh(&giXLWTAcWR_ZCoRT?0BuUTJPu(ujTyBQ{LOOJ2!QH8(pf(+(JJqLDPNQd&~VmE zwe}o2j8OeZ;b0cd`;BD8Yc9K?s(5UN?o!mfBCN?bl9Y@PFZzJo=`dp;Bp^46I3a^Q zOO|hW{6=-W12~_E_QG47!>VU)f2xy)vbJqds8F8pu_QBY)e3lHp;iMShGx1E+ZvwY zq#b1^CBXhh!n$rg*UHzo`u^y#vkQ9iYTHAu7f%B=JTe0J#&Gge^K;H>z}f1Y4s?s2 zvrH!S`D3*72VFbQ?j+!wj^+;{V@K-Z-EisQonHSv2mt zciB}wom;)5EB(+^G$Rl*a&606x#*Bg+HR`ehbo}mZpdN>TqL!mtYdYw47d;4_vcE= zyBtZM(53I<(=A#kh)~C=Z}R#2^!X>u=aT7+U)mRfJ?C1tlK=X{`_&1tRbmKB>6Ye$od;98yVe?SUFBqC^(8I$leOl&Q zJl5$xlECxsMY^dj6(5at3C&XPCc2!myL%bU|4~Au3%}zH&czE=44c6BB~yE7mYeHJ z%=w8sr(RRlX@+$Z8PN~ zU{%oW;aLDt?7?MSY9zSRwME16=SO4BGu`?dd&&M*F`3R14|9!~q|ED!667uvPkm4G zZ0_8pRPJ?q;x6`_gM$v<;!i=~bKR-bPG=wUVFeB)_p;BX^{xiPR`7PpJ9WE{KD?Z zrUFsU_y+QsEeB4n{tWia4R<4+kyGVLJnB%HBiEtZU73yfcq&TYxYkPE7$CqpCw~2w zKH!fGrXy!gcao8E-%dt4zGJRYN&}0%uC=kh<5U@C)H0NlgWL@uys|f{X|;qH0Ohg@ zaiVtH-+Assk7Qg`)X^@~%m9~q`gL$a9lT}6Kh`~Tn;s3psWe2VAqBEOo|1(oJC5c@ z0?r+Y{|unIa&#aGiAq*@!*FO8$sq3mPWej^&QSf0u2`kLt-q}N3G~9*?Ym(Ftj){( zT;~)-gitUxIHmgO3sbzKNn*j@F`2ZK#aEI%KGuB!D$SYW86KxS9TH| zyHR@PXHdoTPgh&>Jq#!QF>lcxp)P9#ZUgu8D#4w;!%EV7WqtxO=cO5p2?1G!)HZ6i z=5}s!JEY+#eAzwnu5X49VLBD8ApB1-yPmTmpR8>e?l0k+`m z@ycB_0oQZIuxDg&Wtx8YM*LF2eYaD7KOHXS^GODJ(?I+f0&eaHGI_F7diFXl=#?CD z5VPvua85MB_;12r813DZbSUPF@HzsY>Hny<+ewmUWO%EN z9<}Jt#Rjz_u%fwqz>M=+(9{8(Hpn zza&g@{fcZ2)SAg3t4fKCDzBFG$nwg+c{n?)VG46Rq=B0ZpHDCC(78VUUJc<(dn2Ec zKYRJJgnl;5#J6_`4YUlMQI&x|&u(g=cXw+Xyjf{S(H}dv55ePI%Wd1Q7iU)tr=~Ag z9x*9!a!BwGN?t6;ld<#m7x-vBZ2Ow|dhG9At+)<+Wb_Gizl+qzZcogxM0pI(O}#f6 zTNN8Be{my?#U*0t`N=XOOl!a88$g%brQBp7k-0L7Nz}}MLN=TLxtI;UO>mt#R+Lvc z6G{7@66ezW39j42s8R>@XYQq^n7L2T6inlf4v%uRbCBD4P!^bIY4=LI&&ql6dS@j} zFEDReTe$LS##h4a35~t$$;jIxoyLpA3oy4dpn0JIH>9eeSl|2KKR*7t=`IUyf$J?C zEcZqCbbGzcv}YZRVzXTBCY(M;r?6vW@Rx@B^Q^ zVC^!GCM`Y&kh7aBLs*;jatV!q-s2jfXA`WIb5^R`>(?244GiB>=x-6ad|3h?sWl>8 zIt!bX)sl<_7lkN;05#C50XCqVVG^pJc*yNr;1n0##rzR3{CzKuELQ*E-ky!Fpow_G zt#Xcy*cL~efi`PP)$AY3n$|V@3@fQ9fKI{ayeVefE);jJ+JV^I-efh(O1~>l_e>G{ z1*+I>P4D@eUNJ3L$d}_Kn658j<~vu;qeJ}$w1OcyY~2D+DvXuc)p@e3V;S@7nWUEy zfd0S4*=!d0THZ7`yR!Fa_D9)iPca97c*RuKD4AASdEf6F$Yox(r>D4nP$k=(RjKe` zz($qdUbXNy3)^3Y9~NAw5Q!nkzBNtN-OVg`P^7t|G%iyibQ~}i%9M5hiA&4P*BM&X zNo#ljDcjuTy}kH2K^BuwyxCGU3c}Ul~^ zJAzd+ue(S;aD5_kk{#FC)OIIhwNEER_uif(>a#P1;|tO(t9I!vk=~InvV72X=g?>X z4LV3$w$9h(idABdMEB6=(-N#7cL>Y?4E1yU93=VeSiBo`^feWxy&B4@TO7eRzl0{fcCE$uuG_DTzS*Lc_V?Xyhw?oIh9s3By7i4 zGUjfT=S0mTeGgzO z{E)W>Fc5b4^bk@B8`+Q~EAb)aZ+S zpX2^|WK8M^g2U#Xt(h-vpKhi4AW&_8V4k<7Hd7^$Wct9hf(Gl1(TB`9qrv7Ahikb#s|@d0!s{n2@m%w<0Qx2x3e46U zFg~1~>*uVE$KFcWjTYBKK}ah zrNsLya`m2E$rrR9{fz4^Sx18FZ(U{^Hy=AW zWBzjRwZ!coth{pp;+R|I?H<{B0R{Mh?1p1%V2*guFNQ<61g6&*b}dzZa{zF!sesGk z^*P}T6d^dVI59T=#bXiLqBn>V4RMFCQkHEf)4Dw?tG%gR1!)vi0=d5&w@zW`4ltsBh<$xiU2%J=hb9I1bPfnG;Y&3))sqOecKHRCG|K|` zez0D+sK^@^1A{V3j?rV&&0jDxT|K(pKS2zi9ClfN;C|SaPSsK5eX}O#_t@4D^;E-z zqsUZ4qfn`3^|JLI=KO8O@`%g1^5SahU2tyydSm%`IDVe=u%{dl{OZUekpy*OCB{J` z?i4R0zdojYSG@rw=XP0zBp)=O_?@wE9yj}-<-H!Rzw@wE(2t!qzx zAX+VH1SKVkqi=iueR^FItCl6Eg>u=2z_AStJc+nVLc^dWk!(s~Fana=j z1UrZ6tok>!q*4q@lL7BmJRpBTZq8-t;UF#p1Aa$^gJ!EF-u7iKp7oL`=f6^Hc`v!K zq0{rPp-J8CkYYfZFl)08=yee-%@Nz(OPA&y`FURVx8CE-oQ#jxnzj_P`6lnd{+n@} zR@VNL?s}`h>sFqFVoF;jw5OCvK+)HK&^@uvug;1-D3a|jXwGyNH82*pf-A3_M>%$8 z|7=44mU9woBOP#AXWVM0`fkrxU|b4neE&L4jiqRR_2BiU)i2OgKk2(d#v&W*8#oy= z42DUay{U~7@4G^`=yBPqD)GYrPtwzJ8XTMI`|l;@jHhnLf4UB~4u=IZeELy&Eck7u zxk*L>-@8R(7RP{)-j=Nqm3?~`#IkrzPa2R+(ecL9%mG*cAVJeLwR3smuv}Fo&9Du5 zzVEMBHg1c|YK7J~t3#VTup#{h>3KHzY#(*EfQ17 z&Lx@_joa3nb(tJm4xR|8*PxW1Vl^7oKL&Y``NPYABS0=A^Wb!r*MiDOIdAJul(%m>a z$>39$h}0*N6f&E1yJpLzRwj)A|It0@c|{PRdEd8-W!qT5>t30TeE0 z!wns9WuN7-q5xd(X@v1$LG`1KhElX_c0ONjbtmG~1i0pzxFzqtPSbr-;mB7Ovd17O z*|_j!G4P9K5=ry=UFK@g%EN{VBHCaT#&@eF^yr{I!Ug!(VoHyDNp1!fLUjFWvWa$V9 z0mEI?b0b!3;mIO*IQ0`AL%nO1^eIa{l2#+og|Ab|cvF9Z%bjc_F}|>iOTrbOPQBE< zPuQDFIhZ}`owpyh;*^=j`No{$i_3sjd4fyLs?j@j&t@7^%GDIv z83>fjDY=dP?ItAlFqH|3!RuvXgX)t?k^Skf-d}du{0&R0W~z%mur?er1<6{&92sNp z(D3SPNIqDxpU*!U84)UGEAaMd@M)B&2~K)YTnvQAp=|JsyiS^%qi&7LCakNa7JSL! z2TotNAQZ)KVwwdvLjc3rIRCQPSvU!m%ot%4g&g1Z`f8+3(Yx<8DhxPSs#^sOYDK>g zp9`8h`ZU(4F*C+Rc#Ebf!Ttiyoec0O@K?K-?4mg9G?ow6Ge~5QX_FId&es#ogtp%( zyr{KFkNX$im;mbp`^%=keW+a=0xCFOqjbyiNWxU~tN`C0vcM&q2K z*S|hR`CFa=%lPYwJoG;r<>3mm5`xx0)Z+RPv^`$QBB;5CJH{Fo@A${oK9a9JF$9-_ zeGxgv@lWYqNgV%Wj?nsDmSW(F5L~c_uoiilIbneF{QxwsbD}XN<6r7C$@9ps z7v&F0up+y|b;)>gHa~zX8MTm<-UH2>Citf_Zdn=>>Qo4*X5gT^G+G#1aZleM>6J%c zeaW+|nlK-6#i^^UXoLJZ$^}V)9+x96RY+O1k-`0iCr%)c&)xG0K5)`_h{7RR2udS+ zo!MM?_aDtHZau5O;1+}qPqI~ zN$7R;WWzUs0sm9q%w<%ohswGk4!|RIoJG~pg5`O@$y@YJ1Fv5R=qq~Z18CAQ!5z;U2ebI z(#H6V_HJ8UG19Rq!nZk|95U9(q*r#z7;MGXGs+Xi}0Y!%2)5|A+|s zV_XjbyRv%m1~$Fw>O>9a#Y-gpdOND@jC$#j&&O(8_;|TCBsVFe_X^_!a+ysH#d{av zVRrLt9D%?xho#GvGKJTL4fj}!bEH{m0lO^d^)H%D;^~KPJBh(7VAY|R z=bn@DFaD>ZB8{>5dR*4{Z1-}B5M(f_&$;xP?-xZuWydss^J}VN=aP}`PRsg3XWk{p zE(|QmweJE1Rqcgj_kP7<@Mglg1@qvUzKNH6N95=G>GBFU?9?5ijPFg~AA)GAFxQv{ z&SQsKa*tsgZ7>ij7JIY*5n5PI#$}`>l`5(UTR13p5Fwlg&=NWXII2 zuBe%GhZkHV+k^6B%R~eYH{@I$+hiW)?wv@gHbBEJQQm7%*-e7pvndn@2?7PT(YoS& z28Be2`oaUEFltmYJAuvq5k+pnw$)PQdTny`Wf!h;)!IZ95?M*nil~AakI2c7?_Qzg zs}HK8-|1I#A*S4&b2x~4#l};=W`&`M4=BK|cUKE5;Ar54KF8RSdNY)4yhzRbX|=O# zYgb+Ei&!aPi%@V$+2Vu=HJma=D7FCb70&gZPXe?F`JSL>>bNxO;A*F6z-hsg=1-nK z{75@Rrn#oV<1IJpo|NHL1%S9P5iOG-G?^!8u8~};x!in`;`AZ}7>Ss8h%5R>gZVVs z#dz&12LC|fZA_itUm{Wx;~V1_m>y`qe@$Q;J7~D=_t$Z_h`B-z9isKu@-IwQ5x30GqMQ|;m8q|H;AaotUTF zH>x}ms=%+T0IJL&%{%x1p%{x=H-?VkOt?{(#Y^nhYp2k1Iem9GoU4B~Dy ziN}YLML3cNzivqMZxXs z&q!uNYZ?=)Un>eSRafu^6I|)U=2FL{U~0d;s=z*r}ggSN^LfzF8Jc-zZf`W@q3BoL0TVYj7VuRcxl{?ZI&{JsIpDKXqa>$uvX6*at zm-HJwQR1>xxO!7QI(Xl}#bh#j!a()v5N(0X%12&22~UOuQhw8d7Q(hP5CL+k){1t5 zkiOcm2NDzA)l_VTK8+uy{PwVTREwokFit*Z0twNG_!dr4;vg*3IxP3Cv?4FJsh;PN z5~BtwK-p5|S|f}S&uOos!P-SnxD~p-`nm6xjJaWZmikUx`!OG5Gt=^~2|53Ryn1+L zv`pfOT7qXdMPG&G4!KsI4Rd_LjXrBb%t1UhxWHre;APTD!<6^yVl&HO1WjW1ZKQ~n z07y+FFz*v5QpY&VdFZG9Uo7t`K@YE`=54U;bF#`nZ*B43@?TP;s#LI$8FD+2k}iy9 zN2A&f0)AHt+!#bLeK&-%IFBrNOZe(bK=}R%!33g{iMId!@-?pa;Lvr)h;V;HxxM?d|?<=t)LUgmpKK;x6J%rdQQ2)}zTu$_UT5Xi#^o`+l<7u3ynV_+V z1`=cY$42PnIiZ_&EBpAmkc`3VpVBuG*42zan@;AA31;a|K(p_5yg2c_2DlXdE&|1! z`Cv~!A>bnB+6o^GJ3&>PTnDb&{1Hu8LVv0}GC;a4JXL+y?g4NJDbU}#rZFG@p{>Zx zawd^OndRY+ZZ+$lsAQUn1U?3Nc2dQ)hHki_aB70)$7cn!18TpGd(ND}v(r2C-qTFS zVsOJ$2QFK0ldaLvXD~RaPPc?4{v}fzN$m@mKqj_|lvX<@lWL$2gj}aWH5x&y$(Abb zfc7*K-433rK;w0;h>dk;4&mx#0+C;kvK-1eVSiFR&?cFKdAK2In6e^Uxdgum5w(N^ ztestGq#V1?>c~0|fuXd<8`%zMYY_~(bN|?~Np>?q_c;|~aLHHQLp|RJF%%omxoN*{ zi4k=AM`uD0Ipy&r2HM`5GkAX@fbOCWxv~v{Z=ZDVw?O~^e8Ar(0&VtnJwx@mnvFYb z_2lJNnxe-{$)6xpVJCmMCp=ggA4^bj=t+D}Q-;Po_2dIuh!Tlip zp5?(saR(DS90jZDGo z-w8maC*PJnlo6(;mv)PzkIf9>>{*Q_BmWKM`VlKFp!!uHqu?eW4qZ38 ziaR6BYe8Z{O_*%t0|$o#Z{ zQR-Ae^?H6-r}E%5E;iII8P+m}E!S$V5gsdB1I4X&RiY^U$QEc2YZ^Q6`T;SV1M>M_ zotNo0C{TBdpycXtrX0J4Ql2EF++f{5b2~xMXRK(; z8BGv(bpzO(PW2j~GGv^r1^wM8^>!*2kQH=&K`7lsHO?rmmp{Tq0Efau^)mxi74p*P+%`zlQOPj>EnU6_l}ltvX7P&P-=nxN=23^FrZ!C!rRWZ$kR?4Ip{j0FH-x!n{jmtJbA&x&D z8_wG%HSUIL`;0+fT1OPoLT=v6ITs91w`i&+0q^Qecf)c88&u8})Ql7CyU-AwwODC(qF@@EkT5VNtkGqII(c{A759ebIWn2bBHM;Y zA~tJuYN2JaONUrRu=Mmk)+LSg1uw0Lf5i;2z>}!7kHTqmnPhl z8dgL6j=NI-i&|uwB!2yDPbYc%YnUrn@|_2yq*ZxS$h-!7>y*V+M`#?QI+mJ@vt-v znNh_HA*)EIqSwtfjLkmevp4!QuiK`##SPJI+p}g~v&9w^m=?f^7jtPK~ z(6|&5E_>ED`{0{8i$gJ57ovW`%8U-k5E&DFlI{Qz7d=NgEs9EXkb%wXa694Md&)~6 zGOKYzfBlBgo?cR*V43?{sFb-S^_7=?s4p}tr-Z)p1YTr5#b%%4?1!`Pp?39v*tDpS zkN}S;RkmKoo?r}Cj$q(Wd2s&th;a3AfwXA2f-CRii(2rkv>p0L`<-?4YO3)&@y>fH zk;~rMB;hgxHI2k~?WNNs!jq_7z&ew!Tbb?uH|PuMu>h-a>;@G!#PC~OrN@|86rh-9 zYr*OppA9#Yy_f|&RL|7y;QX_o9+?SaovOxMNu=a=IoNT8}d5p*cB6eEfc-nQ!Bl32E4T zSY^&{IU4L|+W>a0mBBj+&aLhs9UcYSqa1_{%w1V^Hy15Z?wj!J{DwphH2`FCUgGeq zZ((x?F@rhSZY{B7zR?c%ffPqpz2_z~17HwgEE^1OF1jnixQA)Ob8BIk!z(6j(53Gg z48N*{I-?-7?Z5ul^I1<5iJ-qp{WJOKvRL&v(cw!{mH1x{f+RLikDaJP6zX! z{cg#&Bl^GL6`pc*SMyoC8AB0_x#N!iv;Cv9aC^c3R_TMV-%lMKiDh$~cvLj?26Yf( zf4!^-e8aoZv%xA8PwV%tmrfG|m4@F4H7uwnI!l~z7*%L=YqFVMCx>ePv$0>$ym46v zYHx!JYkLu8R*(}m?90v$LIy213_=?nE2xi4TKt-#zqcn1=ET#K_1A+Okun9=uh<|) zVGJ&`9k3CPR0G?eh=Xsb`?0odvBSRFi`n}s=69;aJ@}q*GEj)J9;!UG!g8@UmnM|?^;3gq~9)!VwLFL{>~R$j;g~=3+@g-i$+2mJ6@LW|KL7i`TbOj z2-SxK%Lp{zH{AGk8Ue6~oa*>f&$Oa2+^@H>*jGA5X$x?^Q1(rn)Y-)aX8$Jj!M6Px z`la8^VwVN5y=DmnI@)pUg{F4 zr5kKG0620sw#Ybx?9m%)igQXX*vB9SQ6Ac?V2z9Gi7ks-qwfrGnq4WREBEvqEgTID zDo#)oT80t+Ov|a45t?lz6BP^iK8@XUgj9>Y1^d!qg zd6BtZKKwtSL^{}oi);494v=BZYpZXf)!trgm68l!Eza_v*tl_BkvF@44!!%SJ|JID zta!gaRO)XYm`D<%4t?lGa5`MjQFp-}TXuiBIiN0RDz+Q(2jsF`Fk34LettJ-jxsiM zc+$(03~vf`$k#x5ehyV=yuQrWpgA=dZ+(R3THj`@cl#C8ZUFrHZ&!&jAJyb|{>Csu zPkPt3WDgc$x2T63o4)<)cctdRy9}bv@N#QC9Q?gW23|s!-u4ZSt}Zw)NDf?@#*7+iXv=-(fL8i6tmT^7pG8U-h+5t0 zzB=VE-51;et(OHg;w1pU(Uo?`_~j8D87e;bDpye>qGs+uSGv_=ha@Y#CfW>xKl)(G zo+QfDz=vE45uaGVGuHg_J!20)srD1aFD8|a>lURr;&pLGak|odCo7`klxH#r%tngtPpKfSu-;{jo{3RvjmW|6+mC9loP?x-tyW9i3!i-lyMR`aTr5DOnrQ0u zO*d5<^aL=et+%|T3mVo>ObPb_zz6BhUu@< z*{Pq*t){o7*TbzgKS>1ezUiy&-6_vmcGrmn$)%wgc3H$$Q}M~Y8>p)2;uW>abw&B? zAHHVCU#Ny(x2(_mtR|md^Nkhy3jp?bo@C|FeRwN+p=u!h@+1@(dTpZ4YI%24B=~kp zbFz1Z&|mr7#$m#ScLj9qjqBB?f2B@ar0I|Vrnyh6G;5=ub7SDgSw9STXI-!DYs1oN zD?T}%X}HokeEENM0j+}ClV(}22K$J=90x7#wS&9QcP7FvBjt`s$PSDNq8zw}_BULF zs}1~y@{g3CW=?X;N}`V6wzIZj_0$(l9Fvc{OnBGYAc(j8A6pulMVOdBLDobHfs{~V zt3C8Q_kTf+`5x&!+!49aB;I_#UmKvU5Q&)2CF-0LV{EX~2%+X(2IlaD^2 zD$LVh>B_`4**J|en0ICx`@1256v0QIv6tghS^Wn7n zlgh}8AWXEa^WNwrdG#AK@aLfow~jX1N5YO@GBn?2NCu8hWaRr0?<6z)vKDp@PZ7b= zX%#vVDSpNomsmOtjZ#)CB)t>!H3sW;Ue(esC*1$jG-M`sr*Ig0{+au$0s>v^y8q3Y zSuW=O#B1q!$_Z!R$F6n9WMn*+ugGYx=-XIs_o+o6SCI$(lOZEkBVty4T>80I8Ztoj zP#!qsSqMO~J8O(G;gI*VSDWJepBl$qM3%1*?U4MbQFh#F_Ss2t+K*NF3+Y!`wOU?_ zZim$s=1+4P9IwKo$CTm@R;@S$T(8sV#{@l<jVPQ zyns!Tau9!6(S0I8xv5H)?7gybq8b#muT#7R6ZTaCKIxkokZq^uPaetC-<4X8<&^%+ zbOV<-Dhkb1D21=(cflb8e7t^YJ#J1TMFkd60ZO==9rRGneV|2Y0qoJs-1#7}x|S8^ zCPN2H{QLz;xKkQ_xr(&Sl)aO-(VNfVWU68~7jNq%)QX_Jw8QLWrcH_wH3eB^%ZRAu{OHw;0gN_=Ib82f}R{Op>8r`9C8kT%Q-n9E}|piYz+57xIhzA#!j zmGxOlb|VEsyP1-CY{>F%PLCdodn6xl_l9ep)M88VZDBS>$e6nz+63F*n=5cG+|{pb zfe(}||Ah_?-=+cZ%|~(<|2x=xTV|7*JjpE6Z&eCYs$ERURW;GVcFXdP4u>5ZTh3MN zjB2lzD`F02Q`hQ;1{vs5sNZC*kDW?;I|f+Q>s#2dKa}Po*#49Bk4?NT-J!s`0rn~=Wn|WqumDJCtW#;Kh>aVop&2oSP z)jiLk=4-{AiL8MEaGxWSwyIt-W|oSVORlT&rAWEHY1^k?E%oQ7JeNs+4VSed&|aFv z9OIU&D3_7V_X_!>@2YFKtMsVymFQkrEwocgKeAr#A~XkkfyDwy2#MxRZivrnv)267 z7xPk>Wt_pW4r~+{=Q!8k}L2oAg1smzZu zP_zc3`?~~W@Omau-;6+?^Q}S#=g$?Wz`FBd^tEsSl0R@`Jzx(1uZ6w~sQP;@eIrx< z84AA7KfB1cz~lysw|(x@+jNnMz90zSqH~@r3*{GVFnML#Bg&U_60K2;$b)^^Y&I5fgH6e$kc$?X z%P%SL0Z{3`&C)WeJKu$s&r1dM$h}{L`VNA2p5@*^GG}{BKjjr<`8iG!ApuKFX|(jy zP9i9Y7Yoa#Pk;l3LGEI(Ro|;U`Km@SK9r>PJXoJ6!IG9SE8-ZYs~YgD zzh0^PxdermW%Mf&Hp;=~gVXVghQ1b%J>z7d9+#>A0G1#`1CviQu-&_Lh_iww)q$rB zWn;BuN(QrF9pYGibB$|3$?sB7O);=Sx^u!s#;cHW&5}Nl=csKx&>&vDY3QX}(X?+C z{vWG5w-_8OT~d^-!Vy{K*7`};BtN+~pCK@kZ-S7(howZr4r2kv#n+zzh zxilaJ#5g6-$;qCmlNRFK{}f=BDN(dTEpPX~`$?!L`C~osDjq<5T%84>L6eiSV3fc6 zT~L4}jQ8%x=&CNXy%H2=F7xAePI83Q;;Wq5lzmoc)*-@tmX82-R=nk&_3~M4{7+`! zFXoCl*fT+DseyvBHKSapi0P1;#b&D6!aB4peYQa)_aX-gWuH+EACL)PBe(i*b_y+z z)|CAo$w!h)&D=E(>0}+c+^)aOjLFNkc-2`P7o9q}_ zVc#lO<_Krzq1K?fVoHKORVLf3pegYx*iWq&nP25-$|M!EQGVu-GbBG5)%%R;Kb&TtFlwxo{LZkRp`5WAD0S z#$5DZ_6>U;ZFBO@8BAG1{1$)l^P$P z4;IvUs?4R1`3F;7X%4FtU)f_CoM!pr#e03((c?awUO5f?oYffDrx9?OzU!`oi=^vN zXSm_hL20C&?eYwakIr)(DNH(BLTZDG7gwU9I?Ex-MltN2mE3QBip!kb1ml|a>&dAh zCnC8b)vPr9FQ`we;u)!dMpZGwxK8(%Ol``8Snu4GBTp`ITdNs&X0Jk0C85>#HTSg!xas&FX3jIRgKN!;gB1h|R?W}3mXoy zsBg*(3;J4%06KAUKIurnSm!MCmp?7Cyh?=@p{KrF{nSt8^rE}ZssUzQSQ(2k*sAKb zTqVyQPH*YOTaKJh#fZB6lZ!HjM8`ZH&@~P$drF4%QsT@7mf1PTzm7GaEl&6Atb*{Y zECadCYko^tyABP@iWqAK0Q9O?)`U21ipzoS^z4b8GUALRN>?jrD6^=GIa=~hqQUiz0)VB60y5qq25U-gz3}4uCP@3*w!E{Z1c=z~ph}=@GnFvqZqVGFItXmq zz~^rc?77_Lr2kaj{!-FR`hSM*8!Aj_ZmG*PPKt|x;hK{14X*6Kbup0@tm0D_nT=ZI zRrfOP2ECA7N3`!O{{o%{M4O@{i6z@sb~9zYBnc7N5{d2NQr9lf5Fxlus5v;8tcoWibLJSz^*XU6x+FTa)K z)hTZ3W$pCAEx$h`iuJ4%xYfrJ|jhIg+$lNMfS5x@3<8`v?QjK%?)IWW8ust?8Oub znq1z@vz)nsfu{UoAS5^Qu=t}75)gsh#bCCh`||`wR?~63LF|26lO*R$B;o<=TXl|% z-J9yV)J53*>uJd!rzVvRZ_WPZ#7Jygnlb&VM~E)DJ8>>}Nc9f;ZalL#FkAn(Fv#Oo zf3H?yj6OE@)SX*7YyhVq;nOYO{CWz=iQAZt^J0Sru9e(WiwQw4?%v6(v39@023$aqsa)@@wv5Ld{$hORg>&Qu$( z=>AVrhhHB3^*f1oO1})_^;gD>6U84!l^aV=4fgY_dMFh?7U5j|_3R;c$#9dOrNs?P zN9%+ALD&fsES)I?Du^I*vg}A?H^0AE8jpwUGxk79ha3ObIK1?Ax5BJs%E@z;h|NA+ z^18v=mjZ}LH>;uSkB>;TjxuzaQeyV55;QNv&7<^#xO^PZJ(ce>dO|%^^?q~=XyY6R zCzBmUhHa2R@S^<8VI|tSU;TJGD$k9^AR>`qYuQsr2FR5%HN{DzFJ4_t*Ky4pZN7*@ zsGU;0wu>*EX1KE`l<_{31xF2?*eUnCZ{b07O1soWU<|7@yE$;ex*~_wZ>JL+N}Riq z_|hc;EoLVD0lqIUNi&vJmhP)};oyf8h5)yAg5dT~`gqsQDzZLv@APz>#}xi?>i9&4vO z){!`Uz$*JeBgAE3G4ucEnYwDHd@B|Lh;PIU?^kn5wp-uP-fTe0EIr56|*Ev#*rFF9n+4Q3N~>j z7Acm7UTS(x(vH1qqam_PZOHUO6xfMs%ZMJ7X6lp41pM`xpU{w)KI6i=uCm7h8N8A; zr+}MypBxgC6?Jg^(@SdANAW+75+NOZGY3D5w{Yb42f+K`+Zwog^A_x!gltxJU#9}k zT~6ny)FSK^(;M}N>_^$skbTJaby9h7gYKZC*ou-nJQi=|SY2b==on!*E#4~6pOX25 zGoO<2uYuP#0082yt$+MFhB##C9nYD9O+quJ;Ceb@BHnE4SPB*{81hh{BW4fbjMDUM zv@F_;>@=&{5!L~6cZ&Frn(=&i{M~EghAl51OYa_28qye>>_y6SZj{^T^lIBL0TmIQx zc{(sf>4_h``?l~^qxOGhfE696%uK(t*mJKhJwW8&EA!(w&V286AB`A@kW64jLemh* z@g+(l8gp9&q9)(Ag|Pa_6Xh~iAor4HUd7BSu&fo&rl~x#fw%5v&>!0)KB(-#?7cIO zOGWDYHPB;!2|sPr)cxgGW)pd8_Fk?mT{^at`kcb3NrHx`CBo^-r)Efh~@fv+x4Ac&gI4NWF|V z;Y+R(_IUH}VPnkA7znZ(9rzvqj|m6)ZYbfe(V%(5`j!v&??}~*oA{vlYF4Q_h{D0? zVV1&WQ((xq6g%5Fm@fno=iy{~K$sV017^{XAI~|}!;9GSW*(Shr6lFNshqi;mwM=a zZ5865RtMZGXX}!)js;t`8mbK{4`xc0IuZ{(K!5WlsGgPPO(d`;D}ywAUZT1%S|I6Z zOzR~_#ncSZC(krfY(~$>7{MH>X*V*Z2%EyZ?14=3XQbpw_bRWSS^&Vy?Bw8mZ@qy6 zYoCE@{oCJSZ~N?mbp^a-hHe29oac1?r(e^WCB1wDihB3W)V|_@auM@*P*0>6xcZM; zHQC2zSt)i6x?E;YiaP=R6M282Q}TzEOklu*UfRx#!&(k@Q5x#7f$LG>QD_QDF~Ngj z23Os+Nj~B*QT>zbdf~)s8ejq9;oGjjZ)4jhzE%*>^6E>0Q#2v}W3{AfeF@5znMiBB zH$D&4(DBL-Ve8cZfSnLW=uk4bhE^?bPX-|7CBr z&Gn6!z4D``!Ukcl3>mmw3J0bkjR*>tIC4;WC6Os9E?u7f^q(G z$K%JXl@J)4()*PLlX4Y3gPa^ncTJt(Zt?pA9L5HpJBU5J9JxiCtQI*X8$KIRIgWQ$ z)sT^9eR#?=Pd{K;FyW;bGw~@+XQZUIe3f%_`%s?Rw@e9wdtM_b^KM3C#j@oL^_Wvl z#d-d(?%NYQGWe2OY|yWy+*q)m8wqEas?gsvd?!j&2VXl0y>@LCuYEF7!fDuNL+!My zg9B!+aWl;4NIoSoYX6loizwx_Q`KLFPKmU{_fAnYkOfMeA`FNPpY}>hP`Hv9TI-)UM1=HWjm9j-#X8#ifp~DoX|Z zv8*-{R#l&sI$nF)!{s&Do70%~Fk>cF%_7#yue4#G-E()YEBma(rjL2V=mQ09o0n&< z-_ld^G%KJbXsLA*>G(g{Irf^1G~!EVXzsV_*O3(mmDKZ?NRkbz3OWEbo%E zy>LlC`?GDx6k#FY*l(;Fs>vq{386Felgg2xFNHshL&&iw3%@G+Q7#&VcU&qJl)bm2 z?DeA?Gi`6Zb#}=@8ll>at}Kex()KSc^Byt2dS8KLnc3{kk(^b-1@xI>p9em-?tFSZ zZWu%6C?VfuvyYyWMmXk`pj-muLBo_z$5J`stmf;E#$+!iY88~`^9N7nmEO|~3DcW@ z<*5-IE?k92m08@L&Qmf7Gfb6?TKV@5busQ98HvB&a`Rmv$sgfdiaH>8clVsD_o;~r zYfDDXdaC;|=bAa0)Ff^4>0dX~!eySSP$Ajgq_D!=B#3q5YD=Dc#Brss{E_+6uT<*? z&vvq36py4{P1E0f$mgFL{+)$CneK7ZC@t5l5CRLHxkvRNRJ{iiM;zTwCz#SV5D)O$ zwgrZ16c2Kj#{+;q59>{yxJ;+m2eRO81`wNAd&$WC@Y$&dSKW1Ug0L&ze%or*B4^a| z_83V|xWJ6ZNPZ3O0FU{R-Mc@`E$AfUd4hjlhbxVWk*u*q>j94LjNR#%hUt;Pfgs&1 zc7RP^yAwNw_a`x<*KvlbDZB_2OEg2L3jxMjVRmY7^u4Vd)m{S)tKPts-DtB2EWq2!FmB3I51SFdb?+WWO~<7U8s4sJ%w%MO-Su>{4d* z*YA7!zggG--mxEw23bwJ0ej716>boA>}zigex9|>0>ti)RB$U;u#*>LgaMj2T5l~i z95D*I6YkKWscDxRux`&|m+hvOYv!ZDfn%e+5GQRo;|w+n+Y6See|#tyQa%~M`N ztXM}02|6|lrUDFZxM7=zBCXEZhUA^FZnLsnX9ERybwM1es95Ii5pw4&Z^5U1$6VSE zrMZk4QPt|f47MhjVs5iUNJWu`#0=;NgVX>Nws-wunYoze zq4x~)d+dIzM7udpMV`P^prT4efo$&y3!5nj4`{EBJsXFb+qEc?Ur>WzxOIZo zfZzlQ`f*5P*kR6k`sB6%e+!du;2-LC0eROYZCf)pBtVl8 zME7|%bh5$v5_WYd9JSaX0@Ph4VX(^D&_x%Ja)4u1st}w5#=>QChYMB8x5qKM;OfJ0E#~{|VTNbVP`B3Wt)Kry`RbMZkP>`XF%;jjm9wRgp zOz9H{H+bjp_k+n@ln8=O_8v-W7Zbeu^|tD z@B%VDj=KGIBcLES`dCmOFH$)5-JGHh|hkoUWhSn5v=p!0Fm3;A08F|NPT zDC{4MCOW9^4_(s<1LVm1VS++6YOoHxfz-`d)-WKbe&6WoqGj!2%AeDxj-h~d0N`h5 zMttoMR&A{+j%+JYH{FQ#1l&MiJ&FiiVGPe!>Vp@xC7O;lQpa$aPVto9FUK zQQNiBUpFqU(1I6~_~dzj1lOMnF|AAlbk`DZ$| z7kE-Loe*xjtF4=@SgWZa+~#)LbDk_F5oC988K6A)Q9=T7RDiO&zqNP3WOl7xCJSY{ zDkgUgab#kdu!cXb^#C(mAFNl_-NZ*|cxzklX{EEGgfFvEvw-$rLH66w;P1j&iG;X4 z7g53E@m6!KRwNZ;j0{&_2MVT~_0VLjjpIM}O$foT%Cx*=*LjD8%L3j890CscHUSRJ zR%2|T`nCd0phb?_Xf#7 zy{|p2Q2P0l31tuEXW{}($ggmX1XJgmM8?umIvk)d0dHB8tE<-GZT}i1&0rHXASiA6jECpY zwdyVp*PYd&UVs$BARWG+p);+!;y1jfvu4q{3;Z0X@8$8pJ$_mjEd|1`>vm!^-s+-T zBmL21NZ`3ENcdWqCo-i{Z)Mw#hW)w`m9dWCZP*C3kfZCPz{A+p2^b@mjv4jWSbckXO3z_nH}~E@DqkB+#p72q#{x+YzGk{NWnRg+dYwZS*G0vE?FHy=}PZ=~GmjnPcY^l2Nr`mR&-i?at4Sbz)>i3%7Jp&@Jv1BHu4GC^$4o z(jKJSxM?qp81u(e*=r;}Ocg6sn21KBs3WZN|ItPuC75F!XRSB7+AV_WJT&Kq2)hZ# zcu?!w%K^CU?$xVxbji5Wj~}YI95t3d+_HB}L{+=>Lmv5(hj5n;wfTQ2 zsOR^pXR_`8qUlk>!Gn`u2Ivasc_| zG$Gw3y@~x%>v`}coz(@Ts+JEu$eJhl90=;h15`)d$Zf%>dXjAV$y;)=g!Gpo563X) zNQkBdU6sB}@UGTGBpVd2xpi5w+ag281PNRKIkbu_eF&g6T?@t|Zlf(cMk7ZQWJNa>FV!?CC#h5-f487((Kk5-nh(vH45CT79p;+ zMsg5>qzXXggd}m86eraGqWr^}5zR$*@pAvKsquu{goNDGXuC5F0jG$QR6Ey^-3egJ zA=o4LE9&u?KySNl0IOQr)-m0HH)jBjLE0VcK%xuM2MG_m6NQV4OBab19AC=LoVfN_hPnzV1H+Ikth{)c~7aqxj_e zQpS?{R2N&kaIMoHtB$_dnAkE%s2D5A^agu{^N$)Y=A7M=p5A?6RwH@ADpMTS?uf=Y zzTm~+4zSAR`%Zmo(I$Gy*TDx8^{Fg zWIA@4G!Rx;BhaFKYc^68HFpx0M&J}2W`_Y7(3`8SUCaY+_>EqO9PK#*r0 z*?DQS^_Pedg!|)G$7;smvEZ6;M|f+hQI%HMirUZ-e4;Kqix@+G-dE z;(jDUfjWQyAdt$VXO4&f0m%U3(6S{!#Jq_{PNU4mz5k&x)gDjmqlM4Io8kZhXo}E` zp&DUp$x_1?398lD|J$7jwe6T=H;kvA7{ZC5=FOQ}uT@4*qCb>^&Z_M|66~-B)+nA# zcA=v1iSJIz#`X3;wy9Nbe~|98L;t%FpA$ZJg@TvnEIiH*#>~-s(Tj{lEqBXxVtyW4 zz0W|hDK=#`T$A9UJZ^x;$*?SMpo{sX;T3#G55%%-tv2lWAgy7{j^Hqx~Akao1?>njnmP})5a+OpvyWEGnynXu zY=@8c4RkKt*XeDrJO72JrqJY`kW{hoM^Q+#E&kcTysU_;`8E;WgsB^knoau68LQ~v zj%K6515lQ&QTbfd-9_0M^S2P12+YRBR}?^G*x zI)8q%)-ZYp-o*@5shYGny`i>y)Wj7gdBm8VXn5ispqKj@^Mb;8dGOt1U}eE-O#qdrP+#zgGu&`B zb@7I=;Isv|U2x~V>_yo$sQ12L<#vtsKW1aLn-EWtbPiEq5hQmd8Q`7^{#=Wj81)t^ z>aKs7MYcrJN}ApMydD#ECC*^;uYTUkyf>?W66yO}XbJl>>+lcA>0Dxs!dOn$P;bw)a+==z^KyD4N|WsN6&6<e&&0S&g~iNzEi+Jt9dZER#x zOX2RBP&>2%%xgaa?v+0Oiy-fPDFxi@@yX4jxTYdeUfVVi)E`hXACUUbml?37rT<_z z$v1_CBUX<_6l^pDq=|`g0y)l%k(2yCTq%skexS(R-@&az+Nt^9=j|yZz2Gnh+gYpN zhV*@AUre~TjaqS8d%+oqn=CaG`a$cpqm55PK{00jN=^tYO%xA7fyfGvgj#N=(Y~wg zoJ>8?Aud7QwBKs(SYuA@ac%&j32-69YPB2RN~eZPbYro~x5GqDIq3kr0t}(i9!9N|# z3d*}>H(C>1$k1dnk%Gq0CIMhqLGFSB#Q?V+XqZPLpK8`>$i}8{4Gc~{07jcP%56#n zUmkko)I$=M8=Ey+zhO%t#i=D`cSn`;pHfsT{>Dw_&lpR2wr+-!aVl)r#Z==0`r*Rf_0!5{OK zrKhEl>{NCURBpf6tW)~DG0TjQ==N6Z)zHtUoLN_U+gqw!xQGpFXtLlv?&V>mI9dbd z-ZS#KS(!o!W;%_IMIw`n>R?~9J!S7{$;bEd8-f|Z?v2#>oG>6hYAm{6b@+F`L{MF* zAo37pOITeh4rW;!a`*V8E0w)Roq98CO=2A}x^9vEs>D5M_(prZaMNP5`s@iz3Jm}n z{Ft3PfV)!M`KyZmTHcTQ6-<6yZ!jh;)LLVB1ZxaG7zqG5_`t4zjCof) zhW~5oUeD~nlharZhhpEPF*c>^jUK^(j6__0ZU)l2V(H2 z)(0gOp<*^SybNT$QY-Erxlw$MAY++$T=cO8xa4Z}95`t~ZS_cH-YaoS2JZB1PZdo( zupd}|c;*!#+~!xR)R%ClgYFVx&#GHycHakiJC1f@ZD4#_zLavzA;r4*b|o0Y8kcer zyG~gIyXOXidW=yM$2R>UFOE;ydJT^yWLF9}@eV69Ukirh95D3b2Mm5aLHqtI8Q9-C zx|Mb+zgQxj*izZPSv4LbT9~BnH`-KM6<2s%UEHdr3M`I3Qa`1075F1iDCeT;u+MD& zsh+ikr{C}+m$KUlBQLXeA`BW>Nu$4h{d=7LBbPb1Z*nx904Veb_>(nds~v>BHF5l? z270c-2$X{u=Oo;3o%Q4nbaV}VQ5s;D63=#&Iky5fh~{1_w}bhU7{z8t?c9vOClv9l zxESGohj#V|EkYu%sm3zmKthrrXhrN(HrQPA&}7zuA6$+vNpuSb4~u8&vzp+Opo!%Y zw=c-szoe`@#1BQ~GP7m5MWPKmu$o*W;qxiXTC+jVWcp^_U@>z=Ij1)%Q$M4ArBRxq zCQ^M?1W_+9d<0@R$(osil)2^ZHD+@`nlWk;fk(}R-bk0(R&tVB0;gH%0wrQbmfJAs z-Ob{rKuLynO$STu;uFb7jRhA;wxpM8+@l(RWi=eBeeuFRD=2*@X^``d!Q=1(E%KHt z*=217@a*(>r31aLpp7!NHRQ+)3sE4e`uCFR-o+UzaU~h z9I|12(I#*8Vurq7#-+ndeoy3 zum=SSAS>SL%)>WV1j;emQ0_asM??pps&trA5sIjyoRInR#kFCgQQ(K%)H@2ZD!Dtw z;Alai&ozOxKi}y>4)O$q@&uZKgNBD?wNF699!w7B=hQ@Y`EJ`TmEI_*?Ify6hx6df zhcZk7Xm~^9wG>$h%?3B}aA|e8WJw0r0hW5#8I!T+&1TY>1}p*hF$kV!HH^W~QrqGn zr4wYaxU6HeG`u#pcg8r4=AXKFp>*|fA*qD9QZpP`E!%!W0d!68q{+lgW?5;qG-a6=S0VL|#h-E;7GEJ_N zY$qK^nyRtLb5K*q{|gQ0DJ%hUVbxCXjm-9aByoifu{alAR)9y8G-5!L70&q>&5IUw zB9Adt;RAcrS@?~9)?)&uEobqqBe3&)& zR`yHOHQ(S|KG7q|q|6E;w>Jl`-qDP9FP{pKZLhq+S>n*YOLdu)k-H5cMDWkKT*0Qi zK~iSf57uu(>EvtTzYoEcl$)*>jV;UNhv};}EM*N|$(1+7&Yo47@GEy!gI-F#p>5CZ zUX^i6EaaN2b++91(7Izm&54O;GT(VpUNCsI)eSkVtdpe%oSei(&5IaN%MtxPzHG~M zQ>7?OaJb_IR=f#pvUOA8M2!3)jKnj8;mI!cE6yjf3q{k<$wDPT%cVaW<>^Q!=7FGz5d--JGD#9E+LIk;~S+EpfQTO49z z#NokG4vbguO5Q7g-2-HYT(t-fTwoiboBz7DFJX7s?Tcll0}oa4TOB^*oYE(rnE!#A zhn#m(WNVVa)%*_Hb0+JJVk1dg$XlHIO*^2MMcQ>Bnu zbByKhxhakEho6{QpLGRgK8faYk!K#3jogB_6#8^SxcdPKi^sLV1Gga}E3T*1N{t#K zq-6e;oK!S1uz$RExjtZS;r<)0GEZW$ET4|5<-MxySXtRpo4y0gr)=eafy}2$9oW;s zaHy4g6P!wUwBvb+R=)8)$tG0Rp_`)2r)fGA26wi&vJs!H`pE z$(4N2jEAvfe59-El7R`CC4b#Z;ao~?#iUXeQ7IQt%v@|ysJ_lkzR7YF9OO*Yutg-W za*gF7^4%P8cUgLj0>?p?E2sSr#^|LR)BXPxor^z{{r|_E+nk#7NVUy2+ni5@D2AEE zFcOk9XG%^<>MmxOInP-VGmMZ^lGN>dYJ?D_Voq~Nk|g!*{`vh2*L6MK*LA%=uh;WA z?psi#8unj2b?lB=kNfTo{F<7N(kC#*~a(1cwV|VO|+A` zlx$rgIQOm(khXM08nQ}0?Lna3YcBrwDl9@K5f6VLP4IN-}3k6n!DrO^TN__YgK_!HP#OGvV)CDbxwL^< zRwZ9+8vD_s%pLTZ9CbH8iC+PVW>5Q4FG@`Kr?@G|M#md43S0Tj+z06jiwZB?8n1U*o1G4#qpo^()Pn#uN!IwT~r-+p< zo&fS0y}B;Ja4S=fsw=-Vtlur^AGb^^!esvX(k$gJ`mOTG(1i>!F$OG}{?9GXvuZSg zzeH?DK{-yd5XgW$xKe6#$fG?PX#IBCv<2UI!+Y~6fOoC+gmkq^ax*)det{_|&~Ics zro%{o3rn8ARe3LegaAz)yOT+}LO<3z;;8Cnp2X1q{q>h%VOx3ny%#wiOi8m(lJ^ex zT+l8_KoNf#2KL0+O`cIa%JO<9|EXQdZde|;el+??=G7WK^p}t&Y)13l2luQ&UF(%$ zo?8zm?USjZd^r!?qw~NMBhIwoxRZzEi2%1_SA^1yOpPA_G>-2LYY*9@M&DEFpblU0)G)>3T?Tzj z7o3jP+x`&~x;xkK?Kj}LDW3pRWc$XY;NrFMTasmYUgJ~t&S6Jw|L0NuU+dZz+=R}| zFK|e~Z>iYbH<(|v+qx+}mbBDineBlTl1UuhI<2JN5%Kt|N*%qUV|1u7<)NMvCq(*G3Qvs85bzc12Wa{0UwY+D9no!YzDy3}0T;3n8 z{+5jAG49*#z;Hf!^CRg|j}+UeW!bXwS@-xdbs4pd@IPpnEg|-NXc?))vwZ)JQB^HpSsE z8cJ16RGP2SbyiN0Pf;H6VM+z;-7(0cSwiuN%HBpLTwL?}+@Lz53MjcRh(x63D2*pu zmIbV&8gxm~-PUysi%lOpw-|?q=8$ws-VgOwg#G;wr{=la^DyVBmC;bql-#v~{)!04 znI}6_hCg*o%e1WZlyo!Qb^7a&ddJD*mO6AZVYaMpQ1SgPYb%&ky-FUKDrNyPGlLj| z^0IR?2LlFl#nkyk?J&GgZs2kAg{8W=TVgj5;@0L8_4kezaVQWgWLWL0 zIzS}G&0(}9H@4i}KAI$ZRzr{=T>J5Sk#;YRv*b7XbUBt2|}gWEd$+5#MA2 z`1mZwrU<(1NIt}flP#FTj|U+@l(Cj!c-PzLWCVXICkH7CVqs;=Xu76tsQv!9vT6+A z5Y;f53ANUW2o?Na~Cg)X#0R^Nej2#l^iIYI{ z3&1#-{5ZpTh3JReDivsuncG=vh2P2*8H zoQ)*&u~iQiHgh^*2#|vOQmxJk{qZKSv_h3hY{%7#Io9C84fWk$@#KpYKZyEKZb9TH zffN5^SBPdQ#vK;uidl|(5ygkQWX(wToadH8I`1vZIFDc#X8%SgzhkE3j`g>^iLYIB zzvF48+TKl6><=?+nh038QLOo{Cs1e)c>5hXg`usY_#_0&Zwsn`v2UO5urfPE&Bb40 zS|(WOZ<|^3k=Z6_f-Ly6YgC+T*?Hm>JZz(;p3m5D@d;}lok>b=UvDSmJVBc}71rtL zplbVVSSG6$|5Hl*lI)P>?wOst5bSbJ%sAYA)%Y{92t^m{GflRKn;NxPu~&!+PuTB{ z0`ahRmrNS3rS378bj@GBvL&Qk{N6%`J3FTP)q`F_{BEaKEo8-$72*CS@vLL_NvtPd zEGuuPc*8g_b{J=1-9VG>=e@Yp065=irt=`Xy{8KR0-oQcF>uCjhjsqy3;U{ea*eGn z57wdmGjS(+CvEtFR2adK!9WH$e$+ujRU;RU<#34Sxj{;tk>ZSRTQ0ra*Ce0$0yK%* z?>a~gPVIooev2WFMHs6hBud0D&Nq#-KY=S!agsYb(pj)#=~VD{ePJIDPMm=mb~573 zKy($b>=x#*(IzFLi_2IO)-vOnri|00tpc-+A&?YlGiuN~hNnI%l5qMoUoM2?5l$V~ zQ)61H0?`wvhRsJQ*~5qb{yQ)~ie#%CL^3_A!yVvSuDd;LMH(jS4Y@l@FY_V_)Jp6u zHGzdsedwIo-HYU@${enK)9_j>e5*ROflTJO3%dHj4s!`u3Btdm|-_u)9NvMl@St^cBs;4sxo~pD$MIVx>OYF9Q|=mxgwK zA@xqhzSwS=&Z%aqWS)v=m0};rz28Pup)vLEl?!KP{bRH&|06|UzPU!o95I7p57U^5 zNqV24!QucJs`8_WIAE+LT%0lFcpgv<1ZC)F9iM{8XQMH&>6SbXK>C?M&69>p%!~&sgr;e>nOt4-t&lj9uuo-Mb*p5~Pqo0gKb*5GJy`>K#ur$$59QM_ zL|Yp6sPEnSRA&YRp65wOaK@sWF=SrXZD=kjF5J#S z81cR{R3_Hof@z>34Q8Xhpn2ctZcc;V)a9UvLMVcKxAR@5KjTaBxO}5((M3?0vUwMN zJ~8fCrf#Q`(VP26qQ>*GUWSS-s6OiBy)0pY(uZEQfe(quwO0vO35tf20qp8FTYS`* zK%_!>$3eMby}jq9j+P%j8gk!a>RzMs-dM;QWke2_31qJPJGFd7_c!a|@yAAOps2@&?7oT zRV%^E=+27!BHwPSdVDFSUyMv8=ti3ke^sxUDoPr@`&Z)jGT|fTU-|e?1T(2WJ8BaI zn;{*YL@DT+%=`CtdF}l^eJy34P6zf{3q9p7$A=Z)UYi^2UL-UK0Zv$N`^|YP%cQV= zfEM{9o<`>=j+O{yM#EwxY^CPcl;1o7!2CjChA>O)H^wvh6h{xjX|=I(lO6sGoOfv{y^aTPJ%SEU8g{8)k>M9@uBUG|Ba8_PBqiQeTM0)xQ{i@W~~<sS?Ov9iAVh~NDVB@9?q{DL$6fcFVuv7^fW9~iq6=ohVqOQ%{h+=x*aVGOHB z{Sr2HYViu6(59G$nZYukVhm*vYKXWBR$!)TtEZmdPAtqFCR7KbmYiX^NAjqu7HuZBTj(x=(#Y_mffzJ3>i9Co z8rNdQ3gwKpYQPZs+ljZR(MsJbZfbwdG4tTVTx(kGYFL<SSS)3?IDy6f50;RU6TlFT1o8qXxNR=Xu|6Wzn((-@oH3DyoexMyNc*khf44 zOUYLCa%CL{GG@0DYgc#_ba%TNdL2d1aTm@4T9Opl2X%0L7i?vEcnA;|9$;Rq-1Q0T zP=9x`Ul;2BIfQ|%bpA%(^ zG-5qg(A^e*x$L*KOR4v!?L*-#o63X65toYi97O1#XW}V)-{M>ePg|@Kzo6(D?{Fx0 z%ha)jH6UWlwPn)@+5*`CCmUHZ8`m7Ob$AM@9jji|vf&r4)OTAXs96HmZvww68-hFo z+s8qRE#cT;hu9MfU&ZHFH2vAufgU_;)Q=;s=ABP*3qg>p?rtMVB0f<_+s)?Q%0?PY zZ&|yu3+yTMm=JG_h|g&z6RbD-?QDgNF`BI$%WgW^V1DHW-KR|Ghc=}F5lhzOp0$;9 z__|25;bL5nnxmzxubU)O%Q?xwD2MnGEdtU-AT_|cV4uA6>`0(x+}}5p0w-0ryTBj$ zDiIvB_-sW~6q%+q^vBw}*Rn2r6G$9biO zO_%TSE?vXMx_(HAz4LSFjR|{a|jMrMP2|$j=EzEb`E9Hnp^1@7tK|2H;TJ;M_^ZNwM47u zlSVjcmK>BcFyD^=-mG)|Cuz?G_fAS%09nKJ^sXk8FkZT4C@=9>&yy z?<_VxIR3z@LLaA6tX10&z{UK7sMqa!=*(?Ti{C*mwxY^gEhx4aevWSK`~)gBwVC90#II6v_%(zI7BN{?w?OOYU~y1u8tzShCF@x z{|pakw@6$q3KAk-CLBJsvH}$}l>? ztwoAx&{jlZYb|vj*Esexzz`+(B+^~|SQ^|N&463_sX6LP+SA{HViJ(f>rLm9=2|21 zsy%yZ+1IR|xG%u@OfX$yv3qHZ`KbC(H)Qyr3Q{IkPj$&g3Ev`dwH9CwK0<_v`!v`B zf)3M}P$9O;QmQdL6+ftCyZA!8z_agr6ZExys5i$}kZ{AIh5{2>7H}c<(gVM{n!cNe zdt!*7iJtpu6FO*F%w0Hr6-}OmtIGaC_=KKe_ZJGQ4~B$n+)%YJ6f{I2RpBBRDpEIF z(l`8FyBD8G`u^0|roIqpPqi2XWxc;EqDy0jpS2xC$MXVPys)=KX0{;mE!0E%@DZ!b z_mSSf8oOW^onH_PuO@1%c!W@EV8i+tE++=H`3&e_kRd|sNeJx0kORfx*@u&M&h&M^ zJ^AdiVDGubNuzJF6bFYe7}d?(NIGl1Umt#K_od8O-VqmKK$W5BWQCrU*f6ryw_d3; zXKy*$d=eRaOZ?c_Kf2vY5kWU%nYi;uP~o{Fx4N4`xhHOXsA*)S8)8L>@1l)Y_pbap zkw`fkqo+H=2-WqyA0=pVp04j{zx8GV1)}zD8!bU~KRe6XL}*zxRq(W{O4MrD|MuFV z7kNFGa3>demg19YUy(<{{Qg-{sim|f_IkNk&Urqc@qZlpOUuundTLe9{tB$X?b@JR zDf##sN}C^Gv{--h&>C{r9pr9%5L ztkR0vWagrT^DTWb+YuGDDgg_>MI!awo%gyHV>;lhwem`}qRn^73l&iNfiORmGWS<6E*iD}*#I}4xeE>6q<i?D>f> z)Mjh1A2gHXD!XDT5_?0y_I@<-E357(Ywo+aOC?tN9@qHP@a@b1D^&o;Mf*oEeH%rm zJ`?t}OskQ7?lyPi+MV{=3zT*#`l>nBMI|2LN^bfq$cstU3-aL`lDvmgs6kPqG6o9d zN^^dDAGkglg9L0@@VHL7WE%8NlifZpok6HRJXHP*P2ir7H`0uEzk#uHTlCat${N%} zLC99O63B)qC4GU0(-R!;_&?<9W7%Al%ieASbmajGQv8O1VQNI#i$Og>~DP zqR(<>hv|Q5>ok>$s5jK*Cr45m({6N)QcPg5Cw`MSwHo6!V8*gY5%fV zt_qmxh-+py##TT-vGUQXd{a42SO^{xcC1E^?SCyWM|aRK#OlYR#TOk9Kba14k_G=D zjC?$q)VmMPrc<&~>UgVGt{5EgY=Y~?kH0S^^3K6_ai=(pJ#nO;Q02Sl3fvG=o~2Jx z0XpuXQdR@jKGK5ItJU9I&ppfP-cw_03Mk|m^-iAS^}8`|P3gg`H;-wj3qKNG8S9zM zBibeSU4PkwLZ~uz$-@rsZ6U%iDbz}^1}N>YO&=s00j3H<_y&DQ1P$2~T=Q(bwvJA! z4Ib;j>nCM{k{kmXYjusq`;u_YU|xbC!YYO2YP$?@H?2B!f?XW}T9&CO55N2UzuLMR zS{EK4ijE9ZYbNGKRy5S7sgKHJ()=)cjEG=_h_1Kd7Ae@J+GA2J?2CJyXC5OLPR>OY zlcuEUE*dEE(~1hU0e#lesxY-2i0|ak!-EkZDomO&RqPGLp;inDp;R3qA$|ddbSTUF zojZpNR&Kiqkxfs+N)g!=fyaaG11uCdE$$Ec?fi}EuUr<>5OvJzab1t`M#yIPYD||v zpyTHL=%JH-dT_>xUcJI9Pn;ryQT`Th;vT#cSD~u38KS~SB^bgFwd@T8zqW`9RqZ>y zG6^+Jo&{C6YfXy>ejRFTD4Ekl#lGqwHY2gQfS^Nm*u$M!4(eP0r8eiZV^~aP%@f~h zL*kLc#A!vFPq|6Fc)(!JLV8e*e1@n+K|c;)6&$M5D8)Smiscg|p+T90EA#C`01|DO zot?8mvvntFh-mzFrnfk*`+Rt2$B;*#s>8?{B%<_aPX{~spcpEQB-=lPb{8%D7rz*} z$?ADfuYpX4g?wOsjXzsN4DFykO%Hf1-G@4KELA*O4&pvh>Lvd?rt})>Zpyarjbuvt z@^VOd4L$Yl)hn0^sKydYGcCg}bT6UG+x@_^=7MM~!6IS9(4&3~TFEKZH_ z2tM`iH^*AEX5;vlY1ODUYv_%+#H_I1II7=Yt()=Z-6^|45V*9d^7kvImzmx<0e*4U zNAT8J)SCq8QK4T6WB0y6EOa;9P*B2&6(Uvk@F-r`7=5c=@J2k}dsUtqSI2-j6VYtd6jajpkWg!9zYe z0M>=H26Kxq%-bzM^Hc}y5ZTdS^VebBhEzZb6?XQnj1EUw+hAYTCtO8%T}>u#^H$n2 zPI(ONk7GV%E4*7tBkotIS;!I(L8dxaP<=p0eSfet_dxQPiBFQN4zu5{kCHj+XIDI~ zLasIK-=-exb}W^11UR232P9WHyi9pF;ff1CDqk~4YlA=%&zpk;(L`}W#Hi&Un0gvm zXj1rt;iJIQagdUQ@(aacQxASa{N@mb-MZca(!9$;c1+NO4>!BGis-LA^H8s-e}aDo zi0@N$_vpO_L z1PN3lid0eIaV0U-T_(17qH*V*|4MS2R^LsZ9IM{Eba2JV4U;?Rzm!PhHs{V;!b!y> z@do!8@Pocr#-bN^;w08dLK!Og+2o&34<;*3#Q_$s(h#h=smX z>@%sA44kC(&e!Q2*?&}%S7`F)>S+t|Zf9+yq8$FM3B-JJA)$=0FYb=4rJg zU9(!b;S`4p4Cy#go!jKx>csZRdhvj#mR*~CMk>ViVxTlkKT!}1#8~kSM-*He4Jytd z_@)Dg>U$r?BAD92jNTr};8DCXm%Q=XG2d6HBE8+s@W!a4u%XM-JchTtIgOifT1fm& zmQTahhZ>a&2-0=f%@94h>ol(1Fc{KjX}bFD_PN2C2WODR!M5W5=1<#5GaH}X)LPQC zd|y7SHyXWi{u0!?S_4s*1w3V0b_q-Uk)pusctUm)|A&8*RLqc8I-Fi{NM>;t+Z#~; z0v#a|Kx_fqP$s{ywW`U2>CaB9!LWDwuKLr(i<8}NcMfzH0IiB(`gvctob9>;MF}>2 z4|)eN=<`{+SzkMt*zpiTbLA%LaEuZ$Vt(eFy203K(mSz;6U$voFy_8PF!(D{X!?${ zuHDg;|9wW;=@hC^J%cZe#s(dboLvWIzhSps84?0M!(Xyhv=$LPR?n^c~Ela zxi^IhBd5=Jptw7a$B9G7fBullmGg(GI`+elmcerb|LWy{i?J1M0$^{EKkvQIT>Hvt zfIqOsP3&e&Nc`&7vIv%Py^OJuaGkRKz;|<ct( zTnKbA$vtLZ={Gu>0%GUco;)qlcluNW>Hw)L>7OB8G5cDdR73e`v>p8upJ4vC81574 z%_83pqDMgpond)O{U}_^QyI;0rKM!l86y9yPKziIt4mg8K*B=^@=<&KiwyrTtLxw7 zvVLX7vvK2*GW`EY?7x$YxdY^jgYmtQICQ9!NWk*(M8sr09*oLemJ-%X{i;oI{LNJOHm~cerQuc$XM)r@gUsE-ArUgJal?~sd z=NF|No$D9x=h;-GL^s5{->NOW-}KryYx0Dk`755<#TH;`0&Tx2 zb;3zeRsx24pEhwZSy4}QqDiWuGZ+7=e*Xn55UR+%!E_B)n(sNHuPVlGDNd@b>~2ie z^Ql{00XCjDTgcIjPB|Xim>3C>8mpjF&Kcux(XI-nG&DckXSHuc!r~X0LDx&n?kDrO zszWmE+tt|4LBOSv9DaR10k^ibE*UHyYW`ZhX@`=9QHXn)+ITyyS}))E7Gu{~`rx7# zKH->Se>y4?YAKZV{0*=MmQRh>-gpV|`f!w8^c;|*3EP+Ay*g3z>`cLHoq9iy`A>Dy*W3-BdH8_Im7kyi?vIjWWdP-om1o+CyJ1%j$$