Skip to content

Commit

Permalink
revreting unwanted spark test _ replace changes
Browse files Browse the repository at this point in the history
  • Loading branch information
manu-sj committed Jun 17, 2024
1 parent 9891900 commit 6ebd9f4
Showing 1 changed file with 13 additions and 13 deletions.
26 changes: 13 additions & 13 deletions python/tests/engine/test_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE_2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
Expand Down Expand Up @@ -937,7 +937,7 @@ def test_save_stream_dataframe(self, mocker, backend_fixtures):
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.call_args[
0
][1]
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}_checkpoint"
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}-checkpoint"
)
assert (
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.return_value.options.call_args[
Expand Down Expand Up @@ -1053,7 +1053,7 @@ def test_save_stream_dataframe_query_name(self, mocker, backend_fixtures):
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.call_args[
0
][1]
== "/Projects/test_project_name/Resources/test_query_name_checkpoint"
== "/Projects/test_project_name/Resources/test_query_name-checkpoint"
)
assert (
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.return_value.options.call_args[
Expand Down Expand Up @@ -1293,7 +1293,7 @@ def test_save_stream_dataframe_await_termination(self, mocker, backend_fixtures)
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.call_args[
0
][1]
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}_checkpoint"
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}-checkpoint"
)
assert (
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.return_value.options.call_args[
Expand Down Expand Up @@ -2456,7 +2456,7 @@ def test_time_series_split_date(self, mocker):
d = {
"col_0": [1, 2],
"col_1": ["test_1", "test_2"],
"event_time": ["2017_03_04", "2017_03_05"],
"event_time": ["2017-03-04", "2017-03-05"],
}
df = pd.DataFrame(data=d)

Expand Down Expand Up @@ -2516,7 +2516,7 @@ def test_time_series_split_timestamp(self, mocker):
d = {
"col_0": [1, 2],
"col_1": ["test_1", "test_2"],
"event_time": ["2017_03_04", "2017_03_05"],
"event_time": ["2017-03-04", "2017-03-05"],
}
df = pd.DataFrame(data=d)

Expand Down Expand Up @@ -3809,7 +3809,7 @@ def __init__(self, label, index):
"double": ["1"],
"timestamp": [1641340800000],
"boolean": ["False"],
"date": ["2022_01_27"],
"date": ["2022-01-27"],
"binary": ["1"],
"array<string>": [["123"]],
"struc": [LabelIndex("0", "1")],
Expand Down Expand Up @@ -4212,11 +4212,11 @@ def test_setup_s3_hadoop_conf(self, mocker):
"fs.s3a.secret.key", s3_connector.secret_key
)
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call(
"fs.s3a.server_side_encryption_algorithm",
"fs.s3a.server-side-encryption-algorithm",
s3_connector.server_encryption_algorithm,
)
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call(
"fs.s3a.server_side_encryption_key", s3_connector.server_encryption_key
"fs.s3a.server-side-encryption-key", s3_connector.server_encryption_key
)
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call(
"fs.s3a.aws.credentials.provider",
Expand Down Expand Up @@ -4514,7 +4514,7 @@ def test_setup_gcp_hadoop_conf(self, mocker):

content = (
'{"type": "service_account", "project_id": "test", "private_key_id": "123456", '
'"private_key": "_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____", '
'"private_key": "-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----", '
'"client_email": "test@project.iam.gserviceaccount.com"}'
)
credentialsFile = "keyFile.json"
Expand Down Expand Up @@ -4563,7 +4563,7 @@ def test_setup_gcp_hadoop_conf(self, mocker):
)
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call(
"fs.gs.auth.service.account.private.key",
"_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____",
"-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----",
)
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.unset.assert_any_call(
"fs.gs.encryption.algorithm"
Expand All @@ -4586,7 +4586,7 @@ def test_setup_gcp_hadoop_conf_algorithm(self, mocker):

content = (
'{"type": "service_account", "project_id": "test", "private_key_id": "123456", '
'"private_key": "_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____", '
'"private_key": "-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----", '
'"client_email": "test@project.iam.gserviceaccount.com"}'
)
credentialsFile = "keyFile.json"
Expand Down Expand Up @@ -4650,7 +4650,7 @@ def test_setup_gcp_hadoop_conf_algorithm(self, mocker):
)
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call(
"fs.gs.auth.service.account.private.key",
"_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____",
"-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----",
)

def test_get_unique_values(self):
Expand Down

0 comments on commit 6ebd9f4

Please sign in to comment.