|
20 | 20 |
|
21 | 21 | import functools
|
22 | 22 | from contextlib import contextmanager
|
| 23 | +import json |
23 | 24 | import os
|
24 | 25 | from typing import (
|
25 | 26 | Any,
|
@@ -1071,10 +1072,28 @@ def xor(df1: PySparkDataFrame, df2: PySparkDataFrame) -> PySparkDataFrame:
|
1071 | 1072 |
|
1072 | 1073 |
|
1073 | 1074 | def is_ansi_mode_enabled(spark: SparkSession) -> bool:
|
1074 |
| - return ( |
1075 |
| - ps.get_option("compute.ansi_mode_support", spark_session=spark) |
1076 |
| - and spark.conf.get("spark.sql.ansi.enabled") == "true" |
1077 |
| - ) |
| 1075 | + if is_remote(): |
| 1076 | + from pyspark.sql.connect.session import SparkSession as ConnectSession |
| 1077 | + from pyspark.pandas.config import _key_format, _options_dict |
| 1078 | + |
| 1079 | + client = cast(ConnectSession, spark).client |
| 1080 | + (ansi_mode_support, ansi_enabled) = client.get_config_with_defaults( |
| 1081 | + ( |
| 1082 | + _key_format("compute.ansi_mode_support"), |
| 1083 | + json.dumps(_options_dict["compute.ansi_mode_support"].default), |
| 1084 | + ), |
| 1085 | + ("spark.sql.ansi.enabled", None), |
| 1086 | + ) |
| 1087 | + if ansi_enabled is None: |
| 1088 | + ansi_enabled = spark.conf.get("spark.sql.ansi.enabled") |
| 1089 | + # Explicitly set the default value to reduce the roundtrip for the next time. |
| 1090 | + spark.conf.set("spark.sql.ansi.enabled", ansi_enabled) |
| 1091 | + return json.loads(ansi_mode_support) and ansi_enabled.lower() == "true" |
| 1092 | + else: |
| 1093 | + return ( |
| 1094 | + ps.get_option("compute.ansi_mode_support", spark_session=spark) |
| 1095 | + and spark.conf.get("spark.sql.ansi.enabled").lower() == "true" |
| 1096 | + ) |
1078 | 1097 |
|
1079 | 1098 |
|
1080 | 1099 | def _test() -> None:
|
|
0 commit comments