Skip to content

Commit

Permalink
Fix more cases
Browse files Browse the repository at this point in the history
  • Loading branch information
Chong Gao committed Nov 29, 2023
1 parent bb1cbca commit 094c533
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 4 deletions.
6 changes: 3 additions & 3 deletions integration_tests/src/main/python/json_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,12 +183,11 @@ def test_json_date_formats_round_trip(spark_tmp_path, date_format, v1_enabled_li
"'T'HH:mm[:ss]",
"'T'HH:mm"]

not_utc_allow=['BatchScanExec'] if is_not_utc() else []

not_utc_allow_for_test_json_scan = ['BatchScanExec', 'FileSourceScanExec'] if is_not_utc() else []
@pytest.mark.parametrize('ts_part', json_supported_ts_parts)
@pytest.mark.parametrize('date_format', json_supported_date_formats)
@pytest.mark.parametrize('v1_enabled_list', ["", "json"])
@allow_non_gpu(*not_utc_allow)
@allow_non_gpu(*not_utc_allow_for_test_json_scan)
def test_json_ts_formats_round_trip(spark_tmp_path, date_format, ts_part, v1_enabled_list):
full_format = date_format + ts_part
data_gen = TimestampGen()
Expand Down Expand Up @@ -284,6 +283,7 @@ def do_read(spark):
@pytest.mark.parametrize('allow_non_numeric_numbers', ["true", "false"])
@pytest.mark.parametrize('allow_numeric_leading_zeros', ["true"])
@pytest.mark.parametrize('ansi_enabled', ["true", "false"])
@allow_non_gpu(*not_utc_allow_for_test_json_scan)
def test_basic_json_read(std_input_path, filename, schema, read_func, allow_non_numeric_numbers, allow_numeric_leading_zeros, ansi_enabled, spark_tmp_table_factory):
updated_conf = copy_and_update(_enable_all_types_conf,
{'spark.sql.ansi.enabled': ansi_enabled,
Expand Down
4 changes: 3 additions & 1 deletion integration_tests/src/main/python/schema_evolution_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from conftest import is_not_utc
from data_gen import *
from datetime import date, datetime, timezone
from marks import ignore_order
from marks import ignore_order, allow_non_gpu
import pytest
from spark_session import is_databricks_runtime, is_databricks113_or_later

Expand Down Expand Up @@ -60,8 +60,10 @@ def get_ddl(col_gen_pairs):
"""Given a list of column_name, data_generator paris, returns the corresponding DDL string"""
return ', '.join([f"{c} {g.data_type.simpleString()}" for c, g in col_gen_pairs])

non_utc_allow_for_test_column_add_after_partition = ['DataWritingCommandExec'] if is_not_utc() else []
@ignore_order(local=True)
@pytest.mark.parametrize("format", _formats)
@allow_non_gpu(*non_utc_allow_for_test_column_add_after_partition)
def test_column_add_after_partition(spark_tmp_table_factory, format):
# Databricks 10.4 appears to be missing https://issues.apache.org/jira/browse/SPARK-39417
# so avoid generating nulls for numeric partitions
Expand Down

0 comments on commit 094c533

Please sign in to comment.