@@ -66,14 +66,15 @@ def test_create_table_properties(make_mocked_engine_adapter: t.Callable):
6666 )
6767
6868
69+ @pytest .mark .parametrize ("wap_enabled" , [True , False ])
6970def test_replace_query_table_properties_not_exists (
70- mocker : MockerFixture , make_mocked_engine_adapter : t .Callable
71+ mocker : MockerFixture , make_mocked_engine_adapter : t .Callable , wap_enabled : bool
7172):
7273 mocker .patch (
7374 "sqlmesh.core.engine_adapter.spark.SparkEngineAdapter.table_exists" ,
7475 return_value = False ,
7576 )
76- adapter = make_mocked_engine_adapter (SparkEngineAdapter )
77+ adapter = make_mocked_engine_adapter (SparkEngineAdapter , wap_enabled = wap_enabled )
7778
7879 columns_to_types = {
7980 "cola" : exp .DataType .build ("INT" ),
@@ -89,10 +90,13 @@ def test_replace_query_table_properties_not_exists(
8990 table_properties = {"a" : exp .convert (1 )},
9091 )
9192
92- assert to_sql_calls ( adapter ) = = [
93+ expected_sql_calls = [
9394 "CREATE TABLE IF NOT EXISTS `test_table` USING ICEBERG PARTITIONED BY (`colb`) TBLPROPERTIES ('a'=1) AS SELECT CAST(`cola` AS INT) AS `cola`, CAST(`colb` AS STRING) AS `colb`, CAST(`colc` AS STRING) AS `colc` FROM (SELECT 1 AS `cola`, '2' AS `colb`, '3' AS `colc`) AS `_subquery`" ,
94- "INSERT INTO `test_table` SELECT * FROM `test_table`" ,
9595 ]
96+ if wap_enabled :
97+ expected_sql_calls .append ("INSERT INTO `test_table` SELECT * FROM `test_table`" )
98+
99+ assert to_sql_calls (adapter ) == expected_sql_calls
96100
97101
98102def test_replace_query_table_properties_exists (
@@ -825,13 +829,16 @@ def test_wap_publish(make_mocked_engine_adapter: t.Callable, mocker: MockerFixtu
825829 )
826830
827831
828- def test_create_table_iceberg (mocker : MockerFixture , make_mocked_engine_adapter : t .Callable ):
832+ @pytest .mark .parametrize ("wap_enabled" , [True , False ])
833+ def test_create_table_iceberg (
834+ mocker : MockerFixture , make_mocked_engine_adapter : t .Callable , wap_enabled : bool
835+ ):
829836 mocker .patch (
830837 "sqlmesh.core.engine_adapter.spark.SparkEngineAdapter.table_exists" ,
831838 return_value = False ,
832839 )
833840
834- adapter = make_mocked_engine_adapter (SparkEngineAdapter )
841+ adapter = make_mocked_engine_adapter (SparkEngineAdapter , wap_enabled = wap_enabled )
835842
836843 columns_to_types = {
837844 "cola" : exp .DataType .build ("INT" ),
@@ -846,10 +853,13 @@ def test_create_table_iceberg(mocker: MockerFixture, make_mocked_engine_adapter:
846853 storage_format = "ICEBERG" ,
847854 )
848855
849- assert to_sql_calls ( adapter ) = = [
856+ expected_sql_calls = [
850857 "CREATE TABLE IF NOT EXISTS `test_table` (`cola` INT, `colb` STRING, `colc` STRING) USING ICEBERG PARTITIONED BY (`colb`)" ,
851- "INSERT INTO `test_table` SELECT * FROM `test_table`" ,
852858 ]
859+ if wap_enabled :
860+ expected_sql_calls .append ("INSERT INTO `test_table` SELECT * FROM `test_table`" )
861+
862+ assert to_sql_calls (adapter ) == expected_sql_calls
853863
854864
855865def test_comments_hive (mocker : MockerFixture , make_mocked_engine_adapter : t .Callable ):
@@ -973,7 +983,7 @@ def test_create_table_with_wap(make_mocked_engine_adapter: t.Callable, mocker: M
973983 "sqlmesh.core.engine_adapter.spark.SparkEngineAdapter.table_exists" ,
974984 return_value = False ,
975985 )
976- adapter = make_mocked_engine_adapter (SparkEngineAdapter )
986+ adapter = make_mocked_engine_adapter (SparkEngineAdapter , wap_enabled = True )
977987
978988 adapter .create_table (
979989 "catalog.schema.table.branch_wap_12345" ,
0 commit comments