From ed92de96a9a333e216156de4ee0e4d6cbec050cf Mon Sep 17 00:00:00 2001 From: Aastha Agrrawal Date: Wed, 26 Oct 2022 09:40:02 -0700 Subject: [PATCH] Fixing some unit tests --- .../rel2trino/RelToTrinoConverterTest.java | 59 +++++++--- .../coral/trino/trino2rel/ToRelTestUtils.java | 1 + .../trino2rel/TrinoToRelConverterTest.java | 103 ++++++++++-------- 3 files changed, 106 insertions(+), 57 deletions(-) diff --git a/coral-trino/src/test/java/com/linkedin/coral/trino/rel2trino/RelToTrinoConverterTest.java b/coral-trino/src/test/java/com/linkedin/coral/trino/rel2trino/RelToTrinoConverterTest.java index eb8da0dec..7cfa90a4a 100644 --- a/coral-trino/src/test/java/com/linkedin/coral/trino/rel2trino/RelToTrinoConverterTest.java +++ b/coral-trino/src/test/java/com/linkedin/coral/trino/rel2trino/RelToTrinoConverterTest.java @@ -5,7 +5,16 @@ */ package com.linkedin.coral.trino.rel2trino; +import java.io.File; +import java.io.IOException; + +import org.apache.calcite.rel.RelNode; import org.apache.calcite.tools.FrameworkConfig; +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.testng.annotations.AfterTest; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; @@ -25,23 +34,35 @@ // This makes it easier to generate RelNodes for testing. The input sql is // in Calcite sql syntax (not Hive) // Disabled tests are failing tests -@Test(enabled = false, +@Test( description = "pending migration to hive tables and corresponding queries to use standardised CoralSqlNode and CoralRelNode representations in the translation path") public class RelToTrinoConverterTest { static FrameworkConfig config; + private HiveConf conf; static final SqlParser trinoParser = new SqlParser(); static final String tableOne = TABLE_ONE.getTableName(); static final String tableTwo = TABLE_TWO.getTableName(); static final String tableThree = TABLE_THREE.getTableName(); static final String tableFour = TABLE_FOUR.getTableName(); + @BeforeTest + public void beforeClass() throws IOException, HiveException, MetaException { + conf = TestUtils.loadResourceHiveConf(); + TestUtils.initializeViews(conf); + } + @BeforeTest public static void beforeTest() { TestUtils.turnOffRelSimplification(); config = TestUtils.createFrameworkConfig(TABLE_ONE, TABLE_TWO, TABLE_THREE, TABLE_FOUR); } + @AfterTest + public void afterClass() throws IOException { + FileUtils.deleteDirectory(new File(conf.get(TestUtils.CORAL_TRINO_TEST_DIR))); + } + private void testConversion(String inputSql, String expectedSql) { String trinoSql = toTrinoSql(inputSql); validate(trinoSql, expectedSql); @@ -137,17 +158,17 @@ public void testMapSelection() { testConversion(sql, expected); } - @Test(enabled = false) + @Test public void testConstantExpressions() { { String sql = "SELECT 1"; String expected = formatSql("SELECT 1 FROM (VALUES (0)) AS \"t\" (\"ZERO\")"); - testConversion(sql, expected); + testHiveToTrinoConversion(sql, expected); } { String sql = "SELECT 5 + 2 * 10 / 4"; String expected = formatSql("SELECT 5 + 2 * 10 / 4 FROM (VALUES (0)) AS \"t\" (\"ZERO\")"); - testConversion(sql, expected); + testHiveToTrinoConversion(sql, expected); } } @@ -494,25 +515,35 @@ public void testDataTypeSpecRewrite() { testConversion(sql3, expectedSql3); } - @Test(enabled = false) + @Test public void testCurrentUser() { String sql = "SELECT current_user"; - String expected = formatSql("SELECT CURRENT_USER AS \"CURRENT_USER\"\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")"); - testConversion(sql, expected); + String expected = formatSql("SELECT CURRENT_USER AS \"current_user\"\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")"); + + testHiveToTrinoConversion(sql, expected); } - @Test(enabled = false) + @Test public void testCurrentTimestamp() { String sql = "SELECT current_timestamp"; - String expected = formatSql( - "SELECT CAST(CURRENT_TIMESTAMP AS TIMESTAMP(3)) AS \"CURRENT_TIMESTAMP\"\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")"); - testConversion(sql, expected); + String expected = + formatSql("SELECT CAST(CURRENT_TIMESTAMP AS TIMESTAMP(3))\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")"); + + testHiveToTrinoConversion(sql, expected); } - @Test(enabled = false) + @Test public void testCurrentDate() { String sql = "SELECT current_date"; - String expected = formatSql("SELECT CURRENT_DATE AS \"CURRENT_DATE\"\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")"); - testConversion(sql, expected); + String expected = formatSql("SELECT CURRENT_DATE\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")"); + + testHiveToTrinoConversion(sql, expected); + } + + private void testHiveToTrinoConversion(String inputSql, String expectedSql) { + RelNode relNode = hiveToRelConverter.convertSql(inputSql); + RelToTrinoConverter relToTrinoConverter = new RelToTrinoConverter(); + String expandedSql = relToTrinoConverter.convert(relNode); + assertEquals(expandedSql, expectedSql); } } diff --git a/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/ToRelTestUtils.java b/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/ToRelTestUtils.java index 8a5df7725..7dd5c3b1e 100644 --- a/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/ToRelTestUtils.java +++ b/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/ToRelTestUtils.java @@ -58,6 +58,7 @@ public static void initializeViews(HiveConf conf) throws HiveException, MetaExce run(driver, "CREATE TABLE IF NOT EXISTS default.my_table(x array, y array>, z int)"); run(driver, "CREATE TABLE IF NOT EXISTS default.a(b int, id int, x int)"); run(driver, "CREATE TABLE IF NOT EXISTS default.b(foobar int, id int, y int)"); + run(driver, "CREATE TABLE IF NOT EXISTS default.table_with_struct_arr(a int, b array>)"); } public static HiveConf loadResourceHiveConf() { diff --git a/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/TrinoToRelConverterTest.java b/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/TrinoToRelConverterTest.java index 4af95101e..2b451e4a4 100644 --- a/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/TrinoToRelConverterTest.java +++ b/coral-trino/src/test/java/com/linkedin/coral/trino/trino2rel/TrinoToRelConverterTest.java @@ -11,6 +11,8 @@ import java.util.List; import java.util.Map; +import javax.annotation.Nullable; + import com.google.common.collect.ImmutableList; import org.apache.calcite.plan.RelOptUtil; @@ -67,23 +69,23 @@ public Iterator getSupportedSql() { .add(ImmutableList.of("select * from foo", "LogicalProject(show=[$0], a=[$1], b=[$2], x=[$3], y=[$4])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", - "SELECT \"show\", \"a\", \"b\", \"x\", \"y\"\n" + "FROM \"default\".\"foo\"")) + "SELECT *\n" + "FROM \"default\".\"foo\"")) .add(ImmutableList.of("select * from foo /* end */", "LogicalProject(show=[$0], a=[$1], b=[$2], x=[$3], y=[$4])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", - "SELECT \"show\", \"a\", \"b\", \"x\", \"y\"\n" + "FROM \"default\".\"foo\"")) + "SELECT *\n" + "FROM \"default\".\"foo\"")) .add(ImmutableList.of("/* start */ select * from foo", "LogicalProject(show=[$0], a=[$1], b=[$2], x=[$3], y=[$4])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", - "SELECT \"show\", \"a\", \"b\", \"x\", \"y\"\n" + "FROM \"default\".\"foo\"")) + "SELECT *\n" + "FROM \"default\".\"foo\"")) .add(ImmutableList.of("/* start */ select * /* middle */ from foo /* end */", "LogicalProject(show=[$0], a=[$1], b=[$2], x=[$3], y=[$4])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", - "SELECT \"show\", \"a\", \"b\", \"x\", \"y\"\n" + "FROM \"default\".\"foo\"")) + "SELECT *\n" + "FROM \"default\".\"foo\"")) .add(ImmutableList.of("-- start \n select * -- junk -- hi\n from foo -- done", "LogicalProject(show=[$0], a=[$1], b=[$2], x=[$3], y=[$4])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", - "SELECT \"show\", \"a\", \"b\", \"x\", \"y\"\n" + "FROM \"default\".\"foo\"")) + "SELECT *\n" + "FROM \"default\".\"foo\"")) .add(ImmutableList.of("select * from foo a (v, w, x, y, z)", "LogicalProject(V=[$0], W=[$1], X=[$2], Y=[$3], Z=[$4])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", @@ -108,9 +110,9 @@ public Iterator getSupportedSql() { "LogicalProject(EXPR$0=[AND(OR(IS NOT NULL($3), IS NOT NULL($4)), IS NOT TRUE(=($3, $4)))])\n" + " LogicalFilter(condition=[NOT(AND(OR(IS NOT NULL($1), IS NOT NULL($2)), IS NOT TRUE(=($1, $2))))])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", - "SELECT (\"x\" IS NOT NULL OR \"y\" IS NOT NULL) AND \"x\" = \"y\" IS NOT TRUE\n" + "SELECT (\"x\" IS NOT NULL OR \"y\" IS NOT NULL) AND TRY_CAST(\"x\" AS VARCHAR) = TRY_CAST(\"y\" AS VARCHAR) IS NOT TRUE\n" + "FROM \"default\".\"foo\"\n" - + "WHERE NOT ((\"a\" IS NOT NULL OR \"b\" IS NOT NULL) AND \"a\" = \"b\" IS NOT TRUE)")) + + "WHERE NOT ((\"a\" IS NOT NULL OR \"b\" IS NOT NULL) AND TRY_CAST(\"a\" AS VARCHAR) = TRY_CAST(\"b\" AS VARCHAR) IS NOT TRUE)")) .add(ImmutableList.of("select x[1] from my_table", "LogicalProject(EXPR$0=[ITEM($0, 1)])\n" + " LogicalTableScan(table=[[hive, default, my_table]])\n", "SELECT element_at(\"x\", 1)\n" + "FROM \"default\".\"my_table\"")) @@ -122,62 +124,45 @@ public Iterator getSupportedSql() { "LogicalProject(EXPR$0=[ITEM($0, CAST(*(10, SIN($2))):BIGINT)])\n" + " LogicalTableScan(table=[[hive, default, my_table]])\n", "SELECT element_at(\"x\", CAST(10 * SIN(\"z\") AS BIGINT))\n" + "FROM \"default\".\"my_table\"")) - .add(ImmutableList.of("select * from unnest(array[1, 2, 3])", - "LogicalProject(EXPR$0=[$0])\n" + " HiveUncollect\n" + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" - + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"col\"\n" + "FROM UNNEST(ARRAY[1, 2, 3]) AS \"t0\" (\"col\")")) - .add(ImmutableList.of("select x from unnest(array[1, 2, 3]) t(x)", - "LogicalProject(X=[$0])\n" + " HiveUncollect\n" + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" - + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"X\"\n" + "FROM UNNEST(ARRAY[1, 2, 3]) AS \"t0\" (\"X\")")) .add(ImmutableList.of("select * from my_table cross join unnest(x)", "LogicalProject(x=[$0], y=[$1], z=[$2], EXPR$0=[$3])\n" + " LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}])\n" + " LogicalTableScan(table=[[hive, default, my_table]])\n" + " HiveUncollect\n" + " LogicalProject(col=[$cor0.x])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"$cor0\".\"x\" AS \"x\", \"$cor0\".\"y\" AS \"y\", \"$cor0\".\"z\" AS \"z\", \"t0\".\"col\"\n" - + "FROM \"default\".\"my_table\" AS \"$cor0\"\n" - + "CROSS JOIN UNNEST(\"$cor0\".\"x\") AS \"t0\" (\"col\")")) + "SELECT \"my_table\".\"x\" AS \"x\", \"my_table\".\"y\" AS \"y\", \"my_table\".\"z\" AS \"z\", \"t0\".\"col\" AS \"col\"\n" + + "FROM \"default\".\"my_table\"\n" + "CROSS JOIN UNNEST(\"my_table\".\"x\") AS \"t0\" (\"col\")")) .add(ImmutableList.of("select z from my_table cross join unnest(x) t(x_)", "LogicalProject(Z=[$2])\n" + " LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}])\n" + " LogicalTableScan(table=[[hive, default, my_table]])\n" + " HiveUncollect\n" + " LogicalProject(col=[$cor0.x])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"$cor0\".\"z\" AS \"Z\"\n" + "FROM \"default\".\"my_table\" AS \"$cor0\"\n" - + "CROSS JOIN UNNEST(\"$cor0\".\"x\") AS \"t0\" (\"X_\")")) - .add(ImmutableList.of("select * from unnest(array[1, 2, 3]) with ordinality", - "LogicalProject(EXPR$0=[$0], ORDINALITY=[$1])\n" + " HiveUncollect(withOrdinality=[true])\n" - + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"col\", \"ORDINALITY\"\n" - + "FROM UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS \"t0\" (\"col\", \"ORDINALITY\")")) - .add(ImmutableList.of("select * from unnest(array[1, 2, 3]) with ordinality t(x, y)", - "LogicalProject(X=[$0], Y=[$1])\n" + " HiveUncollect(withOrdinality=[true])\n" - + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"X\", \"Y\"\n" + "FROM UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS \"t0\" (\"X\", \"Y\")")) + "SELECT \"my_table\".\"z\" AS \"Z\"\n" + "FROM \"default\".\"my_table\"\n" + + "CROSS JOIN UNNEST(\"my_table\".\"x\") AS \"t0\" (\"X_\")")) .add(ImmutableList.of("select * from my_table cross join unnest(x) with ordinality", "LogicalProject(x=[$0], y=[$1], z=[$2], EXPR$0=[$3], ORDINALITY=[$4])\n" + " LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}])\n" + " LogicalTableScan(table=[[hive, default, my_table]])\n" + " HiveUncollect(withOrdinality=[true])\n" + " LogicalProject(col=[$cor0.x])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"$cor0\".\"x\" AS \"x\", \"$cor0\".\"y\" AS \"y\", \"$cor0\".\"z\" AS \"z\", \"t0\".\"col\", \"t0\".\"ORDINALITY\" AS \"ORDINALITY\"\n" - + "FROM \"default\".\"my_table\" AS \"$cor0\"\n" - + "CROSS JOIN UNNEST(\"$cor0\".\"x\") WITH ORDINALITY AS \"t0\" (\"col\", \"ORDINALITY\")")) + "SELECT \"my_table\".\"x\" AS \"x\", \"my_table\".\"y\" AS \"y\", \"my_table\".\"z\" AS \"z\", \"t0\".\"col\" AS \"col\", \"t0\".\"ORDINALITY\" AS \"ORDINALITY\"\n" + + "FROM \"default\".\"my_table\"\n" + + "CROSS JOIN UNNEST(\"my_table\".\"x\") WITH ORDINALITY AS \"t0\" (\"col\", \"ORDINALITY\")")) .add(ImmutableList.of("select z from my_table cross join unnest(x) with ordinality t(a, b)", "LogicalProject(Z=[$2])\n" + " LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}])\n" + " LogicalTableScan(table=[[hive, default, my_table]])\n" + " HiveUncollect(withOrdinality=[true])\n" + " LogicalProject(col=[$cor0.x])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", - "SELECT \"$cor0\".\"z\" AS \"Z\"\n" + "FROM \"default\".\"my_table\" AS \"$cor0\"\n" - + "CROSS JOIN UNNEST(\"$cor0\".\"x\") WITH ORDINALITY AS \"t0\" (\"A\", \"B\")")) + "SELECT \"my_table\".\"z\" AS \"Z\"\n" + "FROM \"default\".\"my_table\"\n" + + "CROSS JOIN UNNEST(\"my_table\".\"x\") WITH ORDINALITY AS \"t0\" (\"A\", \"B\")")) .add(ImmutableList.of( "with a (id) as (with x as (select 123 from foo) select * from x) , b (id) as (select 999 from foo) select * from a join b using (id)", "LogicalProject(ID=[COALESCE($0, $1)])\n" + " LogicalJoin(condition=[=($0, $1)], joinType=[inner])\n" + " LogicalProject(EXPR$0=[123])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n" + " LogicalProject(EXPR$0=[999])\n" + " LogicalTableScan(table=[[hive, default, foo]])\n", "SELECT COALESCE(999, 999) AS \"ID\"\n" + "FROM (SELECT 123\n" + "FROM \"default\".\"foo\") AS \"t\"\n" - + "INNER JOIN (SELECT 999\n" + "FROM \"default\".\"foo\") AS \"t0\" ON 999 = 999")) + + "INNER JOIN (SELECT 999\n" + + "FROM \"default\".\"foo\") AS \"t0\" ON TRY_CAST(999 AS VARCHAR) = TRY_CAST(999 AS VARCHAR)")) .add(ImmutableList.of("select cast('123' as bigint)", "LogicalProject(EXPR$0=[CAST('123'):BIGINT])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", "SELECT CAST('123' AS BIGINT)\n" + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")")) @@ -186,16 +171,15 @@ public Iterator getSupportedSql() { "SELECT \"a\" AS \"MY PRICE\"\n" + "FROM \"default\".\"foo\"")) .add(ImmutableList.of("select * from a limit all", "LogicalProject(b=[$0], id=[$1], x=[$2])\n" + " LogicalTableScan(table=[[hive, default, a]])\n", - "SELECT \"b\", \"id\", \"x\"\n" + "FROM \"default\".\"a\"")) + "SELECT *\n" + "FROM \"default\".\"a\"")) .add(ImmutableList.of("select * from a order by x limit all", "LogicalSort(sort0=[$2], dir0=[ASC-nulls-first])\n" + " LogicalProject(b=[$0], id=[$1], x=[$2])\n" + " LogicalTableScan(table=[[hive, default, a]])\n", - "SELECT \"b\", \"id\", \"x\"\n" + "FROM \"default\".\"a\"\n" + "ORDER BY \"x\" NULLS FIRST")) + "SELECT *\n" + "FROM \"default\".\"a\"\n" + "ORDER BY \"x\" NULLS FIRST")) .add(ImmutableList.of("select * from a union select * from b", "LogicalUnion(all=[false])\n" + " LogicalProject(b=[$0], id=[$1], x=[$2])\n" + " LogicalTableScan(table=[[hive, default, a]])\n" + " LogicalProject(foobar=[$0], id=[$1], y=[$2])\n" + " LogicalTableScan(table=[[hive, default, b]])\n", - "SELECT \"b\", \"id\", \"x\"\n" + "FROM \"default\".\"a\"\n" + "UNION\n" - + "SELECT \"foobar\", \"id\", \"y\"\n" + "FROM \"default\".\"b\"")) + "SELECT *\n" + "FROM \"default\".\"a\"\n" + "UNION\n" + "SELECT *\n" + "FROM \"default\".\"b\"")) .add(ImmutableList.of("select strpos('foobar', 'b') as pos", "LogicalProject(POS=[instr('FOOBAR', 'B')])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", "SELECT \"strpos\"('FOOBAR', 'B') AS \"POS\"\n" + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")")) @@ -219,14 +203,11 @@ public Iterator getSupportedSql() { .build().stream().map(x -> new Object[] { x.get(0), x.get(1), x.get(2) }).iterator(); } - //TODO: Add unsupported SQL tests - public static String relToStr(RelNode rel) { return RelOptUtil.toString(rel); } - //TODO: update the Trino expectedSql in tests - @Test(dataProvider = "support", enabled = false) + @Test(dataProvider = "support") public void testSupport(String trinoSql, String expectedRelString, String expectedSql) { RelNode relNode = trinoToRelConverter.convertSql(trinoSql); assertEquals(relToStr(relNode), expectedRelString); @@ -237,4 +218,40 @@ public void testSupport(String trinoSql, String expectedRelString, String expect assertEquals(expectedSql, expandedSql); } + @DataProvider(name = "Unsupported") + public Iterator getUnsupportedSql() { + return ImmutableList.> builder() + .add(ImmutableList.of("select * from unnest(array[1, 2, 3])", + "LogicalProject(EXPR$0=[$0])\n" + " HiveUncollect\n" + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" + + " LogicalValues(tuples=[[{ 0 }]])\n", + "SELECT \"col\"\n" + "FROM UNNEST(ARRAY[1, 2, 3]) AS \"t0\" (\"col\")")) + .add(ImmutableList.of("select x from unnest(array[1, 2, 3]) t(x)", + "LogicalProject(X=[$0])\n" + " HiveUncollect\n" + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" + + " LogicalValues(tuples=[[{ 0 }]])\n", + "SELECT \"X\"\n" + "FROM UNNEST(ARRAY[1, 2, 3]) AS \"t0\" (\"X\")")) + .add(ImmutableList.of("select * from unnest(array[1, 2, 3]) with ordinality", + "LogicalProject(EXPR$0=[$0], ORDINALITY=[$1])\n" + " HiveUncollect(withOrdinality=[true])\n" + + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", + "SELECT \"col\", \"ORDINALITY\"\n" + + "FROM UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS \"t0\" (\"col\", \"ORDINALITY\")")) + .add(ImmutableList.of("select * from unnest(array[1, 2, 3]) with ordinality t(x, y)", + "LogicalProject(X=[$0], Y=[$1])\n" + " HiveUncollect(withOrdinality=[true])\n" + + " LogicalProject(col=[ARRAY(1, 2, 3)])\n" + " LogicalValues(tuples=[[{ 0 }]])\n", + "SELECT \"X\", \"Y\"\n" + "FROM UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS \"t0\" (\"X\", \"Y\")")) + .add(ImmutableList.of( + "SELECT * from default.table_with_struct_arr cross join unnest(struct.b) AS t(b1col, b2col)", null, null)) + .build().stream().map(x -> new Object[] { x.get(0), x.get(1), x.get(2) }).iterator(); + } + + @Test(dataProvider = "Unsupported", enabled = false, + description = "Input Trino SQLs which do not conform to a valid CoralSqlNode representation") + public void testUnsupported(String trinoSql, @Nullable String expectedRelString, @Nullable String expectedSql) { + RelNode relNode = trinoToRelConverter.convertSql(trinoSql); + assertEquals(relToStr(relNode), expectedRelString); + + RelToTrinoConverter relToTrinoConverter = new RelToTrinoConverter(); + // Convert rel node back to Sql + String expandedSql = relToTrinoConverter.convert(relNode); + assertEquals(expectedSql, expandedSql); + } }