From bda4426f2a3f63c40cf1465121b6725d44d720dd Mon Sep 17 00:00:00 2001 From: mzitnik Date: Tue, 9 Jul 2024 17:12:40 +0300 Subject: [PATCH] Fix spootless format --- .../org/apache/spark/sql/clickhouse/SparkTest.scala | 3 +-- .../sql/clickhouse/single/ClickHouseSingleSuite.scala | 11 +++++++++-- .../clickhouse/single/SparkClickHouseSingleTest.scala | 1 - 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/SparkTest.scala b/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/SparkTest.scala index 471a3a48..6fcd03c3 100644 --- a/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/SparkTest.scala +++ b/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/SparkTest.scala @@ -48,11 +48,10 @@ trait SparkTest extends QueryTest with SharedSparkSession { .set("spark.sql.codegen.wholeStage", "false") .set("spark.sql.shuffle.partitions", "2") - def runClickHouseSQL(sql: String, options: Map[String, String] = cmdRunnerOptions): DataFrame = { + def runClickHouseSQL(sql: String, options: Map[String, String] = cmdRunnerOptions): DataFrame = // spark.conf.getAll.foreach(println) // println("--------------------") spark.executeCommand(classOf[ClickHouseCommandRunner].getName, sql, options) - } def autoCleanupTable( database: String, diff --git a/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/ClickHouseSingleSuite.scala b/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/ClickHouseSingleSuite.scala index 08470154..f009df0b 100644 --- a/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/ClickHouseSingleSuite.scala +++ b/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/ClickHouseSingleSuite.scala @@ -25,7 +25,8 @@ class ClickHouseSingleSuite extends SparkClickHouseSingleTest { import testImplicits._ override protected def beforeAll(): Unit = { super.beforeAll() - Seq("db_t1", + Seq( + "db_t1", "db_t2", "db_part", "db_part_date", @@ -35,7 +36,13 @@ class ClickHouseSingleSuite extends SparkClickHouseSingleTest { "db_multi_sort_col", "db_trunc", "db_del", - "db_rw", "db_metadata_col", "db_agg_col", "db_cor", "cache_db", "runtime_db").foreach { + "db_rw", + "db_metadata_col", + "db_agg_col", + "db_cor", + "cache_db", + "runtime_db" + ).foreach { database => println(s"Drop database $database") runClickHouseSQL("DROP DATABASE IF EXISTS _" + database) diff --git a/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/SparkClickHouseSingleTest.scala b/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/SparkClickHouseSingleTest.scala index 36f42974..540d8527 100644 --- a/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/SparkClickHouseSingleTest.scala +++ b/spark-3.5/clickhouse-spark-it/src/test/scala/org/apache/spark/sql/clickhouse/single/SparkClickHouseSingleTest.scala @@ -48,7 +48,6 @@ trait SparkClickHouseSingleTest extends SparkTest with ClickHouseSingleMixIn { .set("spark.clickhouse.write.format", "json") .set("spark.sql.catalog.clickhouse.option.ssl", isCloud.toString) - override def cmdRunnerOptions: Map[String, String] = Map( "host" -> clickhouseHost, "http_port" -> clickhouseHttpPort.toString,