Skip to content

Commit

Permalink
Remove assume(isOnPrem, "This test is only for on prem version")
Browse files Browse the repository at this point in the history
  • Loading branch information
mzitnik committed Jul 14, 2024
1 parent e369345 commit df04d02
Show file tree
Hide file tree
Showing 6 changed files with 0 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
class ClickHouseClusterReadSuite extends SparkClickHouseClusterTest {

test("clickhouse metadata column - distributed table") {
assume(isOnPrem, "This test is only for on prem version")
withSimpleDistTable("single_replica", "db_w", "t_dist", true) { (_, db, tbl_dist, _) =>
assert(READ_DISTRIBUTED_CONVERT_LOCAL.defaultValueString == "true")

Expand All @@ -49,7 +48,6 @@ class ClickHouseClusterReadSuite extends SparkClickHouseClusterTest {
}

test("push down aggregation - distributed table") {
assume(isOnPrem, "This test is only for on prem version")
withSimpleDistTable("single_replica", "db_agg_col", "t_dist", true) { (_, db, tbl_dist, _) =>
checkAnswer(
spark.sql(s"SELECT COUNT(id) FROM $db.$tbl_dist"),
Expand Down Expand Up @@ -89,7 +87,6 @@ class ClickHouseClusterReadSuite extends SparkClickHouseClusterTest {
}

test("runtime filter - distributed table") {
assume(isOnPrem, "This test is only for on prem version")
withSimpleDistTable("single_replica", "runtime_db", "runtime_tbl", true) { (_, db, tbl_dist, _) =>
spark.sql("set spark.clickhouse.read.runtimeFilter.enabled=false")
checkAnswer(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ package org.apache.spark.sql.clickhouse.cluster
class ClusterDeleteSuite extends SparkClickHouseClusterTest {

test("truncate distribute table") {
assume(isOnPrem, "This test is only for on prem version")
withSimpleDistTable("single_replica", "db_truncate", "tbl_truncate", true) { (_, db, tbl_dist, _) =>
assert(spark.table(s"$db.$tbl_dist").count() === 4)
spark.sql(s"TRUNCATE TABLE $db.$tbl_dist")
Expand All @@ -26,7 +25,6 @@ class ClusterDeleteSuite extends SparkClickHouseClusterTest {
}

test("delete from distribute table") {
assume(isOnPrem, "This test is only for on prem version")
withSimpleDistTable("single_replica", "db_delete", "tbl_delete", true) { (_, db, tbl_dist, _) =>
assert(spark.table(s"$db.$tbl_dist").count() === 4)
spark.sql(s"DELETE FROM $db.$tbl_dist WHERE m = 1")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import org.apache.spark.sql.Row
class ClusterPartitionManagementSuite extends SparkClickHouseClusterTest {

test("distribute table partition") {
assume(isOnPrem, "This test is only for on prem version")
withSimpleDistTable("single_replica", "db_part", "tbl_part", true) { (_, db, tbl_dist, _) =>
checkAnswer(
spark.sql(s"SHOW PARTITIONS $db.$tbl_dist"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import org.apache.spark.sql.Row
class ClusterShardByRandSuite extends SparkClickHouseClusterTest {

test("shard by rand()") {
assume(isOnPrem, "This test is only for on prem version")
val cluster = "single_replica"
val db = "db_rand_shard"
val tbl_dist = "tbl_rand_shard"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ package org.apache.spark.sql.clickhouse.cluster
class ClusterTableManagementSuite extends SparkClickHouseClusterTest {

test("create or replace distribute table") {
assume(isOnPrem, "This test is only for on prem version")
autoCleanupDistTable("single_replica", "db_cor", "tbl_cor_dist") { (cluster, db, _, tbl_local) =>
def createLocalTable(): Unit = spark.sql(
s"""CREATE TABLE $db.$tbl_local (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ class TPCDSClusterSuite extends SparkClickHouseClusterTest {
.set("spark.clickhouse.write.format", "json")

test("Cluster: TPC-DS sf1 write and count(*)") {
assume(isOnPrem, "This test is only for on prem version")
withDatabase("tpcds_sf1_cluster") {
spark.sql("CREATE DATABASE tpcds_sf1_cluster WITH DBPROPERTIES (cluster = 'single_replica')")

Expand Down

0 comments on commit df04d02

Please sign in to comment.