From e1fb1d7e063af7e8eb6e992c800902aff6e19e15 Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Thu, 9 May 2024 08:37:07 -0700 Subject: [PATCH] [SPARK-48216][TESTS] Remove overrides DockerJDBCIntegrationSuite.connectionTimeout to make related tests configurable ### What changes were proposed in this pull request? This PR removes overrides DockerJDBCIntegrationSuite.connectionTimeout to make related tests configurable. ### Why are the changes needed? The db dockers might require more time to bootstrap sometimes. It shall be configurable to avoid failure like: ```scala [info] org.apache.spark.sql.jdbc.DB2IntegrationSuite *** ABORTED *** (3 minutes, 11 seconds) [info] The code passed to eventually never returned normally. Attempted 96 times over 3.003998157633333 minutes. Last failure message: [jcc][t4][2030][11211][4.33.31] A communication error occurred during operations on the connection's underlying socket, socket input stream, [info] or socket output stream. Error location: Reply.fill() - insufficient data (-1). Message: Insufficient data. ERRORCODE=-4499, SQLSTATE=08001. (DockerJDBCIntegrationSuite.scala:215) [info] org.scalatest.exceptions.TestFailedDueToTimeoutException: [info] at org.scalatest.enablers.Retrying$$anon$4.tryTryAgain$2(Retrying.scala:219) [info] at org.scalatest.enablers.Retrying$$anon$4.retry(Retrying.scala:226) [info] at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:313) [info] at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:312) ``` ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? Passing GA ### Was this patch authored or co-authored using generative AI tooling? no Closes #46505 from yaooqinn/SPARK-48216. Authored-by: Kent Yao Signed-off-by: Dongjoon Hyun --- .../scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala | 4 ---- .../org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala | 3 --- .../org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala | 4 ---- .../org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala | 3 --- .../spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala | 4 ---- .../org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala | 4 ---- .../org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala | 4 ---- 7 files changed, 26 deletions(-) diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala index aca174cce1949..4ece4d2088f4b 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala @@ -21,8 +21,6 @@ import java.math.BigDecimal import java.sql.{Connection, Date, Timestamp} import java.util.Properties -import org.scalatest.time.SpanSugar._ - import org.apache.spark.sql.{Row, SaveMode} import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ import org.apache.spark.sql.internal.SQLConf @@ -41,8 +39,6 @@ import org.apache.spark.tags.DockerTest class DB2IntegrationSuite extends DockerJDBCIntegrationSuite { override val db = new DB2DatabaseOnDocker - override val connectionTimeout = timeout(3.minutes) - override def dataPreparation(conn: Connection): Unit = { conn.prepareStatement("CREATE TABLE tbl (x INTEGER, y VARCHAR(8))").executeUpdate() conn.prepareStatement("INSERT INTO tbl VALUES (42,'fred')").executeUpdate() diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala index abb683c064955..4899de2b2a14c 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala @@ -24,7 +24,6 @@ import javax.security.auth.login.Configuration import com.github.dockerjava.api.model.{AccessMode, Bind, ContainerConfig, HostConfig, Volume} import org.apache.hadoop.security.{SecurityUtil, UserGroupInformation} import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS -import org.scalatest.time.SpanSugar._ import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions import org.apache.spark.sql.execution.datasources.jdbc.connection.{DB2ConnectionProvider, SecureConnectionProvider} @@ -68,8 +67,6 @@ class DB2KrbIntegrationSuite extends DockerKrbJDBCIntegrationSuite { } } - override val connectionTimeout = timeout(3.minutes) - override protected def setAuthentication(keytabFile: String, principal: String): Unit = { val config = new SecureConnectionProvider.JDBCConfiguration( Configuration.getConfiguration, "JaasClient", keytabFile, principal, true) diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala index 496498e5455b4..1eee65986fccd 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala @@ -22,8 +22,6 @@ import java.sql.{Connection, Date, Timestamp} import java.time.{Duration, Period} import java.util.{Properties, TimeZone} -import org.scalatest.time.SpanSugar._ - import org.apache.spark.sql.{DataFrame, Row, SaveMode} import org.apache.spark.sql.catalyst.util.CharVarcharUtils import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ @@ -68,8 +66,6 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSpark override val db = new OracleDatabaseOnDocker - override val connectionTimeout = timeout(7.minutes) - private val rsOfTsWithTimezone = Seq( Row(BigDecimal.valueOf(1), new Timestamp(944046000000L)), Row(BigDecimal.valueOf(2), new Timestamp(944078400000L)) diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala index 6c1b7fdd1be5a..3642094d11b29 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala @@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection import java.util.Locale -import org.scalatest.time.SpanSugar._ - import org.apache.spark.SparkConf import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog @@ -52,7 +50,6 @@ class DB2IntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest { override val catalogName: String = "db2" override val namespaceOpt: Option[String] = Some("DB2INST1") override val db = new DB2DatabaseOnDocker - override val connectionTimeout = timeout(3.minutes) override def sparkConf: SparkConf = super.sparkConf .set("spark.sql.catalog.db2", classOf[JDBCTableCatalog].getName) diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala index 65f7579de8205..b1b8aec5ad337 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala @@ -19,8 +19,6 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection -import org.scalatest.time.SpanSugar._ - import org.apache.spark.{SparkConf, SparkSQLFeatureNotSupportedException} import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog @@ -68,8 +66,6 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JD .set("spark.sql.catalog.mssql.pushDownAggregate", "true") .set("spark.sql.catalog.mssql.pushDownLimit", "true") - override val connectionTimeout = timeout(7.minutes) - override def tablePreparation(connection: Connection): Unit = { connection.prepareStatement( "CREATE TABLE employee (dept INT, name VARCHAR(32), salary NUMERIC(20, 2), bonus FLOAT)") diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala index 4997d335fda6b..22900c7bbcc8b 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala @@ -19,8 +19,6 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.{Connection, SQLFeatureNotSupportedException} -import org.scalatest.time.SpanSugar._ - import org.apache.spark.{SparkConf, SparkSQLFeatureNotSupportedException} import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog @@ -68,8 +66,6 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest .set("spark.sql.catalog.mysql.pushDownLimit", "true") .set("spark.sql.catalog.mysql.pushDownOffset", "true") - override val connectionTimeout = timeout(7.minutes) - private var mySQLVersion = -1 override def tablePreparation(connection: Connection): Unit = { diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala index a011afac17720..b35018ec16dce 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala @@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection import java.util.Locale -import org.scalatest.time.SpanSugar._ - import org.apache.spark.{SparkConf, SparkRuntimeException} import org.apache.spark.sql.{AnalysisException, Row} import org.apache.spark.sql.catalyst.util.CharVarcharUtils.CHAR_VARCHAR_TYPE_STRING_METADATA_KEY @@ -91,8 +89,6 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes .set("spark.sql.catalog.oracle.pushDownLimit", "true") .set("spark.sql.catalog.oracle.pushDownOffset", "true") - override val connectionTimeout = timeout(7.minutes) - override def tablePreparation(connection: Connection): Unit = { connection.prepareStatement( "CREATE TABLE employee (dept NUMBER(32), name VARCHAR2(32), salary NUMBER(20, 2)," +