Skip to content

Commit

Permalink
[SPARK-48216][TESTS] Remove overrides DockerJDBCIntegrationSuite.conn…
Browse files Browse the repository at this point in the history
…ectionTimeout to make related tests configurable

### What changes were proposed in this pull request?

This PR removes overrides DockerJDBCIntegrationSuite.connectionTimeout to make related tests configurable.

### Why are the changes needed?

The db dockers might require more time to bootstrap sometimes. It shall be configurable to avoid failure like:

```scala
[info] org.apache.spark.sql.jdbc.DB2IntegrationSuite *** ABORTED *** (3 minutes, 11 seconds)
[info]   The code passed to eventually never returned normally. Attempted 96 times over 3.003998157633333 minutes. Last failure message: [jcc][t4][2030][11211][4.33.31] A communication error occurred during operations on the connection's underlying socket, socket input stream,
[info]   or socket output stream.  Error location: Reply.fill() - insufficient data (-1).  Message: Insufficient data. ERRORCODE=-4499, SQLSTATE=08001. (DockerJDBCIntegrationSuite.scala:215)
[info]   org.scalatest.exceptions.TestFailedDueToTimeoutException:
[info]   at org.scalatest.enablers.Retrying$$anon$4.tryTryAgain$2(Retrying.scala:219)
[info]   at org.scalatest.enablers.Retrying$$anon$4.retry(Retrying.scala:226)
[info]   at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:313)
[info]   at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:312)
```

### Does this PR introduce _any_ user-facing change?
no

### How was this patch tested?

Passing GA

### Was this patch authored or co-authored using generative AI tooling?
no

Closes apache#46505 from yaooqinn/SPARK-48216.

Authored-by: Kent Yao <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
  • Loading branch information
yaooqinn authored and dongjoon-hyun committed May 9, 2024
1 parent 21333f8 commit e1fb1d7
Show file tree
Hide file tree
Showing 7 changed files with 0 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ import java.math.BigDecimal
import java.sql.{Connection, Date, Timestamp}
import java.util.Properties

import org.scalatest.time.SpanSugar._

import org.apache.spark.sql.{Row, SaveMode}
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
import org.apache.spark.sql.internal.SQLConf
Expand All @@ -41,8 +39,6 @@ import org.apache.spark.tags.DockerTest
class DB2IntegrationSuite extends DockerJDBCIntegrationSuite {
override val db = new DB2DatabaseOnDocker

override val connectionTimeout = timeout(3.minutes)

override def dataPreparation(conn: Connection): Unit = {
conn.prepareStatement("CREATE TABLE tbl (x INTEGER, y VARCHAR(8))").executeUpdate()
conn.prepareStatement("INSERT INTO tbl VALUES (42,'fred')").executeUpdate()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import javax.security.auth.login.Configuration
import com.github.dockerjava.api.model.{AccessMode, Bind, ContainerConfig, HostConfig, Volume}
import org.apache.hadoop.security.{SecurityUtil, UserGroupInformation}
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS
import org.scalatest.time.SpanSugar._

import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import org.apache.spark.sql.execution.datasources.jdbc.connection.{DB2ConnectionProvider, SecureConnectionProvider}
Expand Down Expand Up @@ -68,8 +67,6 @@ class DB2KrbIntegrationSuite extends DockerKrbJDBCIntegrationSuite {
}
}

override val connectionTimeout = timeout(3.minutes)

override protected def setAuthentication(keytabFile: String, principal: String): Unit = {
val config = new SecureConnectionProvider.JDBCConfiguration(
Configuration.getConfiguration, "JaasClient", keytabFile, principal, true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@ import java.sql.{Connection, Date, Timestamp}
import java.time.{Duration, Period}
import java.util.{Properties, TimeZone}

import org.scalatest.time.SpanSugar._

import org.apache.spark.sql.{DataFrame, Row, SaveMode}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
Expand Down Expand Up @@ -68,8 +66,6 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSpark

override val db = new OracleDatabaseOnDocker

override val connectionTimeout = timeout(7.minutes)

private val rsOfTsWithTimezone = Seq(
Row(BigDecimal.valueOf(1), new Timestamp(944046000000L)),
Row(BigDecimal.valueOf(2), new Timestamp(944078400000L))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc.v2
import java.sql.Connection
import java.util.Locale

import org.scalatest.time.SpanSugar._

import org.apache.spark.SparkConf
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
Expand Down Expand Up @@ -52,7 +50,6 @@ class DB2IntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest {
override val catalogName: String = "db2"
override val namespaceOpt: Option[String] = Some("DB2INST1")
override val db = new DB2DatabaseOnDocker
override val connectionTimeout = timeout(3.minutes)

override def sparkConf: SparkConf = super.sparkConf
.set("spark.sql.catalog.db2", classOf[JDBCTableCatalog].getName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package org.apache.spark.sql.jdbc.v2

import java.sql.Connection

import org.scalatest.time.SpanSugar._

import org.apache.spark.{SparkConf, SparkSQLFeatureNotSupportedException}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
Expand Down Expand Up @@ -68,8 +66,6 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JD
.set("spark.sql.catalog.mssql.pushDownAggregate", "true")
.set("spark.sql.catalog.mssql.pushDownLimit", "true")

override val connectionTimeout = timeout(7.minutes)

override def tablePreparation(connection: Connection): Unit = {
connection.prepareStatement(
"CREATE TABLE employee (dept INT, name VARCHAR(32), salary NUMERIC(20, 2), bonus FLOAT)")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package org.apache.spark.sql.jdbc.v2

import java.sql.{Connection, SQLFeatureNotSupportedException}

import org.scalatest.time.SpanSugar._

import org.apache.spark.{SparkConf, SparkSQLFeatureNotSupportedException}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
Expand Down Expand Up @@ -68,8 +66,6 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest
.set("spark.sql.catalog.mysql.pushDownLimit", "true")
.set("spark.sql.catalog.mysql.pushDownOffset", "true")

override val connectionTimeout = timeout(7.minutes)

private var mySQLVersion = -1

override def tablePreparation(connection: Connection): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc.v2
import java.sql.Connection
import java.util.Locale

import org.scalatest.time.SpanSugar._

import org.apache.spark.{SparkConf, SparkRuntimeException}
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils.CHAR_VARCHAR_TYPE_STRING_METADATA_KEY
Expand Down Expand Up @@ -91,8 +89,6 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes
.set("spark.sql.catalog.oracle.pushDownLimit", "true")
.set("spark.sql.catalog.oracle.pushDownOffset", "true")

override val connectionTimeout = timeout(7.minutes)

override def tablePreparation(connection: Connection): Unit = {
connection.prepareStatement(
"CREATE TABLE employee (dept NUMBER(32), name VARCHAR2(32), salary NUMBER(20, 2)," +
Expand Down

0 comments on commit e1fb1d7

Please sign in to comment.