Skip to content

Commit

Permalink
Merge branch 'master' into KYUUBI-5594-approach2
Browse files Browse the repository at this point in the history
  • Loading branch information
AngersZhuuuu authored Jan 19, 2024
2 parents 014ef3b + b037325 commit d785d5f
Show file tree
Hide file tree
Showing 33 changed files with 458 additions and 226 deletions.
4 changes: 3 additions & 1 deletion bin/beeline
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@
## Kyuubi BeeLine Entrance
CLASS="org.apache.hive.beeline.KyuubiBeeLine"

export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)"
if [ -z "${KYUUBI_HOME}" ]; then
KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi

. "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s

Expand Down
4 changes: 3 additions & 1 deletion bin/kyuubi
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,9 @@ function kyuubi_rotate_log() {
fi
}

export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)"
if [ -z "${KYUUBI_HOME}" ]; then
KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi

if [[ $1 == "start" ]] || [[ $1 == "run" ]]; then
. "${KYUUBI_HOME}/bin/load-kyuubi-env.sh"
Expand Down
4 changes: 3 additions & 1 deletion bin/kyuubi-admin
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@
## Kyuubi Admin Control Client Entrance
CLASS="org.apache.kyuubi.ctl.cli.AdminControlCli"

export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)"
if [ -z "${KYUUBI_HOME}" ]; then
KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi

. "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s

Expand Down
4 changes: 3 additions & 1 deletion bin/kyuubi-ctl
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@
## Kyuubi Control Client Entrance
CLASS="org.apache.kyuubi.ctl.cli.ControlCli"

export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)"
if [ -z "${KYUUBI_HOME}" ]; then
KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi

. "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s

Expand Down
5 changes: 3 additions & 2 deletions bin/kyuubi-zk-cli
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@
## Zookeeper Shell Client Entrance
CLASS="org.apache.kyuubi.shaded.zookeeper.ZooKeeperMain"

export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)"

if [ -z "${KYUUBI_HOME}" ]; then
KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi
. "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s

if [[ -z ${JAVA_HOME} ]]; then
Expand Down
5 changes: 3 additions & 2 deletions bin/load-kyuubi-env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@
#


export KYUUBI_HOME="${KYUUBI_HOME:-"$(cd "$(dirname "$0")"/.. || exit; pwd)"}"

if [ -z "${KYUUBI_HOME}" ]; then
export KYUUBI_HOME="$(cd "$(dirname "$0")"/.. || exit; pwd)"
fi
export KYUUBI_CONF_DIR="${KYUUBI_CONF_DIR:-"${KYUUBI_HOME}"/conf}"

silent=0
Expand Down
2 changes: 1 addition & 1 deletion docs/configuration/settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ You can configure the Kyuubi properties in `$KYUUBI_HOME/conf/kyuubi-defaults.co

| Key | Default | Meaning | Type | Since |
|-----------------------------------------------|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------|-------|
| kyuubi.authentication | NONE | A comma-separated list of client authentication types.<ul> <li>NOSASL: raw transport.</li> <li>NONE: no authentication check.</li> <li>KERBEROS: Kerberos/GSSAPI authentication.</li> <li>CUSTOM: User-defined authentication.</li> <li>JDBC: JDBC query authentication.</li> <li>LDAP: Lightweight Directory Access Protocol authentication.</li></ul>The following tree describes the catalog of each option.<ul> <li><code>NOSASL</code></li> <li>SASL <ul> <li>SASL/PLAIN</li> <ul> <li><code>NONE</code></li> <li><code>LDAP</code></li> <li><code>JDBC</code></li> <li><code>CUSTOM</code></li> </ul> <li>SASL/GSSAPI <ul> <li><code>KERBEROS</code></li> </ul> </li> </ul> </li></ul> Note that: for SASL authentication, KERBEROS and PLAIN auth types are supported at the same time, and only the first specified PLAIN auth type is valid. | set | 1.0.0 |
| kyuubi.authentication | NONE | A comma-separated list of client authentication types.<ul> <li>NOSASL: raw transport.</li> <li>NONE: no authentication check.</li> <li>KERBEROS: Kerberos/GSSAPI authentication.</li> <li>CUSTOM: User-defined authentication.</li> <li>JDBC: JDBC query authentication.</li> <li>LDAP: Lightweight Directory Access Protocol authentication.</li></ul>The following tree describes the catalog of each option.<ul> <li><code>NOSASL</code></li> <li>SASL <ul> <li>SASL/PLAIN</li> <ul> <li><code>NONE</code></li> <li><code>LDAP</code></li> <li><code>JDBC</code></li> <li><code>CUSTOM</code></li> </ul> <li>SASL/GSSAPI <ul> <li><code>KERBEROS</code></li> </ul> </li> </ul> </li></ul> Note that: for SASL authentication, KERBEROS and PLAIN auth types are supported at the same time, and only the first specified PLAIN auth type is valid. | seq | 1.0.0 |
| kyuubi.authentication.custom.class | &lt;undefined&gt; | User-defined authentication implementation of org.apache.kyuubi.service.authentication.PasswdAuthenticationProvider | string | 1.3.0 |
| kyuubi.authentication.jdbc.driver.class | &lt;undefined&gt; | Driver class name for JDBC Authentication Provider. | string | 1.6.0 |
| kyuubi.authentication.jdbc.password | &lt;undefined&gt; | Database password for JDBC Authentication Provider. | string | 1.6.0 |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1398,11 +1398,7 @@
"classname" : "org.apache.spark.sql.execution.command.InsertIntoDataSourceDirCommand",
"tableDescs" : [ ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor",
"comment" : ""
} ],
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "storage",
"fieldExtractor" : "CatalogStorageFormatURIExtractor",
Expand Down Expand Up @@ -1625,11 +1621,7 @@
"comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor",
"comment" : ""
} ],
"queryDescs" : [ ],
"uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ class AccessResource private (val objectType: ObjectType, val catalog: Option[St
val columnStr = getColumn
if (columnStr == null) Nil else columnStr.split(",").filter(_.nonEmpty)
}
def getUrl: String = getValue("url")
}

object AccessResource {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,8 @@ import org.apache.kyuubi.plugin.spark.authz.rule.expression.TypeOfPlaceHolder

object RuleEliminateTypeOf extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
plan.transformUp { case p =>
p.transformExpressionsUp {
case toph: TypeOfPlaceHolder => TypeOf(toph.expr)
}
plan.transformExpressionsUp {
case toph: TypeOfPlaceHolder => TypeOf(toph.expr)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,14 @@ import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Cast}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Project, Statistics}
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Project, Statistics, View}
import org.apache.spark.sql.catalyst.trees.TreeNodeTag

case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable)
extends LeafNode with MultiInstanceRelation {

private val PVM_NEW_INSTANCE_TAG = TreeNodeTag[Unit]("__PVM_NEW_INSTANCE_TAG")

override def output: Seq[Attribute] = child.output

override def argString(maxFields: Int): String = ""
Expand All @@ -38,6 +41,18 @@ case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable)
val projectList = child.output.map { case attr =>
Alias(Cast(attr, attr.dataType), attr.name)(explicitMetadata = Some(attr.metadata))
}
this.copy(child = Project(projectList, child), catalogTable = catalogTable)
val newProj = Project(projectList, child)
newProj.setTagValue(PVM_NEW_INSTANCE_TAG, ())

this.copy(child = newProj, catalogTable = catalogTable)
}

override def doCanonicalize(): LogicalPlan = {
child match {
case p @ Project(_, view: View) if p.getTagValue(PVM_NEW_INSTANCE_TAG).contains(true) =>
view.canonicalized
case _ =>
child.canonicalized
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1475,16 +1475,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
.queryExecution.analyzed
val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
assert(operationType === QUERY)
assert(in.size === 1)
val po0 = in.head
assert(po0.actionType === PrivilegeObjectActionType.OTHER)
assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assertEqualsIgnoreCase(reusedDb)(po0.dbname)
assert(po0.objectName equalsIgnoreCase reusedPartTable.split("\\.").last)
assert(po0.columns === Seq("key", "pid", "value"))
checkTableOwner(po0)
val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
assert(accessType0 === AccessType.SELECT)
assert(in.size === 0)

assert(out.size == 1)
val po1 = out.head
Expand Down Expand Up @@ -1526,18 +1517,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
val plan = sql(sqlStr).queryExecution.analyzed
val (inputs, outputs, operationType) = PrivilegesBuilder.build(plan, spark)
assert(operationType === QUERY)
assert(inputs.size == 1)
inputs.foreach { po =>
assert(po.actionType === PrivilegeObjectActionType.OTHER)
assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assert(po.catalog.isEmpty)
assertEqualsIgnoreCase(reusedDb)(po.dbname)
assertEqualsIgnoreCase(reusedTableShort)(po.objectName)
assert(po.columns === Seq("key", "value"))
checkTableOwner(po)
val accessType = ranger.AccessType(po, operationType, isInput = true)
assert(accessType === AccessType.SELECT)
}
assert(inputs.size === 0)

assert(outputs.size === 1)
outputs.foreach { po =>
Expand Down Expand Up @@ -1606,16 +1586,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
.queryExecution.analyzed
val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
assert(operationType === QUERY)
assert(in.size === 1)
val po0 = in.head
assert(po0.actionType === PrivilegeObjectActionType.OTHER)
assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assertEqualsIgnoreCase(reusedDb)(po0.dbname)
assert(po0.objectName equalsIgnoreCase reusedPartTable.split("\\.").last)
assert(po0.columns === Seq("key", "pid", "value"))
checkTableOwner(po0)
val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
assert(accessType0 === AccessType.SELECT)
assert(in.size === 0)

assert(out.size == 1)
val po1 = out.head
Expand All @@ -1631,6 +1602,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
test("InsertIntoHiveDirCommand") {
val tableDirectory = getClass.getResource("/").getPath + "table_directory"
val directory = File(tableDirectory).createDirectory()
sql("set spark.sql.hive.convertMetastoreInsertDir=false")
val plan = sql(
s"""
|INSERT OVERWRITE DIRECTORY '${directory.path}'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
"logicalRelation",
classOf[LogicalRelationTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryQueryDesc))
TableCommandSpec(cmd, Seq(tableDesc))
}

val InsertIntoHiveTable = {
Expand All @@ -585,9 +585,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {

val InsertIntoDataSourceDir = {
val cmd = "org.apache.spark.sql.execution.command.InsertIntoDataSourceDirCommand"
val queryDesc = queryQueryDesc
val uriDesc = UriDesc("storage", classOf[CatalogStorageFormatURIExtractor])
TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs = Seq(uriDesc))
TableCommandSpec(cmd, Nil, uriDescs = Seq(uriDesc))
}

val SaveIntoDataSourceCommand = {
Expand All @@ -610,6 +609,13 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
}

val InsertIntoHiveDirCommand = {
val cmd = "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand"
val queryDesc = queryQueryDesc
val uriDesc = UriDesc("storage", classOf[CatalogStorageFormatURIExtractor])
TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs = Seq(uriDesc))
}

val LoadData = {
val cmd = "org.apache.spark.sql.execution.command.LoadDataCommand"
val actionTypeDesc = overwriteActionTypeDesc.copy(fieldName = "isOverwrite")
Expand Down Expand Up @@ -723,8 +729,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
InsertIntoDataSourceDir,
SaveIntoDataSourceCommand,
InsertIntoHadoopFsRelationCommand,
InsertIntoDataSourceDir.copy(classname =
"org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand"),
InsertIntoHiveDirCommand,
InsertIntoHiveTable,
LoadData,
MergeIntoTable,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -760,7 +760,8 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir'
| USING parquet
| SELECT * FROM $db1.$table;""".stripMargin)))
assert(e.getMessage.contains(s"does not have [select] privilege on [$db1/$table/id]"))
assert(e.getMessage.contains(
s"does not have [write] privilege on [[/tmp/test_dir, /tmp/test_dir/]]"))
}
}

Expand Down Expand Up @@ -1089,8 +1090,7 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
|INSERT OVERWRITE DIRECTORY '$path'
|USING parquet
|SELECT * FROM $db1.$table1""".stripMargin)))(
s"does not have [select] privilege on [$db1/$table1/id,$db1/$table1/scope], " +
s"[write] privilege on [[$path, $path/]]")
s"does not have [write] privilege on [[$path, $path/]]")
}
}
}
Expand Down Expand Up @@ -1131,8 +1131,7 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
|INSERT OVERWRITE DIRECTORY '$path'
|USING parquet
|SELECT * FROM $db1.$table1""".stripMargin)))(
s"does not have [select] privilege on [$db1/$table1/id,$db1/$table1/scope], " +
s"[write] privilege on [[$path, $path/]]")
s"does not have [write] privilege on [[$path, $path/]]")

doAs(admin, sql(s"SELECT * FROM parquet.`$path`".stripMargin).explain(true))
interceptEndsWith[AccessControlException](
Expand Down
Loading

0 comments on commit d785d5f

Please sign in to comment.