Skip to content

Commit

Permalink
[KYUUBI #5707][AUTHZ] Support merge into path-based table for Delta L…
Browse files Browse the repository at this point in the history
…ake in Authz

# 🔍 Description
## Issue References 🔗

This pull request fixes #5707

## Describe Your Solution 🔧

`org.apache.spark.sql.delta.commands.MergeIntoCommand` add uriDescs.

## Types of changes 🔖

- [ ] Bugfix (non-breaking change which fixes an issue)
- [x] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 🧪

#### Behavior Without This Pull Request ⚰️

#### Behavior With This Pull Request 🎉

#### Related Unit Tests
org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite.test("merge into path-based table")

---

# Checklists
## 📝 Author Self Checklist

- [x] My code follows the [style guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html) of this project
- [x] I have performed a self-review
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have made corresponding changes to the documentation
- [x] My changes generate no new warnings
- [x] I have added tests that prove my fix is effective or that my feature works
- [ ] New and existing unit tests pass locally with my changes
- [x] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

## 📝 Committer Pre-Merge Checklist

- [x] Pull request title is okay.
- [x] No license issues.
- [x] Milestone correctly set?
- [x] Test coverage is ok
- [x] Assignees are selected.
- [x] Minimum number of approvals
- [x] No changes are requested

**Be nice. Be informative.**

Closes #5708 from zml1206/KYUUBI-5707.

Closes #5707

45ab4d4 [zml1206] fix
679f735 [zml1206] Support merge into path-based table for Delta Lake in Authz

Authored-by: zml1206 <[email protected]>
Signed-off-by: Kent Yao <[email protected]>
  • Loading branch information
zml1206 authored and yaooqinn committed Nov 20, 2023
1 parent 71b0376 commit d4dd1cb
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2198,7 +2198,11 @@
"fieldName" : "source",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
"uriDescs" : [ ]
"uriDescs" : [ {
"fieldName" : "target",
"fieldExtractor" : "SubqueryAliasURIExtractor",
"isInput" : false
} ]
}, {
"classname" : "org.apache.spark.sql.delta.commands.OptimizeTableCommand",
"tableDescs" : [ {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,9 +257,12 @@ class ResolvedIdentifierTableExtractor extends TableExtractor {
class SubqueryAliasTableExtractor extends TableExtractor {
override def apply(spark: SparkSession, v1: AnyRef): Option[Table] = {
v1.asInstanceOf[SubqueryAlias] match {
case SubqueryAlias(_, SubqueryAlias(identifier, _))
if !isPathIdentifier(identifier.name, spark) =>
lookupExtractor[StringTableExtractor].apply(spark, identifier.toString())
case SubqueryAlias(_, SubqueryAlias(identifier, _)) =>
if (isPathIdentifier(identifier.name, spark)) {
None
} else {
lookupExtractor[StringTableExtractor].apply(spark, identifier.toString())
}
case SubqueryAlias(identifier, _) if !isPathIdentifier(identifier.name, spark) =>
lookupExtractor[StringTableExtractor].apply(spark, identifier.toString())
case _ => None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,11 +109,15 @@ class IdentifierURIExtractor extends URIExtractor {

class SubqueryAliasURIExtractor extends URIExtractor {
override def apply(spark: SparkSession, v1: AnyRef): Seq[Uri] = v1 match {
case SubqueryAlias(_, SubqueryAlias(identifier, _))
if isPathIdentifier(identifier.name, spark) =>
Seq(identifier.name).map(Uri)
case SubqueryAlias(_, SubqueryAlias(identifier, _)) =>
if (isPathIdentifier(identifier.name, spark)) {
Seq(identifier.name).map(Uri)
} else {
Nil
}
case SubqueryAlias(identifier, _) if isPathIdentifier(identifier.name, spark) =>
Seq(identifier.name).map(Uri)
case _ => Nil
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,8 @@ object DeltaCommands extends CommandSpecs[TableCommandSpec] {

val MergeIntoCommand = {
val cmd = "org.apache.spark.sql.delta.commands.MergeIntoCommand"
val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
val tableDesc = TableDesc(
"target",
classOf[SubqueryAliasTableExtractor],
actionTypeDesc = Some(actionTypeDesc))
val queryDesc = QueryDesc("source")
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
DeleteCommand.copy(classname = cmd, queryDescs = Seq(queryDesc))
}

val OptimizeTableCommand = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,51 @@ class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
}
}
}

test("merge into path-based table") {
withSingleCallEnabled {
withCleanTmpResources(Seq(
(s"$namespace1.$table2", "table"),
(s"$namespace1", "database"))) {
doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1"))
doAs(admin, sql(createTableSql(namespace1, table2)))
withTempDir(path => {
doAs(admin, sql(createPathBasedTableSql(path)))
val mergeIntoSql =
s"""
|MERGE INTO delta.`$path` AS target
|USING $namespace1.$table2 AS source
|ON target.id = source.id
|WHEN MATCHED THEN
| UPDATE SET
| id = source.id,
| name = source.name,
| gender = source.gender,
| birthDate = source.birthDate
|WHEN NOT MATCHED
| THEN INSERT (
| id,
| name,
| gender,
| birthDate
| )
| VALUES (
| source.id,
| source.name,
| source.gender,
| source.birthDate
| )
|""".stripMargin
interceptContains[AccessControlException](
doAs(someone, sql(mergeIntoSql)))(
s"does not have [select] privilege on [$namespace1/$table2/id," +
s"$namespace1/$table2/name,$namespace1/$table2/gender," +
s"$namespace1/$table2/birthDate], [write] privilege on [[$path, $path/]]")
doAs(admin, sql(mergeIntoSql))
})
}
}
}
}

object DeltaCatalogRangerSparkExtensionSuite {
Expand Down

0 comments on commit d4dd1cb

Please sign in to comment.