Skip to content

Commit

Permalink
[KYUUBI #5628][AUTHZ] Support path privilege check for resource command
Browse files Browse the repository at this point in the history
### _Why are the changes needed?_
To close #5628
Support path privilege check for resource command

- AddJarsCommand
- AddFilesCommand
- AddArchivesCommand

### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [ ] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request

### _Was this patch authored or co-authored using generative AI tooling?_
No

Closes #5629 from AngersZhuuuu/SPDI-5628.

Closes #5628

77ea993 [Angerszhuuuu] update
8116045 [Angerszhuuuu] Update table_command_spec.json
00194c2 [Angerszhuuuu] Merge branch 'master' into SPDI-5628
0480565 [Angerszhuuuu] Update RangerSparkExtensionSuite.scala
888360b [Angerszhuuuu] Merge branch 'master' into SPDI-5628
af0dba0 [Angerszhuuuu] Update RangerSparkExtensionSuite.scala
980b551 [Angerszhuuuu] update
da12dc2 [Angerszhuuuu] Merge branch 'master' into SPDI-5628
70f4330 [Angerszhuuuu] Update TableCommands.scala
b03addc [Angerszhuuuu] [KYUUBI #5628][AUTHZ] Support path privilege check for resource command

Authored-by: Angerszhuuuu <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
  • Loading branch information
AngersZhuuuu authored and pan3793 committed Nov 8, 2023
1 parent 245beb5 commit 0210d54
Show file tree
Hide file tree
Showing 7 changed files with 96 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ org.apache.kyuubi.plugin.spark.authz.serde.CatalogStorageFormatURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.BaseRelationFileIndexURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.OptionsUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringSeqURIExtractor
Original file line number Diff line number Diff line change
Expand Up @@ -626,6 +626,56 @@
"opType" : "QUERY",
"queryDescs" : [ ],
"uriDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddArchivesCommand",
"tableDescs" : [ ],
"opType" : "ADD",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "paths",
"fieldExtractor" : "StringSeqURIExtractor",
"isInput" : true
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddFileCommand",
"tableDescs" : [ ],
"opType" : "ADD",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
"isInput" : true
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddFilesCommand",
"tableDescs" : [ ],
"opType" : "ADD",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "paths",
"fieldExtractor" : "StringSeqURIExtractor",
"isInput" : true
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddJarCommand",
"tableDescs" : [ ],
"opType" : "ADD",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
"isInput" : true
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddJarsCommand",
"tableDescs" : [ ],
"opType" : "ADD",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "paths",
"fieldExtractor" : "StringSeqURIExtractor",
"isInput" : true
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AlterTableAddColumnsCommand",
"tableDescs" : [ {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ object OperationType extends Enumeration {
type OperationType = Value
// According to https://scalameta.org/scalafmt/docs/known-issues.html
// format: off
val ALTERDATABASE, ALTERDATABASE_LOCATION, ALTERTABLE_ADDCOLS, ALTERTABLE_ADDPARTS,
val ADD, ALTERDATABASE, ALTERDATABASE_LOCATION, ALTERTABLE_ADDCOLS, ALTERTABLE_ADDPARTS,
ALTERTABLE_RENAMECOL, ALTERTABLE_REPLACECOLS, ALTERTABLE_DROPPARTS, ALTERTABLE_RENAMEPART,
ALTERTABLE_RENAME, ALTERTABLE_PROPERTIES, ALTERTABLE_SERDEPROPERTIES, ALTERTABLE_LOCATION,
ALTERVIEW_AS, ALTERVIEW_RENAME, ANALYZE_TABLE, CREATEDATABASE, CREATETABLE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ object AccessType extends Enumeration {

type AccessType = Value

val NONE, CREATE, ALTER, DROP, SELECT, UPDATE, USE, READ, WRITE, ALL, ADMIN, INDEX = Value
val NONE, CREATE, ALTER, DROP, SELECT, UPDATE, USE, READ, WRITE, ALL, ADMIN, INDEX, TEMPUDFADMIN =
Value

def apply(obj: PrivilegeObject, opType: OperationType, isInput: Boolean): AccessType = {
if (obj.privilegeObjectType == DFS_URI || obj.privilegeObjectType == LOCAL_URI) {
Expand All @@ -35,6 +36,7 @@ object AccessType extends Enumeration {

obj.actionType match {
case PrivilegeObjectActionType.OTHER => opType match {
case ADD => TEMPUDFADMIN
case CREATEDATABASE if obj.privilegeObjectType == DATABASE => CREATE
case CREATEFUNCTION if obj.privilegeObjectType == FUNCTION => CREATE
case CREATETABLE | CREATEVIEW | CREATETABLE_AS_SELECT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,12 @@ class StringURIExtractor extends URIExtractor {
}
}

class StringSeqURIExtractor extends URIExtractor {
override def apply(v1: AnyRef): Seq[Uri] = {
v1.asInstanceOf[Seq[String]].map(Uri)
}
}

class CatalogStorageFormatURIExtractor extends URIExtractor {
override def apply(v1: AnyRef): Seq[Uri] = {
v1.asInstanceOf[CatalogStorageFormat].locationUri.map(uri => Uri(uri.getPath)).toSeq
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,25 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
}

val AddArchivesCommand = {
val cmd = "org.apache.spark.sql.execution.command.AddArchivesCommand"
val uriDesc = UriDesc("paths", classOf[StringSeqURIExtractor], isInput = true)
TableCommandSpec(cmd, Nil, ADD, uriDescs = Seq(uriDesc))
}

// For spark-3.1
val AddFileCommand = {
val cmd = "org.apache.spark.sql.execution.command.AddFileCommand"
val uriDesc = UriDesc("path", classOf[StringURIExtractor], isInput = true)
TableCommandSpec(cmd, Nil, ADD, uriDescs = Seq(uriDesc))
}

override def specs: Seq[TableCommandSpec] = Seq(
AddArchivesCommand,
AddArchivesCommand.copy(classname = "org.apache.spark.sql.execution.command.AddFilesCommand"),
AddArchivesCommand.copy(classname = "org.apache.spark.sql.execution.command.AddJarsCommand"),
AddFileCommand,
AddFileCommand.copy(classname = "org.apache.spark.sql.execution.command.AddJarCommand"),
AddPartitions,
DropPartitions,
RenamePartitions,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1155,4 +1155,21 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
}
}
}

test("Add resource command") {
withTempDir { path =>
withSingleCallEnabled {
val supportedCommand = if (isSparkV32OrGreater) {
Seq("JAR", "FILE", "ARCHIVE")
} else {
Seq("JAR", "FILE")
}
supportedCommand.foreach { cmd =>
interceptContains[AccessControlException](
doAs(someone, sql(s"ADD $cmd $path")))(
s"does not have [read] privilege on [[$path, $path/]]")
}
}
}
}
}

0 comments on commit 0210d54

Please sign in to comment.