diff --git a/.github/PULL_REQUEST_TEMPLATE b/.github/PULL_REQUEST_TEMPLATE index 6de5d0adccd..072722cc39a 100644 --- a/.github/PULL_REQUEST_TEMPLATE +++ b/.github/PULL_REQUEST_TEMPLATE @@ -29,28 +29,10 @@ Please include a summary of the change and which issue is fixed. Please also inc --- -# Checklists -## 📝 Author Self Checklist +# Checklist 📝 -- [ ] My code follows the [style guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html) of this project -- [ ] I have performed a self-review -- [ ] I have commented my code, particularly in hard-to-understand areas -- [ ] I have made corresponding changes to the documentation -- [ ] My changes generate no new warnings -- [ ] I have added tests that prove my fix is effective or that my feature works -- [ ] New and existing unit tests pass locally with my changes -- [ ] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html) - -## 📝 Committer Pre-Merge Checklist - -- [ ] Pull request title is okay. -- [ ] No license issues. -- [ ] Milestone correctly set? -- [ ] Test coverage is ok -- [ ] Assignees are selected. -- [ ] Minimum number of approvals -- [ ] No changes are requested +- [ ] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html) **Be nice. Be informative.** diff --git a/.github/labeler.yml b/.github/labeler.yml index e76dad43902..717c996eef1 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -129,8 +129,7 @@ '.dockerignore', 'bin/docker-image-tool.sh', 'docker/**/*', - 'integration-tests/kyuubi-kubernetes-it/**/*', - 'tools/spark-block-cleaner/**/*' + 'integration-tests/kyuubi-kubernetes-it/**/*' ] "module:metrics": @@ -164,8 +163,7 @@ - changed-files: - any-glob-to-any-file: [ 'externals/kyuubi-spark-sql-engine/**/*', - 'extensions/spark/**/*', - 'tools/spark-block-cleaner/**/*' + 'extensions/spark/**/*' ] "module:extensions": diff --git a/.github/workflows/gluten.yml b/.github/workflows/gluten.yml index 23b4f0d3bbc..45f4edf545e 100644 --- a/.github/workflows/gluten.yml +++ b/.github/workflows/gluten.yml @@ -99,11 +99,6 @@ jobs: with: path: gluten/package/target/ key: gluten_package_${{ steps.date.outputs.date }} - - name: Cache Gluten Package - uses: actions/cache@v3 - with: - path: gluten/package/target/ - key: gluten_package - name: Setup JDK 8 uses: actions/setup-java@v4 with: diff --git a/.github/workflows/license.yml b/.github/workflows/license.yml index cc1ab623630..0aef73441ba 100644 --- a/.github/workflows/license.yml +++ b/.github/workflows/license.yml @@ -44,7 +44,7 @@ jobs: check-latest: false - run: >- build/mvn org.apache.rat:apache-rat-plugin:check - -Ptpcds -Pspark-block-cleaner -Pkubernetes-it + -Ptpcds -Pkubernetes-it -Pspark-3.1 -Pspark-3.2 -Pspark-3.3 -Pspark-3.4 -Pspark-3.5 - name: Upload rat report if: failure() diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 289e32c14b0..b1083cfa218 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -356,9 +356,9 @@ jobs: uses: ./.github/actions/cache-engine-archives - name: Build and test Trino with maven w/o linters run: | - TEST_MODULES="kyuubi-server,externals/kyuubi-trino-engine,externals/kyuubi-spark-sql-engine,externals/kyuubi-download,integration-tests/kyuubi-trino-it" - ./build/mvn ${MVN_OPT} -pl ${TEST_MODULES} -am -Pflink-provided -Phive-provided clean install -DskipTests - ./build/mvn -Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true -Dspotless.check.skip -pl ${TEST_MODULES} -am -Pflink-provided -Phive-provided test -Dtest=none -DwildcardSuites=org.apache.kyuubi.it.trino.operation.TrinoOperationSuite,org.apache.kyuubi.it.trino.server.TrinoFrontendSuite + TEST_MODULES="externals/kyuubi-trino-engine,integration-tests/kyuubi-trino-it" + ./build/mvn ${MVN_OPT} -pl ${TEST_MODULES} -am clean install -DskipTests + ./build/mvn ${MVN_OPT} -pl ${TEST_MODULES} test - name: Upload test logs if: failure() uses: actions/upload-artifact@v3 diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 5b8b6a7048d..0c3dd1e6082 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -34,7 +34,7 @@ jobs: strategy: matrix: profiles: - - '-Pflink-provided,hive-provided,spark-provided,spark-block-cleaner,spark-3.5,spark-3.4,spark-3.3,spark-3.2,tpcds,kubernetes-it' + - '-Pflink-provided,hive-provided,spark-provided,spark-3.5,spark-3.4,spark-3.3,spark-3.2,tpcds,kubernetes-it' steps: - uses: actions/checkout@v4 @@ -65,7 +65,7 @@ jobs: if: steps.modules-check.conclusion == 'success' && steps.modules-check.outcome == 'failure' run: | MVN_OPT="-DskipTests -Dorg.slf4j.simpleLogger.defaultLogLevel=warn -Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true -Dspotless.check.skip" - build/mvn clean install ${MVN_OPT} -Pflink-provided,hive-provided,spark-provided,spark-block-cleaner,spark-3.2,tpcds + build/mvn clean install ${MVN_OPT} -Pflink-provided,hive-provided,spark-provided,spark-3.2,tpcds build/mvn clean install ${MVN_OPT} -pl extensions/spark/kyuubi-extension-spark-3-1 -Pspark-3.1 build/mvn clean install ${MVN_OPT} -pl extensions/spark/kyuubi-extension-spark-3-3,extensions/spark/kyuubi-spark-connector-hive -Pspark-3.3 build/mvn clean install ${MVN_OPT} -pl extensions/spark/kyuubi-extension-spark-3-4 -Pspark-3.4 diff --git a/.gitignore b/.gitignore index dcf808e6752..8f109875a08 100644 --- a/.gitignore +++ b/.gitignore @@ -80,3 +80,7 @@ conf/kyuubi-env.sh kyuubi-server/gen kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/*.tokens kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/gen/ + +# For draw.io +.$*.bkp +.$*.dtmp diff --git a/LICENSE-binary b/LICENSE-binary index b225b2c6288..970d7c7c080 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -214,7 +214,7 @@ commons-logging:commons-logging org.apache.commons:commons-lang3 org.apache.derby:derby com.google.errorprone:error_prone_annotations -net.jodah:failsafe +dev.failsafe:failsafe com.jakewharton.fliptables:fliptables com.github.mifmif:generex io.grpc:grpc-api @@ -225,6 +225,7 @@ io.grpc:grpc-netty io.grpc:grpc-protobuf-lite io.grpc:grpc-protobuf io.grpc:grpc-stub +io.grpc:grpc-util com.google.code.gson:gson com.google.guava:failureaccess com.google.guava:guava diff --git a/NOTICE b/NOTICE index 8370cdd7e07..8755991fe65 100644 --- a/NOTICE +++ b/NOTICE @@ -1,7 +1,35 @@ Apache Kyuubi -Copyright 2021-2023 The Apache Software Foundation. +Copyright 2021-2024 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Hive +Copyright 2008-2019 The Apache Software Foundation + +Apache Iceberg +Copyright 2017-2022 The Apache Software Foundation + +Apache Parquet MR +Copyright 2014-2024 The Apache Software Foundation + +This project includes code from Kite, developed at Cloudera, Inc. with +the following copyright notice: + +| Copyright 2013 Cloudera Inc. +| +| Licensed under the Apache License, Version 2.0 (the "License"); +| you may not use this file except in compliance with the License. +| You may obtain a copy of the License at +| +| http://www.apache.org/licenses/LICENSE-2.0 +| +| Unless required by applicable law or agreed to in writing, software +| distributed under the License is distributed on an "AS IS" BASIS, +| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +| See the License for the specific language governing permissions and +| limitations under the License. + +Apache Spark +Copyright 2014 and onwards The Apache Software Foundation. diff --git a/NOTICE-binary b/NOTICE-binary index 40ec15010c4..a7b9e72e48d 100644 --- a/NOTICE-binary +++ b/NOTICE-binary @@ -1,10 +1,36 @@ Apache Kyuubi -Copyright 2021-2023 The Apache Software Foundation. +Copyright 2021-2024 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Iceberg +Copyright 2017-2022 The Apache Software Foundation + +Apache Parquet MR +Copyright 2014-2024 The Apache Software Foundation + +This project includes code from Kite, developed at Cloudera, Inc. with +the following copyright notice: + +| Copyright 2013 Cloudera Inc. +| +| Licensed under the Apache License, Version 2.0 (the "License"); +| you may not use this file except in compliance with the License. +| You may obtain a copy of the License at +| +| http://www.apache.org/licenses/LICENSE-2.0 +| +| Unless required by applicable law or agreed to in writing, software +| distributed under the License is distributed on an "AS IS" BASIS, +| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +| See the License for the specific language governing permissions and +| limitations under the License. + +Apache Spark +Copyright 2014 and onwards The Apache Software Foundation. + # Notices for Eclipse GlassFish This content is produced and maintained by the Eclipse GlassFish project. diff --git a/bin/beeline b/bin/beeline index 3581f6dd6bd..f277efa440f 100755 --- a/bin/beeline +++ b/bin/beeline @@ -19,7 +19,9 @@ ## Kyuubi BeeLine Entrance CLASS="org.apache.hive.beeline.KyuubiBeeLine" -export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)" +if [ -z "${KYUUBI_HOME}" ]; then + KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)" +fi . "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s diff --git a/bin/kyuubi b/bin/kyuubi index 9132aae39e8..17ab717e5ee 100755 --- a/bin/kyuubi +++ b/bin/kyuubi @@ -62,7 +62,9 @@ function kyuubi_rotate_log() { fi } -export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)" +if [ -z "${KYUUBI_HOME}" ]; then + KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)" +fi if [[ $1 == "start" ]] || [[ $1 == "run" ]]; then . "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" diff --git a/bin/kyuubi-admin b/bin/kyuubi-admin index a1f176ec157..8a148d159f7 100755 --- a/bin/kyuubi-admin +++ b/bin/kyuubi-admin @@ -19,7 +19,9 @@ ## Kyuubi Admin Control Client Entrance CLASS="org.apache.kyuubi.ctl.cli.AdminControlCli" -export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)" +if [ -z "${KYUUBI_HOME}" ]; then + KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)" +fi . "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s diff --git a/bin/kyuubi-ctl b/bin/kyuubi-ctl index 16809c0754b..0214737d46a 100755 --- a/bin/kyuubi-ctl +++ b/bin/kyuubi-ctl @@ -19,7 +19,9 @@ ## Kyuubi Control Client Entrance CLASS="org.apache.kyuubi.ctl.cli.ControlCli" -export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)" +if [ -z "${KYUUBI_HOME}" ]; then + KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)" +fi . "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s diff --git a/bin/kyuubi-zk-cli b/bin/kyuubi-zk-cli index f503c3e5a5e..c85f47c4c5d 100755 --- a/bin/kyuubi-zk-cli +++ b/bin/kyuubi-zk-cli @@ -19,8 +19,9 @@ ## Zookeeper Shell Client Entrance CLASS="org.apache.kyuubi.shaded.zookeeper.ZooKeeperMain" -export KYUUBI_HOME="$(cd "$(dirname "$0")"/..; pwd)" - +if [ -z "${KYUUBI_HOME}" ]; then + KYUUBI_HOME="$(cd "`dirname "$0"`"/..; pwd)" +fi . "${KYUUBI_HOME}/bin/load-kyuubi-env.sh" -s if [[ -z ${JAVA_HOME} ]]; then diff --git a/bin/load-kyuubi-env.sh b/bin/load-kyuubi-env.sh index 4d6f72ddf3e..779ac2bea5a 100755 --- a/bin/load-kyuubi-env.sh +++ b/bin/load-kyuubi-env.sh @@ -16,9 +16,7 @@ # limitations under the License. # - export KYUUBI_HOME="${KYUUBI_HOME:-"$(cd "$(dirname "$0")"/.. || exit; pwd)"}" - export KYUUBI_CONF_DIR="${KYUUBI_CONF_DIR:-"${KYUUBI_HOME}"/conf}" silent=0 diff --git a/build/dist b/build/dist index 2ea702b61af..cde515dc64a 100755 --- a/build/dist +++ b/build/dist @@ -330,14 +330,6 @@ for jar in $(ls "$DISTDIR/jars/"); do fi done -# Copy kyuubi tools -if [[ -f "$KYUUBI_HOME/tools/spark-block-cleaner/target/spark-block-cleaner_${SCALA_VERSION}-${VERSION}.jar" ]]; then - mkdir -p "$DISTDIR/tools/spark-block-cleaner/kubernetes" - mkdir -p "$DISTDIR/tools/spark-block-cleaner/jars" - cp -r "$KYUUBI_HOME"/tools/spark-block-cleaner/kubernetes/* "$DISTDIR/tools/spark-block-cleaner/kubernetes/" - cp "$KYUUBI_HOME/tools/spark-block-cleaner/target/spark-block-cleaner_${SCALA_VERSION}-${VERSION}.jar" "$DISTDIR/tools/spark-block-cleaner/jars/" -fi - # Copy Kyuubi Spark extension SPARK_EXTENSION_VERSIONS=('3-1' '3-2' '3-3' '3-4' '3-5') # shellcheck disable=SC2068 diff --git a/charts/kyuubi/templates/kyuubi-configmap.yaml b/charts/kyuubi/templates/kyuubi-configmap.yaml index 62413567d76..0f838857e40 100644 --- a/charts/kyuubi/templates/kyuubi-configmap.yaml +++ b/charts/kyuubi/templates/kyuubi-configmap.yaml @@ -24,7 +24,6 @@ metadata: data: {{- with .Values.kyuubiConf.kyuubiEnv }} kyuubi-env.sh: | - #!/usr/bin/env bash {{- tpl . $ | nindent 4 }} {{- end }} kyuubi-defaults.conf: | diff --git a/charts/kyuubi/templates/kyuubi-spark-configmap.yaml b/charts/kyuubi/templates/kyuubi-spark-configmap.yaml new file mode 100644 index 00000000000..5794c429f55 --- /dev/null +++ b/charts/kyuubi/templates/kyuubi-spark-configmap.yaml @@ -0,0 +1,40 @@ +{{/* + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/}} + +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ .Release.Name }}-spark + labels: + {{- include "kyuubi.labels" . | nindent 4 }} +data: + {{- with .Values.sparkConf.sparkEnv }} + spark-env.sh: | + {{- tpl . $ | nindent 4 }} + {{- end }} + {{- with .Values.sparkConf.sparkDefaults }} + spark-defaults.conf: | + {{- tpl . $ | nindent 4 }} + {{- end }} + {{- with .Values.sparkConf.log4j2 }} + log4j2.properties: | + {{- tpl . $ | nindent 4 }} + {{- end }} + {{- with .Values.sparkConf.metrics }} + metrics.properties: | + {{- tpl . $ | nindent 4 }} + {{- end }} diff --git a/charts/kyuubi/templates/kyuubi-statefulset.yaml b/charts/kyuubi/templates/kyuubi-statefulset.yaml index 309ef8ec9c4..a79b5be9ab8 100644 --- a/charts/kyuubi/templates/kyuubi-statefulset.yaml +++ b/charts/kyuubi/templates/kyuubi-statefulset.yaml @@ -62,9 +62,14 @@ spec: {{- with .Values.args }} args: {{- tpl (toYaml .) $ | nindent 12 }} {{- end }} - {{- with .Values.env }} - env: {{- tpl (toYaml .) $ | nindent 12 }} - {{- end }} + env: + - name: KYUUBI_CONF_DIR + value: {{ .Values.kyuubiConfDir }} + - name: SPARK_CONF_DIR + value: {{ .Values.sparkConfDir }} + {{- with .Values.env }} + {{- tpl (toYaml .) $ | nindent 12 }} + {{- end }} {{- with .Values.envFrom }} envFrom: {{- tpl (toYaml .) $ | nindent 12 }} {{- end }} @@ -105,6 +110,8 @@ spec: volumeMounts: - name: conf mountPath: {{ .Values.kyuubiConfDir }} + - name: conf-spark + mountPath: {{ .Values.sparkConfDir }} {{- with .Values.volumeMounts }} {{- tpl (toYaml .) $ | nindent 12 }} {{- end }} @@ -115,6 +122,9 @@ spec: - name: conf configMap: name: {{ .Release.Name }} + - name: conf-spark + configMap: + name: {{ .Release.Name }}-spark {{- with .Values.volumes }} {{- tpl (toYaml .) $ | nindent 8 }} {{- end }} diff --git a/charts/kyuubi/values.yaml b/charts/kyuubi/values.yaml index 044668040f3..31d802fd4f4 100644 --- a/charts/kyuubi/values.yaml +++ b/charts/kyuubi/values.yaml @@ -152,12 +152,13 @@ monitoring: # $KYUUBI_CONF_DIR directory kyuubiConfDir: /opt/kyuubi/conf -# Kyuubi configurations files +# Kyuubi configuration files kyuubiConf: # The value (templated string) is used for kyuubi-env.sh file # See example at conf/kyuubi-env.sh.template and https://kyuubi.readthedocs.io/en/master/configuration/settings.html#environments for more details kyuubiEnv: ~ # kyuubiEnv: | + # #!/usr/bin/env bash # export JAVA_HOME=/usr/jdk64/jdk1.8.0_152 # export SPARK_HOME=/opt/spark # export FLINK_HOME=/opt/flink @@ -179,6 +180,46 @@ kyuubiConf: # See example at conf/log4j2.xml.template https://kyuubi.readthedocs.io/en/master/configuration/settings.html#logging for more details log4j2: ~ +# $SPARK_CONF_DIR directory +sparkConfDir: /opt/spark/conf +# Spark configuration files +sparkConf: + # The value (templated string) is used for spark-env.sh file + # See example at https://github.com/apache/spark/blob/master/conf/spark-env.sh.template and Spark documentation for more details + sparkEnv: ~ + # sparkEnv: | + # #!/usr/bin/env bash + # export JAVA_HOME=/usr/jdk64/jdk1.8.0_152 + # export SPARK_LOG_DIR=/opt/spark/logs + # export SPARK_LOG_MAX_FILES=5 + + # The value (templated string) is used for spark-defaults.conf file + # See example at https://github.com/apache/spark/blob/master/conf/spark-defaults.conf.template and Spark documentation for more details + sparkDefaults: ~ + # sparkDefaults: | + # spark.submit.deployMode=cluster + # spark.kubernetes.container.image=apache/spark:3.5.0 + # spark.kubernetes.authenticate.driver.serviceAccountName=spark + # spark.kubernetes.file.upload.path=s3a://kyuubi/spark + # # S3 dependencies + # spark.jars.packages=org.apache.hadoop:hadoop-aws:3.3.4,com.amazonaws:aws-java-sdk-bundle:1.12.262 + # spark.driver.extraJavaOptions=-Divy.cache.dir=/tmp -Divy.home=/tmp + # # S3A configuration + # spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem + # spark.hadoop.fs.s3a.endpoint=http://object-storage:80 + # spark.hadoop.fs.s3a.access.key=****** + # spark.hadoop.fs.s3a.secret.key=******** + # spark.hadoop.fs.s3a.path.style.access=true + # spark.hadoop.fs.s3a.fast.upload=true + + # The value (templated string) is used for log4j2.properties file + # See example at https://github.com/apache/spark/blob/master/conf/log4j2.properties.template and Spark documentation for more details + log4j2: ~ + + # The value (templated string) is used for metrics.properties file + # See example at https://github.com/apache/spark/blob/master/conf/metrics.properties.template and Spark documentation for more details + metrics: ~ + # Command to launch Kyuubi server (templated) command: ~ # Arguments to launch Kyuubi server (templated) diff --git a/dev/dependencyList b/dev/dependencyList index 4089d963d7a..0a60d8b73d6 100644 --- a/dev/dependencyList +++ b/dev/dependencyList @@ -17,7 +17,7 @@ HikariCP/4.0.3//HikariCP-4.0.3.jar ST4/4.3.4//ST4-4.3.4.jar -animal-sniffer-annotations/1.21//animal-sniffer-annotations-1.21.jar +animal-sniffer-annotations/1.23//animal-sniffer-annotations-1.23.jar annotations/4.1.1.4//annotations-4.1.1.4.jar antlr-runtime/3.5.3//antlr-runtime-3.5.3.jar antlr4-runtime/4.9.3//antlr4-runtime-4.9.3.jar @@ -33,20 +33,21 @@ commons-lang/2.6//commons-lang-2.6.jar commons-lang3/3.13.0//commons-lang3-3.13.0.jar commons-logging/1.1.3//commons-logging-1.1.3.jar derby/10.14.2.0//derby-10.14.2.0.jar -error_prone_annotations/2.14.0//error_prone_annotations-2.14.0.jar -failsafe/2.4.4//failsafe-2.4.4.jar +error_prone_annotations/2.20.0//error_prone_annotations-2.20.0.jar +failsafe/3.3.2//failsafe-3.3.2.jar failureaccess/1.0.1//failureaccess-1.0.1.jar flatbuffers-java/1.12.0//flatbuffers-java-1.12.0.jar fliptables/1.0.2//fliptables-1.0.2.jar -grpc-api/1.53.0//grpc-api-1.53.0.jar -grpc-context/1.53.0//grpc-context-1.53.0.jar -grpc-core/1.53.0//grpc-core-1.53.0.jar -grpc-grpclb/1.53.0//grpc-grpclb-1.53.0.jar -grpc-netty/1.53.0//grpc-netty-1.53.0.jar -grpc-protobuf-lite/1.53.0//grpc-protobuf-lite-1.53.0.jar -grpc-protobuf/1.53.0//grpc-protobuf-1.53.0.jar -grpc-stub/1.53.0//grpc-stub-1.53.0.jar -gson/2.9.0//gson-2.9.0.jar +grpc-api/1.60.1//grpc-api-1.60.1.jar +grpc-context/1.60.1//grpc-context-1.60.1.jar +grpc-core/1.60.1//grpc-core-1.60.1.jar +grpc-grpclb/1.60.1//grpc-grpclb-1.60.1.jar +grpc-netty/1.60.1//grpc-netty-1.60.1.jar +grpc-protobuf-lite/1.60.1//grpc-protobuf-lite-1.60.1.jar +grpc-protobuf/1.60.1//grpc-protobuf-1.60.1.jar +grpc-stub/1.60.1//grpc-stub-1.60.1.jar +grpc-util/1.60.1//grpc-util-1.60.1.jar +gson/2.10.1//gson-2.10.1.jar guava/32.0.1-jre//guava-32.0.1-jre.jar hadoop-client-api/3.3.6//hadoop-client-api-3.3.6.jar hadoop-client-runtime/3.3.6//hadoop-client-runtime-3.3.6.jar @@ -91,10 +92,10 @@ jersey-hk2/2.39.1//jersey-hk2-2.39.1.jar jersey-media-json-jackson/2.39.1//jersey-media-json-jackson-2.39.1.jar jersey-media-multipart/2.39.1//jersey-media-multipart-2.39.1.jar jersey-server/2.39.1//jersey-server-2.39.1.jar -jetcd-api/0.7.3//jetcd-api-0.7.3.jar -jetcd-common/0.7.3//jetcd-common-0.7.3.jar -jetcd-core/0.7.3//jetcd-core-0.7.3.jar -jetcd-grpc/0.7.3//jetcd-grpc-0.7.3.jar +jetcd-api/0.7.7//jetcd-api-0.7.7.jar +jetcd-common/0.7.7//jetcd-common-0.7.7.jar +jetcd-core/0.7.7//jetcd-core-0.7.7.jar +jetcd-grpc/0.7.7//jetcd-grpc-0.7.7.jar jetty-client/9.4.52.v20230823//jetty-client-9.4.52.v20230823.jar jetty-http/9.4.52.v20230823//jetty-http-9.4.52.v20230823.jar jetty-io/9.4.52.v20230823//jetty-io-9.4.52.v20230823.jar @@ -167,8 +168,8 @@ okhttp/3.12.12//okhttp-3.12.12.jar okio/1.15.0//okio-1.15.0.jar osgi-resource-locator/1.0.3//osgi-resource-locator-1.0.3.jar paranamer/2.8//paranamer-2.8.jar -perfmark-api/0.25.0//perfmark-api-0.25.0.jar -proto-google-common-protos/2.9.0//proto-google-common-protos-2.9.0.jar +perfmark-api/0.26.0//perfmark-api-0.26.0.jar +proto-google-common-protos/2.22.0//proto-google-common-protos-2.22.0.jar protobuf-java-util/3.21.7//protobuf-java-util-3.21.7.jar protobuf-java/3.21.7//protobuf-java-3.21.7.jar scala-library/2.12.18//scala-library-2.12.18.jar @@ -191,9 +192,9 @@ swagger-core/2.2.1//swagger-core-2.2.1.jar swagger-integration/2.2.1//swagger-integration-2.2.1.jar swagger-jaxrs2/2.2.1//swagger-jaxrs2-2.2.1.jar swagger-models/2.2.1//swagger-models-2.2.1.jar -trino-client/363//trino-client-363.jar -units/1.6//units-1.6.jar -vertx-core/4.3.2//vertx-core-4.3.2.jar -vertx-grpc/4.3.2//vertx-grpc-4.3.2.jar +trino-client/411//trino-client-411.jar +units/1.7//units-1.7.jar +vertx-core/4.5.1//vertx-core-4.5.1.jar +vertx-grpc/4.5.1//vertx-grpc-4.5.1.jar zjsonpatch/0.3.0//zjsonpatch-0.3.0.jar zstd-jni/1.5.5-1//zstd-jni-1.5.5-1.jar diff --git a/dev/kyuubi-tpcds/README.md b/dev/kyuubi-tpcds/README.md index a9a6487aa12..717c1b0edbc 100644 --- a/dev/kyuubi-tpcds/README.md +++ b/dev/kyuubi-tpcds/README.md @@ -48,14 +48,15 @@ $SPARK_HOME/bin/spark-submit \ Support options: -| key | default | description | -|-------------|------------------------|---------------------------------------------------------------| -| db | none(required) | the TPC-DS database | -| benchmark | tpcds-v2.4-benchmark | the name of application | -| iterations | 3 | the number of iterations to run | -| breakdown | false | whether to record breakdown results of an execution | -| filter | a | filter on the name of the queries to run, e.g. q1-v2.4 | -| results-dir | /spark/sql/performance | dir to store benchmark results, e.g. hdfs://hdfs-nn:9870/pref | +| key | default | description | +|-------------|------------------------|-------------------------------------------------------------------------------| +| db | none(required) | the TPC-DS database | +| benchmark | tpcds-v2.4-benchmark | the name of application | +| iterations | 3 | the number of iterations to run | +| breakdown | false | whether to record breakdown results of an execution | +| results-dir | /spark/sql/performance | dir to store benchmark results, e.g. hdfs://hdfs-nn:9870/pref | +| include | none(optional) | name of the queries to run, use comma to split multiple names, e.g. q1,q2 | +| exclude | none(optional) | name of the queries to exclude, use comma to split multiple names, e.g. q2,q4 | Example: the following command to benchmark TPC-DS sf10 with exists database `tpcds_sf10`. @@ -65,17 +66,52 @@ $SPARK_HOME/bin/spark-submit \ kyuubi-tpcds_*.jar --db tpcds_sf10 ``` -We also support run one of the TPC-DS query: +We also support run specified SQL collections of the TPC-DS query: ```shell $SPARK_HOME/bin/spark-submit \ --class org.apache.kyuubi.tpcds.benchmark.RunBenchmark \ - kyuubi-tpcds_*.jar --db tpcds_sf10 --filter q1-v2.4 + kyuubi-tpcds_*.jar --db tpcds_sf10 --include q1,q2 ``` The result of TPC-DS benchmark like: -| name | minTimeMs | maxTimeMs | avgTimeMs | stdDev | stdDevPercent | -|---------|-----------|------------|------------|----------|----------------| -| q1-v2.4 | 50.522384 | 868.010383 | 323.398267 | 471.6482 | 145.8413108576 | +| name | minTimeMs | maxTimeMs | avgTimeMs | stdDev | stdDevPercent | +|---------|--------------|--------------|------------------|------------------|------------------| +| q1-v2.4 | 8329.884508 | 14159.307004 | 10537.235825 | 3161.74253777417 | 30.0054263782615 | +| q2-v2.4 | 16600.979609 | 18932.613523 | 18137.6516166666 | 1331.06332796139 | 7.33867512781137 | +If you want to exclude some SQL, you can use exclude: + +```shell +$SPARK_HOME/bin/spark-submit \ + --class org.apache.kyuubi.tpcds.benchmark.RunBenchmark \ + kyuubi-tpcds_*.jar --db tpcds_sf10 --exclude q2,q4 +``` + +The result of TPC-DS benchmark like: + +| name | minTimeMs | maxTimeMs | avgTimeMs | stdDev | stdDevPercent | +|----------|--------------|--------------|------------------|------------------|-------------------| +| q1-v2.4 | 8329.884508 | 14159.307004 | 10537.235825 | 3161.74253777417 | 30.0054263782615 | +| q3-v2.4 | 3841.009061 | 4685.16345 | 4128.583224 | 482.102016761038 | 11.6771781166603 | +| q5-v2.4 | 39405.654981 | 48845.359253 | 43530.6847113333 | 4830.98802198401 | 11.0978911864583 | +| q6-v2.4 | 2998.962221 | 7793.096796 | 4658.37355366666 | 2716.310089792 | 58.3102677039276 | +| ... | ... | ... | ... | ... | ... | +| q99-v2.4 | 11747.22389 | 11900.570288 | 11813.018609 | 78.9544389266673 | 0.668368022941351 | + +When both include and exclude exist simultaneously, the final SQL collections executed is include minus exclude: + +```shell +$SPARK_HOME/bin/spark-submit \ + --class org.apache.kyuubi.tpcds.benchmark.RunBenchmark \ + kyuubi-tpcds_*.jar --db tpcds_sf10 --include q1,q2,q3,q4,q5 --exclude q2,q4 +``` + +The result of TPC-DS benchmark like: + +| name | minTimeMs | maxTimeMs | avgTimeMs | stdDev | stdDevPercent | +|----------|--------------|--------------|------------------|------------------|-------------------| +| q1-v2.4 | 8329.884508 | 14159.307004 | 10537.235825 | 3161.74253777417 | 30.0054263782615 | +| q3-v2.4 | 3841.009061 | 4685.16345 | 4128.583224 | 482.102016761038 | 11.6771781166603 | +| q5-v2.4 | 39405.654981 | 48845.359253 | 43530.6847113333 | 4830.98802198401 | 11.0978911864583 | \ No newline at end of file diff --git a/dev/kyuubi-tpcds/src/main/scala/org/apache/kyuubi/tpcds/benchmark/RunBenchmark.scala b/dev/kyuubi-tpcds/src/main/scala/org/apache/kyuubi/tpcds/benchmark/RunBenchmark.scala index 3e2106cff2b..80f74229415 100644 --- a/dev/kyuubi-tpcds/src/main/scala/org/apache/kyuubi/tpcds/benchmark/RunBenchmark.scala +++ b/dev/kyuubi-tpcds/src/main/scala/org/apache/kyuubi/tpcds/benchmark/RunBenchmark.scala @@ -26,11 +26,11 @@ import org.apache.spark.sql.functions._ case class RunConfig( db: String = null, benchmarkName: String = "tpcds-v2.4-benchmark", - filter: Option[String] = None, iterations: Int = 3, breakdown: Boolean = false, resultsDir: String = "/spark/sql/performance", - queries: Set[String] = Set.empty) + include: Set[String] = Set.empty, + exclude: Set[String] = Set.empty) // scalastyle:off /** @@ -54,9 +54,6 @@ object RunBenchmark { opt[String]('b', "benchmark") .action { (x, c) => c.copy(benchmarkName = x) } .text("the name of the benchmark to run") - opt[String]('f', "filter") - .action((x, c) => c.copy(filter = Some(x))) - .text("a filter on the name of the queries to run") opt[Boolean]('B', "breakdown") .action((x, c) => c.copy(breakdown = x)) .text("whether to record breakdown results of an execution") @@ -66,11 +63,16 @@ object RunBenchmark { opt[String]('r', "results-dir") .action((x, c) => c.copy(resultsDir = x)) .text("dir to store benchmark results, e.g. hdfs://hdfs-nn:9870/pref") - opt[String]('q', "queries") + opt[String]("include") .action { case (x, c) => - c.copy(queries = x.split(",").map(_.trim).filter(_.nonEmpty).toSet) + c.copy(include = x.split(",").map(_.trim).filter(_.nonEmpty).toSet) } - .text("name of the queries to run, use , split multiple name") + .text("name of the queries to run, use comma to split multiple names, e.g. q1,q2") + opt[String]("exclude") + .action { case (x, c) => + c.copy(exclude = x.split(",").map(_.trim).filter(_.nonEmpty).toSet) + } + .text("name of the queries to exclude, use comma to split multiple names, e.g. q2,q4") help("help") .text("prints this usage text") } @@ -96,19 +98,18 @@ object RunBenchmark { println(config.db) sparkSession.sql(s"use ${config.db}") - val allQueries = config.filter.map { f => - benchmark.tpcds2_4Queries.filter(_.name contains f) - } getOrElse { - benchmark.tpcds2_4Queries - } - - val runQueries = - if (config.queries.nonEmpty) { - allQueries.filter(q => config.queries.contains(q.name.split('-')(0))) + var runQueries = + if (config.include.nonEmpty) { + benchmark.tpcds2_4Queries.filter(q => config.include.contains(q.name.split('-')(0))) } else { - allQueries + benchmark.tpcds2_4Queries } + // runQueries = include - exclude + if (config.exclude.nonEmpty) { + runQueries = runQueries.filterNot(q => config.exclude.contains(q.name.split('-')(0))) + } + println("== QUERY LIST ==") runQueries.foreach(q => println(q.name)) diff --git a/dev/reformat b/dev/reformat index 7ad26ae2e17..fe05408ccf4 100755 --- a/dev/reformat +++ b/dev/reformat @@ -20,7 +20,7 @@ set -x KYUUBI_HOME="$(cd "`dirname "$0"`/.."; pwd)" -PROFILES="-Pflink-provided,hive-provided,spark-provided,spark-block-cleaner,spark-3.5,spark-3.4,spark-3.3,spark-3.2,spark-3.1,tpcds,kubernetes-it" +PROFILES="-Pflink-provided,hive-provided,spark-provided,spark-3.5,spark-3.4,spark-3.3,spark-3.2,spark-3.1,tpcds,kubernetes-it" # python style checks rely on `black` in path if ! command -v black &> /dev/null diff --git a/docker/playground/.env b/docker/playground/.env index e8446fd56c9..c09540fc700 100644 --- a/docker/playground/.env +++ b/docker/playground/.env @@ -28,3 +28,5 @@ SPARK_VERSION=3.4.2 SPARK_BINARY_VERSION=3.4 SPARK_HADOOP_VERSION=3.3.4 ZOOKEEPER_VERSION=3.6.3 +PROMETHEUS_VERSION=2.45.2 +GRAFANA_VERSION=10.0.10 diff --git a/docker/playground/README.md b/docker/playground/README.md index 66dca2af0ab..04fdebb5274 100644 --- a/docker/playground/README.md +++ b/docker/playground/README.md @@ -34,6 +34,8 @@ Kyuubi supply some built-in dataset, after Kyuubi started, you can run the follo - MinIO: http://localhost:9001 - PostgreSQL localhost:5432 (username: postgres, password: postgres) - Spark UI: http://localhost:4040 (available after Spark application launching by Kyuubi, port may be 4041, 4042... if you launch more than one Spark applications) +- Prometheus: http://localhost:9090 +- Grafana: http://localhost:3000 (username: admin, password: admin) ### Shutdown diff --git a/docker/playground/compose.yml b/docker/playground/compose.yml index 362b3505be1..35a794609e6 100644 --- a/docker/playground/compose.yml +++ b/docker/playground/compose.yml @@ -81,3 +81,28 @@ services: - metastore - minio - zookeeper + + prometheus: + # leave `v` here for match prometheus docker image tag + image: prom/prometheus:v${PROMETHEUS_VERSION} + container_name: prometheus + hostname: prometheus + ports: + - 9090:9090 + - 8123:8123 + volumes: + - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml + + grafana: + image: grafana/grafana-oss:${GRAFANA_VERSION} + container_name: grafana + hostname: grafana + ports: + - 3000:3000 + environment: + - GF_AUTH_ANONYMOUS_ENABLED=true + - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin + volumes: + - ./grafana/datasource/prometheus.yaml:/etc/grafana/provisioning/datasources/prometheus.yaml + depends_on: + - prometheus diff --git a/docker/playground/conf/kyuubi-defaults.conf b/docker/playground/conf/kyuubi-defaults.conf index e4a674634d4..75c28850fe0 100644 --- a/docker/playground/conf/kyuubi-defaults.conf +++ b/docker/playground/conf/kyuubi-defaults.conf @@ -27,6 +27,9 @@ kyuubi.session.engine.idle.timeout=PT5M kyuubi.operation.incremental.collect=true kyuubi.operation.progress.enabled=true +kyuubi.metrics.reporters=PROMETHEUS +kyuubi.metrics.prometheus.port=10019 + kyuubi.engine.session.initialize.sql \ show namespaces in tpcds; \ show namespaces in tpch; \ diff --git a/tools/spark-block-cleaner/kubernetes/docker/entrypoint.sh b/docker/playground/grafana/datasource/prometheus.yaml old mode 100755 new mode 100644 similarity index 79% rename from tools/spark-block-cleaner/kubernetes/docker/entrypoint.sh rename to docker/playground/grafana/datasource/prometheus.yaml index 953a80334a5..c030390b01c --- a/tools/spark-block-cleaner/kubernetes/docker/entrypoint.sh +++ b/docker/playground/grafana/datasource/prometheus.yaml @@ -1,4 +1,3 @@ -#!/bin/bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -16,8 +15,14 @@ # limitations under the License. # -# entrypoint for spark-block-cleaner +apiVersion: 1 -# shellcheck disable=SC2046 -exec /usr/bin/tini -s -- java -cp "${CLASS_PATH}:${CLEANER_CLASSPATH}" \ - org.apache.kyuubi.tools.KubernetesSparkBlockCleaner +datasources: +- name: prometheus + type: prometheus + url: http://prometheus:9090 + orgId: 1 + isDefault: false + access: server + editable: true + version: 1 diff --git a/tools/spark-block-cleaner/kubernetes/docker/Dockerfile b/docker/playground/prometheus/prometheus.yml similarity index 61% rename from tools/spark-block-cleaner/kubernetes/docker/Dockerfile rename to docker/playground/prometheus/prometheus.yml index 95a7b2cf8aa..91218e3744b 100644 --- a/tools/spark-block-cleaner/kubernetes/docker/Dockerfile +++ b/docker/playground/prometheus/prometheus.yml @@ -14,21 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -FROM eclipse-temurin:8-jdk-focal -RUN apt-get update && \ - apt install -y tini && \ - rm -rf /var/cache/apt/* && \ - mkdir /data && \ - mkdir -p /opt/block-cleaner && \ - mkdir -p /log/cleanerLog - -COPY jars /opt/block-cleaner -COPY tools/spark-block-cleaner/jars /opt/block-cleaner -COPY tools/spark-block-cleaner/kubernetes/docker/entrypoint.sh /opt/entrypoint.sh - -RUN chmod +x /opt/entrypoint.sh - -ENV CLEANER_CLASSPATH /opt/block-cleaner/* - -ENTRYPOINT ["/opt/entrypoint.sh"] +scrape_configs: + - job_name: kyuubi + static_configs: + - targets: ['kyuubi:10019'] diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css new file mode 100644 index 00000000000..9352af86567 --- /dev/null +++ b/docs/_static/css/custom.css @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +table.docutils { + width: 100%; + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; + table-layout: auto; +} +table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 1px; + border-left: 1px; + border-right: 1px; + border-bottom: 1px solid #aaa; +} +table.docutils td { + word-break: break-word; + min-width: 10%; +} +table.docutils tr:hover { + background: #efefef; +} +table.docutils tbody tr:nth-child(2n) { + background: #9EBCE21E; +} +table.docutils td:nth-child(1) { + width: 25%; + word-break: break-all; + font-weight: 500; +} diff --git a/docs/client/cli/trino_cli.md b/docs/client/cli/trino_cli.md index 68ebd830020..a2a1469a83b 100644 --- a/docs/client/cli/trino_cli.md +++ b/docs/client/cli/trino_cli.md @@ -21,7 +21,7 @@ The Trino CLI provides a terminal-based, interactive shell for running queries. ## Start Kyuubi Trino Server -First we should configure the trino protocol and the service port in the `kyuubi.conf` +First we should configure the trino protocol and the service port in the `kyuubi-defaults.conf` ``` kyuubi.frontend.protocols TRINO @@ -30,11 +30,11 @@ kyuubi.frontend.trino.bind.port 10999 #default port ## Install -Download [trino-cli-363-executable.jar](https://repo1.maven.org/maven2/io/trino/trino-jdbc/363/trino-jdbc-363.jar), rename it to `trino`, make it executable with `chmod +x`, and run it to show the version of the CLI: +Download [trino-cli-411-executable.jar](https://repo1.maven.org/maven2/io/trino/trino-jdbc/411/trino-jdbc-411.jar), rename it to `trino`, make it executable with `chmod +x`, and run it to show the version of the CLI: ``` -wget https://repo1.maven.org/maven2/io/trino/trino-jdbc/363/trino-jdbc-363.jar -mv trino-jdbc-363.jar trino +wget https://repo1.maven.org/maven2/io/trino/trino-jdbc/411/trino-jdbc-411.jar +mv trino-jdbc-411.jar trino chmod +x trino ./trino --version ``` diff --git a/docs/client/jdbc/trino_jdbc.md b/docs/client/jdbc/trino_jdbc.md index 0f91c4337e6..aad6ebd1fc8 100644 --- a/docs/client/jdbc/trino_jdbc.md +++ b/docs/client/jdbc/trino_jdbc.md @@ -33,7 +33,7 @@ kyuubi.frontend.trino.bind.port 10999 #default port ## Install Trino JDBC -Download [trino-jdbc-363.jar](https://repo1.maven.org/maven2/io/trino/trino-jdbc/363/trino-jdbc-363.jar) and add it to the classpath of your Java application. +Download [trino-jdbc-411.jar](https://repo1.maven.org/maven2/io/trino/trino-jdbc/411/trino-jdbc-363.jar) and add it to the classpath of your Java application. The driver is also available from Maven Central: @@ -41,7 +41,7 @@ The driver is also available from Maven Central: io.trino trino-jdbc - 363 + 411 ``` diff --git a/docs/conf.py b/docs/conf.py index d75f819b3c2..eaac1acedef 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -126,7 +126,8 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = [] +html_static_path = ['_static'] +html_css_files = ["css/custom.css"] htmlhelp_basename = 'Recommonmarkdoc' github_doc_root = 'https://github.com/apache/kyuubi/tree/master/docs/' diff --git a/docs/configuration/settings.md b/docs/configuration/settings.md index 20a1bf8d93f..b203b0bdacb 100644 --- a/docs/configuration/settings.md +++ b/docs/configuration/settings.md @@ -33,7 +33,7 @@ You can configure the Kyuubi properties in `$KYUUBI_HOME/conf/kyuubi-defaults.co | Key | Default | Meaning | Type | Since | |-----------------------------------------------|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------|-------| -| kyuubi.authentication | NONE | A comma-separated list of client authentication types.The following tree describes the catalog of each option. Note that: for SASL authentication, KERBEROS and PLAIN auth types are supported at the same time, and only the first specified PLAIN auth type is valid. | set | 1.0.0 | +| kyuubi.authentication | NONE | A comma-separated list of client authentication types.The following tree describes the catalog of each option. Note that: for SASL authentication, KERBEROS and PLAIN auth types are supported at the same time, and only the first specified PLAIN auth type is valid. | seq | 1.0.0 | | kyuubi.authentication.custom.class | <undefined> | User-defined authentication implementation of org.apache.kyuubi.service.authentication.PasswdAuthenticationProvider | string | 1.3.0 | | kyuubi.authentication.jdbc.driver.class | <undefined> | Driver class name for JDBC Authentication Provider. | string | 1.6.0 | | kyuubi.authentication.jdbc.password | <undefined> | Database password for JDBC Authentication Provider. | string | 1.6.0 | @@ -148,6 +148,7 @@ You can configure the Kyuubi properties in `$KYUUBI_HOME/conf/kyuubi-defaults.co | kyuubi.engine.flink.initialize.sql | SHOW DATABASES | The initialize sql for Flink engine. It fallback to `kyuubi.engine.initialize.sql`. | seq | 1.8.1 | | kyuubi.engine.flink.java.options | <undefined> | The extra Java options for the Flink SQL engine. Only effective in yarn session mode. | string | 1.6.0 | | kyuubi.engine.flink.memory | 1g | The heap memory for the Flink SQL engine. Only effective in yarn session mode. | string | 1.6.0 | +| kyuubi.engine.hive.deploy.mode | LOCAL | Configures the hive engine deploy mode, The value can be 'local', 'yarn'. In local mode, the engine operates on the same node as the KyuubiServer. In YARN mode, the engine runs within the Application Master (AM) container of YARN. | string | 1.9.0 | | kyuubi.engine.hive.event.loggers | JSON | A comma-separated list of engine history loggers, where engine/session/operation etc events go. | seq | 1.7.0 | | kyuubi.engine.hive.extra.classpath | <undefined> | The extra classpath for the Hive query engine, for configuring location of the hadoop client jars and etc. | string | 1.6.0 | | kyuubi.engine.hive.java.options | <undefined> | The extra Java options for the Hive query engine | string | 1.6.0 | @@ -193,6 +194,7 @@ You can configure the Kyuubi properties in `$KYUUBI_HOME/conf/kyuubi-defaults.co | kyuubi.engine.trino.connection.truststore.password | <undefined> | The truststore password used for connecting to trino cluster | string | 1.8.0 | | kyuubi.engine.trino.connection.truststore.path | <undefined> | The truststore path used for connecting to trino cluster | string | 1.8.0 | | kyuubi.engine.trino.connection.truststore.type | <undefined> | The truststore type used for connecting to trino cluster | string | 1.8.0 | +| kyuubi.engine.trino.connection.user | <undefined> | The user used for connecting to trino cluster | string | 1.9.0 | | kyuubi.engine.trino.event.loggers | JSON | A comma-separated list of engine history loggers, where engine/session/operation etc events go. | seq | 1.7.0 | | kyuubi.engine.trino.extra.classpath | <undefined> | The extra classpath for the Trino query engine, for configuring other libs which may need by the Trino engine | string | 1.6.0 | | kyuubi.engine.trino.java.options | <undefined> | The extra Java options for the Trino query engine | string | 1.6.0 | @@ -204,7 +206,15 @@ You can configure the Kyuubi properties in `$KYUUBI_HOME/conf/kyuubi-defaults.co | kyuubi.engine.user.isolated.spark.session | true | When set to false, if the engine is running in a group or server share level, all the JDBC/ODBC connections will be isolated against the user. Including the temporary views, function registries, SQL configuration, and the current database. Note that, it does not affect if the share level is connection or user. | boolean | 1.6.0 | | kyuubi.engine.user.isolated.spark.session.idle.interval | PT1M | The interval to check if the user-isolated Spark session is timeout. | duration | 1.6.0 | | kyuubi.engine.user.isolated.spark.session.idle.timeout | PT6H | If kyuubi.engine.user.isolated.spark.session is false, we will release the Spark session if its corresponding user is inactive after this configured timeout. | duration | 1.6.0 | +| kyuubi.engine.yarn.app.name | <undefined> | The YARN app name when the engine deploy mode is YARN. | string | 1.9.0 | +| kyuubi.engine.yarn.cores | 1 | kyuubi engine container core number when the engine deploy mode is YARN. | int | 1.9.0 | +| kyuubi.engine.yarn.java.options | <undefined> | The extra Java options for the AM when the engine deploy mode is YARN. | string | 1.9.0 | +| kyuubi.engine.yarn.memory | 1024 | kyuubi engine container memory in mb when the engine deploy mode is YARN. | int | 1.9.0 | +| kyuubi.engine.yarn.priority | <undefined> | kyuubi engine yarn priority when the engine deploy mode is YARN. | int | 1.9.0 | +| kyuubi.engine.yarn.queue | default | kyuubi engine yarn queue when the engine deploy mode is YARN. | string | 1.9.0 | +| kyuubi.engine.yarn.stagingDir | <undefined> | Staging directory used while submitting kyuubi engine to YARN, It should be a absolute path in HDFS. | string | 1.9.0 | | kyuubi.engine.yarn.submit.timeout | PT30S | The engine submit timeout for YARN application. | duration | 1.7.2 | +| kyuubi.engine.yarn.tags | <undefined> | kyuubi engine yarn tags when the engine deploy mode is YARN. | seq | 1.9.0 | ### Event @@ -248,7 +258,6 @@ You can configure the Kyuubi properties in `$KYUUBI_HOME/conf/kyuubi-defaults.co | kyuubi.frontend.thrift.binary.ssl.disallowed.protocols | SSLv2,SSLv3 | SSL versions to disallow for Kyuubi thrift binary frontend. | set | 1.7.0 | | kyuubi.frontend.thrift.binary.ssl.enabled | false | Set this to true for using SSL encryption in thrift binary frontend server. | boolean | 1.7.0 | | kyuubi.frontend.thrift.binary.ssl.include.ciphersuites || A comma-separated list of include SSL cipher suite names for thrift binary frontend. | seq | 1.7.0 | -| kyuubi.frontend.thrift.http.allow.user.substitution | true | Allow alternate user to be specified as part of open connection request when using HTTP transport mode. | boolean | 1.6.0 | | kyuubi.frontend.thrift.http.bind.host | <undefined> | Hostname or IP of the machine on which to run the thrift frontend service via http protocol. | string | 1.6.0 | | kyuubi.frontend.thrift.http.bind.port | 10010 | Port of the machine on which to run the thrift frontend service via http protocol. | int | 1.6.0 | | kyuubi.frontend.thrift.http.compression.enabled | true | Enable thrift http compression via Jetty compression support | boolean | 1.6.0 | diff --git a/docs/contributing/code/building.md b/docs/contributing/code/building.md index bfa6a46caed..049d7afb026 100644 --- a/docs/contributing/code/building.md +++ b/docs/contributing/code/building.md @@ -84,7 +84,7 @@ For the Scala version for Spark engines, the server will look up the `SPARK_SCAL ## Building With Apache dlcdn Site -By default, we use `https://archive.apache.org/dist/` to download the built-in release packages of engines, +By default, we use `closer.lua` to download the built-in release packages of engines, such as Spark or Flink. But sometimes, you may find it hard to reach, or the download speed is too slow, then you can define the `apache.archive.dist` by `-Pmirror-cdn` to accelerate to download speed. diff --git a/docs/deployment/spark/gluten.md b/docs/deployment/spark/gluten.md index 8f6bcdef7af..371b74edfd6 100644 --- a/docs/deployment/spark/gluten.md +++ b/docs/deployment/spark/gluten.md @@ -18,7 +18,7 @@ # Gluten -Gluten is a Spark plugin developed by Intel, designed to accelerate Apache Spark with native libraries. Currently, only CentOS 7/8 and Ubuntu 20.04/22.04, along with Spark 3.2/3.3/3.4, are supported. Users can employ the following methods to utilize the Gluten with Velox native libraries. +[Gluten](https://oap-project.github.io/gluten/) is a Spark plugin developed by Intel, designed to accelerate Apache Spark with native libraries. Currently, only CentOS 7/8 and Ubuntu 20.04/22.04, along with Spark 3.2/3.3/3.4, are supported. Users can employ the following methods to utilize the Gluten with Velox native libraries. ## Building(with velox Backend) @@ -40,11 +40,11 @@ You can use Gluten to accelerate Spark by following steps. ### Installing -add gluten jar: `copy /path/to/gluten/package/target/gluten-velox-bundle-spark3.x_2.12-*.jar $SPARK_HOME/jars/` or specified to `spark.jars` configuration +Add gluten jar: `copy /path/to/gluten/package/target/gluten-velox-bundle-spark3.x_2.12-*.jar $SPARK_HOME/jars/` or specified to `spark.jars` configuration ### Configure -add config into `spark-defaults.conf`: +Add the following minimal configuration into `spark-defaults.conf`: ```properties spark.plugins=io.glutenproject.GlutenPlugin @@ -53,3 +53,4 @@ spark.memory.offHeap.enabled=true spark.shuffle.manager=org.apache.spark.shuffle.sort.ColumnarShuffleManager ``` +For more configuration can be found in the doc of [Configuration](https://oap-project.github.io/gluten/Configuration.html). diff --git a/docs/extensions/server/configuration.rst b/docs/extensions/server/configuration.rst index 94b1da9fbb2..97f50342691 100644 --- a/docs/extensions/server/configuration.rst +++ b/docs/extensions/server/configuration.rst @@ -53,8 +53,8 @@ We have a custom class ``CustomSessionConfAdvisor``: .. code-block:: java - @Override - public class CustomSessionConfAdvisor { + public class CustomSessionConfAdvisor implements SessionConfAdvisor { + @Override Map getConfOverlay(String user, Map sessionConf) { if ("uly".equals(user)) { return Collections.singletonMap("spark.driver.memory", "1G"); diff --git a/docs/imgs/kyuubi_layers.drawio b/docs/imgs/kyuubi_layers.drawio index 95dc80ef9a2..1b1d2e0bed7 100644 --- a/docs/imgs/kyuubi_layers.drawio +++ b/docs/imgs/kyuubi_layers.drawio @@ -1,155 +1,158 @@ - + - + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + + + + - - + + - - + + - - + + + + + - + - + - + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - - - - - - - + + - + + + + - - - + + + - + - + - + - + - + - - + + - - - - - + + - + - + + + + - - + + - + - + - - - - + - + - + - + + + + - - + + diff --git a/docs/imgs/kyuubi_layers.drawio.png b/docs/imgs/kyuubi_layers.drawio.png index f3a8c059989..b1b68b308c6 100644 Binary files a/docs/imgs/kyuubi_layers.drawio.png and b/docs/imgs/kyuubi_layers.drawio.png differ diff --git a/docs/quick_start/quick_start.rst b/docs/quick_start/quick_start.rst index 85a215aad8e..c95919e33c1 100644 --- a/docs/quick_start/quick_start.rst +++ b/docs/quick_start/quick_start.rst @@ -36,23 +36,23 @@ For quick start deployment, we need to prepare the following stuffs: These essential components are JVM-based applications. So, the JRE needs to be pre-installed and the ``JAVA_HOME`` is correctly set to each component. - ================ ============ ==================== =========================================== + ================ ============ ==================== ======================================================= Component Role Version Remarks - ================ ============ ==================== =========================================== + ================ ============ ==================== ======================================================= **Java** JRE 8/11/17 Officially released against JDK8 **Kyuubi** Gateway \ |release| \ - Kyuubi Server Engine lib - Kyuubi Engine Beeline - Kyuubi Hive Beeline **Spark** Engine 3.1 to 3.5 A Spark distribution **Flink** Engine 1.16/1.17/1.18 A Flink distribution - **Trino** Engine >=363 A Trino cluster + **Trino** Engine N/A A Trino cluster allows to access via trino-client v411 **Doris** Engine N/A A Doris cluster **Hive** Engine - 2.1-cdh6/2.3/3.1 - A Hive distribution Metastore - N/A - An optional and external metadata store, whose version is decided by engines **Zookeeper** HA >=3.4.x **Disk** Storage N/A N/A - ================ ============ ==================== =========================================== + ================ ============ ==================== ======================================================= The other internal or external parts listed in the above sheet can be used individually or all together. For example, you can use Kyuubi, Spark and Flink to build a streaming diff --git a/docs/tools/spark_block_cleaner.md b/docs/tools/spark_block_cleaner.md index 4a1f20ff884..12e6cbee57d 100644 --- a/docs/tools/spark_block_cleaner.md +++ b/docs/tools/spark_block_cleaner.md @@ -17,119 +17,5 @@ # Kubernetes Tools Spark Block Cleaner -## Requirements - -You'd better have cognition upon the following things when you want to use spark-block-cleaner. - -* Read this article -* An active Kubernetes cluster -* [Kubectl](https://kubernetes.io/docs/reference/kubectl/overview/) -* [Docker](https://www.docker.com/) - -## Scenes - -When you're using Spark On Kubernetes with Client mode and don't use `emptyDir` for Spark `local-dir` type, you may face the same scenario that executor pods deleted without clean all the Block files. It may cause disk overflow. - -Therefore, we chose to use Spark Block Cleaner to clear the block files accumulated by Spark. - -## Principle - -When deploying Spark Block Cleaner, we will configure volumes for the destination folder. Spark Block Cleaner will perceive the folder by the parameter `CACHE_DIRS`. - -Spark Block Cleaner will clear the perceived folder in a fixed loop(which can be configured by `SCHEDULE_INTERVAL`). And Spark Block Cleaner will select folder start with `blockmgr` and `spark` for deletion using the logic Spark uses to create those folders. - -Before deleting those files, Spark Block Cleaner will determine whether it is a recently modified file(depending on whether the file has not been acted on within the specified time which configured by `FILE_EXPIRED_TIME`). Only delete files those beyond that time interval. - -And Spark Block Cleaner will check the disk utilization after clean, if the remaining space is less than the specified value(control by `FREE_SPACE_THRESHOLD`), will trigger deep clean(which file expired time control by `DEEP_CLEAN_FILE_EXPIRED_TIME`). - -## Usage - -Before you start using Spark Block Cleaner, you should build its docker images. - -### Build Block Cleaner Docker Image - -In the `KYUUBI_HOME` directory, you can use the following cmd to build docker image. - -```shell -docker build ./tools/spark-block-cleaner/kubernetes/docker -``` - -### Modify spark-block-cleaner.yml - -You need to modify the `${KYUUBI_HOME}/tools/spark-block-cleaner/kubernetes/spark-block-cleaner.yml` to fit your current environment. - -In Kyuubi tools, we recommend using `DaemonSet` to start, and we offer default yaml file in daemonSet way. - -Base file structure: - -```yaml -apiVersion -kind -metadata - name - namespace -spec - select - template - metadata - spce - containers - - image - - volumeMounts - - env - volumes -``` - -You can use affect the performance of Spark Block Cleaner through configure parameters in containers env part of `spark-block-cleaner.yml`. - -```yaml -env: - - name: CACHE_DIRS - value: /data/data1,/data/data2 - - name: FILE_EXPIRED_TIME - value: 604800 - - name: DEEP_CLEAN_FILE_EXPIRED_TIME - value: 432000 - - name: FREE_SPACE_THRESHOLD - value: 60 - - name: SCHEDULE_INTERVAL - value: 3600 -``` - -The most important thing, configure volumeMounts and volumes corresponding to Spark local-dirs. - -For example, Spark use /spark/shuffle1 as local-dir, you can configure like: - -```yaml -volumes: - - name: block-files-dir-1 - hostPath: - path: /spark/shuffle1 -``` - -```yaml -volumeMounts: - - name: block-files-dir-1 - mountPath: /data/data1 -``` - -```yaml -env: - - name: CACHE_DIRS - value: /data/data1 -``` - -### Start daemonSet - -After you finishing modifying the above, you can use the following command `kubectl apply -f ${KYUUBI_HOME}/tools/spark-block-cleaner/kubernetes/spark-block-cleaner.yml` to start daemonSet. - -## Related parameters - -| Name | Default | unit | Meaning | -|------------------------------|-------------------------|---------|-----------------------------------------------------------------------------------------------------------------------| -| CACHE_DIRS | /data/data1,/data/data2 | | The target dirs in container path which will clean block files. | -| FILE_EXPIRED_TIME | 604800 | seconds | Cleaner will clean the block files which current time - last modified time more than the fileExpiredTime. | -| DEEP_CLEAN_FILE_EXPIRED_TIME | 432000 | seconds | Deep clean will clean the block files which current time - last modified time more than the deepCleanFileExpiredTime. | -| FREE_SPACE_THRESHOLD | 60 | % | After first clean, if free Space low than threshold trigger deep clean. | -| SCHEDULE_INTERVAL | 3600 | seconds | Cleaner sleep between cleaning. | - +**Note**: +This tool has been removed since Kyuubi 1.9.0. diff --git a/extensions/spark/kyuubi-spark-authz-shaded/src/main/resources/META-INF/NOTICE b/extensions/spark/kyuubi-spark-authz-shaded/src/main/resources/META-INF/NOTICE index 9afa0f86d1c..c90e9a7a88d 100644 --- a/extensions/spark/kyuubi-spark-authz-shaded/src/main/resources/META-INF/NOTICE +++ b/extensions/spark/kyuubi-spark-authz-shaded/src/main/resources/META-INF/NOTICE @@ -1,5 +1,5 @@ Apache Kyuubi -Copyright 2021-2023 The Apache Software Foundation. +Copyright 2021-2024 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/). diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json index b555bbcf8be..973d13a0e72 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json @@ -1398,11 +1398,7 @@ "classname" : "org.apache.spark.sql.execution.command.InsertIntoDataSourceDirCommand", "tableDescs" : [ ], "opType" : "QUERY", - "queryDescs" : [ { - "fieldName" : "query", - "fieldExtractor" : "LogicalPlanQueryExtractor", - "comment" : "" - } ], + "queryDescs" : [ ], "uriDescs" : [ { "fieldName" : "storage", "fieldExtractor" : "CatalogStorageFormatURIExtractor", @@ -1625,11 +1621,7 @@ "comment" : "" } ], "opType" : "QUERY", - "queryDescs" : [ { - "fieldName" : "query", - "fieldExtractor" : "LogicalPlanQueryExtractor", - "comment" : "" - } ], + "queryDescs" : [ ], "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand", diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala index 858dc1c3733..7772c86b788 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala @@ -38,7 +38,6 @@ class AccessResource private (val objectType: ObjectType, val catalog: Option[St val columnStr = getColumn if (columnStr == null) Nil else columnStr.split(",").filter(_.nonEmpty) } - def getUrl: String = getValue("url") } object AccessResource { diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/Authorization.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/Authorization.scala index d1494266e85..d682b71d923 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/Authorization.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/Authorization.scala @@ -43,7 +43,7 @@ object Authorization { val KYUUBI_AUTHZ_TAG = TreeNodeTag[Unit]("__KYUUBI_AUTHZ_TAG") - private def markAllNodesAuthChecked(plan: LogicalPlan): LogicalPlan = { + def markAllNodesAuthChecked(plan: LogicalPlan): LogicalPlan = { plan.transformDown { case p => p.setTagValue(KYUUBI_AUTHZ_TAG, ()) p diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala index 003521c727b..a0a6d5b6246 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala @@ -37,7 +37,7 @@ case class RuleEliminatePermanentViewMarker(sparkSession: SparkSession) extends } // For each SubqueryExpression's PVM, we should mark as resolved to // avoid check privilege of PVM's internal Subquery. - Authorization.markAuthChecked(ret) + Authorization.markAllNodesAuthChecked(ret) ret } } @@ -52,8 +52,9 @@ case class RuleEliminatePermanentViewMarker(sparkSession: SparkSession) extends } } if (matched) { - Authorization.markAuthChecked(eliminatedPVM) - sparkSession.sessionState.optimizer.execute(eliminatedPVM) + Authorization.markAllNodesAuthChecked(eliminatedPVM) + val optimized = sparkSession.sessionState.optimizer.execute(eliminatedPVM) + Authorization.markAllNodesAuthChecked(optimized) } else { eliminatedPVM } diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala index 0f3ae136c4a..668f45fbd37 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala @@ -25,10 +25,8 @@ import org.apache.kyuubi.plugin.spark.authz.rule.expression.TypeOfPlaceHolder object RuleEliminateTypeOf extends Rule[LogicalPlan] { override def apply(plan: LogicalPlan): LogicalPlan = { - plan.transformUp { case p => - p.transformExpressionsUp { - case toph: TypeOfPlaceHolder => TypeOf(toph.expr) - } + plan.transformExpressionsUp { + case toph: TypeOfPlaceHolder => TypeOf(toph.expr) } } } diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/PermanentViewMarker.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/PermanentViewMarker.scala index fc52adc0458..cb233b4651f 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/PermanentViewMarker.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/PermanentViewMarker.scala @@ -21,11 +21,14 @@ import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Cast} import org.apache.spark.sql.catalyst.plans.QueryPlan -import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Project, Statistics} +import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Project, Statistics, View} +import org.apache.spark.sql.catalyst.trees.TreeNodeTag case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable) extends LeafNode with MultiInstanceRelation { + private val PVM_NEW_INSTANCE_TAG = TreeNodeTag[Unit]("__PVM_NEW_INSTANCE_TAG") + override def output: Seq[Attribute] = child.output override def argString(maxFields: Int): String = "" @@ -38,6 +41,18 @@ case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable) val projectList = child.output.map { case attr => Alias(Cast(attr, attr.dataType), attr.name)(explicitMetadata = Some(attr.metadata)) } - this.copy(child = Project(projectList, child), catalogTable = catalogTable) + val newProj = Project(projectList, child) + newProj.setTagValue(PVM_NEW_INSTANCE_TAG, ()) + + this.copy(child = newProj, catalogTable = catalogTable) + } + + override def doCanonicalize(): LogicalPlan = { + child match { + case p @ Project(_, view: View) if p.getTagValue(PVM_NEW_INSTANCE_TAG).contains(true) => + view.canonicalized + case _ => + child.canonicalized + } } } diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala index 8a7bc452293..952db091444 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala @@ -438,6 +438,9 @@ abstract class HudiCallProcedureTableExtractor extends TableExtractor { ( s"$PROCEDURE_CLASS_PATH.ShowClusteringProcedure", ProcedureArgsInputOutputPair(input = Some("table"))), + ( + s"$PROCEDURE_CLASS_PATH.ShowCommitsProcedure", + ProcedureArgsInputOutputPair(input = Some("table"))), ( s"$PROCEDURE_CLASS_PATH.ShowCommitExtraMetadataProcedure", ProcedureArgsInputOutputPair(input = Some("table"))), diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala index 214a0375485..673a2e43726 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala @@ -1483,16 +1483,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite { .queryExecution.analyzed val (in, out, operationType) = PrivilegesBuilder.build(plan, spark) assert(operationType === QUERY) - assert(in.size === 1) - val po0 = in.head - assert(po0.actionType === PrivilegeObjectActionType.OTHER) - assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW) - assertEqualsIgnoreCase(reusedDb)(po0.dbname) - assert(po0.objectName equalsIgnoreCase reusedPartTable.split("\\.").last) - assert(po0.columns === Seq("key", "value", "pid")) - checkTableOwner(po0) - val accessType0 = ranger.AccessType(po0, operationType, isInput = true) - assert(accessType0 === AccessType.SELECT) + assert(in.size === 0) assert(out.size == 1) val po1 = out.head @@ -1534,18 +1525,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite { val plan = sql(sqlStr).queryExecution.analyzed val (inputs, outputs, operationType) = PrivilegesBuilder.build(plan, spark) assert(operationType === QUERY) - assert(inputs.size == 1) - inputs.foreach { po => - assert(po.actionType === PrivilegeObjectActionType.OTHER) - assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW) - assert(po.catalog.isEmpty) - assertEqualsIgnoreCase(reusedDb)(po.dbname) - assertEqualsIgnoreCase(reusedTableShort)(po.objectName) - assert(po.columns === Seq("key", "value")) - checkTableOwner(po) - val accessType = ranger.AccessType(po, operationType, isInput = true) - assert(accessType === AccessType.SELECT) - } + assert(inputs.size === 0) assert(outputs.size === 1) outputs.foreach { po => @@ -1614,16 +1594,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite { .queryExecution.analyzed val (in, out, operationType) = PrivilegesBuilder.build(plan, spark) assert(operationType === QUERY) - assert(in.size === 1) - val po0 = in.head - assert(po0.actionType === PrivilegeObjectActionType.OTHER) - assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW) - assertEqualsIgnoreCase(reusedDb)(po0.dbname) - assert(po0.objectName equalsIgnoreCase reusedPartTable.split("\\.").last) - assert(po0.columns === Seq("key", "value", "pid")) - checkTableOwner(po0) - val accessType0 = ranger.AccessType(po0, operationType, isInput = true) - assert(accessType0 === AccessType.SELECT) + assert(in.size === 0) assert(out.size == 1) val po1 = out.head @@ -1639,6 +1610,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite { test("InsertIntoHiveDirCommand") { val tableDirectory = getClass.getResource("/").getPath + "table_directory" val directory = File(tableDirectory).createDirectory() + sql("set spark.sql.hive.convertMetastoreInsertDir=false") val plan = sql( s""" |INSERT OVERWRITE DIRECTORY '${directory.path}' diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala index aced937b9a6..d75411066c3 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala @@ -567,7 +567,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { "logicalRelation", classOf[LogicalRelationTableExtractor], actionTypeDesc = Some(actionTypeDesc)) - TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryQueryDesc)) + TableCommandSpec(cmd, Seq(tableDesc)) } val InsertIntoHiveTable = { @@ -585,9 +585,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { val InsertIntoDataSourceDir = { val cmd = "org.apache.spark.sql.execution.command.InsertIntoDataSourceDirCommand" - val queryDesc = queryQueryDesc val uriDesc = UriDesc("storage", classOf[CatalogStorageFormatURIExtractor]) - TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs = Seq(uriDesc)) + TableCommandSpec(cmd, Nil, uriDescs = Seq(uriDesc)) } val SaveIntoDataSourceCommand = { @@ -610,6 +609,13 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc)) } + val InsertIntoHiveDirCommand = { + val cmd = "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand" + val queryDesc = queryQueryDesc + val uriDesc = UriDesc("storage", classOf[CatalogStorageFormatURIExtractor]) + TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs = Seq(uriDesc)) + } + val LoadData = { val cmd = "org.apache.spark.sql.execution.command.LoadDataCommand" val actionTypeDesc = overwriteActionTypeDesc.copy(fieldName = "isOverwrite") @@ -723,8 +729,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { InsertIntoDataSourceDir, SaveIntoDataSourceCommand, InsertIntoHadoopFsRelationCommand, - InsertIntoDataSourceDir.copy(classname = - "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand"), + InsertIntoHiveDirCommand, InsertIntoHiveTable, LoadData, MergeIntoTable, diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala index b6b9b6f31a5..f7b556686cb 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala @@ -618,4 +618,30 @@ class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { doAs(admin, sql(dropIndex)) } } + + test("ShowCommitsProcedure") { + withCleanTmpResources(Seq((s"$namespace1.$table1", "table"), (namespace1, "database"))) { + doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1")) + doAs( + admin, + sql( + s""" + |CREATE TABLE IF NOT EXISTS $namespace1.$table1(id int, name string, city string) + |USING HUDI + |OPTIONS ( + | type = 'mor', + | primaryKey = 'id', + | 'hoodie.datasource.hive_sync.enable' = 'false' + |) + |PARTITIONED BY(city) + |TBLPROPERTIES ('hoodie.datasource.write.precombine.field' = 'id') + |""".stripMargin)) + + val showCommitsSql = s"CALL SHOW_COMMITS(table => '$namespace1.$table1', limit => 10)" + interceptEndsWith[AccessControlException] { + doAs(someone, sql(showCommitsSql)) + }(s"does not have [select] privilege on [$namespace1/$table1]") + doAs(admin, sql(showCommitsSql)) + } + } } diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala index 9dd9613d8f9..43333ea7763 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala @@ -757,7 +757,8 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir' | USING parquet | SELECT * FROM $db1.$table;""".stripMargin))) - assert(e.getMessage.contains(s"does not have [select] privilege on [$db1/$table/id]")) + assert(e.getMessage.contains( + s"does not have [write] privilege on [[/tmp/test_dir, /tmp/test_dir/]]")) } } @@ -1080,8 +1081,7 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { |INSERT OVERWRITE DIRECTORY '$path' |USING parquet |SELECT * FROM $db1.$table1""".stripMargin)))( - s"does not have [select] privilege on [$db1/$table1/id,$db1/$table1/scope], " + - s"[write] privilege on [[$path, $path/]]") + s"does not have [write] privilege on [[$path, $path/]]") } } } @@ -1122,8 +1122,7 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { |INSERT OVERWRITE DIRECTORY '$path' |USING parquet |SELECT * FROM $db1.$table1""".stripMargin)))( - s"does not have [select] privilege on [$db1/$table1/id,$db1/$table1/scope], " + - s"[write] privilege on [[$path, $path/]]") + s"does not have [write] privilege on [[$path, $path/]]") doAs(admin, sql(s"SELECT * FROM parquet.`$path`".stripMargin).explain(true)) interceptEndsWith[AccessControlException]( diff --git a/externals/kyuubi-download/pom.xml b/externals/kyuubi-download/pom.xml index b21e3e5a223..e9ac586295a 100644 --- a/externals/kyuubi-download/pom.xml +++ b/externals/kyuubi-download/pom.xml @@ -51,7 +51,7 @@ compile ${spark.archive.download.skip} - ${spark.archive.mirror}/${spark.archive.name} + ${spark.archive.mirror}/${spark.archive.name}?action=download @@ -62,7 +62,7 @@ compile ${flink.archive.download.skip} - ${flink.archive.mirror}/${flink.archive.name} + ${flink.archive.mirror}/${flink.archive.name}?action=download @@ -73,7 +73,7 @@ compile ${hive.archive.download.skip} - ${hive.archive.mirror}/${hive.archive.name} + ${hive.archive.mirror}/${hive.archive.name}?action=download diff --git a/externals/kyuubi-flink-sql-engine/src/main/resources/META-INF/NOTICE b/externals/kyuubi-flink-sql-engine/src/main/resources/META-INF/NOTICE index 74a23577f6c..52aa955541d 100644 --- a/externals/kyuubi-flink-sql-engine/src/main/resources/META-INF/NOTICE +++ b/externals/kyuubi-flink-sql-engine/src/main/resources/META-INF/NOTICE @@ -1,9 +1,35 @@ Apache Kyuubi -Copyright 2021-2023 The Apache Software Foundation. +Copyright 2021-2024 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Iceberg +Copyright 2017-2022 The Apache Software Foundation + +Apache Parquet MR +Copyright 2014-2024 The Apache Software Foundation + +This project includes code from Kite, developed at Cloudera, Inc. with +the following copyright notice: + +| Copyright 2013 Cloudera Inc. +| +| Licensed under the Apache License, Version 2.0 (the "License"); +| you may not use this file except in compliance with the License. +| You may obtain a copy of the License at +| +| http://www.apache.org/licenses/LICENSE-2.0 +| +| Unless required by applicable law or agreed to in writing, software +| distributed under the License is distributed on an "AS IS" BASIS, +| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +| See the License for the specific language governing permissions and +| limitations under the License. + +Apache Spark +Copyright 2014 and onwards The Apache Software Foundation. + -------------------------------------------------------------------------------- This binary artifact contains Jackson diff --git a/externals/kyuubi-hive-sql-engine/src/main/resources/META-INF/NOTICE b/externals/kyuubi-hive-sql-engine/src/main/resources/META-INF/NOTICE index c9ffbb70ac1..72c8eff760f 100644 --- a/externals/kyuubi-hive-sql-engine/src/main/resources/META-INF/NOTICE +++ b/externals/kyuubi-hive-sql-engine/src/main/resources/META-INF/NOTICE @@ -1,9 +1,35 @@ Apache Kyuubi -Copyright 2021-2023 The Apache Software Foundation. +Copyright 2021-2024 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Iceberg +Copyright 2017-2022 The Apache Software Foundation + +Apache Parquet MR +Copyright 2014-2024 The Apache Software Foundation + +This project includes code from Kite, developed at Cloudera, Inc. with +the following copyright notice: + +| Copyright 2013 Cloudera Inc. +| +| Licensed under the Apache License, Version 2.0 (the "License"); +| you may not use this file except in compliance with the License. +| You may obtain a copy of the License at +| +| http://www.apache.org/licenses/LICENSE-2.0 +| +| Unless required by applicable law or agreed to in writing, software +| distributed under the License is distributed on an "AS IS" BASIS, +| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +| See the License for the specific language governing permissions and +| limitations under the License. + +Apache Spark +Copyright 2014 and onwards The Apache Software Foundation. + -------------------------------------------------------------------------------- This binary artifact contains diff --git a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala index f22e281fbaa..4e0787039bc 100644 --- a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala +++ b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala @@ -45,7 +45,10 @@ class HiveSQLEngine extends Serverable("HiveSQLEngine") { super.start() // Start engine self-terminating checker after all services are ready and it can be reached by // all servers in engine spaces. - backendService.sessionManager.startTerminatingChecker(() => stop()) + backendService.sessionManager.startTerminatingChecker(() => { + selfExited = true + stop() + }) } override protected def stopServer(): Unit = { @@ -151,7 +154,8 @@ object HiveSQLEngine extends Logging { } } catch { - case t: Throwable => currentEngine match { + case t: Throwable => + currentEngine match { case Some(engine) => engine.stop() val event = HiveEngineEvent(engine) @@ -160,6 +164,7 @@ object HiveSQLEngine extends Logging { case _ => error(s"Failed to start Hive SQL engine: ${t.getMessage}.", t) } + throw t } } } diff --git a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/deploy/HiveYarnModeSubmitter.scala b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/deploy/HiveYarnModeSubmitter.scala new file mode 100644 index 00000000000..9d5126ad668 --- /dev/null +++ b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/deploy/HiveYarnModeSubmitter.scala @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.hive.deploy + +import java.io.File + +import scala.collection.mutable.ListBuffer + +import org.apache.kyuubi.Utils +import org.apache.kyuubi.config.KyuubiConf.ENGINE_HIVE_EXTRA_CLASSPATH +import org.apache.kyuubi.engine.deploy.yarn.EngineYarnModeSubmitter +import org.apache.kyuubi.engine.hive.HiveSQLEngine + +object HiveYarnModeSubmitter extends EngineYarnModeSubmitter { + + def main(args: Array[String]): Unit = { + Utils.fromCommandLineArgs(args, kyuubiConf) + submitApplication() + } + + override var engineType: String = "hive" + + override def engineMainClass(): String = HiveSQLEngine.getClass.getName + + /** + * Jar list for the Hive engine. + */ + override def engineExtraJars(): Seq[File] = { + val hadoopCp = sys.env.get("HIVE_HADOOP_CLASSPATH") + val extraCp = kyuubiConf.get(ENGINE_HIVE_EXTRA_CLASSPATH) + val jars = new ListBuffer[File] + hadoopCp.foreach(cp => parseClasspath(cp, jars)) + extraCp.foreach(cp => parseClasspath(cp, jars)) + jars.toSeq + } + + private[hive] def parseClasspath(classpath: String, jars: ListBuffer[File]): Unit = { + classpath.split(":").filter(_.nonEmpty).foreach { cp => + if (cp.endsWith("/*")) { + val dir = cp.substring(0, cp.length - 2) + new File(dir) match { + case f if f.isDirectory => + f.listFiles().filter(_.getName.endsWith(".jar")).foreach(jars += _) + case _ => + } + } else { + jars += new File(cp) + } + } + } +} diff --git a/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/deploy/HiveYarnModeSubmitterSuite.scala b/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/deploy/HiveYarnModeSubmitterSuite.scala new file mode 100644 index 00000000000..9621eb2359b --- /dev/null +++ b/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/deploy/HiveYarnModeSubmitterSuite.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.hive.deploy + +import java.io.File + +import scala.collection.mutable.ListBuffer + +import org.apache.kyuubi.{KYUUBI_VERSION, KyuubiFunSuite, SCALA_COMPILE_VERSION, Utils} + +class HiveYarnModeSubmitterSuite extends KyuubiFunSuite { + val hiveEngineHome: String = Utils.getCodeSourceLocation(getClass).split("/target")(0) + + test("hadoop class path") { + val jars = new ListBuffer[File] + val classpath = + s"$hiveEngineHome/target/scala-$SCALA_COMPILE_VERSION/jars/*:" + + s"$hiveEngineHome/target/kyuubi-hive-sql-engine-$SCALA_COMPILE_VERSION-$KYUUBI_VERSION.jar" + HiveYarnModeSubmitter.parseClasspath(classpath, jars) + assert(jars.nonEmpty) + assert(jars.exists( + _.getName == s"kyuubi-hive-sql-engine-$SCALA_COMPILE_VERSION-$KYUUBI_VERSION.jar")) + } + +} diff --git a/externals/kyuubi-spark-sql-engine/pom.xml b/externals/kyuubi-spark-sql-engine/pom.xml index 4317b2ede37..e6e678aab7a 100644 --- a/externals/kyuubi-spark-sql-engine/pom.xml +++ b/externals/kyuubi-spark-sql-engine/pom.xml @@ -59,6 +59,11 @@ grpc-protobuf + + io.grpc + grpc-util + + org.apache.spark spark-sql_${scala.binary.version} @@ -232,12 +237,12 @@ com.google.guava:* com.google.j2objc:j2objc-annotations com.google.protobuf:* + dev.failsafe:failsafe io.etcd:* io.grpc:* io.netty:* io.perfmark:perfmark-api io.vertx:* - net.jodah:failsafe org.apache.kyuubi:* org.checkerframework:checker-qual org.codehaus.mojo:animal-sniffer-annotations @@ -263,6 +268,13 @@ + + dev.failsafe + ${kyuubi.shade.packageName}.dev.failsafe + + dev.failsafe.** + + io.etcd ${kyuubi.shade.packageName}.io.etcd @@ -292,13 +304,6 @@ io.vertx.** - - net.jodah - ${kyuubi.shade.packageName}.net.jodah - - net.jodah.** - - android.annotation ${kyuubi.shade.packageName}.android.annotation diff --git a/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/LICENSE b/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/LICENSE index fc5e57e6b19..3901f5b7c93 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/LICENSE +++ b/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/LICENSE @@ -221,6 +221,7 @@ com.google.errorprone:error_prone_annotations com.google.guava:guava com.google.guava:failureaccess com.google.j2objc:j2objc-annotations +dev.failsafe:failsafe io.etcd:jetcd-api io.etcd:jetcd-common io.etcd:jetcd-core @@ -233,6 +234,7 @@ io.grpc:grpc-netty io.grpc:grpc-protobuf io.grpc:grpc-protobuf-lite io.grpc:grpc-stub +io.grpc:grpc-util io.netty:netty-buffer io.netty:netty-codec io.netty:netty-codec-dns @@ -251,7 +253,6 @@ io.netty:netty-transport-native-unix-common io.perfmark:perfmark-api io.vertx:vertx-grpc io.vertx:vertx-core -net.jodah:failsafe BSD License -------------------------- diff --git a/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/NOTICE b/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/NOTICE index bfb7e983431..fc896341f13 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/NOTICE +++ b/externals/kyuubi-spark-sql-engine/src/main/resources/META-INF/NOTICE @@ -1,9 +1,35 @@ Apache Kyuubi -Copyright 2021-2023 The Apache Software Foundation. +Copyright 2021-2024 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Iceberg +Copyright 2017-2022 The Apache Software Foundation + +Apache Parquet MR +Copyright 2014-2024 The Apache Software Foundation + +This project includes code from Kite, developed at Cloudera, Inc. with +the following copyright notice: + +| Copyright 2013 Cloudera Inc. +| +| Licensed under the Apache License, Version 2.0 (the "License"); +| you may not use this file except in compliance with the License. +| You may obtain a copy of the License at +| +| http://www.apache.org/licenses/LICENSE-2.0 +| +| Unless required by applicable law or agreed to in writing, software +| distributed under the License is distributed on an "AS IS" BASIS, +| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +| See the License for the specific language governing permissions and +| limitations under the License. + +Apache Spark +Copyright 2014 and onwards The Apache Software Foundation. + -------------------------------------------------------------------------------- This binary artifact contains diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EnginePage.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EnginePage.scala index 7188ac62f62..cae0c03bf4d 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EnginePage.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EnginePage.scala @@ -23,6 +23,7 @@ import java.util.Date import javax.servlet.http.HttpServletRequest import scala.collection.JavaConverters.mapAsScalaMapConverter +import scala.collection.mutable import scala.xml.{Node, Unparsed} import org.apache.commons.text.StringEscapeUtils @@ -36,18 +37,46 @@ case class EnginePage(parent: EngineTab) extends WebUIPage("") { private val store = parent.store override def render(request: HttpServletRequest): Seq[Node] = { + val onlineSession = new mutable.ArrayBuffer[SessionEvent]() + val closedSession = new mutable.ArrayBuffer[SessionEvent]() + + val runningSqlStat = new mutable.ArrayBuffer[SparkOperationEvent]() + val completedSqlStat = new mutable.ArrayBuffer[SparkOperationEvent]() + val failedSqlStat = new mutable.ArrayBuffer[SparkOperationEvent]() + + store.getSessionList.foreach { s => + if (s.endTime <= 0L) { + onlineSession += s + } else { + closedSession += s + } + } + + store.getStatementList.foreach { op => + if (op.completeTime <= 0L) { + runningSqlStat += op + } else if (op.exception.isDefined) { + failedSqlStat += op + } else { + completedSqlStat += op + } + } + val content = generateBasicStats() ++
++ stop(request) ++
++

- {store.getSessionCount} session(s) are online, - running {store.getStatementCount} - operations + {onlineSession.size} session(s) are online, + running {runningSqlStat.size} operation(s)

++ - generateSessionStatsTable(request) ++ - generateStatementStatsTable(request) + generateSessionStatsTable(request, onlineSession.toSeq, closedSession.toSeq) ++ + generateStatementStatsTable( + request, + runningSqlStat.toSeq, + completedSqlStat.toSeq, + failedSqlStat.toSeq) UIUtils.headerSparkPage(request, parent.name, content, parent) } @@ -129,102 +158,199 @@ case class EnginePage(parent: EngineTab) extends WebUIPage("") { } } - /** Generate stats of statements for the engine */ - private def generateStatementStatsTable(request: HttpServletRequest): Seq[Node] = { - - val numStatement = store.getStatementList.size - - val table = - if (numStatement > 0) { + /** Generate stats of running statements for the engine */ + private def generateStatementStatsTable( + request: HttpServletRequest, + running: Seq[SparkOperationEvent], + completed: Seq[SparkOperationEvent], + failed: Seq[SparkOperationEvent]): Seq[Node] = { + + val content = mutable.ListBuffer[Node]() + if (running.nonEmpty) { + val sqlTableTag = "running-sqlstat" + val table = + statementStatsTable(request, sqlTableTag, parent, running) + content ++= + +

+ + Running Statement Statistics ( + {running.size} + ) +

+
++ +
+ {table} +
+ } - val sqlTableTag = "sqlstat" + if (completed.nonEmpty) { + val table = { + val sqlTableTag = "completed-sqlstat" + statementStatsTable( + request, + sqlTableTag, + parent, + completed) + } - val sqlTablePage = - Option(request.getParameter(s"$sqlTableTag.page")).map(_.toInt).getOrElse(1) + content ++= + +

+ + Completed Statement Statistics ( + {completed.size} + ) +

+
++ +
+ {table} +
+ } - try { - Some(new StatementStatsPagedTable( - request, - parent, - store.getStatementList, - "kyuubi", - UIUtils.prependBaseUri(request, parent.basePath), - sqlTableTag).table(sqlTablePage)) - } catch { - case e @ (_: IllegalArgumentException | _: IndexOutOfBoundsException) => - Some(
-

Error while rendering job table:

-
-              {Utils.stringifyException(e)}
-            
-
) - } - } else { - None + if (failed.nonEmpty) { + val table = { + val sqlTableTag = "failed-sqlstat" + statementStatsTable( + request, + sqlTableTag, + parent, + failed) } - val content = - -

- - Statement Statistics ({numStatement}) -

-
++ -
- {table.getOrElse("No statistics have been generated yet.")} + + content ++= + +

+ + Failed Statement Statistics ( + {failed.size} + ) +

+
++ +
+ {table}
+ } content } - /** Generate stats of sessions for the engine */ - private def generateSessionStatsTable(request: HttpServletRequest): Seq[Node] = { - val numSessions = store.getSessionList.size - val table = - if (numSessions > 0) { + private def statementStatsTable( + request: HttpServletRequest, + sqlTableTag: String, + parent: EngineTab, + data: Seq[SparkOperationEvent]): Seq[Node] = { + + val sqlTablePage = + Option(request.getParameter(s"$sqlTableTag.page")).map(_.toInt).getOrElse(1) - val sessionTableTag = "sessionstat" + try { + new StatementStatsPagedTable( + request, + parent, + data, + "kyuubi", + UIUtils.prependBaseUri(request, parent.basePath), + s"${sqlTableTag}-table").table(sqlTablePage) + } catch { + case e @ (_: IllegalArgumentException | _: IndexOutOfBoundsException) => +
+

Error while rendering job table:

+
+                {Utils.stringifyException(e)}
+              
+
+ } + } - val sessionTablePage = - Option(request.getParameter(s"$sessionTableTag.page")).map(_.toInt).getOrElse(1) + /** Generate stats of online sessions for the engine */ + private def generateSessionStatsTable( + request: HttpServletRequest, + online: Seq[SessionEvent], + closed: Seq[SessionEvent]): Seq[Node] = { + val content = mutable.ListBuffer[Node]() + if (online.nonEmpty) { + val sessionTableTag = "online-sessionstat" + val table = sessionTable( + request, + sessionTableTag, + parent, + online) + content ++= + +

+ + Online Session Statistics ( + {online.size} + ) +

+
++ +
+ {table} +
+ } - try { - Some(new SessionStatsPagedTable( - request, - parent, - store.getSessionList, - "kyuubi", - UIUtils.prependBaseUri(request, parent.basePath), - sessionTableTag).table(sessionTablePage)) - } catch { - case e @ (_: IllegalArgumentException | _: IndexOutOfBoundsException) => - Some(
-

Error while rendering job table:

-
-              {Utils.stringifyException(e)}
-            
-
) - } - } else { - None + if (closed.nonEmpty) { + val table = { + val sessionTableTag = "closed-sessionstat" + sessionTable( + request, + sessionTableTag, + parent, + closed) } - val content = - -

- - Session Statistics ({numSessions}) -

-
++ -
- {table.getOrElse("No statistics have been generated yet.")} + content ++= + +

+ + Closed Session Statistics ( + {closed.size} + ) +

+
++ +
+ {table}
- + } content } + private def sessionTable( + request: HttpServletRequest, + sessionTage: String, + parent: EngineTab, + data: Seq[SessionEvent]): Seq[Node] = { + val sessionPage = + Option(request.getParameter(s"$sessionTage.page")).map(_.toInt).getOrElse(1) + try { + new SessionStatsPagedTable( + request, + parent, + data, + "kyuubi", + UIUtils.prependBaseUri(request, parent.basePath), + s"${sessionTage}-table").table(sessionPage) + } catch { + case e @ (_: IllegalArgumentException | _: IndexOutOfBoundsException) => +
+

Error while rendering job table:

+
+            {Utils.stringifyException(e)}
+          
+
+ } + } + private class SessionStatsPagedTable( request: HttpServletRequest, parent: EngineTab, diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala index cdfc6d31355..46011ceae9a 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala @@ -20,6 +20,7 @@ package org.apache.spark.ui import java.util.Date import javax.servlet.http.HttpServletRequest +import scala.collection.mutable import scala.xml.Node import org.apache.spark.internal.Logging @@ -27,6 +28,8 @@ import org.apache.spark.internal.config.SECRET_REDACTION_PATTERN import org.apache.spark.ui.UIUtils._ import org.apache.spark.util.Utils +import org.apache.kyuubi.engine.spark.events.SparkOperationEvent + /** Page for Spark Web UI that shows statistics of jobs running in the engine server */ case class EngineSessionPage(parent: EngineTab) extends WebUIPage("session") with Logging { @@ -126,50 +129,118 @@ case class EngineSessionPage(parent: EngineTab) /** Generate stats of batch statements of the engine server */ private def generateSQLStatsTable(request: HttpServletRequest, sessionID: String): Seq[Node] = { - val executionList = store.getStatementList + val running = new mutable.ArrayBuffer[SparkOperationEvent]() + val completed = new mutable.ArrayBuffer[SparkOperationEvent]() + val failed = new mutable.ArrayBuffer[SparkOperationEvent]() + + store.getStatementList .filter(_.sessionId == sessionID) - val numStatement = executionList.size - val table = - if (numStatement > 0) { - - val sqlTableTag = "sqlsessionstat" - - val sqlTablePage = - Option(request.getParameter(s"$sqlTableTag.page")).map(_.toInt).getOrElse(1) - - try { - Some(new StatementStatsPagedTable( - request, - parent, - executionList, - "kyuubi/session", - UIUtils.prependBaseUri(request, parent.basePath), - sqlTableTag).table(sqlTablePage)) - } catch { - case e @ (_: IllegalArgumentException | _: IndexOutOfBoundsException) => - Some(
-

Error while rendering job table:

-
-              {Utils.exceptionString(e)}
-            
-
) + .foreach { op => + if (op.completeTime <= 0L) { + running += op + } else if (op.exception.isDefined) { + failed += op + } else { + completed += op } - } else { - None } - val content = - -

- - Statement Statistics -

-
++ -
- {table.getOrElse("No statistics have been generated yet.")} -
+ val content = mutable.ListBuffer[Node]() + if (running.nonEmpty) { + val sqlTableTag = "running-sqlstat" + val table = statementStatsTable(request, sqlTableTag, parent, running.toSeq) + content ++= + +

+ + Running Statement Statistics +

+
++ +
+ {table} +
+ } + + if (completed.nonEmpty) { + val table = { + val sqlTableTag = "completed-sqlstat" + statementStatsTable( + request, + sqlTableTag, + parent, + completed.toSeq) + } + + content ++= + +

+ + Completed Statement Statistics ( + {completed.size} + ) +

+
++ +
+ {table} +
+ } + + if (failed.nonEmpty) { + val table = { + val sqlTableTag = "failed-sqlstat" + statementStatsTable( + request, + sqlTableTag, + parent, + failed.toSeq) + } + + content ++= + +

+ + Failed Statement Statistics ( + {failed.size} + ) +

+
++ +
+ {table} +
+ } content } + + private def statementStatsTable( + request: HttpServletRequest, + sqlTableTag: String, + parent: EngineTab, + data: Seq[SparkOperationEvent]): Seq[Node] = { + val sqlTablePage = + Option(request.getParameter(s"$sqlTableTag.page")).map(_.toInt).getOrElse(1) + + try { + new StatementStatsPagedTable( + request, + parent, + data, + "kyuubi/session", + UIUtils.prependBaseUri(request, parent.basePath), + s"${sqlTableTag}").table(sqlTablePage) + } catch { + case e @ (_: IllegalArgumentException | _: IndexOutOfBoundsException) => +
+

Error while rendering job table:

+
+            {Utils.exceptionString(e)}
+          
+
+ } + } } diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/ui/EngineTabSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/ui/EngineTabSuite.scala index 260dbf87e17..ad056a0643d 100644 --- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/ui/EngineTabSuite.scala +++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/ui/EngineTabSuite.scala @@ -96,10 +96,10 @@ class EngineTabSuite extends WithSparkSQLEngine with HiveJDBCTestHelper { val resp = EntityUtils.toString(response.getEntity) // check session section - assert(resp.contains("Session Statistics")) + assert(resp.contains("Online Session Statistics")) // check session stats table id - assert(resp.contains("sessionstat")) + assert(resp.contains("onlineSessionstat")) // check session stats table title assert(resp.contains("Total Statements")) @@ -110,10 +110,11 @@ class EngineTabSuite extends WithSparkSQLEngine with HiveJDBCTestHelper { assert(spark.sparkContext.ui.nonEmpty) val client = HttpClients.createDefault() val req = new HttpGet(spark.sparkContext.uiWebUrl.get + "/kyuubi/") - val response = client.execute(req) + var response = client.execute(req) assert(response.getStatusLine.getStatusCode === 200) - val resp = EntityUtils.toString(response.getEntity) + var resp = EntityUtils.toString(response.getEntity) assert(resp.contains("0 session(s) are online,")) + assert(!resp.contains("Statement Statistics")) withJdbcStatement() { statement => statement.execute( """ @@ -133,13 +134,23 @@ class EngineTabSuite extends WithSparkSQLEngine with HiveJDBCTestHelper { // check session section assert(resp.contains("Statement Statistics")) + assert(!resp.contains("Failed Statement Statistics")) // check sql stats table id - assert(resp.contains("sqlstat")) + assert(resp.contains("runningSqlstat") || resp.contains("completedSqlstat")) + + assert(resp.contains("1 session(s) are online,")) // check sql stats table title assert(resp.contains("Query Details")) } + response = client.execute(req) + assert(response.getStatusLine.getStatusCode === 200) + resp = EntityUtils.toString(response.getEntity) + assert(resp.contains("0 session(s) are online,")) + assert(resp.contains("running 0 operation(s)")) + assert(resp.contains("completedSqlstat")) + assert(resp.contains("Completed Statement Statistics")) } test("statement redact for engine tab") { diff --git a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/TrinoStatement.scala b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/TrinoStatement.scala index 2508f326e3d..48a6d625987 100644 --- a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/TrinoStatement.scala +++ b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/TrinoStatement.scala @@ -25,6 +25,7 @@ import scala.concurrent.ExecutionContext import com.google.common.base.Verify import io.trino.client.ClientSession +import io.trino.client.ClientTypeSignature import io.trino.client.Column import io.trino.client.StatementClient import io.trino.client.StatementClientFactory @@ -46,6 +47,9 @@ class TrinoStatement( sql: String, operationLog: Option[OperationLog]) extends Logging { + private val defaultSchema: List[Column] = + List(new Column("Result", "VARCHAR", new ClientTypeSignature("VARCHAR"))) + private lazy val trino = StatementClientFactory .newStatementClient(trinoContext.httpClient, trinoContext.clientSession.get, sql) @@ -68,6 +72,9 @@ class TrinoStatement( val columns = results.getColumns() if (columns != null) { info(s"Execute with Trino query id: ${results.getId}") + if (columns.isEmpty) { + return defaultSchema + } return columns.asScala.toList } trino.advance() @@ -121,13 +128,13 @@ class TrinoStatement( // update catalog and schema if (trino.getSetCatalog.isPresent || trino.getSetSchema.isPresent) { builder = builder - .withCatalog(trino.getSetCatalog.orElse(session.getCatalog)) - .withSchema(trino.getSetSchema.orElse(session.getSchema)) + .catalog(trino.getSetCatalog.orElse(session.getCatalog)) + .schema(trino.getSetSchema.orElse(session.getSchema)) } // update path if present if (trino.getSetPath.isPresent) { - builder = builder.withPath(trino.getSetPath.get) + builder = builder.path(trino.getSetPath.get) } // update session properties if present @@ -135,7 +142,7 @@ class TrinoStatement( val properties = session.getProperties.asScala.clone() properties ++= trino.getSetSessionProperties.asScala properties --= trino.getResetSessionProperties.asScala - builder = builder.withProperties(properties.asJava) + builder = builder.properties(properties.asJava) } trinoContext.clientSession.set(builder.build()) diff --git a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentCatalog.scala b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentCatalog.scala index 16836b0a97d..02fc917fbe3 100644 --- a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentCatalog.scala +++ b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentCatalog.scala @@ -33,7 +33,7 @@ class SetCurrentCatalog(session: Session, catalog: String) try { val session = trinoContext.clientSession.get var builder = ClientSession.builder(session) - builder = builder.withCatalog(catalog) + builder = builder.catalog(catalog) trinoContext.clientSession.set(builder.build()) setHasResultSet(false) } catch onError() diff --git a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentDatabase.scala b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentDatabase.scala index aa4697f5f0e..b190251aa8b 100644 --- a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentDatabase.scala +++ b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/SetCurrentDatabase.scala @@ -33,7 +33,7 @@ class SetCurrentDatabase(session: Session, database: String) try { val session = trinoContext.clientSession.get var builder = ClientSession.builder(session) - builder = builder.withSchema(database) + builder = builder.schema(database) trinoContext.clientSession.set(builder.build()) setHasResultSet(false) } catch onError() diff --git a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala index 950a0814b5d..674a67d0e38 100644 --- a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala +++ b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala @@ -19,7 +19,7 @@ package org.apache.kyuubi.engine.trino.session import java.net.URI import java.time.ZoneId -import java.util.{Collections, Locale, Optional} +import java.util.{Locale, Optional} import java.util.concurrent.TimeUnit import scala.collection.JavaConverters._ @@ -54,7 +54,7 @@ class TrinoSessionImpl( override val handle: SessionHandle = conf.get(KYUUBI_SESSION_HANDLE_KEY).map(SessionHandle.fromUUID).getOrElse(SessionHandle()) - private val username: String = sessionConf + private val sessionUser: String = sessionConf .getOption(KyuubiReservedKeys.KYUUBI_SESSION_USER_KEY).getOrElse(currentUser) var trinoContext: TrinoContext = _ @@ -93,27 +93,18 @@ class TrinoSessionImpl( val properties = getTrinoSessionConf(sessionConf).asJava - new ClientSession( - URI.create(connectionUrl), - username, - Optional.empty(), - "kyuubi", - Optional.empty(), - Collections.emptySet(), - null, - catalogName, - databaseName, - null, - ZoneId.systemDefault(), - Locale.getDefault, - Collections.emptyMap(), - Collections.emptyMap(), - properties, - Collections.emptyMap(), - Collections.emptyMap(), - null, - new Duration(clientRequestTimeout, TimeUnit.MILLISECONDS), - true) + ClientSession.builder() + .server(URI.create(connectionUrl)) + .principal(Optional.of(sessionUser)) + .source("kyuubi") + .catalog(catalogName) + .schema(databaseName) + .timeZone(ZoneId.systemDefault()) + .locale(Locale.getDefault) + .properties(properties) + .clientRequestTimeout(new Duration(clientRequestTimeout, TimeUnit.MILLISECONDS)) + .compressionDisabled(true) + .build() } private def createHttpClient(): OkHttpClient = { @@ -135,13 +126,16 @@ class TrinoSessionImpl( Optional.ofNullable(keystoreType.orNull), Optional.ofNullable(truststorePath.orNull), Optional.ofNullable(truststorePassword.orNull), - Optional.ofNullable(truststoreType.orNull)) + Optional.ofNullable(truststoreType.orNull), + true) sessionConf.get(KyuubiConf.ENGINE_TRINO_CONNECTION_PASSWORD).foreach { password => require( serverScheme.equalsIgnoreCase("https"), "Trino engine using username/password requires HTTPS to be enabled") - builder.addInterceptor(OkHttpUtil.basicAuth(username, password)) + val user: String = sessionConf + .get(KyuubiConf.ENGINE_TRINO_CONNECTION_USER).getOrElse(sessionUser) + builder.addInterceptor(OkHttpUtil.basicAuth(user, password)) } builder.build() diff --git a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/WithTrinoContainerServer.scala b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/WithTrinoContainerServer.scala index f72164b427a..f7c7b253a0f 100644 --- a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/WithTrinoContainerServer.scala +++ b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/WithTrinoContainerServer.scala @@ -23,12 +23,9 @@ import java.util.Locale import java.util.Optional import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ - import com.dimafeng.testcontainers.TrinoContainer import com.dimafeng.testcontainers.scalatest.TestContainerForAll import io.airlift.units.Duration -import io.trino.client.ClientSelectedRole import io.trino.client.ClientSession import okhttp3.OkHttpClient import org.testcontainers.utility.DockerImageName @@ -38,8 +35,8 @@ import org.apache.kyuubi.config.KyuubiConf trait WithTrinoContainerServer extends KyuubiFunSuite with TestContainerForAll { - final val IMAGE_VERSION = 363 - final val DOCKER_IMAGE_NAME = s"trinodb/trino:${IMAGE_VERSION}" + final val IMAGE_VERSION = 411 + final val DOCKER_IMAGE_NAME = s"trinodb/trino:$IMAGE_VERSION" override val containerDef = TrinoContainer.Def(DockerImageName.parse(DOCKER_IMAGE_NAME)) @@ -56,27 +53,17 @@ trait WithTrinoContainerServer extends KyuubiFunSuite with TestContainerForAll { } } - protected def session(connectionUrl: String): ClientSession = new ClientSession( - URI.create(connectionUrl), - "kyuubi_test", - Optional.empty(), - "kyuubi", - Optional.empty(), - Set[String]().asJava, - null, - catalog, - schema, - null, - ZoneId.systemDefault(), - Locale.getDefault, - Map[String, String]().asJava, - Map[String, String]().asJava, - Map[String, String]().asJava, - Map[String, ClientSelectedRole]().asJava, - Map[String, String]().asJava, - null, - new Duration(2, TimeUnit.MINUTES), - true) + protected def session(connectionUrl: String): ClientSession = ClientSession.builder() + .server(URI.create(connectionUrl)) + .principal(Optional.of("kyuubi_test")) + .source("kyuubi") + .catalog(catalog) + .schema(schema) + .timeZone(ZoneId.systemDefault()) + .locale(Locale.getDefault) + .clientRequestTimeout(new Duration(2, TimeUnit.MINUTES)) + .compressionDisabled(true) + .build() lazy val httpClient = new OkHttpClient.Builder().build() } diff --git a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala index c49c4965bfc..c3a81a9e43e 100644 --- a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala +++ b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala @@ -89,7 +89,9 @@ class TrinoOperationSuite extends WithTrinoEngine with TrinoQueryTests { "tdigest", "LikePattern", "function", - "Classifier") + "Classifier", + "json2016", + "JsonPath2016") val typeInfos: Set[String] = Set() while (typeInfo.next()) { assert(expectedTypes.contains(typeInfo.getString(TYPE_NAME))) diff --git a/integration-tests/kyuubi-gluten-it/pom.xml b/integration-tests/kyuubi-gluten-it/pom.xml index ac49c286ade..fe34bc2e93a 100644 --- a/integration-tests/kyuubi-gluten-it/pom.xml +++ b/integration-tests/kyuubi-gluten-it/pom.xml @@ -30,7 +30,7 @@ https://kyuubi.apache.org/ - 1.1.0-SNAPSHOT + 1.2.0-SNAPSHOT 3.4.2 3.4 diff --git a/integration-tests/kyuubi-hive-it/pom.xml b/integration-tests/kyuubi-hive-it/pom.xml index c4e9f320c95..cdd9fa4d99b 100644 --- a/integration-tests/kyuubi-hive-it/pom.xml +++ b/integration-tests/kyuubi-hive-it/pom.xml @@ -68,6 +68,37 @@ test-jar test + + + + org.apache.hadoop + hadoop-client-minicluster + test + + + + org.bouncycastle + bcprov-jdk15on + test + + + + org.bouncycastle + bcpkix-jdk15on + test + + + + jakarta.activation + jakarta.activation-api + test + + + + jakarta.xml.bind + jakarta.xml.bind-api + test + diff --git a/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEngineYarnModeSuite.scala b/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEngineYarnModeSuite.scala new file mode 100644 index 00000000000..55943094f21 --- /dev/null +++ b/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEngineYarnModeSuite.scala @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.it.hive.operation + +import org.apache.kyuubi.{HiveEngineTests, Utils, WithKyuubiServerAndHadoopMiniCluster} +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.config.KyuubiConf.{ENGINE_IDLE_TIMEOUT, ENGINE_TYPE, KYUUBI_ENGINE_ENV_PREFIX, KYUUBI_HOME} +import org.apache.kyuubi.engine.deploy.DeployMode + +class KyuubiOperationHiveEngineYarnModeSuite extends HiveEngineTests + with WithKyuubiServerAndHadoopMiniCluster { + + override protected val conf: KyuubiConf = { + val metastore = Utils.createTempDir(prefix = getClass.getSimpleName) + metastore.toFile.delete() + KyuubiConf() + .set(s"$KYUUBI_ENGINE_ENV_PREFIX.$KYUUBI_HOME", kyuubiHome) + .set(ENGINE_TYPE, "HIVE_SQL") + .set(KyuubiConf.ENGINE_HIVE_DEPLOY_MODE, DeployMode.YARN.toString) + // increase this to 30s as hive session state and metastore client is slow initializing + .setIfMissing(ENGINE_IDLE_TIMEOUT, 30000L) + .set("javax.jdo.option.ConnectionURL", s"jdbc:derby:;databaseName=$metastore;create=true") + } + + override def beforeAll(): Unit = { + super.beforeAll() + conf + .set(KyuubiConf.ENGINE_DEPLOY_YARN_MODE_MEMORY, Math.min(getYarnMaximumAllocationMb, 1024)) + .set(KyuubiConf.ENGINE_DEPLOY_YARN_MODE_CORES, 1) + } + + override protected def jdbcUrl: String = getJdbcUrl +} diff --git a/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/WithKyuubiServerAndTrinoContainer.scala b/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/WithKyuubiServerAndTrinoContainer.scala index 3b0548bde34..8e0d3f095b5 100644 --- a/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/WithKyuubiServerAndTrinoContainer.scala +++ b/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/WithKyuubiServerAndTrinoContainer.scala @@ -28,8 +28,8 @@ trait WithKyuubiServerAndTrinoContainer extends WithKyuubiServer with TestContai val kyuubiHome: String = Utils.getCodeSourceLocation(getClass).split("integration-tests").head - final val IMAGE_VERSION = 363 - final val DOCKER_IMAGE_NAME = s"trinodb/trino:${IMAGE_VERSION}" + final val IMAGE_VERSION = 411 + final val DOCKER_IMAGE_NAME = s"trinodb/trino:$IMAGE_VERSION" override val containerDef: TrinoContainer.Def = TrinoContainer.Def(DOCKER_IMAGE_NAME) diff --git a/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala b/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala index 7575bf8a9b4..20865243693 100644 --- a/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala +++ b/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala @@ -19,8 +19,9 @@ package org.apache.kyuubi.it.trino.server import scala.util.control.NonFatal -import org.apache.kyuubi.WithKyuubiServer +import org.apache.kyuubi.{Utils, WithKyuubiServer} import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.config.KyuubiConf.{KYUUBI_ENGINE_ENV_PREFIX, KYUUBI_HOME} import org.apache.kyuubi.operation.SparkMetadataTests /** @@ -53,8 +54,12 @@ class TrinoFrontendSuite extends WithKyuubiServer with SparkMetadataTests { } } + val kyuubiHome: String = Utils.getCodeSourceLocation(getClass).split("integration-tests").head + override protected val conf: KyuubiConf = { - KyuubiConf().set(KyuubiConf.FRONTEND_PROTOCOLS, Seq("TRINO")) + KyuubiConf() + .set(KyuubiConf.FRONTEND_PROTOCOLS, Seq("TRINO")) + .set(s"$KYUUBI_ENGINE_ENV_PREFIX.$KYUUBI_HOME", kyuubiHome) } override protected def jdbcUrl: String = { diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala index 896ed9df29d..961a69ad005 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala @@ -159,6 +159,17 @@ object Utils extends Logging { dir } + /** + * List the files recursively in a directory. + */ + def listFilesRecursively(file: File): Seq[File] = { + if (!file.isDirectory) { + file :: Nil + } else { + file.listFiles().flatMap(listFilesRecursively) + } + } + /** * Copies bytes from an InputStream source to a newly created temporary file * created in the directory destination. The temporary file will be created diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala index 784b82cddeb..3eedfdded91 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala @@ -30,6 +30,7 @@ import scala.util.matching.Regex import org.apache.kyuubi.{Logging, Utils} import org.apache.kyuubi.config.KyuubiConf._ import org.apache.kyuubi.engine.{EngineType, ShareLevel} +import org.apache.kyuubi.engine.deploy.DeployMode import org.apache.kyuubi.operation.{NoneMode, PlainStyle} import org.apache.kyuubi.service.authentication.{AuthTypes, SaslQOP} @@ -231,6 +232,7 @@ object KyuubiConf { final val KYUUBI_CONF_FILE_NAME = "kyuubi-defaults.conf" final val KYUUBI_HOME = "KYUUBI_HOME" final val KYUUBI_ENGINE_ENV_PREFIX = "kyuubi.engineEnv" + final val KYUUBI_ENGINE_YARN_MODE_ENV_PREFIX = "kyuubi.engine.yarn.AMEnv" final val KYUUBI_BATCH_CONF_PREFIX = "kyuubi.batchConf" final val KYUUBI_KUBERNETES_CONF_PREFIX = "kyuubi.kubernetes" final val USER_DEFAULTS_CONF_QUOTE = "___" @@ -743,14 +745,6 @@ object KyuubiConf { .toSequence() .createWithDefault(Nil) - val FRONTEND_THRIFT_HTTP_ALLOW_USER_SUBSTITUTION: ConfigEntry[Boolean] = - buildConf("kyuubi.frontend.thrift.http.allow.user.substitution") - .doc("Allow alternate user to be specified as part of open connection" + - " request when using HTTP transport mode.") - .version("1.6.0") - .booleanConf - .createWithDefault(true) - val FRONTEND_PROXY_HTTP_CLIENT_IP_HEADER: ConfigEntry[String] = buildConf("kyuubi.frontend.proxy.http.client.ip.header") .doc("The HTTP header to record the real client IP address. If your server is behind a load" + @@ -763,7 +757,7 @@ object KyuubiConf { .stringConf .createWithDefault("X-Real-IP") - val AUTHENTICATION_METHOD: ConfigEntry[Set[String]] = buildConf("kyuubi.authentication") + val AUTHENTICATION_METHOD: ConfigEntry[Seq[String]] = buildConf("kyuubi.authentication") .doc("A comma-separated list of client authentication types." + "
    " + "
  • NOSASL: raw transport.
  • " + @@ -799,9 +793,9 @@ object KyuubiConf { .serverOnly .stringConf .transformToUpperCase - .toSet() + .toSequence() .checkValues(AuthTypes) - .createWithDefault(Set(AuthTypes.NONE.toString)) + .createWithDefault(Seq(AuthTypes.NONE.toString)) val AUTHENTICATION_CUSTOM_CLASS: OptionalConfigEntry[String] = buildConf("kyuubi.authentication.custom.class") @@ -1394,6 +1388,13 @@ object KyuubiConf { .stringConf .createOptional + val ENGINE_TRINO_CONNECTION_USER: OptionalConfigEntry[String] = + buildConf("kyuubi.engine.trino.connection.user") + .doc("The user used for connecting to trino cluster") + .version("1.9.0") + .stringConf + .createOptional + val ENGINE_TRINO_CONNECTION_PASSWORD: OptionalConfigEntry[String] = buildConf("kyuubi.engine.trino.connection.password") .doc("The password used for connecting to trino cluster") @@ -2693,6 +2694,77 @@ object KyuubiConf { .stringConf .createOptional + val ENGINE_HIVE_DEPLOY_MODE: ConfigEntry[String] = + buildConf("kyuubi.engine.hive.deploy.mode") + .doc("Configures the hive engine deploy mode, The value can be 'local', 'yarn'. " + + "In local mode, the engine operates on the same node as the KyuubiServer. " + + "In YARN mode, the engine runs within the Application Master (AM) container of YARN. ") + .version("1.9.0") + .stringConf + .transformToUpperCase + .checkValue( + mode => Set("LOCAL", "YARN").contains(mode), + "Invalid value for 'kyuubi.engine.hive.deploy.mode'. Valid values are 'local', 'yarn'.") + .createWithDefault(DeployMode.LOCAL.toString) + + val ENGINE_DEPLOY_YARN_MODE_STAGING_DIR: OptionalConfigEntry[String] = + buildConf("kyuubi.engine.yarn.stagingDir") + .doc("Staging directory used while submitting kyuubi engine to YARN, " + + "It should be a absolute path in HDFS.") + .version("1.9.0") + .stringConf + .createOptional + + val ENGINE_DEPLOY_YARN_MODE_TAGS: OptionalConfigEntry[Seq[String]] = + buildConf("kyuubi.engine.yarn.tags") + .doc(s"kyuubi engine yarn tags when the engine deploy mode is YARN.") + .version("1.9.0") + .stringConf + .toSequence() + .createOptional + + val ENGINE_DEPLOY_YARN_MODE_QUEUE: ConfigEntry[String] = + buildConf("kyuubi.engine.yarn.queue") + .doc(s"kyuubi engine yarn queue when the engine deploy mode is YARN.") + .version("1.9.0") + .stringConf + .createWithDefault("default") + + val ENGINE_DEPLOY_YARN_MODE_PRIORITY: OptionalConfigEntry[Int] = + buildConf("kyuubi.engine.yarn.priority") + .doc(s"kyuubi engine yarn priority when the engine deploy mode is YARN.") + .version("1.9.0") + .intConf + .createOptional + + val ENGINE_DEPLOY_YARN_MODE_APP_NAME: OptionalConfigEntry[String] = + buildConf("kyuubi.engine.yarn.app.name") + .doc(s"The YARN app name when the engine deploy mode is YARN.") + .version("1.9.0") + .stringConf + .createOptional + + val ENGINE_DEPLOY_YARN_MODE_MEMORY: ConfigEntry[Int] = + buildConf("kyuubi.engine.yarn.memory") + .doc(s"kyuubi engine container memory in mb when the engine deploy mode is YARN.") + .version("1.9.0") + .intConf + .createWithDefault(1024) + + val ENGINE_DEPLOY_YARN_MODE_CORES: ConfigEntry[Int] = + buildConf("kyuubi.engine.yarn.cores") + .doc(s"kyuubi engine container core number when the engine deploy mode is YARN.") + .version("1.9.0") + .intConf + .createWithDefault(1) + + val ENGINE_DEPLOY_YARN_MODE_JAVA_OPTIONS: OptionalConfigEntry[String] = + buildConf("kyuubi.engine.yarn.java.options") + .doc(s"The extra Java options for the AM when the engine deploy mode is YARN.") + .version("1.9.0") + .stringConf + .createOptional + val ENGINE_FLINK_MEMORY: ConfigEntry[String] = buildConf("kyuubi.engine.flink.memory") .doc("The heap memory for the Flink SQL engine. Only effective in yarn session mode.") diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/DeployMode.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/DeployMode.scala new file mode 100644 index 00000000000..50aa3e4d6e7 --- /dev/null +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/DeployMode.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.deploy + +object DeployMode extends Enumeration { + type DeployMode = Value + val + /** + * In this mode, the engine will be launched locally. + */ + LOCAL, + /** + * In this mode, the engine will be launched on YARN. + */ + YARN, + /** + * In this mode, the engine will be launched on Kubernetes. + */ + KUBERNETES = Value +} diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/ApplicationMaster.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/ApplicationMaster.scala new file mode 100644 index 00000000000..3e396beb070 --- /dev/null +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/ApplicationMaster.scala @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.deploy.yarn + +import java.io.{File, IOException} + +import scala.collection.mutable.ArrayBuffer + +import org.apache.hadoop.fs.Path +import org.apache.hadoop.yarn.api.records.FinalApplicationStatus +import org.apache.hadoop.yarn.client.api.AMRMClient +import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest +import org.apache.hadoop.yarn.conf.YarnConfiguration + +import org.apache.kyuubi.{Logging, Utils} +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.service.Serverable +import org.apache.kyuubi.util.KyuubiHadoopUtils +import org.apache.kyuubi.util.command.CommandLineUtils.confKeyValues +import org.apache.kyuubi.util.reflect.{DynFields, DynMethods} + +object ApplicationMaster extends Logging { + + private var amClient: AMRMClient[ContainerRequest] = _ + private var yarnConf: YarnConfiguration = _ + + private val kyuubiConf = new KyuubiConf() + + private var currentEngineMainClass: String = _ + + private var currentEngine: Serverable = _ + + private var finalMsg: String = _ + + @volatile private var registered: Boolean = false + @volatile private var unregistered: Boolean = false + @volatile private var finalStatus = FinalApplicationStatus.UNDEFINED + + def main(args: Array[String]): Unit = { + try { + val amArgs = new ApplicationMasterArguments(args) + Utils.getPropertiesFromFile(Some(new File(amArgs.propertiesFile))).foreach { case (k, v) => + kyuubiConf.set(k, v) + } + currentEngineMainClass = amArgs.engineMainClass + yarnConf = KyuubiHadoopUtils.newYarnConfiguration(kyuubiConf) + Utils.addShutdownHook(() => { + if (!unregistered) { + if (currentEngine != null && currentEngine.selfExited) { + finalMsg = "Kyuubi Application Master is shutting down." + finalStatus = FinalApplicationStatus.SUCCEEDED + } else { + finalMsg = "Kyuubi Application Master is shutting down with error." + finalStatus = FinalApplicationStatus.FAILED + } + cleanupStagingDir() + unregister(finalStatus, finalMsg) + } + }) + runApplicationMaster() + } catch { + case t: Throwable => + error("Error running ApplicationMaster", t) + finalStatus = FinalApplicationStatus.FAILED + finalMsg = t.getMessage + cleanupStagingDir() + unregister(finalStatus, finalMsg) + if (currentEngine != null) { + currentEngine.stop() + } + } + } + + def runApplicationMaster(): Unit = { + initAmClient() + + runEngine() + + registerAM() + } + + def runEngine(): Unit = { + val buffer = new ArrayBuffer[String]() + buffer ++= confKeyValues(kyuubiConf.getAll) + + val instance = DynFields.builder() + .impl(currentEngineMainClass, "MODULE$") + .build[Object].get(null) + DynMethods.builder("main") + .hiddenImpl(currentEngineMainClass, classOf[Array[String]]) + .buildChecked() + .invoke(instance, buffer.toArray) + + currentEngine = DynFields.builder() + .hiddenImpl(currentEngineMainClass, "currentEngine") + .buildChecked[Option[Serverable]]() + .get(instance) + .get + } + + def initAmClient(): Unit = { + amClient = AMRMClient.createAMRMClient() + amClient.init(yarnConf) + amClient.start() + } + + def registerAM(): Unit = { + val frontendService = currentEngine.frontendServices.head + val trackingUrl = frontendService.connectionUrl + val (host, port) = resolveHostAndPort(trackingUrl) + info("Registering the HiveSQLEngine ApplicationMaster with tracking url " + + s"$trackingUrl, host = $host, port = $port") + synchronized { + amClient.registerApplicationMaster(host, port, trackingUrl) + registered = true + } + } + + def unregister(status: FinalApplicationStatus, diagnostics: String): Unit = { + synchronized { + if (registered && !unregistered) { + info(s"Unregistering ApplicationMaster with $status" + + Option(diagnostics).map(msg => s" (diagnostics message: $msg)").getOrElse("")) + unregistered = true + amClient.unregisterApplicationMaster(status, diagnostics, "") + if (amClient != null) { + amClient.stop() + } + } + } + } + + private def resolveHostAndPort(connectionUrl: String): (String, Int) = { + val strings = connectionUrl.split(":") + (strings(0), strings(1).toInt) + } + + private def cleanupStagingDir(): Unit = { + val stagingDirPath = new Path(System.getenv("KYUUBI_ENGINE_YARN_MODE_STAGING_DIR")) + try { + val fs = stagingDirPath.getFileSystem(yarnConf) + info("Deleting staging directory " + stagingDirPath) + fs.delete(stagingDirPath, true) + } catch { + case ioe: IOException => + error("Failed to cleanup staging dir " + stagingDirPath, ioe) + } + } +} diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/ApplicationMasterArguments.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/ApplicationMasterArguments.scala new file mode 100644 index 00000000000..d4be6afbf50 --- /dev/null +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/ApplicationMasterArguments.scala @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.deploy.yarn + +import org.apache.kyuubi.Logging + +class ApplicationMasterArguments(val args: Array[String]) extends Logging { + var engineMainClass: String = null + var propertiesFile: String = null + + parseArgs(args.toList) + + private def parseArgs(inputArgs: List[String]): Unit = { + var args = inputArgs + + while (args.nonEmpty) { + args match { + case ("--class") :: value :: tail => + engineMainClass = value + args = tail + + case ("--properties-file") :: value :: tail => + propertiesFile = value + args = tail + + case other => + throw new IllegalArgumentException(s"Unrecognized option $other.") + } + } + validateRequiredArguments() + } + + private def validateRequiredArguments(): Unit = { + if (engineMainClass == null) { + throw new IllegalArgumentException("No engine main class provided.") + } + + if (propertiesFile == null) { + throw new IllegalArgumentException("No properties file provided.") + } + } +} diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/EngineYarnModeSubmitter.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/EngineYarnModeSubmitter.scala new file mode 100644 index 00000000000..552a3158f3d --- /dev/null +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/engine/deploy/yarn/EngineYarnModeSubmitter.scala @@ -0,0 +1,435 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.deploy.yarn + +import java.io._ +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.util +import java.util.{Locale, Properties} +import java.util.zip.{ZipEntry, ZipOutputStream} + +import scala.collection.JavaConverters._ +import scala.collection.mutable +import scala.collection.mutable.ListBuffer + +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.{FileSystem, Path} +import org.apache.hadoop.fs.permission.FsPermission +import org.apache.hadoop.security.UserGroupInformation +import org.apache.hadoop.yarn.api.ApplicationConstants +import org.apache.hadoop.yarn.api.ApplicationConstants.Environment +import org.apache.hadoop.yarn.api.records._ +import org.apache.hadoop.yarn.client.api.{YarnClient, YarnClientApplication} +import org.apache.hadoop.yarn.util.Records + +import org.apache.kyuubi.{KyuubiException, Logging, Utils} +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.config.KyuubiConf._ +import org.apache.kyuubi.engine.deploy.yarn.EngineYarnModeSubmitter._ +import org.apache.kyuubi.util.KyuubiHadoopUtils + +abstract class EngineYarnModeSubmitter extends Logging { + + val KYUUBI_ENGINE_STAGING: String = ".kyuubiEngineStaging" + + /* + * The following variables are used to describe the contents of the + * ApplicationMaster's working directory. The directory structure is as follows: + * + * ApplicationMasterWorkDir/ + * |-- __kyuubi_engine_conf__ + * | |-- __hadoop_conf__ + * | | |-- hadoop conf file1 + * | | |-- hadoop conf file2 + * | | `-- ... + * | `-- __kyuubi_conf__.properties + * `-- __kyuubi_engine_libs__ + * |-- kyuubi_engine.jar + * `-- ... + */ + val LOCALIZED_LIB_DIR = "__kyuubi_engine_libs__" + val LOCALIZED_CONF_DIR = "__kyuubi_engine_conf__" + val HADOOP_CONF_DIR = "__hadoop_conf__" + val KYUUBI_CONF_FILE = "__kyuubi_conf__.properties" + + val STAGING_DIR_PERMISSION: FsPermission = + FsPermission.createImmutable(Integer.parseInt("700", 8).toShort) + + private val applicationMaster = ApplicationMaster.getClass.getName.dropRight(1) + + @volatile private var yarnClient: YarnClient = _ + private var appId: ApplicationId = _ + + private[engine] var stagingDirPath: Path = _ + + val kyuubiConf = new KyuubiConf() + + var yarnConf: Configuration = _ + var hadoopConf: Configuration = _ + + var engineType: String + + def engineMainClass(): String + + /** + * The extra jars that will be added to the classpath of the engine. + */ + def engineExtraJars(): Seq[File] = Seq.empty + + protected def submitApplication(): Unit = { + yarnConf = KyuubiHadoopUtils.newYarnConfiguration(kyuubiConf) + hadoopConf = KyuubiHadoopUtils.newHadoopConf(kyuubiConf) + try { + yarnClient = YarnClient.createYarnClient() + yarnClient.init(yarnConf) + yarnClient.start() + + debug("Requesting a new application from cluster with %d NodeManagers" + .format(yarnClient.getYarnClusterMetrics.getNumNodeManagers)) + + val newApp = yarnClient.createApplication() + val newAppResponse = newApp.getNewApplicationResponse + appId = newAppResponse.getApplicationId + + // The app staging dir based on the STAGING_DIR configuration if configured + // otherwise based on the users home directory. + val appStagingBaseDir = kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_STAGING_DIR) + .map { new Path(_, UserGroupInformation.getCurrentUser.getShortUserName) } + .getOrElse(FileSystem.get(hadoopConf).getHomeDirectory()) + stagingDirPath = new Path(appStagingBaseDir, buildPath(KYUUBI_ENGINE_STAGING, appId.toString)) + + // Set up the appropriate contexts to launch AM + val containerContext = createContainerLaunchContext() + val appContext = createApplicationSubmissionContext(newApp, containerContext) + + // Finally, submit and monitor the application + info(s"Submitting application $appId to ResourceManager") + yarnClient.submitApplication(appContext) + monitorApplication(appId) + } catch { + case e: Throwable => + if (stagingDirPath != null) { + cleanupStagingDir() + } + throw new KyuubiException("Failed to submit application to YARN", e) + } finally { + if (yarnClient != null) { + yarnClient.stop() + } + } + } + + private def createContainerLaunchContext(): ContainerLaunchContext = { + info("Setting up container launch context for engine AM") + val env = setupLaunchEnv(kyuubiConf) + val localResources = prepareLocalResources(stagingDirPath, env) + + val amContainer = Records.newRecord(classOf[ContainerLaunchContext]) + amContainer.setLocalResources(localResources.asJava) + amContainer.setEnvironment(env.asJava) + + val javaOpts = ListBuffer[String]() + + val javaOptions = kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_JAVA_OPTIONS) + if (javaOptions.isDefined) { + javaOpts += javaOptions.get + } + + val am = Seq(applicationMaster) + + val engineClass = Seq("--class", engineMainClass()) + + val kyuubiConfProperties = Seq( + "--properties-file", + buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, KYUUBI_CONF_FILE)) + + val commands = + Seq(Environment.JAVA_HOME.$$() + "/bin/java", "-server") ++ + javaOpts ++ am ++ engineClass ++ kyuubiConfProperties ++ + Seq( + "1>", + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout", + "2>", + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr") + + val printableCommands = commands.map(s => if (s == null) "null" else s).toList + amContainer.setCommands(printableCommands.asJava) + info(s"Commands: ${printableCommands.mkString(" ")}") + + amContainer + } + + private def prepareLocalResources( + destDir: Path, + env: mutable.HashMap[String, String]): mutable.HashMap[String, LocalResource] = { + info("Preparing resources for engine AM container") + // Upload kyuubi engine and the extra JAR to the remote file system if necessary, + // and add them as local resources to the application master. + val fs = destDir.getFileSystem(hadoopConf) + + val localResources = mutable.HashMap[String, LocalResource]() + FileSystem.mkdirs(fs, destDir, new FsPermission(STAGING_DIR_PERMISSION)) + + distributeJars(localResources, env) + distributeConf(localResources, env) + localResources + } + + private def distributeJars( + localResources: mutable.HashMap[String, LocalResource], + env: mutable.HashMap[String, String]): Unit = { + val jarsArchive = File.createTempFile(LOCALIZED_LIB_DIR, ".zip", Utils.createTempDir().toFile) + val jarsStream = new ZipOutputStream(new FileOutputStream(jarsArchive)) + try { + jarsStream.setLevel(0) + val jars = kyuubiConf.getOption(KYUUBI_ENGINE_DEPLOY_YARN_MODE_JARS_KEY) + val putedEntry = new ListBuffer[String] + jars.get.split(KYUUBI_ENGINE_DEPLOY_YARN_MODE_ARCHIVE_SEPARATOR).foreach { path => + val jars = Utils.listFilesRecursively(new File(path)) ++ engineExtraJars() + jars.foreach { f => + if (!putedEntry.contains(f.getName) && f.isFile && + f.getName.toLowerCase(Locale.ROOT).endsWith(".jar") && f.canRead) { + jarsStream.putNextEntry(new ZipEntry(f.getName)) + Files.copy(f.toPath, jarsStream) + jarsStream.closeEntry() + putedEntry += f.getName + addClasspathEntry(buildPath(Environment.PWD.$$(), LOCALIZED_LIB_DIR, f.getName), env) + } + } + } + putedEntry.clear() + } finally { + jarsStream.close() + } + + distribute( + new Path(jarsArchive.getAbsolutePath), + resType = LocalResourceType.ARCHIVE, + destName = LOCALIZED_LIB_DIR, + localResources) + } + + private def distributeConf( + localResources: mutable.HashMap[String, LocalResource], + env: mutable.HashMap[String, String]): Unit = { + val confArchive = File.createTempFile(LOCALIZED_CONF_DIR, ".zip", Utils.createTempDir().toFile) + val confStream = new ZipOutputStream(new FileOutputStream(confArchive)) + try { + confStream.setLevel(0) + val putedEntry = new ListBuffer[String] + def putEntry(f: File): Unit = { + if (!putedEntry.contains(f.getName) && f.isFile && f.canRead) { + confStream.putNextEntry(new ZipEntry(s"$HADOOP_CONF_DIR/${f.getName}")) + Files.copy(f.toPath, confStream) + confStream.closeEntry() + putedEntry += f.getName + addClasspathEntry( + buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, HADOOP_CONF_DIR, f.getName), + env) + } + } + // respect the following priority loading configuration, and distinct files + // hive configuration -> hadoop configuration -> yarn configuration + val hiveConf = kyuubiConf.getOption(KYUUBI_ENGINE_DEPLOY_YARN_MODE_HIVE_CONF_KEY) + listDistinctFiles(hiveConf.get).foreach(putEntry) + val hadoopConf = kyuubiConf.getOption(KYUUBI_ENGINE_DEPLOY_YARN_MODE_HADOOP_CONF_KEY) + listDistinctFiles(hadoopConf.get).foreach(putEntry) + val yarnConf = kyuubiConf.getOption(KYUUBI_ENGINE_DEPLOY_YARN_MODE_YARN_CONF_KEY) + listDistinctFiles(yarnConf.get).foreach(putEntry) + + val properties = confToProperties(kyuubiConf) + writePropertiesToArchive(properties, KYUUBI_CONF_FILE, confStream) + } finally { + confStream.close() + } + + distribute( + new Path(confArchive.getAbsolutePath), + resType = LocalResourceType.ARCHIVE, + destName = LOCALIZED_CONF_DIR, + localResources) + } + + def listDistinctFiles(archive: String): Seq[File] = { + val distinctFiles = new mutable.LinkedHashSet[File] + archive.split(KYUUBI_ENGINE_DEPLOY_YARN_MODE_ARCHIVE_SEPARATOR).foreach { path => + val file = new File(path) + val files = Utils.listFilesRecursively(file) + files.foreach { f => + if (f.isFile && f.canRead) { + distinctFiles += f + } + } + } + distinctFiles.groupBy(_.getName).map { + case (_, items) => items.head + }.toSeq + } + + private def distribute( + srcPath: Path, + resType: LocalResourceType, + destName: String, + localResources: mutable.HashMap[String, LocalResource]): Unit = { + val fs = stagingDirPath.getFileSystem(hadoopConf) + val destPath = new Path(stagingDirPath, srcPath.getName) + info(s"Copying $srcPath to $destPath") + fs.copyFromLocalFile(srcPath, destPath) + fs.setPermission(destPath, new FsPermission(STAGING_DIR_PERMISSION)) + + val destFs = FileSystem.get(destPath.toUri, hadoopConf) + val destStatus = destFs.getFileStatus(destPath) + + val destResource = Records.newRecord(classOf[LocalResource]) + destResource.setType(resType) + destResource.setVisibility(LocalResourceVisibility.APPLICATION) + destResource.setResource(URL.fromPath(destPath)) + destResource.setTimestamp(destStatus.getModificationTime) + destResource.setSize(destStatus.getLen) + localResources(destName) = destResource + } + + private[kyuubi] def setupLaunchEnv(kyuubiConf: KyuubiConf): mutable.HashMap[String, String] = { + info("Setting up the launch environment for engine AM container") + val env = new mutable.HashMap[String, String]() + + kyuubiConf.getAll + .filter { case (k, _) => k.startsWith(KyuubiConf.KYUUBI_ENGINE_YARN_MODE_ENV_PREFIX) } + .map { case (k, v) => + (k.substring(KyuubiConf.KYUUBI_ENGINE_YARN_MODE_ENV_PREFIX.length + 1), v) + } + .foreach { case (k, v) => KyuubiHadoopUtils.addPathToEnvironment(env, k, v) } + + addClasspathEntry(buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR), env) + env.put( + Environment.HADOOP_CONF_DIR.name(), + buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, HADOOP_CONF_DIR)) + addClasspathEntry(buildPath(Environment.PWD.$$(), LOCALIZED_CONF_DIR, HADOOP_CONF_DIR), env) + env.put("KYUUBI_ENGINE_YARN_MODE_STAGING_DIR", stagingDirPath.toString) + env + } + + private def createApplicationSubmissionContext( + newApp: YarnClientApplication, + containerContext: ContainerLaunchContext): ApplicationSubmissionContext = { + + val appContext = newApp.getApplicationSubmissionContext + appContext.setApplicationName(kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_APP_NAME) + .getOrElse(s"Apache Kyuubi $engineType Engine")) + appContext.setQueue(kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_QUEUE)) + appContext.setAMContainerSpec(containerContext) + kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_PRIORITY).foreach { appPriority => + appContext.setPriority(Priority.newInstance(appPriority)) + } + appContext.setApplicationType(engineType.toUpperCase(Locale.ROOT)) + + val allTags = new util.HashSet[String] + kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_TAGS).foreach { tags => + allTags.addAll(tags.asJava) + } + appContext.setApplicationTags(allTags) + appContext.setMaxAppAttempts(1) + + val capability = Records.newRecord(classOf[Resource]) + capability.setMemorySize(kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_MEMORY)) + capability.setVirtualCores(kyuubiConf.get(ENGINE_DEPLOY_YARN_MODE_CORES)) + debug(s"Created resource capability for AM request: $capability") + appContext.setResource(capability) + + appContext + } + + private def monitorApplication(appId: ApplicationId): Unit = { + val report = yarnClient.getApplicationReport(appId) + val state = report.getYarnApplicationState + info(s"Application report for $appId (state: $state)") + if (state == YarnApplicationState.FAILED || state == YarnApplicationState.KILLED) { + throw new KyuubiException(s"Application $appId finished with status: $state") + } + } + + private def cleanupStagingDir(): Unit = { + try { + val fs = stagingDirPath.getFileSystem(hadoopConf) + if (fs.delete(stagingDirPath, true)) { + info(s"Deleted staging directory $stagingDirPath") + } + } catch { + case ioe: IOException => + warn("Failed to cleanup staging dir " + stagingDirPath, ioe) + } + } + + /** + * Joins all the path components using Path.SEPARATOR. + */ + private def buildPath(components: String*): String = { + components.mkString(Path.SEPARATOR) + } + + /** + * Add the given path to the classpath entry of the given environment map. + * If the classpath is already set, this appends the new path to the existing classpath. + */ + private def addClasspathEntry(path: String, env: mutable.HashMap[String, String]): Unit = + KyuubiHadoopUtils.addPathToEnvironment(env, Environment.CLASSPATH.name, path) + + private def confToProperties(conf: KyuubiConf): Properties = { + val props = new Properties() + conf.getAll.foreach { case (k, v) => + props.setProperty(k, v) + } + props + } + + def writePropertiesToArchive(props: Properties, name: String, out: ZipOutputStream): Unit = { + out.putNextEntry(new ZipEntry(name)) + val writer = new OutputStreamWriter(out, StandardCharsets.UTF_8) + props.store(writer, "Kyuubi configuration.") + writer.flush() + out.closeEntry() + } + + def writeConfigurationToArchive( + conf: Configuration, + name: String, + out: ZipOutputStream): Unit = { + out.putNextEntry(new ZipEntry(name)) + val writer = new OutputStreamWriter(out, StandardCharsets.UTF_8) + conf.writeXml(writer) + writer.flush() + out.closeEntry() + } +} + +object EngineYarnModeSubmitter { + final val KYUUBI_ENGINE_DEPLOY_YARN_MODE_JARS_KEY = "kyuubi.engine.deploy.yarn.mode.jars" + final val KYUUBI_ENGINE_DEPLOY_YARN_MODE_HIVE_CONF_KEY = + "kyuubi.engine.deploy.yarn.mode.hiveConf" + final val KYUUBI_ENGINE_DEPLOY_YARN_MODE_HADOOP_CONF_KEY = + "kyuubi.engine.deploy.yarn.mode.hadoopConf" + final val KYUUBI_ENGINE_DEPLOY_YARN_MODE_YARN_CONF_KEY = + "kyuubi.engine.deploy.yarn.mode.yarnConf" + + final val KYUUBI_ENGINE_DEPLOY_YARN_MODE_ARCHIVE_SEPARATOR = "," +} + +case class YarnAppReport( + appState: YarnApplicationState, + finalState: FinalApplicationStatus, + diagnostics: Option[String]) diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/package.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/package.scala index e05ad9fbe73..1ddefed0bbe 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/package.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/package.scala @@ -43,20 +43,24 @@ package object kyuubi { Try(buildFileStream.close()) } - val version: String = props.getProperty("kyuubi_version", unknown) - val java_version: String = props.getProperty("kyuubi_java_version", unknown) - val scala_version: String = props.getProperty("kyuubi_scala_version", unknown) - val spark_version: String = props.getProperty("kyuubi_spark_version", unknown) - val hive_version: String = props.getProperty("kyuubi_hive_version", unknown) - val hadoop_version: String = props.getProperty("kyuubi_hadoop_version", unknown) - val flink_version: String = props.getProperty("kyuubi_flink_version", unknown) - val trino_version: String = props.getProperty("kyuubi_trino_version", unknown) - val branch: String = props.getProperty("branch", unknown) - val revision: String = props.getProperty("revision", unknown) - val revisionTime: String = props.getProperty("revision_time", unknown) - val user: String = props.getProperty("user", unknown) - val repoUrl: String = props.getProperty("url", unknown) - val buildDate: String = props.getProperty("date", unknown) + private def getProperty(key: String, defaultValue: String = unknown): String = { + Option(props.getProperty(key, defaultValue)).filterNot(_.isEmpty).getOrElse(defaultValue) + } + + val version: String = getProperty("kyuubi_version") + val java_version: String = getProperty("kyuubi_java_version") + val scala_version: String = getProperty("kyuubi_scala_version") + val spark_version: String = getProperty("kyuubi_spark_version") + val hive_version: String = getProperty("kyuubi_hive_version") + val hadoop_version: String = getProperty("kyuubi_hadoop_version") + val flink_version: String = getProperty("kyuubi_flink_version") + val trino_version: String = getProperty("kyuubi_trino_version") + val branch: String = getProperty("branch") + val revision: String = getProperty("revision") + val revisionTime: String = getProperty("revision_time") + val user: String = getProperty("user") + val repoUrl: String = getProperty("url") + val buildDate: String = getProperty("date") } val KYUUBI_VERSION: String = BuildInfo.version diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/Serverable.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/Serverable.scala index 05ed3644ca5..2d75e7fc296 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/Serverable.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/Serverable.scala @@ -35,6 +35,8 @@ abstract class Serverable(name: String) extends CompositeService(name) { private val started = new AtomicBoolean(false) + var selfExited = false + val backendService: AbstractBackendService val frontendServices: Seq[AbstractFrontendService] diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala index a742993c5ad..9aefe63c8b6 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala @@ -30,7 +30,7 @@ import org.apache.kyuubi.Utils.stringifyException import org.apache.kyuubi.config.KyuubiConf.{FRONTEND_ADVERTISED_HOST, FRONTEND_CONNECTION_URL_USE_HOSTNAME, PROXY_USER, SESSION_CLOSE_ON_DISCONNECT} import org.apache.kyuubi.config.KyuubiReservedKeys._ import org.apache.kyuubi.operation.{FetchOrientation, OperationHandle} -import org.apache.kyuubi.service.authentication.KyuubiAuthenticationFactory +import org.apache.kyuubi.service.authentication.{AuthUtils, KyuubiAuthenticationFactory} import org.apache.kyuubi.session.SessionHandle import org.apache.kyuubi.shaded.hive.service.rpc.thrift._ import org.apache.kyuubi.shaded.thrift.protocol.TProtocol @@ -128,11 +128,11 @@ abstract class TFrontendService(name: String) ipAddress: String, realUser: String): String = { val proxyUser = Option(sessionConf.get(PROXY_USER.key)) - .getOrElse(sessionConf.get(KyuubiAuthenticationFactory.HS2_PROXY_USER)) + .getOrElse(sessionConf.get(AuthUtils.HS2_PROXY_USER)) if (proxyUser == null) { realUser } else { - KyuubiAuthenticationFactory.verifyProxyAccess(realUser, proxyUser, ipAddress, hadoopConf) + AuthUtils.verifyProxyAccess(realUser, proxyUser, ipAddress, hadoopConf) proxyUser } } diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/authentication/AuthUtils.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/authentication/AuthUtils.scala new file mode 100644 index 00000000000..d3191ae23ef --- /dev/null +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/authentication/AuthUtils.scala @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kyuubi.service.authentication + +import java.io.IOException + +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.security.UserGroupInformation +import org.apache.hadoop.security.authentication.util.KerberosName +import org.apache.hadoop.security.authorize.ProxyUsers + +import org.apache.kyuubi.{KyuubiSQLException, Logging} +import org.apache.kyuubi.service.authentication.AuthTypes.{AuthType, KERBEROS, NOSASL} + +object AuthUtils extends Logging { + val HS2_PROXY_USER = "hive.server2.proxy.user" + + @throws[KyuubiSQLException] + def verifyProxyAccess( + realUser: String, + proxyUser: String, + ipAddress: String, + hadoopConf: Configuration): Unit = { + try { + val sessionUgi = { + if (UserGroupInformation.isSecurityEnabled) { + val kerbName = new KerberosName(realUser) + UserGroupInformation.createProxyUser( + kerbName.getServiceName, + UserGroupInformation.getLoginUser) + } else { + UserGroupInformation.createRemoteUser(realUser) + } + } + + if (!proxyUser.equalsIgnoreCase(realUser)) { + ProxyUsers.refreshSuperUserGroupsConfiguration(hadoopConf) + ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, sessionUgi), ipAddress) + } + } catch { + case e: IOException => + throw KyuubiSQLException( + "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, + e) + } + } + + def saslDisabled(authTypes: Seq[AuthType]): Boolean = authTypes == Seq(NOSASL) + + def kerberosEnabled(authTypes: Seq[AuthType]): Boolean = authTypes.contains(KERBEROS) + + // take the first declared SASL/PLAIN auth type + def effectivePlainAuthType(authTypes: Seq[AuthType]): Option[AuthType] = authTypes.find { + case NOSASL | KERBEROS => false + case _ => true + } +} diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactory.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactory.scala index 736f8e1e15e..978527b8818 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactory.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactory.scala @@ -21,15 +21,9 @@ import java.io.IOException import javax.security.auth.login.LoginException import javax.security.sasl.Sasl -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.security.UserGroupInformation -import org.apache.hadoop.security.authentication.util.KerberosName -import org.apache.hadoop.security.authorize.ProxyUsers - -import org.apache.kyuubi.{KyuubiSQLException, Logging} +import org.apache.kyuubi.Logging import org.apache.kyuubi.config.KyuubiConf import org.apache.kyuubi.config.KyuubiConf._ -import org.apache.kyuubi.service.authentication.AuthMethods.AuthMethod import org.apache.kyuubi.service.authentication.AuthTypes._ import org.apache.kyuubi.shaded.hive.service.rpc.thrift.TCLIService.Iface import org.apache.kyuubi.shaded.thrift.TProcessorFactory @@ -37,11 +31,10 @@ import org.apache.kyuubi.shaded.thrift.transport.{TSaslServerTransport, TTranspo class KyuubiAuthenticationFactory(conf: KyuubiConf, isServer: Boolean = true) extends Logging { - val authTypes: Set[AuthType] = conf.get(AUTHENTICATION_METHOD).map(AuthTypes.withName) - val noSaslEnabled: Boolean = authTypes == Set(NOSASL) - val kerberosEnabled: Boolean = authTypes.contains(KERBEROS) - private val plainAuthTypeOpt = authTypes.filterNot(_.equals(KERBEROS)) - .filterNot(_.equals(NOSASL)).headOption + val authTypes: Seq[AuthType] = conf.get(AUTHENTICATION_METHOD).map(AuthTypes.withName) + val saslDisabled: Boolean = AuthUtils.saslDisabled(authTypes) + val kerberosEnabled: Boolean = AuthUtils.kerberosEnabled(authTypes) + val effectivePlainAuthType: Option[AuthType] = AuthUtils.effectivePlainAuthType(authTypes) private val hadoopAuthServer: Option[HadoopThriftAuthBridgeServer] = { if (kerberosEnabled) { @@ -70,7 +63,7 @@ class KyuubiAuthenticationFactory(conf: KyuubiConf, isServer: Boolean = true) ex } def getTTransportFactory: TTransportFactory = { - if (noSaslEnabled) { + if (saslDisabled) { new TTransportFactory() } else { var transportFactory: TSaslServerTransport.Factory = null @@ -87,7 +80,7 @@ class KyuubiAuthenticationFactory(conf: KyuubiConf, isServer: Boolean = true) ex case _ => } - plainAuthTypeOpt match { + effectivePlainAuthType match { case Some(plainAuthType) => transportFactory = PlainSASLHelper.getTransportFactory( plainAuthType.toString, @@ -119,45 +112,3 @@ class KyuubiAuthenticationFactory(conf: KyuubiConf, isServer: Boolean = true) ex .orElse(Option(TSetIpAddressProcessor.getUserIpAddress)) } } -object KyuubiAuthenticationFactory extends Logging { - val HS2_PROXY_USER = "hive.server2.proxy.user" - - @throws[KyuubiSQLException] - def verifyProxyAccess( - realUser: String, - proxyUser: String, - ipAddress: String, - hadoopConf: Configuration): Unit = { - try { - val sessionUgi = { - if (UserGroupInformation.isSecurityEnabled) { - val kerbName = new KerberosName(realUser) - UserGroupInformation.createProxyUser( - kerbName.getServiceName, - UserGroupInformation.getLoginUser) - } else { - UserGroupInformation.createRemoteUser(realUser) - } - } - - if (!proxyUser.equalsIgnoreCase(realUser)) { - ProxyUsers.refreshSuperUserGroupsConfiguration(hadoopConf) - ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, sessionUgi), ipAddress) - } - } catch { - case e: IOException => - throw KyuubiSQLException( - "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, - e) - } - } - - def getValidPasswordAuthMethod(authTypes: Set[AuthType]): AuthMethod = { - if (authTypes == Set(NOSASL)) AuthMethods.NONE - else if (authTypes.contains(NONE)) AuthMethods.NONE - else if (authTypes.contains(LDAP)) AuthMethods.LDAP - else if (authTypes.contains(JDBC)) AuthMethods.JDBC - else if (authTypes.contains(CUSTOM)) AuthMethods.CUSTOM - else throw new IllegalArgumentException("No valid Password Auth detected") - } -} diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/util/KyuubiHadoopUtils.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/util/KyuubiHadoopUtils.scala index 4959c845d49..2d9ea4a8ad5 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/util/KyuubiHadoopUtils.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/util/KyuubiHadoopUtils.scala @@ -21,6 +21,7 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, Da import java.util.{Base64, Map => JMap} import scala.collection.JavaConverters._ +import scala.collection.mutable.HashMap import scala.util.{Failure, Success, Try} import org.apache.hadoop.conf.Configuration @@ -29,6 +30,7 @@ import org.apache.hadoop.io.Text import org.apache.hadoop.security.{Credentials, SecurityUtil} import org.apache.hadoop.security.token.{Token, TokenIdentifier} import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier +import org.apache.hadoop.yarn.api.ApplicationConstants import org.apache.hadoop.yarn.conf.YarnConfiguration import org.apache.kyuubi.Logging @@ -98,4 +100,18 @@ object KyuubiHadoopUtils extends Logging { None } } + + /** + * Add a path variable to the given environment map. + * If the map already contains this key, append the value to the existing value instead. + */ + def addPathToEnvironment(env: HashMap[String, String], key: String, value: String): Unit = { + val newValue = + if (env.contains(key)) { + env(key) + ApplicationConstants.CLASS_PATH_SEPARATOR + value + } else { + value + } + env.put(key, newValue) + } } diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/GlutenSuiteMixin.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/GlutenSuiteMixin.scala index 6095e163017..c9ee6ab9625 100644 --- a/kyuubi-common/src/test/scala/org/apache/kyuubi/GlutenSuiteMixin.scala +++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/GlutenSuiteMixin.scala @@ -29,5 +29,6 @@ trait GlutenSuiteMixin { "spark.memory.offHeap.size" -> "4g", "spark.memory.offHeap.enabled" -> "true", "spark.shuffle.manager" -> "org.apache.spark.shuffle.sort.ColumnarShuffleManager", + "spark.gluten.ui.enabled" -> "false", "spark.jars" -> extraJars) } diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/engine/deploy/yarn/EngineYarnModeSubmitterSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/engine/deploy/yarn/EngineYarnModeSubmitterSuite.scala new file mode 100644 index 00000000000..349c194e6fc --- /dev/null +++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/engine/deploy/yarn/EngineYarnModeSubmitterSuite.scala @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.deploy.yarn + +import java.io.File + +import org.apache.hadoop.fs.Path +import org.apache.hadoop.yarn.api.ApplicationConstants.Environment +import org.scalatest.matchers.must.Matchers +import org.scalatest.matchers.should.Matchers.convertToAnyShouldWrapper + +import org.apache.kyuubi.{KyuubiFunSuite, Utils} +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.engine.deploy.yarn.EngineYarnModeSubmitter.KYUUBI_ENGINE_DEPLOY_YARN_MODE_JARS_KEY + +class EngineYarnModeSubmitterSuite extends KyuubiFunSuite with Matchers { + + val kyuubiHome: String = Utils.getCodeSourceLocation(getClass).split("kyuubi-common").head + + test("Classpath should contain engine jars dir and conf dir") { + val kyuubiConf = new KyuubiConf() + .set(KYUUBI_ENGINE_DEPLOY_YARN_MODE_JARS_KEY, "mock.jar") + + val env = MockEngineYarnModeSubmitter.setupLaunchEnv(kyuubiConf) + assert(env.contains(Environment.HADOOP_CONF_DIR.name())) + + val cp = env("CLASSPATH").split(":|;|") + + assert(cp.length == 2) + cp should contain("{{PWD}}/__kyuubi_engine_conf__") + cp should contain("{{PWD}}/__kyuubi_engine_conf__/__hadoop_conf__") + } + + test("container env should contain engine env") { + val kyuubiConf = new KyuubiConf() + .set(s"${KyuubiConf.KYUUBI_ENGINE_YARN_MODE_ENV_PREFIX}.KYUUBI_HOME", kyuubiHome) + + val env = MockEngineYarnModeSubmitter.setupLaunchEnv(kyuubiConf) + assert(env.nonEmpty) + assert(env.contains("KYUUBI_HOME")) + assert(env("KYUUBI_HOME") == kyuubiHome) + } + + test("distinct archive files") { + val targetJars: String = s"${Utils.getCodeSourceLocation(getClass)}" + // double the jars to make sure the distinct works + val archives = s"$targetJars,$targetJars" + val files = MockEngineYarnModeSubmitter.listDistinctFiles(archives) + val targetFiles = Utils.listFilesRecursively(new File(targetJars)) + assert(targetFiles != null) + assert(targetFiles.length == files.length) + } + +} + +object MockEngineYarnModeSubmitter extends EngineYarnModeSubmitter { + override var engineType: String = "mock" + + stagingDirPath = new Path("target/test-staging-dir") + + override def engineMainClass(): String = "org.apache.kyuubi.engine.deploy.Mock" +} diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala index 607c397d81f..18520b39b64 100644 --- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala +++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala @@ -28,21 +28,20 @@ import org.apache.kyuubi.util.AssertionUtils._ import org.apache.kyuubi.util.KyuubiHadoopUtils class KyuubiAuthenticationFactorySuite extends KyuubiFunSuite { - import KyuubiAuthenticationFactory._ test("verify proxy access") { val kyuubiConf = KyuubiConf() val hadoopConf = KyuubiHadoopUtils.newHadoopConf(kyuubiConf) val e1 = intercept[KyuubiSQLException] { - verifyProxyAccess("kent", "yao", "localhost", hadoopConf) + AuthUtils.verifyProxyAccess("kent", "yao", "localhost", hadoopConf) } assert(e1.getMessage === "Failed to validate proxy privilege of kent for yao") kyuubiConf.set("hadoop.proxyuser.kent.groups", "*") kyuubiConf.set("hadoop.proxyuser.kent.hosts", "*") val hadoopConf2 = KyuubiHadoopUtils.newHadoopConf(kyuubiConf) - verifyProxyAccess("kent", "yao", "localhost", hadoopConf2) + AuthUtils.verifyProxyAccess("kent", "yao", "localhost", hadoopConf2) } test("AuthType NONE") { @@ -56,21 +55,21 @@ class KyuubiAuthenticationFactorySuite extends KyuubiFunSuite { } test("AuthType Other") { - val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Set("INVALID")) + val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Seq("INVALID")) interceptEquals[IllegalArgumentException] { new KyuubiAuthenticationFactory(conf) }( "The value of kyuubi.authentication should be one of" + " NOSASL, NONE, LDAP, JDBC, KERBEROS, CUSTOM, but was INVALID") } test("AuthType LDAP") { - val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Set("LDAP")) + val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Seq("LDAP")) val authFactory = new KyuubiAuthenticationFactory(conf) authFactory.getTTransportFactory assert(Security.getProviders.exists(_.isInstanceOf[SaslPlainProvider])) } test("AuthType KERBEROS w/o keytab/principal") { - val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Set("KERBEROS")) + val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Seq("KERBEROS")) val factory = new KyuubiAuthenticationFactory(conf) val e = intercept[LoginException](factory.getTTransportFactory) @@ -78,11 +77,11 @@ class KyuubiAuthenticationFactorySuite extends KyuubiFunSuite { } test("AuthType is NOSASL if only NOSASL is specified") { - val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Set("NOSASL")) + val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Seq("NOSASL")) var factory = new KyuubiAuthenticationFactory(conf) !factory.getTTransportFactory.isInstanceOf[TSaslServerTransport.Factory] - conf.set(KyuubiConf.AUTHENTICATION_METHOD, Set("NOSASL", "NONE")) + conf.set(KyuubiConf.AUTHENTICATION_METHOD, Seq("NOSASL", "NONE")) factory = new KyuubiAuthenticationFactory(conf) factory.getTTransportFactory.isInstanceOf[TSaslServerTransport.Factory] } diff --git a/kyuubi-ha/pom.xml b/kyuubi-ha/pom.xml index f007f2c7064..6a4461d19ff 100644 --- a/kyuubi-ha/pom.xml +++ b/kyuubi-ha/pom.xml @@ -67,6 +67,11 @@ grpc-netty + + io.grpc + grpc-util + + io.netty netty-transport-native-epoll diff --git a/kyuubi-hive-beeline/src/main/java/org/apache/hive/beeline/KyuubiBeeLine.java b/kyuubi-hive-beeline/src/main/java/org/apache/hive/beeline/KyuubiBeeLine.java index c786da35f24..5aad29fd789 100644 --- a/kyuubi-hive-beeline/src/main/java/org/apache/hive/beeline/KyuubiBeeLine.java +++ b/kyuubi-hive-beeline/src/main/java/org/apache/hive/beeline/KyuubiBeeLine.java @@ -20,11 +20,14 @@ import java.io.IOException; import java.io.InputStream; import java.sql.Driver; +import java.sql.SQLException; +import java.sql.SQLWarning; import java.util.*; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.hive.common.util.HiveStringUtils; +import org.apache.kyuubi.shaded.thrift.transport.TTransportException; import org.apache.kyuubi.util.reflect.DynConstructors; import org.apache.kyuubi.util.reflect.DynFields; import org.apache.kyuubi.util.reflect.DynMethods; @@ -299,4 +302,56 @@ int runInit() { boolean dispatch(String line) { return super.dispatch(isPythonMode() ? line : HiveStringUtils.removeComments(line)); } + + @Override + void handleSQLException(SQLException e) { + if (e instanceof SQLWarning && !(getOpts().getShowWarnings())) { + return; + } + + if (e.getCause() instanceof TTransportException) { + switch (((TTransportException) e.getCause()).getType()) { + case TTransportException.ALREADY_OPEN: + error(loc("hs2-connection-already-open")); + break; + case TTransportException.END_OF_FILE: + error(loc("hs2-unexpected-end-of-file")); + break; + case TTransportException.NOT_OPEN: + error(loc("hs2-could-not-open-connection")); + break; + case TTransportException.TIMED_OUT: + error(loc("hs2-connection-timed-out")); + break; + case TTransportException.UNKNOWN: + error(loc("hs2-unknown-connection-problem")); + break; + default: + error(loc("hs2-unexpected-error")); + } + } + + error( + loc( + e instanceof SQLWarning ? "Warning" : "Error", + new Object[] { + e.getMessage() == null ? "" : e.getMessage().trim(), + e.getSQLState() == null ? "" : e.getSQLState().trim(), + new Integer(e.getErrorCode()) + })); + + if (getOpts().getVerbose()) { + e.printStackTrace(getErrorStream()); + } + + if (!getOpts().getShowNestedErrs()) { + return; + } + + for (SQLException nested = e.getNextException(); + nested != null && nested != e; + nested = nested.getNextException()) { + handleSQLException(nested); + } + } } diff --git a/kyuubi-hive-jdbc-shaded/src/main/resources/META-INF/NOTICE b/kyuubi-hive-jdbc-shaded/src/main/resources/META-INF/NOTICE index 01ce2db0d7b..cf2047d5ee2 100644 --- a/kyuubi-hive-jdbc-shaded/src/main/resources/META-INF/NOTICE +++ b/kyuubi-hive-jdbc-shaded/src/main/resources/META-INF/NOTICE @@ -4,6 +4,32 @@ Copyright 2021-2022 The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Iceberg +Copyright 2017-2022 The Apache Software Foundation + +Apache Parquet MR +Copyright 2014-2024 The Apache Software Foundation + +This project includes code from Kite, developed at Cloudera, Inc. with +the following copyright notice: + +| Copyright 2013 Cloudera Inc. +| +| Licensed under the Apache License, Version 2.0 (the "License"); +| you may not use this file except in compliance with the License. +| You may obtain a copy of the License at +| +| http://www.apache.org/licenses/LICENSE-2.0 +| +| Unless required by applicable law or agreed to in writing, software +| distributed under the License is distributed on an "AS IS" BASIS, +| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +| See the License for the specific language governing permissions and +| limitations under the License. + +Apache Spark +Copyright 2014 and onwards The Apache Software Foundation. + -------------------------------------------------------------------------------- This binary artifact contains Apache Commons Codec diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcConnectionParams.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcConnectionParams.java index c60f3489958..9aba2a813fa 100644 --- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcConnectionParams.java +++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/JdbcConnectionParams.java @@ -52,7 +52,9 @@ public class JdbcConnectionParams { public static final String AUTH_KYUUBI_CLIENT_TICKET_CACHE = "kyuubiClientTicketCache"; public static final String AUTH_PASSWD = "password"; public static final String AUTH_KERBEROS_AUTH_TYPE = "kerberosAuthType"; + public static final String AUTH_KERBEROS_AUTH_TYPE_FROM_KEYTAB = "fromKeytab"; public static final String AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT = "fromSubject"; + public static final String AUTH_KERBEROS_AUTH_TYPE_FROM_TICKET_CACHE = "fromTicketCache"; public static final String ANONYMOUS_USER = "anonymous"; public static final String ANONYMOUS_PASSWD = "anonymous"; public static final String USE_SSL = "ssl"; diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java index 47de5f7480b..7cc150ac90c 100644 --- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java +++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/KyuubiConnection.java @@ -847,28 +847,68 @@ private boolean isHadoopUserGroupInformationDoAs() { } } + private boolean isForciblyFromKeytabAuthMode() { + return AUTH_KERBEROS_AUTH_TYPE_FROM_KEYTAB.equalsIgnoreCase( + sessConfMap.get(AUTH_KERBEROS_AUTH_TYPE)); + } + + private boolean isForciblyFromSubjectAuthMode() { + return AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equalsIgnoreCase( + sessConfMap.get(AUTH_KERBEROS_AUTH_TYPE)); + } + + private boolean isForciblyTgtCacheAuthMode() { + return AUTH_KERBEROS_AUTH_TYPE_FROM_TICKET_CACHE.equalsIgnoreCase( + sessConfMap.get(AUTH_KERBEROS_AUTH_TYPE)); + } + private boolean isKeytabAuthMode() { - return isSaslAuthMode() - && hasSessionValue(AUTH_PRINCIPAL) + // handle explicit cases first + if (isForciblyFromSubjectAuthMode() || isForciblyTgtCacheAuthMode()) { + return false; + } + if (isKerberosAuthMode() && isForciblyFromKeytabAuthMode()) { + return true; + } + if (isKerberosAuthMode() + && hasSessionValue(AUTH_KYUUBI_CLIENT_KEYTAB) + && !hasSessionValue(AUTH_KYUUBI_CLIENT_PRINCIPAL)) { + throw new IllegalArgumentException( + AUTH_KYUUBI_CLIENT_KEYTAB + + " is set but " + + AUTH_KYUUBI_CLIENT_PRINCIPAL + + " is not set"); + } + // handle implicit cases then + return isKerberosAuthMode() && hasSessionValue(AUTH_KYUUBI_CLIENT_PRINCIPAL) && hasSessionValue(AUTH_KYUUBI_CLIENT_KEYTAB); } private boolean isFromSubjectAuthMode() { - return isSaslAuthMode() - && hasSessionValue(AUTH_PRINCIPAL) - && !hasSessionValue(AUTH_KYUUBI_CLIENT_PRINCIPAL) + // handle explicit cases first + if (isForciblyFromKeytabAuthMode() || isForciblyTgtCacheAuthMode()) { + return false; + } + if (isKerberosAuthMode() && isForciblyFromSubjectAuthMode()) { + return true; + } + // handle implicit cases then + return isKerberosAuthMode() && !hasSessionValue(AUTH_KYUUBI_CLIENT_KEYTAB) - && (AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equalsIgnoreCase( - sessConfMap.get(AUTH_KERBEROS_AUTH_TYPE)) - || isHadoopUserGroupInformationDoAs()); + && isHadoopUserGroupInformationDoAs(); } private boolean isTgtCacheAuthMode() { - return isSaslAuthMode() - && hasSessionValue(AUTH_PRINCIPAL) - && !hasSessionValue(AUTH_KYUUBI_CLIENT_PRINCIPAL) - && !hasSessionValue(AUTH_KYUUBI_CLIENT_KEYTAB); + // handle explicit cases first + if (isForciblyFromKeytabAuthMode() || isForciblyFromSubjectAuthMode()) { + return false; + } + if (isKerberosAuthMode() && isForciblyTgtCacheAuthMode()) { + return true; + } + // handle implicit cases then + return isKerberosAuthMode() && !hasSessionValue(AUTH_KYUUBI_CLIENT_KEYTAB); } private boolean isPlainSaslAuthMode() { diff --git a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/KerberosAuthentication.java b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/KerberosAuthentication.java index a137fbb9946..51999bed234 100644 --- a/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/KerberosAuthentication.java +++ b/kyuubi-hive-jdbc/src/main/java/org/apache/kyuubi/jdbc/hive/auth/KerberosAuthentication.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.net.InetAddress; +import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; @@ -107,6 +108,9 @@ private static Configuration createLoginFromTgtCacheConfiguration(String ticketC ticketCache = System.getenv("KRB5CCNAME"); } if (StringUtils.isNotBlank(ticketCache)) { + if (!Files.exists(Paths.get(ticketCache))) { + LOG.warn("TicketCache {} does not exist", ticketCache); + } optionsBuilder.put("ticketCache", ticketCache); } return createConfiguration(optionsBuilder); diff --git a/kyuubi-server/pom.xml b/kyuubi-server/pom.xml index 17fd851d2ec..09b89bb0e7a 100644 --- a/kyuubi-server/pom.xml +++ b/kyuubi-server/pom.xml @@ -337,16 +337,6 @@ kyuubi-spark-sql-engine_${scala.binary.version} ${project.version} test - - - io.grpc - grpc-core - - - io.grpc - grpc-protobuf - - diff --git a/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseLexer.g4 b/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseLexer.g4 index 7e7dee0e371..c6fd6676b86 100644 --- a/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseLexer.g4 +++ b/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseLexer.g4 @@ -51,6 +51,8 @@ KYUUBIADMIN: 'KYUUBIADMIN'; SESSION: 'SESSION'; +ENGINE: 'ENGINE'; + BACKQUOTED_IDENTIFIER : '`' ( ~'`' | '``' )* '`' ; diff --git a/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseParser.g4 b/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseParser.g4 index 7360c8410ae..67bb7e35a7f 100644 --- a/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseParser.g4 +++ b/kyuubi-server/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSqlBaseParser.g4 @@ -30,4 +30,5 @@ statement runnableCommand : (DESC | DESCRIBE) SESSION #describeSession + | (DESC | DESCRIBE) ENGINE #describeEngine ; diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/client/KyuubiSyncThriftClient.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/client/KyuubiSyncThriftClient.scala index 0dc6692da43..d24387341e6 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/client/KyuubiSyncThriftClient.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/client/KyuubiSyncThriftClient.scala @@ -42,6 +42,7 @@ import org.apache.kyuubi.util.{ThreadUtils, ThriftUtils} import org.apache.kyuubi.util.ThreadUtils.scheduleTolerableRunnableWithFixedDelay class KyuubiSyncThriftClient private ( + val hostPort: (String, Int), protocol: TProtocol, engineAliveProbeProtocol: Option[TProtocol], engineAliveProbeInterval: Long, @@ -483,6 +484,7 @@ private[kyuubi] object KyuubiSyncThriftClient extends Logging { None } new KyuubiSyncThriftClient( + (host, port), tProtocol, aliveProbeProtocol, aliveProbeInterval, diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/EngineRef.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/EngineRef.scala index 2bd8554036e..eb9c7ab47c9 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/EngineRef.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/EngineRef.scala @@ -38,7 +38,7 @@ import org.apache.kyuubi.engine.jdbc.JdbcProcessBuilder import org.apache.kyuubi.engine.spark.SparkProcessBuilder import org.apache.kyuubi.engine.trino.TrinoProcessBuilder import org.apache.kyuubi.ha.HighAvailabilityConf.{HA_ENGINE_REF_ID, HA_NAMESPACE} -import org.apache.kyuubi.ha.client.{DiscoveryClient, DiscoveryClientProvider, DiscoveryPaths} +import org.apache.kyuubi.ha.client.{DiscoveryClient, DiscoveryClientProvider, DiscoveryPaths, ServiceNodeInfo} import org.apache.kyuubi.metrics.MetricsConstants.{ENGINE_FAIL, ENGINE_TIMEOUT, ENGINE_TOTAL} import org.apache.kyuubi.metrics.MetricsSystem import org.apache.kyuubi.operation.log.OperationLog @@ -196,7 +196,7 @@ private[kyuubi] class EngineRef( new TrinoProcessBuilder(appUser, conf, engineRefId, extraEngineLog) case HIVE_SQL => conf.setIfMissing(HiveProcessBuilder.HIVE_ENGINE_NAME, defaultEngineName) - new HiveProcessBuilder(appUser, conf, engineRefId, extraEngineLog) + HiveProcessBuilder(appUser, conf, engineRefId, extraEngineLog, defaultEngineName) case JDBC => new JdbcProcessBuilder(appUser, conf, engineRefId, extraEngineLog) case CHAT => @@ -297,6 +297,7 @@ private[kyuubi] class EngineRef( * * @param discoveryClient the zookeeper client to get or create engine instance * @param extraEngineLog the launch engine operation log, used to inject engine log into it + * @return engine host and port */ def getOrCreate( discoveryClient: DiscoveryClient, @@ -312,14 +313,37 @@ private[kyuubi] class EngineRef( * * @param discoveryClient the zookeeper client to get or create engine instance * @param hostPort the existing engine host and port + * @return deregister result and message */ - def deregister(discoveryClient: DiscoveryClient, hostPort: (String, Int)): Unit = + def deregister(discoveryClient: DiscoveryClient, hostPort: (String, Int)): (Boolean, String) = tryWithLock(discoveryClient) { - if (discoveryClient.getServerHost(engineSpace) == Option(hostPort)) { - discoveryClient.delete(engineSpace) + // refer the DiscoveryClient::getServerHost implementation + discoveryClient.getServiceNodesInfo(engineSpace, Some(1), silent = true) match { + case Seq(sn) => + if ((sn.host, sn.port) == hostPort) { + val msg = s"Deleting engine node:$sn" + info(msg) + discoveryClient.delete(s"$engineSpace/${sn.nodeName}") + (true, msg) + } else { + val msg = s"Engine node:$sn is not matched with host&port[$hostPort]" + warn(msg) + (false, msg) + } + case _ => + val msg = s"No engine node found in $engineSpace" + warn(msg) + (false, msg) } } + def getServiceNode( + discoveryClient: DiscoveryClient, + hostPort: (String, Int)): Option[ServiceNodeInfo] = { + val serviceNodes = discoveryClient.getServiceNodesInfo(engineSpace) + serviceNodes.filter { sn => (sn.host, sn.port) == hostPort }.headOption + } + def close(): Unit = { if (shareLevel == CONNECTION && builder != null) { try { diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KyuubiApplicationManager.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KyuubiApplicationManager.scala index f8b64005359..1afdcc3cf7c 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KyuubiApplicationManager.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KyuubiApplicationManager.scala @@ -103,6 +103,13 @@ object KyuubiApplicationManager { conf.set(SparkProcessBuilder.TAG_KEY, newTag) } + private def setupEngineYarnModeTag(tag: String, conf: KyuubiConf): Unit = { + val originalTag = + conf.getOption(KyuubiConf.ENGINE_DEPLOY_YARN_MODE_TAGS.key).map(_ + ",").getOrElse("") + val newTag = s"${originalTag}KYUUBI" + Some(tag).filterNot(_.isEmpty).map("," + _).getOrElse("") + conf.set(KyuubiConf.ENGINE_DEPLOY_YARN_MODE_TAGS.key, newTag) + } + private def setupSparkK8sTag(tag: String, conf: KyuubiConf): Unit = { conf.set("spark.kubernetes.driver.label." + LABEL_KYUUBI_UNIQUE_KEY, tag) } @@ -182,6 +189,8 @@ object KyuubiApplicationManager { // running flink on other platforms is not yet supported setupFlinkYarnTag(applicationTag, conf) // other engine types are running locally yet + case ("HIVE", Some("YARN")) => + setupEngineYarnModeTag(applicationTag, conf) case _ => } } diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/YarnApplicationOperation.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/YarnApplicationOperation.scala index 1f672ad701e..18d0006a592 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/YarnApplicationOperation.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/YarnApplicationOperation.scala @@ -184,7 +184,8 @@ object YarnApplicationOperation extends Logging { ApplicationState.RUNNING case (YarnApplicationState.FINISHED, FinalApplicationStatus.SUCCEEDED) => ApplicationState.FINISHED - case (YarnApplicationState.FAILED, FinalApplicationStatus.FAILED) => + case (YarnApplicationState.FINISHED, FinalApplicationStatus.FAILED) | + (YarnApplicationState.FAILED, FinalApplicationStatus.FAILED) => ApplicationState.FAILED case (YarnApplicationState.KILLED, FinalApplicationStatus.KILLED) => ApplicationState.KILLED diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala index d8e4454b610..2d4145ff522 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala @@ -26,10 +26,12 @@ import com.google.common.annotations.VisibleForTesting import org.apache.kyuubi._ import org.apache.kyuubi.config.KyuubiConf -import org.apache.kyuubi.config.KyuubiConf.{ENGINE_HIVE_EXTRA_CLASSPATH, ENGINE_HIVE_JAVA_OPTIONS, ENGINE_HIVE_MEMORY} +import org.apache.kyuubi.config.KyuubiConf.{ENGINE_DEPLOY_YARN_MODE_APP_NAME, ENGINE_HIVE_DEPLOY_MODE, ENGINE_HIVE_EXTRA_CLASSPATH, ENGINE_HIVE_JAVA_OPTIONS, ENGINE_HIVE_MEMORY} import org.apache.kyuubi.config.KyuubiReservedKeys.{KYUUBI_ENGINE_ID, KYUUBI_SESSION_USER_KEY} import org.apache.kyuubi.engine.{KyuubiApplicationManager, ProcBuilder} -import org.apache.kyuubi.engine.hive.HiveProcessBuilder._ +import org.apache.kyuubi.engine.deploy.DeployMode +import org.apache.kyuubi.engine.deploy.DeployMode.{LOCAL, YARN} +import org.apache.kyuubi.engine.hive.HiveProcessBuilder.HIVE_HADOOP_CLASSPATH_KEY import org.apache.kyuubi.operation.log.OperationLog import org.apache.kyuubi.util.command.CommandLineUtils._ @@ -45,7 +47,7 @@ class HiveProcessBuilder( this(proxyUser, conf, "") } - private val hiveHome: String = getEngineHome(shortName) + protected val hiveHome: String = getEngineHome(shortName) override protected def module: String = "kyuubi-hive-sql-engine" @@ -113,7 +115,23 @@ class HiveProcessBuilder( override def shortName: String = "hive" } -object HiveProcessBuilder { +object HiveProcessBuilder extends Logging { final val HIVE_HADOOP_CLASSPATH_KEY = "HIVE_HADOOP_CLASSPATH" final val HIVE_ENGINE_NAME = "hive.engine.name" + + def apply( + appUser: String, + conf: KyuubiConf, + engineRefId: String, + extraEngineLog: Option[OperationLog], + defaultEngineName: String): HiveProcessBuilder = { + DeployMode.withName(conf.get(ENGINE_HIVE_DEPLOY_MODE)) match { + case LOCAL => new HiveProcessBuilder(appUser, conf, engineRefId, extraEngineLog) + case YARN => + warn(s"Hive on YARN model is experimental.") + conf.setIfMissing(ENGINE_DEPLOY_YARN_MODE_APP_NAME, Some(defaultEngineName)) + new HiveYarnModeProcessBuilder(appUser, conf, engineRefId, extraEngineLog) + case other => throw new KyuubiException(s"Unsupported deploy mode: $other") + } + } } diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveYarnModeProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveYarnModeProcessBuilder.scala new file mode 100644 index 00000000000..ba842cbd4d5 --- /dev/null +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveYarnModeProcessBuilder.scala @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.hive + +import java.io.File +import java.nio.file.{Files, Paths} +import java.util + +import scala.collection.JavaConverters._ +import scala.collection.mutable.ArrayBuffer + +import org.apache.kyuubi.{KyuubiException, Logging, SCALA_COMPILE_VERSION} +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.config.KyuubiConf.{ENGINE_HIVE_EXTRA_CLASSPATH, ENGINE_HIVE_MEMORY} +import org.apache.kyuubi.config.KyuubiReservedKeys.{KYUUBI_ENGINE_ID, KYUUBI_SESSION_USER_KEY} +import org.apache.kyuubi.engine.{ApplicationManagerInfo, KyuubiApplicationManager} +import org.apache.kyuubi.engine.deploy.yarn.EngineYarnModeSubmitter._ +import org.apache.kyuubi.engine.hive.HiveProcessBuilder.HIVE_HADOOP_CLASSPATH_KEY +import org.apache.kyuubi.operation.log.OperationLog +import org.apache.kyuubi.util.command.CommandLineUtils.{confKeyValue, confKeyValues} + +/** + * A process builder for Hive on Yarn. + * + * It will new a process on kyuubi server side to submit hive engine to yarn. + */ +class HiveYarnModeProcessBuilder( + override val proxyUser: String, + override val conf: KyuubiConf, + override val engineRefId: String, + override val extraEngineLog: Option[OperationLog] = None) + extends HiveProcessBuilder(proxyUser, conf, engineRefId, extraEngineLog) with Logging { + + override protected def mainClass: String = + "org.apache.kyuubi.engine.hive.deploy.HiveYarnModeSubmitter" + + override def isClusterMode(): Boolean = true + + override def clusterManager(): Option[String] = Some("yarn") + + override def appMgrInfo(): ApplicationManagerInfo = ApplicationManagerInfo(clusterManager()) + + override protected val commands: Iterable[String] = { + KyuubiApplicationManager.tagApplication(engineRefId, shortName, clusterManager(), conf) + val buffer = new ArrayBuffer[String]() + buffer += executable + + val memory = conf.get(ENGINE_HIVE_MEMORY) + buffer += s"-Xmx$memory" + buffer += "-cp" + + val classpathEntries = new util.LinkedHashSet[String] + classpathEntries.addAll(hiveConfFiles()) + classpathEntries.addAll(hadoopConfFiles()) + classpathEntries.addAll(yarnConfFiles()) + classpathEntries.addAll(jarFiles(true)) + + buffer += classpathEntries.asScala.mkString(File.pathSeparator) + buffer += mainClass + + buffer ++= confKeyValue(KYUUBI_SESSION_USER_KEY, proxyUser) + buffer ++= confKeyValue(KYUUBI_ENGINE_ID, engineRefId) + + buffer ++= confKeyValue( + KYUUBI_ENGINE_DEPLOY_YARN_MODE_JARS_KEY, + jarFiles(false).asScala.mkString(KYUUBI_ENGINE_DEPLOY_YARN_MODE_ARCHIVE_SEPARATOR)) + + buffer ++= confKeyValue( + KYUUBI_ENGINE_DEPLOY_YARN_MODE_HIVE_CONF_KEY, + hiveConfFiles().asScala.mkString(KYUUBI_ENGINE_DEPLOY_YARN_MODE_ARCHIVE_SEPARATOR)) + buffer ++= confKeyValue( + KYUUBI_ENGINE_DEPLOY_YARN_MODE_HADOOP_CONF_KEY, + hadoopConfFiles().asScala.mkString(KYUUBI_ENGINE_DEPLOY_YARN_MODE_ARCHIVE_SEPARATOR)) + buffer ++= confKeyValue( + KYUUBI_ENGINE_DEPLOY_YARN_MODE_YARN_CONF_KEY, + yarnConfFiles().asScala.mkString(KYUUBI_ENGINE_DEPLOY_YARN_MODE_ARCHIVE_SEPARATOR)) + + buffer ++= confKeyValues(conf.getAll) + + buffer + } + + private def jarFiles(isClasspath: Boolean): util.LinkedHashSet[String] = { + val jarEntries = new util.LinkedHashSet[String] + + mainResource.foreach(jarEntries.add) + + jarEntries.add(s"$hiveHome${File.separator}lib${appendClasspathSuffix(isClasspath)}") + + val hadoopCp = env.get(HIVE_HADOOP_CLASSPATH_KEY) + val extraCp = conf.get(ENGINE_HIVE_EXTRA_CLASSPATH) + // the classpath of the ApplicationMaster is resolved when submit hive engine to YARN. + if (isClasspath) { + extraCp.foreach(jarEntries.add) + hadoopCp.foreach(jarEntries.add) + } + if (hadoopCp.isEmpty && extraCp.isEmpty) { + warn(s"The conf of ${HIVE_HADOOP_CLASSPATH_KEY} and ${ENGINE_HIVE_EXTRA_CLASSPATH.key}" + + s" is empty.") + debug("Detected development environment") + mainResource.foreach { path => + val devHadoopJars = Paths.get(path).getParent + .resolve(s"scala-$SCALA_COMPILE_VERSION") + .resolve("jars") + if (!Files.exists(devHadoopJars)) { + throw new KyuubiException(s"The path $devHadoopJars does not exists. " + + s"Please set ${HIVE_HADOOP_CLASSPATH_KEY} or ${ENGINE_HIVE_EXTRA_CLASSPATH.key} for " + + s"configuring location of hadoop client jars, etc") + } + jarEntries.add(s"$devHadoopJars${appendClasspathSuffix(isClasspath)}") + } + } + + jarEntries + } + + private def hiveConfFiles(): util.LinkedHashSet[String] = { + val confEntries = new util.LinkedHashSet[String] + confEntries.add(env.getOrElse( + "HIVE_CONF_DIR", + s"$hiveHome${File.separator}conf")) + + confEntries + } + + private def hadoopConfFiles(): util.LinkedHashSet[String] = { + val confEntries = new util.LinkedHashSet[String] + env.get("HADOOP_CONF_DIR").foreach(confEntries.add) + + confEntries + } + + private def yarnConfFiles(): util.LinkedHashSet[String] = { + val confEntries = new util.LinkedHashSet[String] + env.get("YARN_CONF_DIR").foreach(confEntries.add) + + confEntries + } + + private def appendClasspathSuffix(isClasspath: Boolean): String = { + if (isClasspath) { + s"${File.separator}*" + } else { + "" + } + } +} diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiRestFrontendService.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiRestFrontendService.scala index d738995130b..83aee66fef0 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiRestFrontendService.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiRestFrontendService.scala @@ -35,7 +35,7 @@ import org.apache.kyuubi.server.api.v1.ApiRootResource import org.apache.kyuubi.server.http.authentication.{AuthenticationFilter, KyuubiHttpAuthenticationFactory} import org.apache.kyuubi.server.ui.{JettyServer, JettyUtils} import org.apache.kyuubi.service.{AbstractFrontendService, Serverable, Service, ServiceUtils} -import org.apache.kyuubi.service.authentication.{AuthMethods, AuthTypes, KyuubiAuthenticationFactory} +import org.apache.kyuubi.service.authentication.{AuthTypes, AuthUtils} import org.apache.kyuubi.session.{KyuubiSessionManager, SessionHandle} import org.apache.kyuubi.util.ThreadUtils import org.apache.kyuubi.util.ThreadUtils.scheduleTolerableRunnableWithFixedDelay @@ -71,9 +71,10 @@ class KyuubiRestFrontendService(override val serverable: Serverable) private lazy val port: Int = conf.get(FRONTEND_REST_BIND_PORT) - private lazy val securityEnabled = { + private[kyuubi] lazy val securityEnabled = { val authTypes = conf.get(AUTHENTICATION_METHOD).map(AuthTypes.withName) - KyuubiAuthenticationFactory.getValidPasswordAuthMethod(authTypes) != AuthMethods.NONE + AuthUtils.kerberosEnabled(authTypes) || + !AuthUtils.effectivePlainAuthType(authTypes).contains(AuthTypes.NONE) } private lazy val administrators: Set[String] = @@ -259,9 +260,9 @@ class KyuubiRestFrontendService(override val serverable: Serverable) } else { val proxyUser = sessionConf.getOrElse( PROXY_USER.key, - sessionConf.getOrElse(KyuubiAuthenticationFactory.HS2_PROXY_USER, realUser)) + sessionConf.getOrElse(AuthUtils.HS2_PROXY_USER, realUser)) if (!proxyUser.equals(realUser) && !isAdministrator(realUser)) { - KyuubiAuthenticationFactory.verifyProxyAccess(realUser, proxyUser, ipAddress, hadoopConf) + AuthUtils.verifyProxyAccess(realUser, proxyUser, ipAddress, hadoopConf) } proxyUser } diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiTHttpFrontendService.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiTHttpFrontendService.scala index ca8939d69a3..2763e9481f9 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiTHttpFrontendService.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/KyuubiTHttpFrontendService.scala @@ -41,7 +41,6 @@ import org.apache.kyuubi.metrics.MetricsSystem import org.apache.kyuubi.server.http.ThriftHttpServlet import org.apache.kyuubi.server.http.util.SessionManager import org.apache.kyuubi.service.{Serverable, Service, ServiceUtils, TFrontendService} -import org.apache.kyuubi.service.authentication.KyuubiAuthenticationFactory import org.apache.kyuubi.shaded.hive.service.rpc.thrift.{TCLIService, TOpenSessionReq} import org.apache.kyuubi.shaded.thrift.protocol.TBinaryProtocol import org.apache.kyuubi.util.NamedThreadFactory @@ -75,13 +74,8 @@ final class KyuubiTHttpFrontendService( */ override def initialize(conf: KyuubiConf): Unit = synchronized { this.conf = conf - if (authFactory.kerberosEnabled) { - try { - KyuubiAuthenticationFactory.getValidPasswordAuthMethod(authFactory.authTypes) - } catch { - case _: IllegalArgumentException => - throw new AuthenticationException("Kerberos is not supported for thrift http mode") - } + if (authFactory.kerberosEnabled && authFactory.effectivePlainAuthType.isEmpty) { + throw new AuthenticationException("Kerberos is not supported for Thrift HTTP mode") } try { diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/ThriftHttpServlet.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/ThriftHttpServlet.scala index eb8fb2caa69..980f35d70b5 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/ThriftHttpServlet.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/ThriftHttpServlet.scala @@ -136,7 +136,7 @@ class ThriftHttpServlet( } else SessionManager.setForwardedAddresses(List.empty[String]) // Generate new cookie and add it to the response - if (requireNewCookie && !authFactory.noSaslEnabled) { + if (requireNewCookie && !authFactory.saslDisabled) { val cookieToken = HttpAuthUtils.createCookieToken(clientUserName) val hs2Cookie = createCookie(signer.signCookie(cookieToken)) if (isHttpOnlyCookie) response.setHeader("SET-COOKIE", getHttpOnlyCookieHeader(hs2Cookie)) diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/FileSessionConfAdvisor.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/FileSessionConfAdvisor.scala index 96569bc389d..d480520c49f 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/FileSessionConfAdvisor.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/FileSessionConfAdvisor.scala @@ -43,7 +43,8 @@ class FileSessionConfAdvisor extends SessionConfAdvisor { } object FileSessionConfAdvisor extends Logging { - private val reloadInterval: Long = KyuubiConf().get(KyuubiConf.SESSION_CONF_FILE_RELOAD_INTERVAL) + private val reloadInterval: Long = + KyuubiConf().loadFileDefaults().get(KyuubiConf.SESSION_CONF_FILE_RELOAD_INTERVAL) private lazy val sessionConfCache: LoadingCache[String, JMap[String, String]] = CacheBuilder.newBuilder() .expireAfterWrite( diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionImpl.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionImpl.scala index a5d160e0714..e34f7b2a06d 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionImpl.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionImpl.scala @@ -30,6 +30,7 @@ import org.apache.kyuubi.config.KyuubiReservedKeys.{KYUUBI_ENGINE_CREDENTIALS_KE import org.apache.kyuubi.engine.{EngineRef, KyuubiApplicationManager} import org.apache.kyuubi.events.{EventBus, KyuubiSessionEvent} import org.apache.kyuubi.ha.client.DiscoveryClientProvider._ +import org.apache.kyuubi.ha.client.ServiceNodeInfo import org.apache.kyuubi.operation.{Operation, OperationHandle} import org.apache.kyuubi.operation.log.OperationLog import org.apache.kyuubi.service.authentication.InternalSecurityAccessor @@ -119,6 +120,12 @@ class KyuubiSessionImpl( engineLastAlive = System.currentTimeMillis() } + def getEngineNode: Option[ServiceNodeInfo] = { + withDiscoveryClient(sessionConf) { discoveryClient => + engine.getServiceNode(discoveryClient, _client.hostPort) + } + } + private[kyuubi] def openEngineSession(extraEngineLog: Option[OperationLog] = None): Unit = handleSessionException { withDiscoveryClient(sessionConf) { discoveryClient => diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/parser/server/KyuubiAstBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/parser/server/KyuubiAstBuilder.scala index 6c5f6a395f3..b29b57cbd2d 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/parser/server/KyuubiAstBuilder.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/parser/server/KyuubiAstBuilder.scala @@ -20,7 +20,7 @@ package org.apache.kyuubi.sql.parser.server import org.apache.kyuubi.sql.{KyuubiSqlBaseParser, KyuubiSqlBaseParserBaseVisitor} import org.apache.kyuubi.sql.KyuubiSqlBaseParser.SingleStatementContext import org.apache.kyuubi.sql.plan.{KyuubiTreeNode, PassThroughNode} -import org.apache.kyuubi.sql.plan.command.{DescribeSession, RunnableCommand} +import org.apache.kyuubi.sql.plan.command.{DescribeEngine, DescribeSession, RunnableCommand} class KyuubiAstBuilder extends KyuubiSqlBaseParserBaseVisitor[AnyRef] { @@ -44,4 +44,9 @@ class KyuubiAstBuilder extends KyuubiSqlBaseParserBaseVisitor[AnyRef] { : RunnableCommand = { DescribeSession() } + + override def visitDescribeEngine(ctx: KyuubiSqlBaseParser.DescribeEngineContext) + : RunnableCommand = { + DescribeEngine() + } } diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/DescribeEngine.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/DescribeEngine.scala new file mode 100644 index 00000000000..0c9a0bfa567 --- /dev/null +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/DescribeEngine.scala @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kyuubi.sql.plan.command + +import scala.collection.mutable.ListBuffer + +import org.apache.kyuubi.operation.IterableFetchIterator +import org.apache.kyuubi.session.{KyuubiSession, KyuubiSessionImpl} +import org.apache.kyuubi.shaded.hive.service.rpc.thrift.TTypeId +import org.apache.kyuubi.sql.schema.{Column, Row, Schema} + +/** + * A runnable node for description the current session engine. + * + * The syntax of using this command in SQL is: + * {{{ + * [DESC|DESCRIBE] ENGINE; + * }}} + */ +case class DescribeEngine() extends RunnableCommand { + + override def run(kyuubiSession: KyuubiSession): Unit = { + val rows = Seq(kyuubiSession.asInstanceOf[KyuubiSessionImpl]).map { session => + lazy val client = session.client + val values = new ListBuffer[String]() + values += client.engineId.getOrElse("") + values += client.engineName.getOrElse("") + values += client.engineUrl.getOrElse("") + session.getEngineNode match { + case Some(nodeInfo) => + values += s"${nodeInfo.host}:${nodeInfo.port}" + values += nodeInfo.version.getOrElse("") + values += nodeInfo.attributes.mkString(",") + case None => + values += ("", "", "") + } + Row(values.toList) + } + iter = new IterableFetchIterator(rows) + } + + override def resultSchema: Schema = { + Schema(DescribeEngine.outputCols().toList) + } + + override def name(): String = "Describe Engine Node" +} + +object DescribeEngine { + + def outputCols(): Seq[Column] = { + Seq( + Column("ENGINE_ID", TTypeId.STRING_TYPE, Some("Kyuubi engine identify")), + Column("ENGINE_NAME", TTypeId.STRING_TYPE, Some("Kyuubi engine name")), + Column("ENGINE_URL", TTypeId.STRING_TYPE, Some("Kyuubi engine url")), + Column("ENGINE_INSTANCE", TTypeId.STRING_TYPE, Some("Kyuubi engine instance host and port")), + Column("ENGINE_VERSION", TTypeId.STRING_TYPE, Some("Kyuubi engine version")), + Column("ENGINE_ATTRIBUTES", TTypeId.STRING_TYPE, Some("Kyuubi engine attributes"))) + } +} diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/DescribeSession.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/DescribeSession.scala index e1d77f296e7..8abe93e17d0 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/DescribeSession.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/DescribeSession.scala @@ -56,8 +56,8 @@ object DescribeSession { def outputCols(): Seq[Column] = { Seq( - Column("id", TTypeId.STRING_TYPE, Some("Kyuubi session identify")), - Column("user", TTypeId.STRING_TYPE, Some("Kyuubi session user")), - Column("type", TTypeId.STRING_TYPE, Some("Kyuubi session type"))) + Column("SESSION_ID", TTypeId.STRING_TYPE, Some("Kyuubi session identify")), + Column("SESSION_USER", TTypeId.STRING_TYPE, Some("Kyuubi session user")), + Column("SESSION_TYPE", TTypeId.STRING_TYPE, Some("Kyuubi session type"))) } } diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/RestClientTestHelper.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/RestClientTestHelper.scala index 1c78b9fa612..8344cdef01d 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/RestClientTestHelper.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/RestClientTestHelper.scala @@ -48,7 +48,7 @@ trait RestClientTestHelper extends RestFrontendTestHelper with KerberizedTestHel UserGroupInformation.setConfiguration(config) assert(UserGroupInformation.isSecurityEnabled) - val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Set("KERBEROS", "LDAP", "CUSTOM")) + val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Seq("KERBEROS", "LDAP", "CUSTOM")) .set(KyuubiConf.SERVER_KEYTAB.key, testKeytab) .set(KyuubiConf.SERVER_PRINCIPAL, testPrincipal) .set(KyuubiConf.SERVER_SPNEGO_KEYTAB, testKeytab) diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerAndHadoopMiniCluster.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerAndHadoopMiniCluster.scala new file mode 100644 index 00000000000..bd11de08d43 --- /dev/null +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerAndHadoopMiniCluster.scala @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi + +import java.io.File + +import org.apache.hadoop.yarn.conf.YarnConfiguration + +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.config.KyuubiConf.KYUUBI_ENGINE_ENV_PREFIX +import org.apache.kyuubi.server.{MiniDFSService, MiniYarnService} + +trait WithKyuubiServerAndHadoopMiniCluster extends KyuubiFunSuite with WithKyuubiServer { + + val kyuubiHome: String = Utils.getCodeSourceLocation(getClass).split("integration-tests").head + + override protected val conf: KyuubiConf = new KyuubiConf(false) + + private val hadoopConfDir: File = Utils.createTempDir().toFile + + protected var miniHdfsService: MiniDFSService = _ + + protected var miniYarnService: MiniYarnService = _ + + override def beforeAll(): Unit = { + miniHdfsService = new MiniDFSService() + miniHdfsService.initialize(conf) + miniHdfsService.start() + + miniYarnService = new MiniYarnService() + miniYarnService.initialize(conf) + miniYarnService.start() + + miniHdfsService.saveHadoopConf(hadoopConfDir) + miniYarnService.saveYarnConf(hadoopConfDir) + + conf.set(s"$KYUUBI_ENGINE_ENV_PREFIX.KYUUBI_HOME", kyuubiHome) + conf.set(s"$KYUUBI_ENGINE_ENV_PREFIX.HADOOP_CONF_DIR", hadoopConfDir.getAbsolutePath) + conf.set(s"$KYUUBI_ENGINE_ENV_PREFIX.YARN_CONF_DIR", hadoopConfDir.getAbsolutePath) + + super.beforeAll() + } + + override def afterAll(): Unit = { + super.afterAll() + if (miniYarnService != null) { + miniYarnService.stop() + miniYarnService = null + } + if (miniHdfsService != null) { + miniHdfsService.stop() + miniHdfsService = null + } + } + + def getYarnMaximumAllocationMb: Int = { + require(miniYarnService != null, "MiniYarnService is not initialized") + miniYarnService.getYarnConf.getInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 1024) + } +} diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala index 5a674d98fd0..8d3f7b17d8b 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala @@ -54,7 +54,7 @@ sealed trait WithKyuubiServerOnYarn extends WithKyuubiServer { miniYarnService = new MiniYarnService() miniYarnService.initialize(conf) miniYarnService.start() - conf.set(s"$KYUUBI_ENGINE_ENV_PREFIX.HADOOP_CONF_DIR", miniYarnService.getHadoopConfDir) + conf.set(s"$KYUUBI_ENGINE_ENV_PREFIX.HADOOP_CONF_DIR", miniYarnService.getYarnConfDir) super.beforeAll() } diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala index 08b36b84a73..4341c541584 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala @@ -22,6 +22,7 @@ import java.util.concurrent.Executors import scala.collection.JavaConverters._ +import org.scalatest.concurrent.PatienceConfiguration.Timeout import org.scalatest.time.SpanSugar.convertIntToGrainOfTime import org.apache.kyuubi.{KYUUBI_VERSION, Utils} @@ -341,4 +342,27 @@ trait EngineRefTests extends KyuubiFunSuite { val engine4 = new EngineRef(conf, user, PluginLoader.loadGroupProvider(conf), id, null) assert(engine4.subdomain.startsWith("engine-pool-")) } + + test("deregister engine with existing host port") { + val id = UUID.randomUUID().toString + conf.set(KyuubiConf.ENGINE_SHARE_LEVEL, USER.toString) + conf.set(KyuubiConf.ENGINE_TYPE, SPARK_SQL.toString) + conf.set(KyuubiConf.FRONTEND_THRIFT_BINARY_BIND_PORT, 0) + conf.set(HighAvailabilityConf.HA_NAMESPACE, "engine_test") + conf.set(HighAvailabilityConf.HA_ADDRESSES, getConnectString()) + conf.set(KyuubiConf.GROUP_PROVIDER, "hadoop") + + val engine = new EngineRef(conf, user, PluginLoader.loadGroupProvider(conf), id, null) + + DiscoveryClientProvider.withDiscoveryClient(conf) { client => + val hp = engine.getOrCreate(client) + assert(client.getServerHost(engine.engineSpace) == Option(hp)) + assert(!engine.deregister(client, ("non_existing_host", 0))._1) + assert(client.getServerHost(engine.engineSpace) == Option(hp)) + assert(engine.deregister(client, hp)._1) + eventually(Timeout(10.seconds)) { + assert(client.getServerHost(engine.engineSpace).isEmpty) + } + } + } } diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/hive/HiveYarnModeProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/hive/HiveYarnModeProcessBuilderSuite.scala new file mode 100644 index 00000000000..7c896309c5a --- /dev/null +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/hive/HiveYarnModeProcessBuilderSuite.scala @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.engine.hive + +import org.apache.kyuubi.KyuubiFunSuite +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.engine.deploy.yarn.EngineYarnModeSubmitter.{KYUUBI_ENGINE_DEPLOY_YARN_MODE_HADOOP_CONF_KEY, KYUUBI_ENGINE_DEPLOY_YARN_MODE_HIVE_CONF_KEY, KYUUBI_ENGINE_DEPLOY_YARN_MODE_YARN_CONF_KEY} +import org.apache.kyuubi.engine.hive.HiveProcessBuilder.HIVE_HADOOP_CLASSPATH_KEY + +class HiveYarnModeProcessBuilderSuite extends KyuubiFunSuite { + + test("hive yarn mode process builder") { + val conf = KyuubiConf().set("kyuubi.on", "off") + val builder = new HiveYarnModeProcessBuilder("kyuubi", conf, "") { + override def env: Map[String, String] = + super.env + ("HIVE_CONF_DIR" -> "/etc/hive/conf") + (HIVE_HADOOP_CLASSPATH_KEY -> "/hadoop") + } + val commands = builder.toString.split('\n') + assert(commands.head.contains("bin/java"), "wrong exec") + assert(builder.toString.contains("--conf kyuubi.session.user=kyuubi")) + assert(commands.exists(ss => ss.contains("kyuubi-hive-sql-engine")), "wrong classpath") + assert(builder.toString.contains("--conf kyuubi.on=off")) + assert(builder.toString.contains( + s"--conf $KYUUBI_ENGINE_DEPLOY_YARN_MODE_HIVE_CONF_KEY=/etc/hive/conf")) + } + + test("hadoop conf dir") { + val conf = KyuubiConf().set("kyuubi.on", "off") + val builder = new HiveYarnModeProcessBuilder("kyuubi", conf, "") { + override def env: Map[String, String] = + super.env + ("HADOOP_CONF_DIR" -> "/etc/hadoop/conf") + + (HIVE_HADOOP_CLASSPATH_KEY -> "/hadoop") + } + assert(builder.toString.contains( + s"--conf $KYUUBI_ENGINE_DEPLOY_YARN_MODE_HADOOP_CONF_KEY=/etc/hadoop/conf")) + } + + test("yarn conf dir") { + val conf = KyuubiConf().set("kyuubi.on", "off") + val builder = new HiveYarnModeProcessBuilder("kyuubi", conf, "") { + override def env: Map[String, String] = + super.env + ("YARN_CONF_DIR" -> "/etc/hadoop/yarn/conf") + + (HIVE_HADOOP_CLASSPATH_KEY -> "/hadoop") + } + assert(builder.toString.contains( + s"--conf $KYUUBI_ENGINE_DEPLOY_YARN_MODE_YARN_CONF_KEY=/etc/hadoop/yarn/conf")) + } +} diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationKerberosAndPlainAuthSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationKerberosAndPlainAuthSuite.scala index 1791b492e25..31cde639734 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationKerberosAndPlainAuthSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationKerberosAndPlainAuthSuite.scala @@ -64,7 +64,7 @@ class KyuubiOperationKerberosAndPlainAuthSuite extends WithKyuubiServer with Ker assert(UserGroupInformation.isSecurityEnabled) KyuubiConf() - .set(KyuubiConf.AUTHENTICATION_METHOD, Set("KERBEROS", "LDAP", "CUSTOM")) + .set(KyuubiConf.AUTHENTICATION_METHOD, Seq("KERBEROS", "LDAP", "CUSTOM")) .set(KyuubiConf.SERVER_KEYTAB, testKeytab) .set(KyuubiConf.SERVER_PRINCIPAL, testPrincipal) .set(KyuubiConf.AUTHENTICATION_LDAP_URL, ldapUrl) diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/parser/DescribeEngineSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/parser/DescribeEngineSuite.scala new file mode 100644 index 00000000000..1b11fb827ef --- /dev/null +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/parser/DescribeEngineSuite.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kyuubi.operation.parser + +class DescribeEngineSuite extends ExecutedCommandExecSuite { + + test("desc/describe kyuubi engine") { + Seq("DESC", "DESCRIBE").foreach { desc => + withJdbcStatement() { statement => + val resultSet = statement.executeQuery(s"KYUUBI $desc ENGINE") + assert(resultSet.next()) + + assert(resultSet.getMetaData.getColumnCount == 6) + assert(resultSet.getMetaData.getColumnName(1) == "ENGINE_ID") + assert(resultSet.getMetaData.getColumnName(2) == "ENGINE_NAME") + assert(resultSet.getMetaData.getColumnName(3) == "ENGINE_URL") + assert(resultSet.getMetaData.getColumnName(4) == "ENGINE_INSTANCE") + assert(resultSet.getMetaData.getColumnName(5) == "ENGINE_VERSION") + assert(resultSet.getMetaData.getColumnName(6) == "ENGINE_ATTRIBUTES") + } + } + } +} diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/parser/DescribeSessionSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/parser/DescribeSessionSuite.scala index e6f42ed6231..b4eb0893d85 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/parser/DescribeSessionSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/parser/DescribeSessionSuite.scala @@ -19,15 +19,17 @@ package org.apache.kyuubi.operation.parser class DescribeSessionSuite extends ExecutedCommandExecSuite { - test("desc kyuubi session") { - withJdbcStatement() { statement => - val resultSet = statement.executeQuery("KYUUBI DESC SESSION") - assert(resultSet.next()) + test("desc/describe kyuubi session") { + Seq("DESC", "DESCRIBE").foreach { desc => + withJdbcStatement() { statement => + val resultSet = statement.executeQuery(s"KYUUBI $desc SESSION") + assert(resultSet.next()) - assert(resultSet.getMetaData.getColumnCount == 3) - assert(resultSet.getMetaData.getColumnName(1) == "id") - assert(resultSet.getMetaData.getColumnName(2) == "user") - assert(resultSet.getMetaData.getColumnName(3) == "type") + assert(resultSet.getMetaData.getColumnCount == 3) + assert(resultSet.getMetaData.getColumnName(1) == "SESSION_ID") + assert(resultSet.getMetaData.getColumnName(2) == "SESSION_USER") + assert(resultSet.getMetaData.getColumnName(3) == "SESSION_TYPE") + } } } } diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpKerberosAndPlainAuthSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpKerberosAndPlainAuthSuite.scala index cee43bf5cf1..941e121a6cd 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpKerberosAndPlainAuthSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpKerberosAndPlainAuthSuite.scala @@ -49,7 +49,7 @@ class KyuubiOperationThriftHttpKerberosAndPlainAuthSuite UserGroupInformation.setConfiguration(config) assert(UserGroupInformation.isSecurityEnabled) - KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Set("KERBEROS", "LDAP", "CUSTOM")) + KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Seq("KERBEROS", "LDAP", "CUSTOM")) .set(KyuubiConf.SERVER_KEYTAB, testKeytab) .set(KyuubiConf.SERVER_PRINCIPAL, testPrincipal) .set(KyuubiConf.AUTHENTICATION_LDAP_URL, ldapUrl) diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/parser/KyuubiParserSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/parser/KyuubiParserSuite.scala index 6858ea0c041..b2ddfdfcdfa 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/parser/KyuubiParserSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/parser/KyuubiParserSuite.scala @@ -20,7 +20,7 @@ package org.apache.kyuubi.parser import org.apache.kyuubi.KyuubiFunSuite import org.apache.kyuubi.sql.parser.server.KyuubiParser import org.apache.kyuubi.sql.plan.PassThroughNode -import org.apache.kyuubi.sql.plan.command.DescribeSession +import org.apache.kyuubi.sql.plan.command.{DescribeEngine, DescribeSession} class KyuubiParserSuite extends KyuubiFunSuite { @@ -51,4 +51,11 @@ class KyuubiParserSuite extends KyuubiFunSuite { assert(node.isInstanceOf[DescribeSession]) assert(node.name() == "Describe Session Node") } + + test("Describe session engine") { + val node = parser.parsePlan("KYUUBI DESC ENGINE") + + assert(node.isInstanceOf[DescribeEngine]) + assert(node.name() == "Describe Engine Node") + } } diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiRestFrontendServiceSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiRestFrontendServiceSuite.scala index 20dd863f9cd..b60517a06d8 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiRestFrontendServiceSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiRestFrontendServiceSuite.scala @@ -19,9 +19,15 @@ package org.apache.kyuubi.server import org.apache.kyuubi.{KYUUBI_VERSION, RestFrontendTestHelper} import org.apache.kyuubi.client.api.v1.dto.VersionInfo +import org.apache.kyuubi.config.KyuubiConf +import org.apache.kyuubi.config.KyuubiConf._ +import org.apache.kyuubi.service.authentication.AnonymousAuthenticationProviderImpl class KyuubiRestFrontendServiceSuite extends RestFrontendTestHelper { + override protected lazy val conf: KyuubiConf = KyuubiConf() + .set(AUTHENTICATION_METHOD, Seq("NONE")) + test("version") { val resp = v1Call("version") assert(resp.readEntity(classOf[VersionInfo]).getVersion === KYUUBI_VERSION) @@ -51,3 +57,36 @@ class KyuubiRestFrontendServiceSuite extends RestFrontendTestHelper { assert(resp.getStatus === 200) } } + +class KerberosKyuubiRestFrontendServiceSuite extends RestFrontendTestHelper { + + override protected lazy val conf: KyuubiConf = KyuubiConf() + .set(AUTHENTICATION_METHOD, Seq("KERBEROS")) + .set(AUTHENTICATION_CUSTOM_CLASS, classOf[AnonymousAuthenticationProviderImpl].getName) + + test("security enabled - KERBEROS") { + assert(fe.asInstanceOf[KyuubiRestFrontendService].securityEnabled === true) + } +} + +class NoneKyuubiRestFrontendServiceSuite extends RestFrontendTestHelper { + + override protected lazy val conf: KyuubiConf = KyuubiConf() + .set(AUTHENTICATION_METHOD, Seq("NONE")) + .set(AUTHENTICATION_CUSTOM_CLASS, classOf[AnonymousAuthenticationProviderImpl].getName) + + test("security enabled - NONE") { + assert(fe.asInstanceOf[KyuubiRestFrontendService].securityEnabled === false) + } +} + +class KerberosAndCustomKyuubiRestFrontendServiceSuite extends RestFrontendTestHelper { + + override protected lazy val conf: KyuubiConf = KyuubiConf() + .set(AUTHENTICATION_METHOD, Seq("KERBEROS,CUSTOM")) + .set(AUTHENTICATION_CUSTOM_CLASS, classOf[AnonymousAuthenticationProviderImpl].getName) + + test("security enabled - KERBEROS,CUSTOM") { + assert(fe.asInstanceOf[KyuubiRestFrontendService].securityEnabled === true) + } +} diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniDFSService.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniDFSService.scala index caacbb6bf39..dbc20be8796 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniDFSService.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniDFSService.scala @@ -60,7 +60,7 @@ class MiniDFSService(name: String, hdfsConf: Configuration) s"NameNode address in configuration is " + s"${hdfsConf.get(HdfsClientConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY)}") super.start() - saveHadoopConf() + saveHadoopConf(hadoopConfDir) } override def stop(): Unit = { @@ -68,7 +68,7 @@ class MiniDFSService(name: String, hdfsConf: Configuration) super.stop() } - private def saveHadoopConf(): Unit = { + def saveHadoopConf(hadoopConfDir: File): Unit = { val configToWrite = new Configuration(false) val hostName = InetAddress.getLocalHost.getHostName hdfsConf.iterator().asScala.foreach { kv => diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniYarnService.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniYarnService.scala index 68a175efc4e..deaeae3bed1 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniYarnService.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/MiniYarnService.scala @@ -33,7 +33,7 @@ import org.apache.kyuubi.service.AbstractService class MiniYarnService extends AbstractService("TestMiniYarnService") { - private val hadoopConfDir: File = Utils.createTempDir().toFile + private val yarnConfDir: File = Utils.createTempDir().toFile private var yarnConf: YarnConfiguration = { val yarnConfig = new YarnConfiguration() // Disable the disk utilization check to avoid the test hanging when people's disks are @@ -82,7 +82,7 @@ class MiniYarnService extends AbstractService("TestMiniYarnService") { override def start(): Unit = { yarnCluster.start() - saveHadoopConf() + saveYarnConf(yarnConfDir) super.start() } @@ -91,7 +91,7 @@ class MiniYarnService extends AbstractService("TestMiniYarnService") { super.stop() } - private def saveHadoopConf(): Unit = { + def saveYarnConf(yarnConfDir: File): Unit = { val configToWrite = new Configuration(false) val hostName = InetAddress.getLocalHost.getHostName yarnCluster.getConfig.iterator().asScala.foreach { kv => @@ -100,10 +100,12 @@ class MiniYarnService extends AbstractService("TestMiniYarnService") { configToWrite.set(key, value) getConf.set(key, value) } - val writer = new FileWriter(new File(hadoopConfDir, "yarn-site.xml")) + val writer = new FileWriter(new File(yarnConfDir, "yarn-site.xml")) configToWrite.writeXml(writer) writer.close() } - def getHadoopConfDir: String = hadoopConfDir.getAbsolutePath + def getYarnConfDir: String = yarnConfDir.getAbsolutePath + + def getYarnConf: YarnConfiguration = yarnConf } diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala index 0951d82727c..c69a97cefd6 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala @@ -51,7 +51,7 @@ class AdminResourceSuite extends KyuubiFunSuite with RestFrontendTestHelper { private val engineMgr = new KyuubiApplicationManager() override protected lazy val conf: KyuubiConf = KyuubiConf() - .set(AUTHENTICATION_METHOD, Set("CUSTOM")) + .set(AUTHENTICATION_METHOD, Seq("CUSTOM")) .set(AUTHENTICATION_CUSTOM_CLASS, classOf[AnonymousAuthenticationProviderImpl].getName) .set(SERVER_ADMINISTRATORS, Set("admin001")) .set(ENGINE_IDLE_TIMEOUT, Duration.ofMinutes(3).toMillis) diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala index 6ae2bd04063..f1ee71bec9f 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala @@ -45,7 +45,7 @@ import org.apache.kyuubi.operation.OperationState.OperationState import org.apache.kyuubi.server.{KyuubiBatchService, KyuubiRestFrontendService} import org.apache.kyuubi.server.http.util.HttpAuthUtils.{basicAuthorizationHeader, AUTHORIZATION_HEADER} import org.apache.kyuubi.server.metadata.api.{Metadata, MetadataFilter} -import org.apache.kyuubi.service.authentication.{AnonymousAuthenticationProviderImpl, KyuubiAuthenticationFactory} +import org.apache.kyuubi.service.authentication.{AnonymousAuthenticationProviderImpl, AuthUtils} import org.apache.kyuubi.session.{KyuubiBatchSession, KyuubiSessionManager, SessionHandle, SessionType} import org.apache.kyuubi.shaded.hive.service.rpc.thrift.TProtocolVersion @@ -85,7 +85,7 @@ abstract class BatchesResourceSuiteBase extends KyuubiFunSuite override protected lazy val conf: KyuubiConf = { val testResourceDir = Paths.get(sparkBatchTestResource.get).getParent val kyuubiConf = KyuubiConf() - .set(AUTHENTICATION_METHOD, Set("CUSTOM")) + .set(AUTHENTICATION_METHOD, Seq("CUSTOM")) .set(AUTHENTICATION_CUSTOM_CLASS, classOf[AnonymousAuthenticationProviderImpl].getName) .set(SERVER_ADMINISTRATORS, Set("admin")) .set(BATCH_IMPL_VERSION, batchVersion) @@ -130,7 +130,7 @@ abstract class BatchesResourceSuiteBase extends KyuubiFunSuite assert(batch.getEndTime === 0) requestObj.setConf((requestObj.getConf.asScala ++ - Map(KyuubiAuthenticationFactory.HS2_PROXY_USER -> "root")).asJava) + Map(AuthUtils.HS2_PROXY_USER -> "root")).asJava) val proxyUserRequest = requestObj val proxyUserResponse = webTarget.path("api/v1/batches") .request(MediaType.APPLICATION_JSON_TYPE) @@ -856,7 +856,7 @@ abstract class BatchesResourceSuiteBase extends KyuubiFunSuite conf += (PROXY_USER.key -> username) } hs2ProxyUser.map { username => - conf += (KyuubiAuthenticationFactory.HS2_PROXY_USER -> username) + conf += (AuthUtils.HS2_PROXY_USER -> username) } val proxyUserRequest = newSparkBatchRequest(conf.toMap) diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminCtlSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminCtlSuite.scala index 32bb6fbb152..986b171c142 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminCtlSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminCtlSuite.scala @@ -56,7 +56,7 @@ class AdminCtlSuite extends RestClientTestHelper with TestPrematureExit { val id = UUID.randomUUID().toString conf.set(HighAvailabilityConf.HA_NAMESPACE, "kyuubi_test") conf.set(KyuubiConf.ENGINE_IDLE_TIMEOUT, 180000L) - conf.set(KyuubiConf.AUTHENTICATION_METHOD, Set("LDAP", "CUSTOM")) + conf.set(KyuubiConf.AUTHENTICATION_METHOD, Seq("LDAP", "CUSTOM")) conf.set(KyuubiConf.GROUP_PROVIDER, "hadoop") val user = ldapUser diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala index c8f1d68e67e..4a05dc079c0 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala @@ -50,7 +50,7 @@ class AdminRestApiSuite extends RestClientTestHelper { val id = UUID.randomUUID().toString conf.set(HighAvailabilityConf.HA_NAMESPACE, "kyuubi_test") conf.set(KyuubiConf.ENGINE_IDLE_TIMEOUT, 180000L) - conf.set(KyuubiConf.AUTHENTICATION_METHOD, Set("LDAP", "CUSTOM")) + conf.set(KyuubiConf.AUTHENTICATION_METHOD, Seq("LDAP", "CUSTOM")) conf.set(KyuubiConf.GROUP_PROVIDER, "hadoop") val user = ldapUser val engine = new EngineRef(conf.clone, user, PluginLoader.loadGroupProvider(conf), id, null) diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/TrinoClientApiSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/TrinoClientApiSuite.scala index 478bf917463..1e9170f58bf 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/TrinoClientApiSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/TrinoClientApiSuite.scala @@ -19,7 +19,7 @@ package org.apache.kyuubi.server.trino.api import java.net.URI import java.time.ZoneId -import java.util.{Collections, Locale, Optional} +import java.util.{Locale, Optional} import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicReference @@ -74,13 +74,13 @@ class TrinoClientApiSuite extends KyuubiFunSuite with TrinoRestFrontendTestHelpe // update catalog and schema if (trino.getSetCatalog.isPresent || trino.getSetSchema.isPresent) { builder = builder - .withCatalog(trino.getSetCatalog.orElse(session.getCatalog)) - .withSchema(trino.getSetSchema.orElse(session.getSchema)) + .catalog(trino.getSetCatalog.orElse(session.getCatalog)) + .schema(trino.getSetSchema.orElse(session.getSchema)) } // update path if present if (trino.getSetPath.isPresent) { - builder = builder.withPath(trino.getSetPath.get) + builder = builder.path(trino.getSetPath.get) } // update session properties if present @@ -88,7 +88,7 @@ class TrinoClientApiSuite extends KyuubiFunSuite with TrinoRestFrontendTestHelpe val properties = session.getProperties.asScala.clone() properties ++= trino.getSetSessionProperties.asScala properties --= trino.getResetSessionProperties.asScala - builder = builder.withProperties(properties.asJava) + builder = builder.properties(properties.asJava) } clientSession.set(builder.build()) } @@ -123,32 +123,26 @@ class TrinoClientApiSuite extends KyuubiFunSuite with TrinoRestFrontendTestHelpe } private def createTestClientSession(connectUrl: URI): ClientSession = { - new ClientSession( - connectUrl, - "kyuubi_test", - Optional.of("test_user"), - "kyuubi", - Optional.of("test_token_tracing"), - Set[String]().asJava, - "test_client_info", - "test_catalog", - "test_schema", - null, - ZoneId.systemDefault(), - Locale.getDefault, - Collections.emptyMap(), - Map[String, String]( + ClientSession.builder() + .server(connectUrl) + .principal(Optional.of("kyuubi_test")) + .user(Optional.of("test_user")) + .source("kyuubi") + .traceToken(Optional.of("test_token_tracing")) + .clientInfo("test_client_info") + .catalog("test_catalog") + .schema("test_schema") + .timeZone(ZoneId.systemDefault()) + .locale(Locale.getDefault) + .properties(Map[String, String]( "test_property_key0" -> "test_property_value0", - "test_property_key1" -> "test_propert_value1").asJava, - Map[String, String]( + "test_property_key1" -> "test_propert_value1").asJava) + .preparedStatements(Map[String, String]( "test_statement_key0" -> "select 1", - "test_statement_key1" -> "select 2").asJava, - Collections.emptyMap(), - Collections.emptyMap(), - null, - new Duration(2, TimeUnit.MINUTES), - true) - + "test_statement_key1" -> "select 2").asJava) + .clientRequestTimeout(new Duration(2, TimeUnit.MINUTES)) + .compressionDisabled(true) + .build() } } diff --git a/kyuubi-server/web-ui/package-lock.json b/kyuubi-server/web-ui/package-lock.json index 3dab868017f..71b4e574a20 100644 --- a/kyuubi-server/web-ui/package-lock.json +++ b/kyuubi-server/web-ui/package-lock.json @@ -39,7 +39,7 @@ "prettier": "^2.7.1", "sass": "^1.54.4", "typescript": "^4.6.4", - "vite": "^4.2.3", + "vite": "^4.5.2", "vitest": "^0.32.0", "vue-tsc": "^0.38.4" } @@ -102,9 +102,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.19.tgz", - "integrity": "sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz", + "integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==", "cpu": [ "arm" ], @@ -118,9 +118,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz", - "integrity": "sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz", + "integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==", "cpu": [ "arm64" ], @@ -134,9 +134,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.17.19.tgz", - "integrity": "sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz", + "integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==", "cpu": [ "x64" ], @@ -150,9 +150,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz", - "integrity": "sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz", + "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==", "cpu": [ "arm64" ], @@ -166,9 +166,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz", - "integrity": "sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz", + "integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==", "cpu": [ "x64" ], @@ -182,9 +182,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz", - "integrity": "sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz", + "integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==", "cpu": [ "arm64" ], @@ -198,9 +198,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz", - "integrity": "sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz", + "integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==", "cpu": [ "x64" ], @@ -214,9 +214,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz", - "integrity": "sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz", + "integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==", "cpu": [ "arm" ], @@ -230,9 +230,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz", - "integrity": "sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz", + "integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==", "cpu": [ "arm64" ], @@ -246,9 +246,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz", - "integrity": "sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz", + "integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==", "cpu": [ "ia32" ], @@ -262,9 +262,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz", - "integrity": "sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz", + "integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==", "cpu": [ "loong64" ], @@ -278,9 +278,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz", - "integrity": "sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz", + "integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==", "cpu": [ "mips64el" ], @@ -294,9 +294,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz", - "integrity": "sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz", + "integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==", "cpu": [ "ppc64" ], @@ -310,9 +310,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz", - "integrity": "sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz", + "integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==", "cpu": [ "riscv64" ], @@ -326,9 +326,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz", - "integrity": "sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz", + "integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==", "cpu": [ "s390x" ], @@ -342,9 +342,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz", - "integrity": "sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz", + "integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==", "cpu": [ "x64" ], @@ -358,9 +358,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz", - "integrity": "sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz", + "integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==", "cpu": [ "x64" ], @@ -374,9 +374,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz", - "integrity": "sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz", + "integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==", "cpu": [ "x64" ], @@ -390,9 +390,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz", - "integrity": "sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz", + "integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==", "cpu": [ "x64" ], @@ -406,9 +406,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz", - "integrity": "sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz", + "integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==", "cpu": [ "arm64" ], @@ -422,9 +422,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz", - "integrity": "sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz", + "integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==", "cpu": [ "ia32" ], @@ -438,9 +438,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz", - "integrity": "sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz", + "integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==", "cpu": [ "x64" ], @@ -1985,9 +1985,9 @@ } }, "node_modules/esbuild": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.17.19.tgz", - "integrity": "sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz", + "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==", "dev": true, "hasInstallScript": true, "bin": { @@ -1997,28 +1997,28 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/android-arm": "0.17.19", - "@esbuild/android-arm64": "0.17.19", - "@esbuild/android-x64": "0.17.19", - "@esbuild/darwin-arm64": "0.17.19", - "@esbuild/darwin-x64": "0.17.19", - "@esbuild/freebsd-arm64": "0.17.19", - "@esbuild/freebsd-x64": "0.17.19", - "@esbuild/linux-arm": "0.17.19", - "@esbuild/linux-arm64": "0.17.19", - "@esbuild/linux-ia32": "0.17.19", - "@esbuild/linux-loong64": "0.17.19", - "@esbuild/linux-mips64el": "0.17.19", - "@esbuild/linux-ppc64": "0.17.19", - "@esbuild/linux-riscv64": "0.17.19", - "@esbuild/linux-s390x": "0.17.19", - "@esbuild/linux-x64": "0.17.19", - "@esbuild/netbsd-x64": "0.17.19", - "@esbuild/openbsd-x64": "0.17.19", - "@esbuild/sunos-x64": "0.17.19", - "@esbuild/win32-arm64": "0.17.19", - "@esbuild/win32-ia32": "0.17.19", - "@esbuild/win32-x64": "0.17.19" + "@esbuild/android-arm": "0.18.20", + "@esbuild/android-arm64": "0.18.20", + "@esbuild/android-x64": "0.18.20", + "@esbuild/darwin-arm64": "0.18.20", + "@esbuild/darwin-x64": "0.18.20", + "@esbuild/freebsd-arm64": "0.18.20", + "@esbuild/freebsd-x64": "0.18.20", + "@esbuild/linux-arm": "0.18.20", + "@esbuild/linux-arm64": "0.18.20", + "@esbuild/linux-ia32": "0.18.20", + "@esbuild/linux-loong64": "0.18.20", + "@esbuild/linux-mips64el": "0.18.20", + "@esbuild/linux-ppc64": "0.18.20", + "@esbuild/linux-riscv64": "0.18.20", + "@esbuild/linux-s390x": "0.18.20", + "@esbuild/linux-x64": "0.18.20", + "@esbuild/netbsd-x64": "0.18.20", + "@esbuild/openbsd-x64": "0.18.20", + "@esbuild/sunos-x64": "0.18.20", + "@esbuild/win32-arm64": "0.18.20", + "@esbuild/win32-ia32": "0.18.20", + "@esbuild/win32-x64": "0.18.20" } }, "node_modules/escape-html": { @@ -2517,9 +2517,9 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz", - "integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==", + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", "funding": [ { "type": "individual", @@ -3746,9 +3746,9 @@ } }, "node_modules/rollup": { - "version": "3.24.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.24.0.tgz", - "integrity": "sha512-OgraHOIg2YpHQTjl0/ymWfFNBEyPucB7lmhXrQUh38qNOegxLapSPFs9sNr0qKR75awW41D93XafoR2QfhBdUQ==", + "version": "3.29.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", + "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", "dev": true, "bin": { "rollup": "dist/bin/rollup" @@ -4201,14 +4201,14 @@ } }, "node_modules/vite": { - "version": "4.3.9", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.3.9.tgz", - "integrity": "sha512-qsTNZjO9NoJNW7KnOrgYwczm0WctJ8m/yqYAMAK9Lxt4SoySUfS5S8ia9K7JHpa3KEeMfyF8LoJ3c5NeBJy6pg==", + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.2.tgz", + "integrity": "sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==", "dev": true, "dependencies": { - "esbuild": "^0.17.5", - "postcss": "^8.4.23", - "rollup": "^3.21.0" + "esbuild": "^0.18.10", + "postcss": "^8.4.27", + "rollup": "^3.27.1" }, "bin": { "vite": "bin/vite.js" @@ -4216,12 +4216,16 @@ "engines": { "node": "^14.18.0 || >=16.0.0" }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@types/node": ">= 14", "less": "*", + "lightningcss": "^1.21.0", "sass": "*", "stylus": "*", "sugarss": "*", @@ -4234,6 +4238,9 @@ "less": { "optional": true }, + "lightningcss": { + "optional": true + }, "sass": { "optional": true }, @@ -4678,156 +4685,156 @@ "requires": {} }, "@esbuild/android-arm": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.19.tgz", - "integrity": "sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz", + "integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==", "dev": true, "optional": true }, "@esbuild/android-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz", - "integrity": "sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz", + "integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==", "dev": true, "optional": true }, "@esbuild/android-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.17.19.tgz", - "integrity": "sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz", + "integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==", "dev": true, "optional": true }, "@esbuild/darwin-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz", - "integrity": "sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz", + "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==", "dev": true, "optional": true }, "@esbuild/darwin-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz", - "integrity": "sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz", + "integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==", "dev": true, "optional": true }, "@esbuild/freebsd-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz", - "integrity": "sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz", + "integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==", "dev": true, "optional": true }, "@esbuild/freebsd-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz", - "integrity": "sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz", + "integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==", "dev": true, "optional": true }, "@esbuild/linux-arm": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz", - "integrity": "sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz", + "integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==", "dev": true, "optional": true }, "@esbuild/linux-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz", - "integrity": "sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz", + "integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==", "dev": true, "optional": true }, "@esbuild/linux-ia32": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz", - "integrity": "sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz", + "integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==", "dev": true, "optional": true }, "@esbuild/linux-loong64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz", - "integrity": "sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz", + "integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==", "dev": true, "optional": true }, "@esbuild/linux-mips64el": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz", - "integrity": "sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz", + "integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==", "dev": true, "optional": true }, "@esbuild/linux-ppc64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz", - "integrity": "sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz", + "integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==", "dev": true, "optional": true }, "@esbuild/linux-riscv64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz", - "integrity": "sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz", + "integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==", "dev": true, "optional": true }, "@esbuild/linux-s390x": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz", - "integrity": "sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz", + "integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==", "dev": true, "optional": true }, "@esbuild/linux-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz", - "integrity": "sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz", + "integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==", "dev": true, "optional": true }, "@esbuild/netbsd-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz", - "integrity": "sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz", + "integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==", "dev": true, "optional": true }, "@esbuild/openbsd-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz", - "integrity": "sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz", + "integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==", "dev": true, "optional": true }, "@esbuild/sunos-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz", - "integrity": "sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz", + "integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==", "dev": true, "optional": true }, "@esbuild/win32-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz", - "integrity": "sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz", + "integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==", "dev": true, "optional": true }, "@esbuild/win32-ia32": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz", - "integrity": "sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz", + "integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==", "dev": true, "optional": true }, "@esbuild/win32-x64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz", - "integrity": "sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==", + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz", + "integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==", "dev": true, "optional": true }, @@ -5975,33 +5982,33 @@ "dev": true }, "esbuild": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.17.19.tgz", - "integrity": "sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==", - "dev": true, - "requires": { - "@esbuild/android-arm": "0.17.19", - "@esbuild/android-arm64": "0.17.19", - "@esbuild/android-x64": "0.17.19", - "@esbuild/darwin-arm64": "0.17.19", - "@esbuild/darwin-x64": "0.17.19", - "@esbuild/freebsd-arm64": "0.17.19", - "@esbuild/freebsd-x64": "0.17.19", - "@esbuild/linux-arm": "0.17.19", - "@esbuild/linux-arm64": "0.17.19", - "@esbuild/linux-ia32": "0.17.19", - "@esbuild/linux-loong64": "0.17.19", - "@esbuild/linux-mips64el": "0.17.19", - "@esbuild/linux-ppc64": "0.17.19", - "@esbuild/linux-riscv64": "0.17.19", - "@esbuild/linux-s390x": "0.17.19", - "@esbuild/linux-x64": "0.17.19", - "@esbuild/netbsd-x64": "0.17.19", - "@esbuild/openbsd-x64": "0.17.19", - "@esbuild/sunos-x64": "0.17.19", - "@esbuild/win32-arm64": "0.17.19", - "@esbuild/win32-ia32": "0.17.19", - "@esbuild/win32-x64": "0.17.19" + "version": "0.18.20", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz", + "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==", + "dev": true, + "requires": { + "@esbuild/android-arm": "0.18.20", + "@esbuild/android-arm64": "0.18.20", + "@esbuild/android-x64": "0.18.20", + "@esbuild/darwin-arm64": "0.18.20", + "@esbuild/darwin-x64": "0.18.20", + "@esbuild/freebsd-arm64": "0.18.20", + "@esbuild/freebsd-x64": "0.18.20", + "@esbuild/linux-arm": "0.18.20", + "@esbuild/linux-arm64": "0.18.20", + "@esbuild/linux-ia32": "0.18.20", + "@esbuild/linux-loong64": "0.18.20", + "@esbuild/linux-mips64el": "0.18.20", + "@esbuild/linux-ppc64": "0.18.20", + "@esbuild/linux-riscv64": "0.18.20", + "@esbuild/linux-s390x": "0.18.20", + "@esbuild/linux-x64": "0.18.20", + "@esbuild/netbsd-x64": "0.18.20", + "@esbuild/openbsd-x64": "0.18.20", + "@esbuild/sunos-x64": "0.18.20", + "@esbuild/win32-arm64": "0.18.20", + "@esbuild/win32-ia32": "0.18.20", + "@esbuild/win32-x64": "0.18.20" } }, "escape-html": { @@ -6374,9 +6381,9 @@ "dev": true }, "follow-redirects": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz", - "integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==" + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==" }, "form-data": { "version": "4.0.0", @@ -7234,9 +7241,9 @@ } }, "rollup": { - "version": "3.24.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.24.0.tgz", - "integrity": "sha512-OgraHOIg2YpHQTjl0/ymWfFNBEyPucB7lmhXrQUh38qNOegxLapSPFs9sNr0qKR75awW41D93XafoR2QfhBdUQ==", + "version": "3.29.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", + "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", "dev": true, "requires": { "fsevents": "~2.3.2" @@ -7568,15 +7575,15 @@ } }, "vite": { - "version": "4.3.9", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.3.9.tgz", - "integrity": "sha512-qsTNZjO9NoJNW7KnOrgYwczm0WctJ8m/yqYAMAK9Lxt4SoySUfS5S8ia9K7JHpa3KEeMfyF8LoJ3c5NeBJy6pg==", + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.2.tgz", + "integrity": "sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==", "dev": true, "requires": { - "esbuild": "^0.17.5", + "esbuild": "^0.18.10", "fsevents": "~2.3.2", - "postcss": "^8.4.23", - "rollup": "^3.21.0" + "postcss": "^8.4.27", + "rollup": "^3.27.1" } }, "vite-node": { diff --git a/kyuubi-server/web-ui/package.json b/kyuubi-server/web-ui/package.json index 607fa4f3cd5..2674bd0c8ca 100644 --- a/kyuubi-server/web-ui/package.json +++ b/kyuubi-server/web-ui/package.json @@ -46,7 +46,7 @@ "prettier": "^2.7.1", "sass": "^1.54.4", "typescript": "^4.6.4", - "vite": "^4.2.3", + "vite": "^4.5.2", "vitest": "^0.32.0", "vue-tsc": "^0.38.4" } diff --git a/kyuubi-server/web-ui/pnpm-lock.yaml b/kyuubi-server/web-ui/pnpm-lock.yaml index 14f50016028..0c5ffbe2f57 100644 --- a/kyuubi-server/web-ui/pnpm-lock.yaml +++ b/kyuubi-server/web-ui/pnpm-lock.yaml @@ -56,7 +56,7 @@ devDependencies: version: 5.33.1(eslint@8.22.0)(typescript@4.7.4) '@vitejs/plugin-vue': specifier: ^4.2.3 - version: 4.2.3(vite@4.2.3)(vue@3.2.37) + version: 4.2.3(vite@4.5.2)(vue@3.2.37) '@vitest/coverage-v8': specifier: ^0.32.0 version: 0.32.0(vitest@0.32.0) @@ -91,8 +91,8 @@ devDependencies: specifier: ^4.6.4 version: 4.7.4 vite: - specifier: ^4.2.3 - version: 4.2.3(@types/node@18.7.6)(sass@1.54.4) + specifier: ^4.5.2 + version: 4.5.2(@types/node@18.7.6)(sass@1.54.4) vitest: specifier: ^0.32.0 version: 0.32.0(jsdom@20.0.0)(sass@1.54.4) @@ -150,8 +150,8 @@ packages: vue: 3.2.37 dev: false - /@esbuild/android-arm64@0.17.19: - resolution: {integrity: sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==} + /@esbuild/android-arm64@0.18.20: + resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} engines: {node: '>=12'} cpu: [arm64] os: [android] @@ -159,8 +159,8 @@ packages: dev: true optional: true - /@esbuild/android-arm@0.17.19: - resolution: {integrity: sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==} + /@esbuild/android-arm@0.18.20: + resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} engines: {node: '>=12'} cpu: [arm] os: [android] @@ -168,8 +168,8 @@ packages: dev: true optional: true - /@esbuild/android-x64@0.17.19: - resolution: {integrity: sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==} + /@esbuild/android-x64@0.18.20: + resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} engines: {node: '>=12'} cpu: [x64] os: [android] @@ -177,8 +177,8 @@ packages: dev: true optional: true - /@esbuild/darwin-arm64@0.17.19: - resolution: {integrity: sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==} + /@esbuild/darwin-arm64@0.18.20: + resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} engines: {node: '>=12'} cpu: [arm64] os: [darwin] @@ -186,8 +186,8 @@ packages: dev: true optional: true - /@esbuild/darwin-x64@0.17.19: - resolution: {integrity: sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==} + /@esbuild/darwin-x64@0.18.20: + resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} engines: {node: '>=12'} cpu: [x64] os: [darwin] @@ -195,8 +195,8 @@ packages: dev: true optional: true - /@esbuild/freebsd-arm64@0.17.19: - resolution: {integrity: sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==} + /@esbuild/freebsd-arm64@0.18.20: + resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} engines: {node: '>=12'} cpu: [arm64] os: [freebsd] @@ -204,8 +204,8 @@ packages: dev: true optional: true - /@esbuild/freebsd-x64@0.17.19: - resolution: {integrity: sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==} + /@esbuild/freebsd-x64@0.18.20: + resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} engines: {node: '>=12'} cpu: [x64] os: [freebsd] @@ -213,8 +213,8 @@ packages: dev: true optional: true - /@esbuild/linux-arm64@0.17.19: - resolution: {integrity: sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==} + /@esbuild/linux-arm64@0.18.20: + resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} engines: {node: '>=12'} cpu: [arm64] os: [linux] @@ -222,8 +222,8 @@ packages: dev: true optional: true - /@esbuild/linux-arm@0.17.19: - resolution: {integrity: sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==} + /@esbuild/linux-arm@0.18.20: + resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} engines: {node: '>=12'} cpu: [arm] os: [linux] @@ -231,8 +231,8 @@ packages: dev: true optional: true - /@esbuild/linux-ia32@0.17.19: - resolution: {integrity: sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==} + /@esbuild/linux-ia32@0.18.20: + resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} engines: {node: '>=12'} cpu: [ia32] os: [linux] @@ -240,8 +240,8 @@ packages: dev: true optional: true - /@esbuild/linux-loong64@0.17.19: - resolution: {integrity: sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==} + /@esbuild/linux-loong64@0.18.20: + resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} engines: {node: '>=12'} cpu: [loong64] os: [linux] @@ -249,8 +249,8 @@ packages: dev: true optional: true - /@esbuild/linux-mips64el@0.17.19: - resolution: {integrity: sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==} + /@esbuild/linux-mips64el@0.18.20: + resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} engines: {node: '>=12'} cpu: [mips64el] os: [linux] @@ -258,8 +258,8 @@ packages: dev: true optional: true - /@esbuild/linux-ppc64@0.17.19: - resolution: {integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==} + /@esbuild/linux-ppc64@0.18.20: + resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} engines: {node: '>=12'} cpu: [ppc64] os: [linux] @@ -267,8 +267,8 @@ packages: dev: true optional: true - /@esbuild/linux-riscv64@0.17.19: - resolution: {integrity: sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==} + /@esbuild/linux-riscv64@0.18.20: + resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} engines: {node: '>=12'} cpu: [riscv64] os: [linux] @@ -276,8 +276,8 @@ packages: dev: true optional: true - /@esbuild/linux-s390x@0.17.19: - resolution: {integrity: sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==} + /@esbuild/linux-s390x@0.18.20: + resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} engines: {node: '>=12'} cpu: [s390x] os: [linux] @@ -285,8 +285,8 @@ packages: dev: true optional: true - /@esbuild/linux-x64@0.17.19: - resolution: {integrity: sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==} + /@esbuild/linux-x64@0.18.20: + resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} engines: {node: '>=12'} cpu: [x64] os: [linux] @@ -294,8 +294,8 @@ packages: dev: true optional: true - /@esbuild/netbsd-x64@0.17.19: - resolution: {integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==} + /@esbuild/netbsd-x64@0.18.20: + resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} engines: {node: '>=12'} cpu: [x64] os: [netbsd] @@ -303,8 +303,8 @@ packages: dev: true optional: true - /@esbuild/openbsd-x64@0.17.19: - resolution: {integrity: sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==} + /@esbuild/openbsd-x64@0.18.20: + resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] @@ -312,8 +312,8 @@ packages: dev: true optional: true - /@esbuild/sunos-x64@0.17.19: - resolution: {integrity: sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==} + /@esbuild/sunos-x64@0.18.20: + resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} engines: {node: '>=12'} cpu: [x64] os: [sunos] @@ -321,8 +321,8 @@ packages: dev: true optional: true - /@esbuild/win32-arm64@0.17.19: - resolution: {integrity: sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==} + /@esbuild/win32-arm64@0.18.20: + resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} engines: {node: '>=12'} cpu: [arm64] os: [win32] @@ -330,8 +330,8 @@ packages: dev: true optional: true - /@esbuild/win32-ia32@0.17.19: - resolution: {integrity: sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==} + /@esbuild/win32-ia32@0.18.20: + resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} engines: {node: '>=12'} cpu: [ia32] os: [win32] @@ -339,8 +339,8 @@ packages: dev: true optional: true - /@esbuild/win32-x64@0.17.19: - resolution: {integrity: sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==} + /@esbuild/win32-x64@0.18.20: + resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} engines: {node: '>=12'} cpu: [x64] os: [win32] @@ -673,14 +673,14 @@ packages: eslint-visitor-keys: 3.3.0 dev: true - /@vitejs/plugin-vue@4.2.3(vite@4.2.3)(vue@3.2.37): + /@vitejs/plugin-vue@4.2.3(vite@4.5.2)(vue@3.2.37): resolution: {integrity: sha512-R6JDUfiZbJA9cMiguQ7jxALsgiprjBeHL5ikpXfJCH62pPHtI+JdJ5xWj6Ev73yXSlYl86+blXn1kZHQ7uElxw==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: vite: ^4.0.0 vue: ^3.2.25 dependencies: - vite: 4.2.3(@types/node@18.7.6)(sass@1.54.4) + vite: 4.5.2(@types/node@18.7.6)(sass@1.54.4) vue: 3.2.37 dev: true @@ -1041,7 +1041,7 @@ packages: /axios@1.6.0: resolution: {integrity: sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==} dependencies: - follow-redirects: 1.15.1 + follow-redirects: 1.15.4 form-data: 4.0.0 proxy-from-env: 1.1.0 transitivePeerDependencies: @@ -1322,34 +1322,34 @@ packages: engines: {node: '>=0.12'} dev: true - /esbuild@0.17.19: - resolution: {integrity: sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==} + /esbuild@0.18.20: + resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} engines: {node: '>=12'} hasBin: true requiresBuild: true optionalDependencies: - '@esbuild/android-arm': 0.17.19 - '@esbuild/android-arm64': 0.17.19 - '@esbuild/android-x64': 0.17.19 - '@esbuild/darwin-arm64': 0.17.19 - '@esbuild/darwin-x64': 0.17.19 - '@esbuild/freebsd-arm64': 0.17.19 - '@esbuild/freebsd-x64': 0.17.19 - '@esbuild/linux-arm': 0.17.19 - '@esbuild/linux-arm64': 0.17.19 - '@esbuild/linux-ia32': 0.17.19 - '@esbuild/linux-loong64': 0.17.19 - '@esbuild/linux-mips64el': 0.17.19 - '@esbuild/linux-ppc64': 0.17.19 - '@esbuild/linux-riscv64': 0.17.19 - '@esbuild/linux-s390x': 0.17.19 - '@esbuild/linux-x64': 0.17.19 - '@esbuild/netbsd-x64': 0.17.19 - '@esbuild/openbsd-x64': 0.17.19 - '@esbuild/sunos-x64': 0.17.19 - '@esbuild/win32-arm64': 0.17.19 - '@esbuild/win32-ia32': 0.17.19 - '@esbuild/win32-x64': 0.17.19 + '@esbuild/android-arm': 0.18.20 + '@esbuild/android-arm64': 0.18.20 + '@esbuild/android-x64': 0.18.20 + '@esbuild/darwin-arm64': 0.18.20 + '@esbuild/darwin-x64': 0.18.20 + '@esbuild/freebsd-arm64': 0.18.20 + '@esbuild/freebsd-x64': 0.18.20 + '@esbuild/linux-arm': 0.18.20 + '@esbuild/linux-arm64': 0.18.20 + '@esbuild/linux-ia32': 0.18.20 + '@esbuild/linux-loong64': 0.18.20 + '@esbuild/linux-mips64el': 0.18.20 + '@esbuild/linux-ppc64': 0.18.20 + '@esbuild/linux-riscv64': 0.18.20 + '@esbuild/linux-s390x': 0.18.20 + '@esbuild/linux-x64': 0.18.20 + '@esbuild/netbsd-x64': 0.18.20 + '@esbuild/openbsd-x64': 0.18.20 + '@esbuild/sunos-x64': 0.18.20 + '@esbuild/win32-arm64': 0.18.20 + '@esbuild/win32-ia32': 0.18.20 + '@esbuild/win32-x64': 0.18.20 dev: true /escape-html@1.0.3: @@ -1616,8 +1616,8 @@ packages: resolution: {integrity: sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ==} dev: true - /follow-redirects@1.15.1: - resolution: {integrity: sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==} + /follow-redirects@1.15.4: + resolution: {integrity: sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==} engines: {node: '>=4.0'} peerDependencies: debug: '*' @@ -1646,10 +1646,6 @@ packages: dev: true optional: true - /function-bind@1.1.1: - resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} - dev: true - /functional-red-black-tree@1.0.1: resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==} dev: true @@ -1716,13 +1712,6 @@ packages: engines: {node: '>=8'} dev: true - /has@1.0.3: - resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} - engines: {node: '>= 0.4.0'} - dependencies: - function-bind: 1.1.1 - dev: true - /html-encoding-sniffer@3.0.0: resolution: {integrity: sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==} engines: {node: '>=12'} @@ -1802,12 +1791,6 @@ packages: binary-extensions: 2.2.0 dev: true - /is-core-module@2.10.0: - resolution: {integrity: sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==} - dependencies: - has: 1.0.3 - dev: true - /is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -2199,10 +2182,6 @@ packages: engines: {node: '>=8'} dev: true - /path-parse@1.0.7: - resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - dev: true - /path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} @@ -2367,15 +2346,6 @@ packages: engines: {node: '>=4'} dev: true - /resolve@1.22.1: - resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} - hasBin: true - dependencies: - is-core-module: 2.10.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - dev: true - /ret@0.1.15: resolution: {integrity: sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==} engines: {node: '>=0.12'} @@ -2393,8 +2363,8 @@ packages: glob: 7.2.3 dev: true - /rollup@3.24.0: - resolution: {integrity: sha512-OgraHOIg2YpHQTjl0/ymWfFNBEyPucB7lmhXrQUh38qNOegxLapSPFs9sNr0qKR75awW41D93XafoR2QfhBdUQ==} + /rollup@3.29.4: + resolution: {integrity: sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==} engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true optionalDependencies: @@ -2519,11 +2489,6 @@ packages: has-flag: 4.0.0 dev: true - /supports-preserve-symlinks-flag@1.0.0: - resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} - engines: {node: '>= 0.4'} - dev: true - /swagger-ui-dist@4.19.1: resolution: {integrity: sha512-n/gFn+R7G/BXWwl5UZLw6F1YgWOlf3zkwGlsPhTMhNtAAolBGKg0JS5b2RKt5NI6/hSopVaSrki2wTIMUDDy2w==} dev: false @@ -2684,10 +2649,11 @@ packages: mlly: 1.3.0 pathe: 1.1.1 picocolors: 1.0.0 - vite: 4.2.3(@types/node@18.7.6)(sass@1.54.4) + vite: 4.5.2(@types/node@18.7.6)(sass@1.54.4) transitivePeerDependencies: - '@types/node' - less + - lightningcss - sass - stylus - sugarss @@ -2695,13 +2661,14 @@ packages: - terser dev: true - /vite@4.2.3(@types/node@18.7.6)(sass@1.54.4): - resolution: {integrity: sha512-kLU+m2q0Y434Y1kCy3TchefAdtFso0ILi0dLyFV8Us3InXTU11H/B5ZTqCKIQHzSKNxVG/yEx813EA9f1imQ9A==} + /vite@4.5.2(@types/node@18.7.6)(sass@1.54.4): + resolution: {integrity: sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==} engines: {node: ^14.18.0 || >=16.0.0} hasBin: true peerDependencies: '@types/node': '>= 14' less: '*' + lightningcss: ^1.21.0 sass: '*' stylus: '*' sugarss: '*' @@ -2711,6 +2678,8 @@ packages: optional: true less: optional: true + lightningcss: + optional: true sass: optional: true stylus: @@ -2721,10 +2690,9 @@ packages: optional: true dependencies: '@types/node': 18.7.6 - esbuild: 0.17.19 + esbuild: 0.18.20 postcss: 8.4.31 - resolve: 1.22.1 - rollup: 3.24.0 + rollup: 3.29.4 sass: 1.54.4 optionalDependencies: fsevents: 2.3.2 @@ -2784,11 +2752,12 @@ packages: strip-literal: 1.0.1 tinybench: 2.5.0 tinypool: 0.5.0 - vite: 4.2.3(@types/node@18.7.6)(sass@1.54.4) + vite: 4.5.2(@types/node@18.7.6)(sass@1.54.4) vite-node: 0.32.0(@types/node@18.7.6)(sass@1.54.4) why-is-node-running: 2.2.2 transitivePeerDependencies: - less + - lightningcss - sass - stylus - sugarss diff --git a/pom.xml b/pom.xml index f3b3d57644c..a04c34ff3a9 100644 --- a/pom.xml +++ b/pom.xml @@ -123,7 +123,7 @@ 4.9.3 4.3.4 - https://archive.apache.org/dist + http://www.apache.org/dyn/closer.lua 2.3.0 1.67 4.2.23 @@ -133,18 +133,17 @@ 2.11.0 2.6 3.13.0 - 0.7.3 delta-core 2.4.0 - 2.4.4 + 3.3.2 0.9.3 0.62.2 - 1.17.1 + 1.17.2 flink-${flink.version}-bin-scala_2.12.tgz ${apache.archive.dist}/flink/flink-${flink.version} false 3.0.2 - 1.53.0 + 1.60.1 32.0.1-jre 1.0.1 3.3.6 @@ -168,6 +167,7 @@ 2.3.2 1.2.2 2.39.1 + 0.7.7 9.4.52.v20230823 0.9.94 4.13.2 @@ -212,7 +212,8 @@ 1.7.0 0.9.3 - 363 + + 411 1.4 1.1 @@ -1105,7 +1106,7 @@ io.etcd jetcd-core - ${etcd.version} + ${jetcd.version} javax.annotation @@ -1117,7 +1118,7 @@ io.etcd jetcd-launcher - ${etcd.version} + ${jetcd.version} org.testcontainers @@ -1156,6 +1157,12 @@ ${grpc.version} + + io.grpc + grpc-util + ${grpc.version} + + com.google.protobuf protobuf-java @@ -1169,7 +1176,7 @@ - net.jodah + dev.failsafe failsafe ${failsafe.verion} @@ -2337,7 +2344,7 @@ flink-1.17 - 1.17.1 + 1.17.2 @@ -2376,13 +2383,6 @@ - - spark-block-cleaner - - tools/spark-block-cleaner - - - spotless-python diff --git a/tools/spark-block-cleaner/kubernetes/spark-block-cleaner.yml b/tools/spark-block-cleaner/kubernetes/spark-block-cleaner.yml deleted file mode 100644 index 408ee18aa00..00000000000 --- a/tools/spark-block-cleaner/kubernetes/spark-block-cleaner.yml +++ /dev/null @@ -1,75 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -apiVersion: apps/v1 -# A DaemonSet ensures that all (or some) Nodes run a copy of a Pod. -kind: DaemonSet -metadata: - name: kyuubi-kubernetes-spark-block-cleaner - # NameSpace help assigned daemonSet to the designated cluster resource - namespace: default -spec: - selector: - matchLabels: - name: block-cleaner - template: - metadata: - labels: - name: block-cleaner - spec: - containers: - # Container image which build by Dockerfile - # TODO official Image - - image: - name: cleaner - volumeMounts: - - name: block-files-dir-1 - mountPath: /data/data1 - - name: block-files-dir-2 - mountPath: /data/data2 - - name: cleaner-log - mountPath: /log/cleanerLog - env: - # Set env to manager cleaner running - # the target dirs which in container - - name: CACHE_DIRS - value: /data/data1,/data/data2 - # Cleaner will clean More distant block files, seconds - - name: FILE_EXPIRED_TIME - value: 604800 - # Deep clean fileExpiredTime, seconds - - name: DEEP_CLEAN_FILE_EXPIRED_TIME - value: 432000 - # After first clean, if free Space low than threshold - # trigger deep clean - - name: FREE_SPACE_THRESHOLD - value: 60 - # Cleaner clean sleep times after cleaning, seconds - - name: SCHEDULE_INTERVAL - value: 3600 - volumes: - # Directory on the host which store block dirs - - name: block-files-dir-1 - hostPath: - path: /blockFilesDirs/data1 - - name: block-files-dir-2 - hostPath: - path: /blockFilesDirs/data2 - # Directory on the host which you want to store clean log - - name: cleaner-log - hostPath: - path: /logDir diff --git a/tools/spark-block-cleaner/pom.xml b/tools/spark-block-cleaner/pom.xml deleted file mode 100644 index 9c777f12177..00000000000 --- a/tools/spark-block-cleaner/pom.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - 4.0.0 - - org.apache.kyuubi - kyuubi-parent - 1.9.0-SNAPSHOT - ../../pom.xml - - - spark-block-cleaner_${scala.binary.version} - jar - Kyuubi Project Spark Block Cleaner - https://kyuubi.apache.org/ - - - - org.apache.kyuubi - kyuubi-common_${scala.binary.version} - ${project.version} - - - - org.apache.kyuubi - kyuubi-common_${scala.binary.version} - ${project.version} - test-jar - test - - - - - target/scala-${scala.binary.version}/classes - target/scala-${scala.binary.version}/test-classes - - diff --git a/tools/spark-block-cleaner/src/main/resources/log4j-block-cleaner.properties b/tools/spark-block-cleaner/src/main/resources/log4j-block-cleaner.properties deleted file mode 100644 index 2649bc49b95..00000000000 --- a/tools/spark-block-cleaner/src/main/resources/log4j-block-cleaner.properties +++ /dev/null @@ -1,33 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Set everything to be logged to the console -log4j.rootCategory=INFO, console, logFile - -### console -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.out -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS} %d{yyyy} %p %c{2}: %m%n - -### logFile -log4j.appender.logFile=org.apache.log4j.RollingFileAppender -log4j.appender.logFile.File=/logs/spark-block-cleaner-log/cleaner-log.out -log4j.appender.logFile.MaxFileSize=20MB -log4j.appender.logFile.MaxBackupIndex=5 -log4j.appender.logFile.layout=org.apache.log4j.PatternLayout -log4j.appender.logFile.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS} %p %c{2}: %m%n diff --git a/tools/spark-block-cleaner/src/main/scala/org/apache/kyuubi/tools/KubernetesSparkBlockCleaner.scala b/tools/spark-block-cleaner/src/main/scala/org/apache/kyuubi/tools/KubernetesSparkBlockCleaner.scala deleted file mode 100644 index 6d4c1050b43..00000000000 --- a/tools/spark-block-cleaner/src/main/scala/org/apache/kyuubi/tools/KubernetesSparkBlockCleaner.scala +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.kyuubi.tools - -import java.io.File -import java.nio.file.{Files, Paths} -import java.util.concurrent.{CountDownLatch, Executors} - -import scala.util.control.NonFatal - -import org.apache.log4j.PropertyConfigurator - -import org.apache.kyuubi.Logging - -/* - * Spark storage shuffle data as the following structure. - * - * local-dir1/ - * blockmgr-uuid/ - * hash-sub-dir/ - * shuffle-data - * shuffle-index - * - * local-dir2/ - * blockmgr-uuid/ - * hash-sub-dir/ - * shuffle-data - * shuffle-index - * - * ... - */ -object KubernetesSparkBlockCleaner extends Logging { - import KubernetesSparkBlockCleanerConstants._ - - private val envMap = System.getenv() - - PropertyConfigurator.configure( - Thread.currentThread().getContextClassLoader.getResource("log4j-block-cleaner.properties")) - - private val freeSpaceThreshold = envMap.getOrDefault(FREE_SPACE_THRESHOLD_KEY, "60").toInt - private val fileExpiredTime = envMap.getOrDefault(FILE_EXPIRED_TIME_KEY, "604800").toLong * 1000 - private val scheduleInterval = envMap.getOrDefault(SCHEDULE_INTERVAL, "3600").toLong * 1000 - private val deepCleanFileExpiredTime = - envMap.getOrDefault(DEEP_CLEAN_FILE_EXPIRED_TIME_KEY, "432000").toLong * 1000 - private val cacheDirs = - if (envMap.containsKey(CACHE_DIRS_KEY)) { - envMap.get(CACHE_DIRS_KEY).split(",").filter(!_.equals("")) - } else { - throw new IllegalArgumentException(s"the env $CACHE_DIRS_KEY must be set") - } - private val isTesting = envMap.getOrDefault("kyuubi.testing", "false").toBoolean - checkConfiguration() - - /** - * one thread clean one dir - */ - private val threadPool = Executors.newFixedThreadPool(cacheDirs.length) - - private def checkConfiguration(): Unit = { - require(fileExpiredTime > 0, s"the env $FILE_EXPIRED_TIME_KEY should be greater than 0") - require( - deepCleanFileExpiredTime > 0, - s"the env $DEEP_CLEAN_FILE_EXPIRED_TIME_KEY should be greater than 0") - require(scheduleInterval > 0, s"the env $SCHEDULE_INTERVAL should be greater than 0") - require( - freeSpaceThreshold > 0 && freeSpaceThreshold < 100, - s"the env $FREE_SPACE_THRESHOLD_KEY should between 0 and 100") - require(cacheDirs.nonEmpty, s"the env $CACHE_DIRS_KEY must be set") - cacheDirs.foreach { dir => - val path = Paths.get(dir) - require(Files.exists(path), s"the input cache dir: $dir does not exists") - require(Files.isDirectory(path), s"the input cache dir: $dir should be a directory") - } - - info(s"finish initializing configuration, " + - s"use $CACHE_DIRS_KEY: ${cacheDirs.mkString(",")}, " + - s"$FILE_EXPIRED_TIME_KEY: $fileExpiredTime, " + - s"$FREE_SPACE_THRESHOLD_KEY: $freeSpaceThreshold, " + - s"$SCHEDULE_INTERVAL: $scheduleInterval, " + - s"$DEEP_CLEAN_FILE_EXPIRED_TIME_KEY: $deepCleanFileExpiredTime") - } - - private def doClean(dir: File, time: Long) { - // clean blockManager shuffle file - dir.listFiles.filter(_.isDirectory).filter(_.getName.startsWith("blockmgr")) - .foreach { blockManagerDir => - info(s"start check blockManager dir ${blockManagerDir.getCanonicalPath}") - // check blockManager directory - val released = blockManagerDir.listFiles.filter(_.isDirectory).map { subDir => - debug(s"start check sub dir ${subDir.getCanonicalPath}") - // check sub directory - subDir.listFiles.map(file => checkAndDeleteFile(file, time)).sum - } - // delete empty blockManager directory and all empty sub directory - if (blockManagerDir.listFiles().forall(subDir => - subDir.isDirectory && subDir.listFiles().isEmpty)) { - blockManagerDir.listFiles().foreach(checkAndDeleteFile(_, time, true)) - checkAndDeleteFile(blockManagerDir, time, true) - } - info(s"finished clean blockManager dir ${blockManagerDir.getCanonicalPath}, " + - s"released space: ${released.sum / 1024 / 1024} MB") - } - - // clean spark cache file - dir.listFiles.filter(_.isDirectory).filter(_.getName.startsWith("spark")) - .foreach { cacheDir => - info(s"start check cache dir ${cacheDir.getCanonicalPath}") - val released = cacheDir.listFiles.map(file => checkAndDeleteFile(file, time)) - // delete empty spark cache file - checkAndDeleteFile(cacheDir, time, true) - info(s"finished clean cache dir ${cacheDir.getCanonicalPath}, " + - s"released space: ${released.sum / 1024 / 1024} MB") - } - } - - private def checkAndDeleteFile(file: File, time: Long, isDir: Boolean = false): Long = { - debug(s"check file ${file.getName}") - val shouldDeleteFile = - if (isDir) { - file.listFiles.isEmpty && (System.currentTimeMillis() - file.lastModified() > time) - } else { - System.currentTimeMillis() - file.lastModified() > time - } - val length = if (isDir) 0 else file.length() - if (shouldDeleteFile) { - if (file.delete()) { - debug(s"delete file ${file.getAbsolutePath} success") - return length - } else { - warn(s"delete file ${file.getAbsolutePath} fail") - } - } - 0L - } - - import scala.sys.process._ - - private def needToDeepClean(dir: String): Boolean = { - try { - val used = (s"df $dir" #| s"grep $dir").!! - .split(" ").filter(_.endsWith("%")) { - 0 - }.replace("%", "") - info(s"$dir now used $used% space") - - used.toInt > (100 - freeSpaceThreshold) - } catch { - case NonFatal(e) => - error(s"An error occurs when querying the disk $dir capacity, " + - s"return true to make sure the disk space will not overruns: ${e.getMessage}") - true - } - } - - private def doCleanJob(dir: String): Unit = { - val startTime = System.currentTimeMillis() - val path = Paths.get(dir) - info(s"start clean job for $dir") - doClean(path.toFile, fileExpiredTime) - // re check if the disk has enough space - if (needToDeepClean(dir)) { - info(s"start deep clean job for $dir") - doClean(path.toFile, deepCleanFileExpiredTime) - if (needToDeepClean(dir)) { - warn(s"after deep clean $dir, used space still higher than $freeSpaceThreshold") - } - } - val finishedTime = System.currentTimeMillis() - info(s"clean job $dir finished, elapsed time: ${(finishedTime - startTime) / 1000} s") - } - - def main(args: Array[String]): Unit = { - do { - info(s"start all clean job") - val startTime = System.currentTimeMillis() - val hasFinished = new CountDownLatch(cacheDirs.length) - cacheDirs.foreach { dir => - threadPool.execute(() => { - try { - doCleanJob(dir) - } catch { - case NonFatal(e) => - error(s"failed to clean dir: $dir", e) - } finally { - hasFinished.countDown() - } - }) - } - hasFinished.await() - - val usedTime = System.currentTimeMillis() - startTime - info(s"finished to clean all dir, elapsed time ${usedTime / 1000} s") - if (usedTime > scheduleInterval) { - warn(s"clean job elapsed time $usedTime which is greater than $scheduleInterval") - } else { - Thread.sleep(scheduleInterval - usedTime) - } - } while (!isTesting) - } -} - -object KubernetesSparkBlockCleanerConstants { - val CACHE_DIRS_KEY = "CACHE_DIRS" - val FILE_EXPIRED_TIME_KEY = "FILE_EXPIRED_TIME" - val FREE_SPACE_THRESHOLD_KEY = "FREE_SPACE_THRESHOLD" - val SCHEDULE_INTERVAL = "SCHEDULE_INTERVAL" - val DEEP_CLEAN_FILE_EXPIRED_TIME_KEY = "DEEP_CLEAN_FILE_EXPIRED_TIME" -} diff --git a/tools/spark-block-cleaner/src/test/scala/org.apache.kyuubi.tools/KubernetesSparkBlockCleanerSuite.scala b/tools/spark-block-cleaner/src/test/scala/org.apache.kyuubi.tools/KubernetesSparkBlockCleanerSuite.scala deleted file mode 100644 index ae4651fe28f..00000000000 --- a/tools/spark-block-cleaner/src/test/scala/org.apache.kyuubi.tools/KubernetesSparkBlockCleanerSuite.scala +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.kyuubi.tools - -import java.io.File -import java.nio.file.Files -import java.util.{Map => JMap} -import java.util.UUID - -import org.apache.kyuubi.{KyuubiFunSuite, Utils} -import org.apache.kyuubi.util.reflect.ReflectUtils._ - -class KubernetesSparkBlockCleanerSuite extends KyuubiFunSuite { - import KubernetesSparkBlockCleanerConstants._ - - private val rootDir = Utils.createTempDir() - private val cacheDir = Seq("1", "2").map(rootDir.resolve) - private val block1 = new File(cacheDir.head.toFile, s"blockmgr-${UUID.randomUUID.toString}") - private val block2 = new File(cacheDir.head.toFile, s"blockmgr-${UUID.randomUUID.toString}") - - // do not remove - private val subDir1 = new File(block1, "01") - // do not remove - private val data11 = new File(subDir1, "shuffle_0_0_0") - // remove - private val data12 = new File(subDir1, "shuffle_0_0_1") - - // remove - private val subDir2 = new File(block2, "02") - // remove - private val data21 = new File(subDir1, "shuffle_0_1_0") - - private def deleteRecursive(path: File): Unit = { - path.listFiles.foreach { f => - if (f.isDirectory) { - deleteRecursive(f) - } else { - f.delete() - } - } - path.delete() - } - - override def beforeAll(): Unit = { - super.beforeAll() - cacheDir.foreach(Files.createDirectories(_)) - - // create some dir - Files.createDirectories(block1.toPath) - // hash sub dir - Files.createDirectory(subDir1.toPath) - data11.createNewFile() - data11.setLastModified(System.currentTimeMillis() - 10) - data12.createNewFile() - Files.write(data12.toPath, "111".getBytes()) - data12.setLastModified(System.currentTimeMillis() - 10000000) - - Files.createDirectories(block2.toPath) - Files.createDirectory(subDir2.toPath) - subDir2.setLastModified(System.currentTimeMillis() - 10000000) - data21.createNewFile() - data21.setLastModified(System.currentTimeMillis() - 10000000) - } - - override def afterAll(): Unit = { - deleteRecursive(block1) - deleteRecursive(block2) - - super.afterAll() - } - - private def updateEnv(name: String, value: String): Unit = { - getField[JMap[String, String]](System.getenv, "m").put(name, value) - } - - test("test clean") { - updateEnv(CACHE_DIRS_KEY, cacheDir.mkString(",")) - updateEnv(FILE_EXPIRED_TIME_KEY, "600") - updateEnv(SCHEDULE_INTERVAL, "1") - updateEnv("kyuubi.testing", "true") - - KubernetesSparkBlockCleaner.main(Array.empty) - - assert(block1.exists()) - assert(subDir1.exists()) - assert(data11.exists()) - assert(!data12.exists()) - - assert(block2.exists()) - assert(!subDir2.exists()) - assert(!data21.exists()) - } -}