Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Improve]Format the code for the entire project #133

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 38 additions & 0 deletions .github/workflows/checkstyle.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
---
name: Code Style Checker

on:
pull_request:

jobs:
java-checkstyle:
name: "CheckStyle"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
persist-credentials: false
submodules: recursive

- name: Run java checkstyle
run:
cd spark-doris-connector && mvn clean checkstyle:check

Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
429 changes: 429 additions & 0 deletions spark-doris-connector/check/checkstyle/checkstyle.xml

Large diffs are not rendered by default.

43 changes: 43 additions & 0 deletions spark-doris-connector/check/checkstyle/import-control.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->

<!DOCTYPE import-control PUBLIC
"-//Checkstyle//DTD ImportControl Configuration 1.4//EN"
"https://checkstyle.org/dtds/import_control_1_4.dtd">

<import-control pkg="org.apache.doris" strategyOnMismatch="allowed">
<disallow pkg="com.clearspring.analytics.util" />
<disallow pkg="com.alibaba.google" />
<disallow pkg="org.spark_project.guava" />
<disallow pkg="org.glassfish.jersey.internal.guava" />
<disallow pkg="io.fabric8.zjsonpatch.internal.guava" />
<disallow pkg="org.checkerframework.com.google" />
<disallow pkg="org.apache.iceberg.relocated" />
<disallow pkg="com.alibaba.fastjson2" />
<disallow pkg="org.apache.log4j.LogManager" />
<disallow pkg="org.apache.log4j.Logger" />
<disallow pkg="java.text.SimpleDateFormat"/>
<disallow pkg="org.apache.parquet.Strings"/>
<subpackage name="nereids">
<allow pkg="org.junit.jupiter"/>
<disallow pkg="org.junit"/>
</subpackage>
</import-control>

63 changes: 63 additions & 0 deletions spark-doris-connector/check/checkstyle/suppressions.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->

<!DOCTYPE suppressions PUBLIC
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
"https://checkstyle.org/dtds/suppressions_1_2.dtd">

<suppressions>
<!-- Excludes test files from having Javadocs for classes and methods -->
<suppress files="[\\/]jmockit[\\/]" checks=".*" />
<suppress files="[\\/]test[\\/]" checks="MissingJavadocMethod" />
<suppress files="[\\/]test[\\/]" checks="MissingJavadocType" />
<suppress files="[\\/]test[\\/]" checks="LineLength" />

<!-- Suppress some rules except doris-flink-connector -->
<!-- Java doc -->
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="AtclauseOrder" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="JavadocMethod" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="JavadocParagraph" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="JavadocStyle" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="JavadocTagContinuationIndentation" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="InvalidJavadocPosition" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="MissingJavadocMethod" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="MissingJavadocType" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="NonEmptyAtclauseDescription" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="RequireEmptyLineBeforeBlockTagGroup" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="SummaryJavadoc" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[/].*" checks="SingleLineJavadoc" />

<!-- exclude rules for special files -->
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]Schema\.java" checks="ParameterName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]RespContent\.java" checks="MemberName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]QueryPlan\.java" checks="ParameterName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]QueryPlan\.java" checks="MemberName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]Field\.java" checks="ParameterName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]Field\.java" checks="MemberName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]BackendRow\.java" checks="MemberName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]BackendRow\.java" checks="ParameterName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]load[\\/]DorisStreamLoad\.java" checks="MemberName" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]load[\\/]DorisStreamLoad\.java" checks="AbbreviationAsWordInName" />

<!-- exclude rules for special files -->
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]util[\\/]ResponseUtil\.java" checks="NewlineAtEndOfFile" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]rest[\\/]models[\\/]BackendRow\.java" checks="NewlineAtEndOfFile" />
<suppress files="org[\\/]apache[\\/]doris[\\/]spark[\\/]util[\\/]DataUtilTest\.java" checks="NewlineAtEndOfFile" />
</suppressions>
37 changes: 36 additions & 1 deletion spark-doris-connector/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -318,6 +318,42 @@
</execution>
</executions>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.2</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>9.3</version>
</dependency>
</dependencies>
<configuration>
<configLocation>check/checkstyle/checkstyle.xml</configLocation>
<suppressionsLocation>check/checkstyle/suppressions.xml</suppressionsLocation>
<encoding>UTF-8</encoding>
<consoleOutput>true</consoleOutput>
<failsOnError>true</failsOnError>
<linkXRef>false</linkXRef>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
</configuration>
<executions>
<execution>
<id>validate</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.sonarsource.scanner.maven</groupId>
<artifactId>sonar-maven-plugin</artifactId>
<version>3.9.1.2184</version>
</plugin>
</plugins>
</pluginManagement>

Expand Down Expand Up @@ -349,7 +385,6 @@
</plugins>
</build>


<profiles>
<!-- for custom internal repository -->
<profile>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,13 @@
import org.apache.doris.sdk.thrift.TScanOpenResult;
import org.apache.doris.sdk.thrift.TStatusCode;
import org.apache.doris.spark.cfg.ConfigurationOptions;
import org.apache.doris.spark.cfg.Settings;
import org.apache.doris.spark.exception.ConnectedFailedException;
import org.apache.doris.spark.exception.DorisException;
import org.apache.doris.spark.exception.DorisInternalException;
import org.apache.doris.spark.util.ErrorMessages;
import org.apache.doris.spark.cfg.Settings;
import org.apache.doris.spark.serialization.Routing;
import org.apache.doris.spark.util.ErrorMessages;

import org.apache.thrift.TConfiguration;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
Expand All @@ -46,7 +47,7 @@
* Client to request Doris BE
*/
public class BackendClient {
private final static Logger logger = LoggerFactory.getLogger(BackendClient.class);
private static final Logger logger = LoggerFactory.getLogger(BackendClient.class);

private Routing routing;

Expand Down Expand Up @@ -78,7 +79,8 @@ private void open() throws ConnectedFailedException {
logger.debug("Attempt {} to connect {}.", attempt, routing);
try {
TBinaryProtocol.Factory factory = new TBinaryProtocol.Factory();
transport = new TSocket(new TConfiguration(), routing.getHost(), routing.getPort(), socketTimeout, connectTimeout);
transport = new TSocket(new TConfiguration(), routing.getHost(), routing.getPort(), socketTimeout,
connectTimeout);
TProtocol protocol = factory.getProtocol(transport);
client = new TDorisExternalService.Client(protocol);
logger.trace("Connect status before open transport to {} is '{}'.", routing, isConnected);
Expand Down Expand Up @@ -115,6 +117,7 @@ private void close() {

/**
* Open a scanner for reading Doris data.
*
* @param openParams thrift struct to required by request
* @return scan open result
* @throws ConnectedFailedException throw if cannot connect to Doris BE
Expand Down Expand Up @@ -154,6 +157,7 @@ public TScanOpenResult openScanner(TScanOpenParams openParams) throws ConnectedF

/**
* get next row batch from Doris BE
*
* @param nextBatchParams thrift struct to required by request
* @return scan batch result
* @throws ConnectedFailedException throw if cannot connect to Doris BE
Expand All @@ -168,7 +172,7 @@ public TScanBatchResult getNext(TScanNextBatchParams nextBatchParams) throws Dor
for (int attempt = 0; attempt < retries; ++attempt) {
logger.debug("Attempt {} to getNext {}.", attempt, routing);
try {
result = client.getNext(nextBatchParams);
result = client.getNext(nextBatchParams);
if (result == null) {
logger.warn("GetNext result from {} is null.", routing);
continue;
Expand Down Expand Up @@ -196,6 +200,7 @@ public TScanBatchResult getNext(TScanNextBatchParams nextBatchParams) throws Dor

/**
* close an scanner.
*
* @param closeParams thrift struct to required by request
*/
public void closeScanner(TScanCloseParams closeParams) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,11 @@ public interface ConfigurationOptions {
String DORIS_SINK_TASK_PARTITION_SIZE = "doris.sink.task.partition.size";

/**
* Set doris sink task partition size. If you set a small coalesce size and you don't have the action operations, this may result in the same parallelism in your computation.
* To avoid this, you can use repartition operations. This will add a shuffle step, but means the current upstream partitions will be executed in parallel.
* Set doris sink task partition size.
* If you set a small coalesce size and you don't have the action operations, this may result in the same
* parallelism in your computation.
* To avoid this, you can use repartition operations. This will add a shuffle step, but means the current
* upstream partitions will be executed in parallel.
*/
String DORIS_SINK_TASK_USE_REPARTITION = "doris.sink.task.use.repartition";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@

package org.apache.doris.spark.cfg;

import java.util.Properties;

import com.google.common.base.Preconditions;

import java.util.Properties;

public class PropertiesSettings extends Settings {

protected final Properties props;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,28 +17,23 @@

package org.apache.doris.spark.cfg;

import java.util.Enumeration;
import java.util.Map;
import java.util.Properties;

import org.apache.commons.lang3.StringUtils;
import org.apache.doris.spark.exception.IllegalArgumentException;
import org.apache.doris.spark.util.ErrorMessages;
import org.apache.doris.spark.util.IOUtils;

import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Enumeration;
import java.util.Map;
import java.util.Properties;

public abstract class Settings {
private final static Logger logger = LoggerFactory.getLogger(Settings.class);
private static final Logger logger = LoggerFactory.getLogger(Settings.class);

public abstract String getProperty(String name);

public abstract void setProperty(String name, String value);

public abstract Properties asProperties();

public abstract Settings copy();

public String getProperty(String name, String defaultValue) {
String value = getProperty(name);
if (StringUtils.isEmpty(value)) {
Expand All @@ -47,14 +42,20 @@ public String getProperty(String name, String defaultValue) {
return value;
}

public abstract void setProperty(String name, String value);

public abstract Properties asProperties();

public abstract Settings copy();

public Integer getIntegerProperty(String name) {
return getIntegerProperty(name, null);
}

public Integer getIntegerProperty(String name, Integer defaultValue) {
try {
if (getProperty(name) != null) {
return Integer.parseInt(getProperty(name));
return Integer.parseInt(getProperty(name));
}
} catch (NumberFormatException e) {
logger.warn(ErrorMessages.PARSE_NUMBER_FAILED_MESSAGE, name, getProperty(name));
Expand All @@ -80,7 +81,7 @@ public Settings merge(Properties properties) {

Enumeration<?> propertyNames = properties.propertyNames();

for (; propertyNames.hasMoreElements();) {
for (; propertyNames.hasMoreElements(); ) {
Object prop = propertyNames.nextElement();
if (prop instanceof String) {
Object value = properties.get(prop);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,14 @@

package org.apache.doris.spark.cfg;

import java.util.Properties;

import org.apache.spark.SparkConf;

import com.google.common.base.Preconditions;

import org.apache.spark.SparkConf;
import scala.Option;
import scala.Serializable;
import scala.Tuple2;

import java.util.Properties;

public class SparkSettings extends Settings implements Serializable {

private final SparkConf cfg;
Expand Down
Loading