Skip to content

Commit

Permalink
release one tispark jar instead of two (pingcap#933)
Browse files Browse the repository at this point in the history
  • Loading branch information
marsishandsome authored Jul 19, 2019
1 parent c65b9b7 commit 4888fa8
Show file tree
Hide file tree
Showing 25 changed files with 603 additions and 190 deletions.
5 changes: 3 additions & 2 deletions .ci/integration_test.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def call(ghprbActualCommit, ghprbCommentBody, ghprbPullId, ghprbPullTitle, ghprb
// parse mvn profile
def m4 = ghprbCommentBody =~ /profile\s*=\s*([^\s\\]+)(\s|\\|$)/
if (m4) {
MVN_PROFILE = "-P ${m4[0][1]}"
MVN_PROFILE = "-P${m4[0][1]}"
}

def readfile = { filename ->
Expand Down Expand Up @@ -130,6 +130,7 @@ def call(ghprbActualCommit, ghprbCommentBody, ghprbPullId, ghprbPullTitle, ghprb
sh """
cp .ci/log4j-ci.properties core/src/test/resources/log4j.properties
export MAVEN_OPTS="-Xmx6G -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=51M"
mvn compile ${MVN_PROFILE} -DskipCloneProtoFiles=true
mvn test ${MVN_PROFILE} -Dtest=moo ${mvnStr} -DskipCloneProtoFiles=true
"""
}
Expand All @@ -143,7 +144,7 @@ def call(ghprbActualCommit, ghprbCommentBody, ghprbPullId, ghprbPullTitle, ghprb
"""
sh """
export MAVEN_OPTS="-Xmx6G -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512M"
mvn test -am -pl tikv-client -DskipCloneProtoFiles=true
mvn test ${MVN_PROFILE} -am -pl tikv-client -DskipCloneProtoFiles=true
"""
unstash "CODECOV_TOKEN"
sh 'curl -s https://codecov.io/bash | bash -s - -t @CODECOV_TOKEN'
Expand Down
59 changes: 59 additions & 0 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.pingcap.tispark</groupId>
<artifactId>tispark-parent</artifactId>
<version>2.2.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

<artifactId>tispark-assembly</artifactId>
<packaging>pom</packaging>
<name>TiSpark Project Assembly</name>
<url>http://github.copm/pingcap/tispark</url>

<dependencies>
<dependency>
<groupId>com.pingcap.tispark</groupId>
<artifactId>tispark-core</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>com.pingcap.tikv</groupId>
<artifactId>tikv-client</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.1.0</version>
<executions>
<execution>
<id>jar</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<descriptors>
<descriptor>src/main/assembly/assembly.xml</descriptor>
</descriptors>
<appendAssemblyId>false</appendAssemblyId>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
41 changes: 41 additions & 0 deletions assembly/src/main/assembly/assembly.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
<assembly>
<id>jar</id>

<formats>
<format>jar</format>
</formats>

<includeBaseDirectory>false</includeBaseDirectory>

<dependencySets>
<dependencySet>
<includes>
<include>com.pingcap.tispark:tispark-core:jar</include>
<include>com.pingcap.tikv:tikv-client:jar</include>
<include>mysql:mysql-connector-java:jar</include>
</includes>
<unpack>true</unpack>
</dependencySet>
</dependencySets>

<fileSets>
<fileSet>
<directory>
${project.parent.basedir}/spark-wrapper/spark-2.3/target/classes/
</directory>
<outputDirectory>resources/spark-wrapper-spark-2.3</outputDirectory>
<includes>
<include>**/*</include>
</includes>
</fileSet>
<fileSet>
<directory>
${project.parent.basedir}/spark-wrapper/spark-2.4/target/classes/
</directory>
<outputDirectory>resources/spark-wrapper-spark-2.4</outputDirectory>
<includes>
<include>**/*</include>
</includes>
</fileSet>
</fileSets>
</assembly>
73 changes: 1 addition & 72 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@

<properties>
<scalaj.version>2.3.0</scalaj.version>
<mysql.connector.version>5.1.44</mysql.connector.version>
<play.version>2.6.8</play.version>
</properties>

<dependencies>
Expand All @@ -41,37 +39,31 @@
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.8.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.8.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
Expand All @@ -83,19 +75,16 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-unsafe_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.2</version>
<scope>${spark.scope}</scope>
</dependency>
<!-- For tests -->
<dependency>
Expand Down Expand Up @@ -213,9 +202,6 @@
<encoding>UTF-8</encoding>
<showWarnings>true</showWarnings>
<showDeprecation>true</showDeprecation>
<includes>
<include>src/main/${profile.source.directory}</include>
</includes>
</configuration>
</plugin>
<!-- Jar Plug-in -->
Expand Down Expand Up @@ -269,44 +255,6 @@
</execution>
</executions>
</plugin>
<!--GPG Signed Components-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.5</version>
<configuration>
<keyname>Yifei Wu</keyname>
<skip>${gpg.skip}</skip>
</configuration>
<executions>
<execution>
<id>sign-artifacts</id>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Assembly Plug-in -->
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>assemble-all</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<!-- Rename Jar to contain Spark Version -->
<!--<finalName>${artifactId}-${version}-spark-${spark.version}-jar-with-dependencies</finalName>-->
</configuration>
</plugin>
<!-- Scala Format Plug-in -->
<plugin>
<groupId>org.antipathy</groupId>
Expand All @@ -315,12 +263,11 @@
<configuration>
<sourceDirectories> <!-- (Optional) Paths to source-directories. Overrides ${project.build.sourceDirectory} -->
<sourceDirectory>${project.basedir}/src/main/scala</sourceDirectory>
<sourceDirectory>${project.basedir}/src/main/${profile.source.directory}</sourceDirectory>
</sourceDirectories>
<testSourceDirectories> <!-- (Optional) Paths to test-source-directories. Overrides ${project.build.testSourceDirectory} -->
<param>${project.basedir}/src/test/scala</param>
</testSourceDirectories>
<configLocation>${project.basedir}/scalafmt.conf</configLocation>
<configLocation>${project.parent.basedir}/scalafmt.conf</configLocation>
</configuration>
<executions>
<execution>
Expand Down Expand Up @@ -357,24 +304,6 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>src/main/${profile.source.directory}</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
2 changes: 1 addition & 1 deletion core/scripts/version.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,4 @@ object TiSparkVersion {
if (confStream != null) {
prop.load(confStream)
}
val version: String = "Release Version: '${TiSparkReleaseVersion}'\\nSupported Spark Version: " + prop.getProperty("spark.supported_version", "spark-2.3") + "\\nGit Commit Hash: '${TiSparkGitHash}'\\nGit Branch: '${TiSparkGitBranch}'\\nUTC Build Time: '${TiSparkBuildTS}'" }' > core/src/main/scala/com/pingcap/tispark/TiSparkVersion.scala
val version: String = "Release Version: '${TiSparkReleaseVersion}'\\nGit Commit Hash: '${TiSparkGitHash}'\\nGit Branch: '${TiSparkGitBranch}'\\nUTC Build Time: '${TiSparkBuildTS}'" }' > core/src/main/scala/com/pingcap/tispark/TiSparkVersion.scala
1 change: 0 additions & 1 deletion core/src/main/resources/config.properties

This file was deleted.

30 changes: 30 additions & 0 deletions core/src/main/scala/com/pingcap/tispark/examples/OneJarTest.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Copyright 2019 PingCAP, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.pingcap.tispark.examples

import com.pingcap.tispark.utils.ReflectionUtil

object OneJarTest {

def main(args: Array[String]): Unit = {
val clazz =
ReflectionUtil.classLoader.loadClass("com.pingcap.tispark.SparkWrapper")
val sparkWrapperVersion =
clazz.getDeclaredMethod("getVersion").invoke(null).asInstanceOf[String]
println(s"sparkWrapperVersion=$sparkWrapperVersion")
}

}
Loading

0 comments on commit 4888fa8

Please sign in to comment.