Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support merge into operation #433

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions .github/workflows/snyk-issue.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: Snyk Issue

on:
schedule:
- cron: '* */12 * * *'

concurrency: snyk-issue

jobs:
whitesource:
runs-on: ubuntu-latest
steps:
- name: checkout action
uses: actions/checkout@v2
with:
repository: snowflakedb/whitesource-actions
token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }}
path: whitesource-actions
- name: set-env
run: echo "REPO=$(basename $GITHUB_REPOSITORY)" >> $GITHUB_ENV
- name: Jira Creation
uses: ./whitesource-actions/snyk-issue
with:
snyk_org: ${{ secrets.SNYK_ORG_ID }}
snyk_token: ${{ secrets.SNYK_GITHUB_INTEGRATION_TOKEN }}
jira_token: ${{ secrets.JIRA_TOKEN }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

31 changes: 31 additions & 0 deletions .github/workflows/snyk-pr.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: snyk-pr
on:
pull_request:
branches:
- master
jobs:
whitesource:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'sfc-gh-snyk-sca-sa' }}
steps:
- name: checkout
uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.ref }}
fetch-depth: 0

- name: checkout action
uses: actions/checkout@v2
with:
repository: snowflakedb/whitesource-actions
token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }}
path: whitesource-actions

- name: PR
uses: ./whitesource-actions/snyk-pr
env:
PR_TITLE: ${{ github.event.pull_request.title }}
with:
jira_token: ${{ secrets.JIRA_TOKEN }}
gh_token: ${{ secrets.GITHUB_TOKEN }}
amend: false # true if you want the commit to be amended with the JIRA number
12 changes: 11 additions & 1 deletion src/main/scala/net/snowflake/spark/snowflake/Parameters.scala
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,13 @@ object Parameters {
"internal_use_aws_region_url"
)

val PARAM_UNIQUE_KEYS: String = knownParam(
"unique.keys"
)
val PARAM_INSERTED_COLUMNS: String = knownParam(
"inserted.columns"
)

val DEFAULT_S3_MAX_FILE_SIZE: String = (10 * 1000 * 1000).toString
val MIN_S3_MAX_FILE_SIZE = 1000000

Expand Down Expand Up @@ -577,6 +584,10 @@ object Parameters {
parameters.get(PARAM_SF_ACCOUNT)
}

def uniqueKeys: String = parameters.getOrElse(PARAM_UNIQUE_KEYS, "")

def insertedColumns: String = parameters.getOrElse(PARAM_INSERTED_COLUMNS, "")

/**
* Snowflake SSL on/off - "on" by default
*/
Expand Down Expand Up @@ -625,7 +636,6 @@ object Parameters {
def nonProxyHosts: Option[String] = {
parameters.get(PARAM_NON_PROXY_HOSTS)
}

/**
* Mapping OAuth and authenticator values
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,22 @@ private[snowflake] object DefaultJDBCWrapper extends JDBCWrapper {
.execute(bindVariableEnabled)(connection)
}

def mergeIntoTable(tempTable: String,
targetTable: String,
keys: Array[String],
insertedColumns: Array[String],
bindVariableEnabled: Boolean = true): Unit = {
val joinExpr = keys.map(key => s"$targetTable.$key=$tempTable.$key").mkString(" and ")
val updateExpr = insertedColumns.map(c => s"$targetTable.$c=$tempTable.$c").mkString(",")
val matchExpr = s"when matched then update set $updateExpr"
val insertedColumnStr = insertedColumns.mkString(",")
val insertExpr = insertedColumns.map(key => s"$tempTable.$key").mkString(",")
val notMatchExpr = s"when not matched then insert($insertedColumnStr)values($insertExpr)"
(ConstantString("merge into") + Identifier(targetTable) + "using" +
Identifier(tempTable) + "on" + joinExpr + matchExpr + notMatchExpr)
.execute(bindVariableEnabled)(connection)
}

def truncateTable(table: String,
bindVariableEnabled: Boolean = true): Unit =
(ConstantString("truncate") + table)
Expand Down
50 changes: 38 additions & 12 deletions src/main/scala/net/snowflake/spark/snowflake/io/StageWriter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -418,18 +418,44 @@ private[io] object StageWriter {
)

// Execute COPY INTO TABLE to load data
StageWriter.executeCopyIntoTable(
sqlContext,
conn,
schema,
saveMode,
params,
targetTable,
file,
tempStage,
format,
fileUploadResults)

// update data by merging syntax
val keyStr: String = sqlContext.getConf("spark.sql.unique.keys", params.uniqueKeys)
val insertedColumnStr: String = sqlContext.getConf("spark.sql.inserted.columns", params.insertedColumns)
if(!tempTable.equals(targetTable) && !keyStr.isEmpty && !insertedColumnStr.isEmpty){
conn.createTable(tempTable.name, schema, params,
overwrite = true, temporary = true)
StageWriter.executeCopyIntoTable(
sqlContext,
conn,
schema,
saveMode,
params,
tempTable,
file,
tempStage,
format,
fileUploadResults
)
val keys = keyStr.split(',')
val insertedColumns = insertedColumnStr.split(',')
conn.mergeIntoTable(tempTable.name,
targetTable.name,
keys,
insertedColumns)
} else {
StageWriter.executeCopyIntoTable(
sqlContext,
conn,
schema,
saveMode,
params,
targetTable,
file,
tempStage,
format,
fileUploadResults
)
}
// post actions
Utils.executePostActions(
DefaultJDBCWrapper,
Expand Down