Skip to content

Flink CI (extended) #42

Flink CI (extended)

Flink CI (extended) #42

# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This workflow is meant as an extended CI run that includes certain features that shall be tested
# and JDK versions that are supported but not considered default.
name: "Flink CI (extended)"
on:
schedule:
- cron: '0 2,8,14,20 * * *'
workflow_call:
inputs:
branch:
description: "The branch the extended CI run should be executed on."
type: string
required: true
secrets:
s3_bucket:
required: false
s3_access_key:
required: false
s3_secret_key:
required: false
workflow_dispatch:
inputs:
branch:
description: "The branch the extended workflow should run on."
default: "master"
type: string
permissions: read-all
jobs:
basic-qa:
name: "Basic Source Code Analysis"
uses: ./.github/workflows/source-code-analysis.yml
with:
branch: ${{ inputs.branch }}
java8:
name: "Default (Java 8/Hadoop 2.10.2)"
uses: ./.github/workflows/flink-ci-template.yml
with:
workflow-caller-id: java8
branch: ${{ inputs.branch }}
environment: 'PROFILE="-Dflink.hadoop.version=2.10.2 -Dinclude_hadoop_aws"'
jdk-version: 8
secrets:
s3_bucket: ${{ secrets.s3_bucket }}
s3_access_key: ${{ secrets.s3_access_key }}
s3_secret_key: ${{ secrets.s3_secret_key }}
java11:
name: "Java 11"
uses: ./.github/workflows/flink-ci-template.yml
with:
workflow-caller-id: java11
branch: ${{ inputs.branch }}
environment: 'PROFILE="-Dflink.hadoop.version=2.10.2 -Dinclude_hadoop_aws -Djdk11 -Pjava11-target"'
jdk-version: 11
secrets:
s3_bucket: ${{ secrets.s3_bucket }}
s3_access_key: ${{ secrets.s3_access_key }}
s3_secret_key: ${{ secrets.s3_secret_key }}
java17:
name: "Java 17"
uses: ./.github/workflows/flink-ci-template.yml
with:
workflow-caller-id: java17
branch: ${{ inputs.branch }}
environment: 'PROFILE="-Dflink.hadoop.version=2.10.2 -Dinclude_hadoop_aws -Djdk11 -Djdk17 -Pjava17-target"'
jdk-version: 17
secrets:
s3_bucket: ${{ secrets.s3_bucket }}
s3_access_key: ${{ secrets.s3_access_key }}
s3_secret_key: ${{ secrets.s3_secret_key }}
hadoop313:
name: "Hadoop 3.2.3"
uses: ./.github/workflows/flink-ci-template.yml
with:
workflow-caller-id: hadoop313
branch: ${{ inputs.branch }}
environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3"'
jdk-version: 8
secrets:
s3_bucket: ${{ secrets.s3_bucket }}
s3_access_key: ${{ secrets.s3_access_key }}
s3_secret_key: ${{ secrets.s3_secret_key }}
adaptive-scheduler:
name: "AdaptiveScheduler"
uses: ./.github/workflows/flink-ci-template.yml
with:
workflow-caller-id: adaptive-scheduler
branch: ${{ inputs.branch }}
environment: 'PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler"'
jdk-version: 8
secrets:
s3_bucket: ${{ secrets.s3_bucket }}
s3_access_key: ${{ secrets.s3_access_key }}
s3_secret_key: ${{ secrets.s3_secret_key }}