atf: support wildcard schema patches, apply one for salesforce reduction #1316
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Connectors | |
on: | |
push: | |
branches: [master] | |
pull_request: | |
branches: [master] | |
jobs: | |
paths-filter: | |
runs-on: ubuntu-latest | |
outputs: | |
atf_modified: ${{ steps.filter.outputs.atf }} | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: dorny/paths-filter@v2 | |
id: filter | |
with: | |
filters: | | |
atf: | |
- 'airbyte-to-flow/**' | |
airbyte_to_flow: | |
runs-on: ubuntu-20.04 | |
permissions: | |
contents: read | |
packages: write | |
strategy: | |
fail-fast: false | |
needs: paths-filter | |
steps: | |
- uses: actions/checkout@v2 | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
with: | |
fetch-depth: 0 | |
- name: Prepare | |
id: prep | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
run: | | |
TAG=$(echo $GITHUB_SHA | head -c7) | |
echo ::set-output name=tag::${TAG} | |
# Linux builds need the non-default musl target. | |
- name: Install Rust | |
uses: actions-rs/toolchain@v1 | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
with: | |
toolchain: nightly-2023-04-16 | |
target: x86_64-unknown-linux-musl | |
default: true | |
override: true | |
- uses: actions/cache@v3 | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
with: | |
path: | | |
~/.cargo/bin/ | |
~/.cargo/registry/index/ | |
~/.cargo/registry/cache/ | |
~/.cargo/git/db/ | |
target/ | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: Install protobuf compiler (it's not already included in CI runner) | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
run: sudo apt install -y libprotobuf-dev protobuf-compiler | |
- name: Build Linux | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
run: |- | |
sudo apt-get update && \ | |
sudo apt-get install -y musl-tools && \ | |
cd airbyte-to-flow && \ | |
make | |
- name: Tests | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
run: cd airbyte-to-flow && cargo test --release --target x86_64-unknown-linux-musl | |
- name: Set up Docker Buildx | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
uses: docker/setup-buildx-action@v2 | |
- name: Login to GitHub Container Registry | |
if: needs.paths-filter.outputs.atf_modified == 'true' | |
uses: docker/login-action@v2 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ secrets.GITHUB_TOKEN }} | |
- name: Build and push | |
uses: docker/build-push-action@v3 | |
if: needs.paths-filter.outputs.atf_modified == 'true' && github.ref == 'refs/heads/master' | |
with: | |
context: airbyte-to-flow | |
platforms: linux/amd64 | |
push: true | |
tags: ghcr.io/estuary/airbyte-to-flow:dev,ghcr.io/estuary/airbyte-to-flow:${{ steps.prep.outputs.tag }} | |
- name: Build and push | |
uses: docker/build-push-action@v3 | |
if: needs.paths-filter.outputs.atf_modified == 'true' && github.ref != 'refs/heads/master' | |
with: | |
context: airbyte-to-flow | |
platforms: linux/amd64 | |
push: true | |
tags: ghcr.io/estuary/airbyte-to-flow:${{ steps.prep.outputs.tag }} | |
connector: | |
needs: | |
- paths-filter | |
- airbyte_to_flow | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
connector: | |
- name: source-facebook-marketing | |
- name: source-hubspot | |
- name: source-google-sheets | |
- name: source-exchange-rates | |
- name: source-google-analytics-v4 | |
alias: source-google-analytics-ua | |
- name: source-mailchimp | |
- name: source-zendesk-support | |
- name: source-stripe | |
- name: source-amplitude | |
- name: source-intercom | |
- name: source-google-ads | |
- name: source-salesforce | |
- name: source-google-analytics-data-api | |
- name: source-linkedin-ads | |
- name: source-bing-ads | |
- name: source-github | |
- name: source-amazon-ads | |
- name: source-notion | |
- name: source-tiktok-marketing | |
- name: source-google-search-console | |
- name: source-surveymonkey | |
- name: source-slack | |
- name: source-airtable | |
- name: source-freshdesk | |
- name: source-marketo | |
- name: source-chargebee | |
- name: source-klaviyo | |
- name: source-snapchat-marketing | |
- name: source-recharge | |
- name: source-mixpanel | |
- name: source-harvest | |
- name: source-zendesk-talk | |
- name: source-zendesk-chat | |
- name: source-twilio | |
- name: source-paypal-transaction | |
- name: source-greenhouse | |
- name: source-iterable | |
- name: source-sendgrid | |
- name: source-sentry | |
- name: source-youtube-analytics | |
- name: source-instagram | |
- name: source-braintree | |
- name: source-braze | |
- name: source-netsuite | |
- name: source-jira | |
- name: source-dynamodb | |
- name: source-bigquery | |
- name: source-woocommerce | |
- name: source-amazon-sqs | |
- name: source-redshift | |
- name: source-oracle | |
- name: source-postgres-heroku | |
- name: source-pinterest | |
- name: source-aircall | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
fetch-depth: 0 | |
- name: Prepare | |
id: prep | |
run: | | |
TAG=$(echo $GITHUB_SHA | head -c7) | |
echo ::set-output name=tag::${TAG} | |
- uses: actions/setup-python@v3 | |
with: | |
python-version: "3.9" | |
- uses: actions/setup-java@v1 | |
with: | |
java-version: "17" | |
- name: Login to GitHub package docker registry | |
run: | | |
echo "${{ secrets.GITHUB_TOKEN }}" | \ | |
docker login --username ${{ github.actor }} --password-stdin ghcr.io | |
- name: Install virtualenv | |
run: python3 -m pip install virtualenv==20.4.2 --user | |
- name: Build ${{ matrix.connector.name }} | |
id: build | |
run: | | |
AIRBYTE_TO_FLOW_TAG=dev | |
if [[ "$GITHUB_EVENT_NAME" == "pull_request" && '${{ needs.paths-filter.outputs.atf_modified }}' == 'true' ]]; then | |
AIRBYTE_TO_FLOW_TAG=${{ steps.prep.outputs.tag }} | |
fi | |
if [ -f "airbyte-integrations/connectors/${{ matrix.connector.name }}/requirements.txt" ]; then | |
./gradlew :airbyte-integrations:connectors:${{ matrix.connector.name }}:build | |
docker tag docker.io/airbyte/${{ matrix.connector.name }}:dev ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} | |
else | |
docker build "airbyte-integrations/connectors/${{ matrix.connector.name }}" \ | |
-f "airbyte-integrations/connectors/${{ matrix.connector.name }}/Dockerfile" \ | |
--build-arg AIRBYTE_TO_FLOW_TAG=$AIRBYTE_TO_FLOW_TAG \ | |
-t ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} | |
fi | |
docker push ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} | |
- name: Push ${{ matrix.connector.name }} image with 'dev' and version tags | |
if: ${{ github.event_name == 'push' }} | |
id: push-dev | |
run: | | |
source tools/lib/lib.sh | |
export IMAGE_VERSION=$(_get_docker_image_version airbyte-integrations/connectors/${{ matrix.connector.name }}/Dockerfile) | |
docker pull ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} | |
echo "Publishing image ghcr.io/estuary/${{ matrix.connector.name }}:dev" | |
docker tag ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} ghcr.io/estuary/${{ matrix.connector.name }}:dev | |
docker push ghcr.io/estuary/${{ matrix.connector.name }}:dev | |
echo "Publishing image ghcr.io/estuary/${{ matrix.connector.name }}:$IMAGE_VERSION" | |
docker tag ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} ghcr.io/estuary/${{ matrix.connector.name }}:$IMAGE_VERSION | |
docker push ghcr.io/estuary/${{ matrix.connector.name }}:$IMAGE_VERSION | |
- name: Push ${{ matrix.connector.alias }} image with 'dev' and version tags | |
if: github.event_name == 'push' && matrix.connector.alias | |
id: push-dev-alias | |
run: | | |
source tools/lib/lib.sh | |
export IMAGE_VERSION=$(_get_docker_image_version airbyte-integrations/connectors/${{ matrix.connector.name }}/Dockerfile) | |
docker pull ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} | |
echo "Publishing image ghcr.io/estuary/${{ matrix.connector.alias }}:dev" | |
docker tag ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} ghcr.io/estuary/${{ matrix.connector.alias }}:dev | |
docker push ghcr.io/estuary/${{ matrix.connector.alias }}:dev | |
echo "Publishing image ghcr.io/estuary/${{ matrix.connector.alias }}:$IMAGE_VERSION" | |
docker tag ghcr.io/estuary/${{ matrix.connector.name }}:${{ steps.prep.outputs.tag }} ghcr.io/estuary/${{ matrix.connector.alias }}:$IMAGE_VERSION | |
docker push ghcr.io/estuary/${{ matrix.connector.alias }}:$IMAGE_VERSION | |
- name: Install psql | |
if: ${{ github.event_name == 'push' }} | |
run: | | |
sudo apt update | |
sudo apt install postgresql | |
- name: Refresh connector tags for ${{ matrix.connector.name }} | |
if: ${{ github.event_name == 'push' }} | |
env: | |
PGHOST: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_HOST }} | |
PGUSER: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_USER }} | |
PGPASSWORD: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_PASSWORD }} | |
PGDATABASE: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_DATABASE }} | |
run: | | |
source tools/lib/lib.sh | |
export VERSION=$(_get_docker_image_version airbyte-integrations/connectors/${{ matrix.connector.name }}/Dockerfile) | |
echo "UPDATE public.connector_tags SET job_status='{\"type\": \"queued\"}' | |
WHERE connector_id IN ( | |
SELECT id FROM connectors WHERE image_name='ghcr.io/estuary/${{ matrix.connector.name }}' | |
) | |
AND | |
image_tag IN (':$VERSION', ':dev');" | psql | |
- name: Refresh connector tags for ${{ matrix.connector.alias }} | |
if: github.event_name == 'push' && matrix.connector.alias | |
env: | |
PGHOST: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_HOST }} | |
PGUSER: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_USER }} | |
PGPASSWORD: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_PASSWORD }} | |
PGDATABASE: ${{ secrets.POSTGRES_CONNECTOR_REFRESH_DATABASE }} | |
run: | | |
source tools/lib/lib.sh | |
export VERSION=$(_get_docker_image_version airbyte-integrations/connectors/${{ matrix.connector.name }}/Dockerfile) | |
echo "UPDATE public.connector_tags SET job_status='{\"type\": \"queued\"}' | |
WHERE connector_id IN ( | |
SELECT id FROM connectors WHERE image_name='ghcr.io/estuary/${{ matrix.connector.alias }}' | |
) | |
AND | |
image_tag IN (':$VERSION', ':dev');" | psql |