-
Notifications
You must be signed in to change notification settings - Fork 1.4k
70 lines (69 loc) · 2.5 KB
/
cli-jobs-spark-attached-spark-pipeline-user-identity.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# This code is autogenerated.
# Code is generated by running custom script: python3 readme.py
# Any manual changes to this file may cause incorrect behavior.
# Any manual changes will be overwritten if the code is regenerated.
name: cli-jobs-spark-attached-spark-pipeline-user-identity
on:
workflow_dispatch:
schedule:
- cron: "15 4/12 * * *"
pull_request:
branches:
- main
paths:
- cli/jobs/spark/**
- infra/bootstrapping/**
- .github/workflows/cli-jobs-spark-attached-spark-pipeline-user-identity.yml
- cli/jobs/spark/data/titanic.csv
- cli/setup.sh
permissions:
id-token: write
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: check out repo
uses: actions/checkout@v2
- name: azure login
uses: azure/login@v1
with:
client-id: ${{ secrets.OIDC_AZURE_CLIENT_ID }}
tenant-id: ${{ secrets.OIDC_AZURE_TENANT_ID }}
subscription-id: ${{ secrets.OIDC_AZURE_SUBSCRIPTION_ID }}
- name: bootstrap resources
run: |
echo '${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}';
bash bootstrap.sh
working-directory: infra/bootstrapping
continue-on-error: false
- name: setup-cli
run: |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh";
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh";
bash setup.sh
working-directory: cli
continue-on-error: true
- name: upload data
run: |
bash -x upload-data-to-blob.sh jobs/spark/
working-directory: cli
continue-on-error: true
- name: setup attached spark
working-directory: cli
continue-on-error: true
run: |
bash -x jobs/spark/setup-attached-resources.sh resources/compute/attached-spark-user-identity.yml jobs/spark/attached-spark-pipeline-user-identity.yml
- name: run job
run: |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh";
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh";
bash -x ../../run-job.sh attached-spark-pipeline-user-identity.yml
working-directory: cli/jobs/spark
- name: validate readme
run: |
python check-readme.py "${{ github.workspace }}/cli/jobs/spark"
working-directory: infra/bootstrapping
continue-on-error: false