Skip to content

Commit

Permalink
Merge branch 'main' into monitoring-cli-samples
Browse files Browse the repository at this point in the history
  • Loading branch information
ahughes-msft authored Jul 25, 2023
2 parents bb27ad7 + 66cf84f commit 3a801d7
Show file tree
Hide file tree
Showing 22 changed files with 3,618 additions and 27 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# This code is autogenerated.
# Code is generated by running custom script: python3 readme.py
# Any manual changes to this file may cause incorrect behavior.
# Any manual changes will be overwritten if the code is regenerated.

name: sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-hierarchical-timeseries-in-pipeline-automl-forecasting-demand-hierarchical-timeseries-in-pipeline
# This file is created by sdk/python/readme.py.
# Please do not edit directly.
on:
workflow_dispatch:
schedule:
- cron: "45 5/12 * * *"
pull_request:
branches:
- main
paths:
- sdk/python/jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-hierarchical-timeseries-in-pipeline/**
- .github/workflows/sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-hierarchical-timeseries-in-pipeline-automl-forecasting-demand-hierarchical-timeseries-in-pipeline.yml
- sdk/python/dev-requirements.txt
- infra/bootstrapping/**
- sdk/python/setup.sh
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: check out repo
uses: actions/checkout@v2
- name: setup python
uses: actions/setup-python@v2
with:
python-version: "3.8"
- name: pip install notebook reqs
run: pip install -r sdk/python/dev-requirements.txt
- name: azure login
uses: azure/login@v1
with:
creds: ${{secrets.AZUREML_CREDENTIALS}}
- name: bootstrap resources
run: |
echo '${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}';
bash bootstrap.sh
working-directory: infra/bootstrapping
continue-on-error: false
- name: setup SDK
run: |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh";
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh";
bash setup.sh
working-directory: sdk/python
continue-on-error: true
- name: setup-cli
run: |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh";
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh";
bash setup.sh
working-directory: cli
continue-on-error: true
- name: run jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-hierarchical-timeseries-in-pipeline/automl-forecasting-demand-hierarchical-timeseries-in-pipeline.ipynb
run: |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh";
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh";
bash "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh" generate_workspace_config "../../.azureml/config.json";
bash "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh" replace_template_values "automl-forecasting-demand-hierarchical-timeseries-in-pipeline.ipynb";
[ -f "../../.azureml/config" ] && cat "../../.azureml/config";
papermill -k python -p compute_name automl-cpu-cluster automl-forecasting-demand-hierarchical-timeseries-in-pipeline.ipynb automl-forecasting-demand-hierarchical-timeseries-in-pipeline.output.ipynb
working-directory: sdk/python/jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-hierarchical-timeseries-in-pipeline
- name: upload notebook's working folder as an artifact
if: ${{ always() }}
uses: actions/upload-artifact@v2
with:
name: automl-forecasting-demand-hierarchical-timeseries-in-pipeline
path: sdk/python/jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-hierarchical-timeseries-in-pipeline
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,19 @@
# Any manual changes to this file may cause incorrect behavior.
# Any manual changes will be overwritten if the code is regenerated.

name: sdk-jobs-single-step-pytorch-distributed-training-yolov5-yolov5-tutorial
name: sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-many-models-in-pipeline-automl-forecasting-demand-many-models-in-pipeline
# This file is created by sdk/python/readme.py.
# Please do not edit directly.
on:
workflow_dispatch:
schedule:
- cron: "10 5/12 * * *"
- cron: "59 7/12 * * *"
pull_request:
branches:
- main
paths:
- sdk/python/jobs/single-step/pytorch/distributed-training-yolov5/yolov5/**
- .github/workflows/sdk-jobs-single-step-pytorch-distributed-training-yolov5-yolov5-tutorial.yml
- sdk/python/jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-many-models-in-pipeline/**
- .github/workflows/sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-many-models-in-pipeline-automl-forecasting-demand-many-models-in-pipeline.yml
- sdk/python/dev-requirements.txt
- infra/bootstrapping/**
- sdk/python/setup.sh
Expand Down Expand Up @@ -58,18 +58,18 @@ jobs:
bash setup.sh
working-directory: cli
continue-on-error: true
- name: run jobs/single-step/pytorch/distributed-training-yolov5/yolov5/tutorial.ipynb
- name: run jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-many-models-in-pipeline/automl-forecasting-demand-many-models-in-pipeline.ipynb
run: |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh";
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh";
bash "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh" generate_workspace_config "../../.azureml/config.json";
bash "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh" replace_template_values "tutorial.ipynb";
bash "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh" replace_template_values "automl-forecasting-demand-many-models-in-pipeline.ipynb";
[ -f "../../.azureml/config" ] && cat "../../.azureml/config";
papermill -k python tutorial.ipynb tutorial.output.ipynb
working-directory: sdk/python/jobs/single-step/pytorch/distributed-training-yolov5/yolov5
papermill -k python -p compute_name automl-cpu-cluster automl-forecasting-demand-many-models-in-pipeline.ipynb automl-forecasting-demand-many-models-in-pipeline.output.ipynb
working-directory: sdk/python/jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-many-models-in-pipeline
- name: upload notebook's working folder as an artifact
if: ${{ always() }}
uses: actions/upload-artifact@v2
with:
name: tutorial
path: sdk/python/jobs/single-step/pytorch/distributed-training-yolov5/yolov5
name: automl-forecasting-demand-many-models-in-pipeline
path: sdk/python/jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-many-models-in-pipeline
3 changes: 3 additions & 0 deletions sdk/python/README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

---
page_type: sample
languages:
Expand Down Expand Up @@ -122,6 +123,8 @@ Test Status is for branch - **_main_**
|jobs|pipelines|[pipeline_with_spark_nodes](jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb)|Create pipeline with spark node - _This sample is excluded from automated tests_|[![pipeline_with_spark_nodes](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1i_pipeline_with_spark_nodes-pipeline_with_spark_nodes.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1i_pipeline_with_spark_nodes-pipeline_with_spark_nodes.yml)|
|jobs|pipelines|[nyc_taxi_data_regression_with_pipeline_component](jobs/pipelines/1j_pipeline_with_pipeline_component/nyc_taxi_data_regression_with_pipeline_component/nyc_taxi_data_regression_with_pipeline_component.ipynb)|Create pipeline with CommandComponents from local YAML file|[![nyc_taxi_data_regression_with_pipeline_component](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1j_pipeline_with_pipeline_component-nyc_taxi_data_regression_with_pipeline_component-nyc_taxi_data_regression_with_pipeline_component.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1j_pipeline_with_pipeline_component-nyc_taxi_data_regression_with_pipeline_component-nyc_taxi_data_regression_with_pipeline_component.yml)|
|jobs|pipelines|[pipeline_with_train_eval_pipeline_component](jobs/pipelines/1j_pipeline_with_pipeline_component/pipeline_with_train_eval_pipeline_component/pipeline_with_train_eval_pipeline_component.ipynb)|Create pipeline with CommandComponents from local YAML file|[![pipeline_with_train_eval_pipeline_component](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1j_pipeline_with_pipeline_component-pipeline_with_train_eval_pipeline_component-pipeline_with_train_eval_pipeline_component.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1j_pipeline_with_pipeline_component-pipeline_with_train_eval_pipeline_component-pipeline_with_train_eval_pipeline_component.yml)|
|jobs|pipelines|[automl-forecasting-demand-hierarchical-timeseries-in-pipeline](jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-hierarchical-timeseries-in-pipeline/automl-forecasting-demand-hierarchical-timeseries-in-pipeline.ipynb)|*no description*|[![automl-forecasting-demand-hierarchical-timeseries-in-pipeline](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-hierarchical-timeseries-in-pipeline-automl-forecasting-demand-hierarchical-timeseries-in-pipeline.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-hierarchical-timeseries-in-pipeline-automl-forecasting-demand-hierarchical-timeseries-in-pipeline.yml)|
|jobs|pipelines|[automl-forecasting-demand-many-models-in-pipeline](jobs/pipelines/1k_demand_forecasting_with_pipeline_components/automl-forecasting-demand-many-models-in-pipeline/automl-forecasting-demand-many-models-in-pipeline.ipynb)|*no description*|[![automl-forecasting-demand-many-models-in-pipeline](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-many-models-in-pipeline-automl-forecasting-demand-many-models-in-pipeline.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-1k_demand_forecasting_with_pipeline_components-automl-forecasting-demand-many-models-in-pipeline-automl-forecasting-demand-many-models-in-pipeline.yml)|
|jobs|pipelines|[train_mnist_with_tensorflow](jobs/pipelines/2a_train_mnist_with_tensorflow/train_mnist_with_tensorflow.ipynb)|Create pipeline using components to run a distributed job with tensorflow|[![train_mnist_with_tensorflow](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-2a_train_mnist_with_tensorflow-train_mnist_with_tensorflow.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-2a_train_mnist_with_tensorflow-train_mnist_with_tensorflow.yml)|
|jobs|pipelines|[train_cifar_10_with_pytorch](jobs/pipelines/2b_train_cifar_10_with_pytorch/train_cifar_10_with_pytorch.ipynb)|Get data, train and evaluate a model in pipeline with Components|[![train_cifar_10_with_pytorch](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-2b_train_cifar_10_with_pytorch-train_cifar_10_with_pytorch.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-2b_train_cifar_10_with_pytorch-train_cifar_10_with_pytorch.yml)|
|jobs|pipelines|[nyc_taxi_data_regression](jobs/pipelines/2c_nyc_taxi_data_regression/nyc_taxi_data_regression.ipynb)|Build pipeline with components for 5 jobs - prep data, transform data, train model, predict results and evaluate model performance|[![nyc_taxi_data_regression](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-2c_nyc_taxi_data_regression-nyc_taxi_data_regression.yml/badge.svg?branch=main)](https://github.com/Azure/azureml-examples/actions/workflows/sdk-jobs-pipelines-2c_nyc_taxi_data_regression-nyc_taxi_data_regression.yml)|
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -926,7 +926,7 @@
},
"outputs": [],
"source": [
"# The below code creeates a feature stor\n",
"# The below code creates a feature store\n",
"import yaml\n",
"\n",
"config = {\n",
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/featurestore_sample/project/env/conda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ dependencies:
# Protobuf is needed to avoid conflict with managed spark
- protobuf==3.19.6
# Feature store core SDK
- azureml-featurestore==0.1.0b2
- azureml-featurestore==0.1.0b3
# This is needed if you want to execute the Part 2 of the "SDK" track or execute "SDK+CLI" track in the docs tutorial
- azure-cli

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ inputs:
jobs:

retrieval_step:
component: azureml://registries/azureml/components/feature_retrieval/versions/0.1.0
component: azureml://registries/azureml/components/feature_retrieval/versions/0.3.0
inputs:
input_model:
path: ${{parent.inputs.input_model}}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ inputs:

jobs:
feature_retrieval_step:
component: azureml://registries/azureml/components/feature_retrieval/versions/0.1.0
component: azureml://registries/azureml/components/feature_retrieval/versions/0.3.0
inputs:
feature_retrieval_spec:
path: ${{parent.inputs.feature_retrieval_spec}}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -355,10 +355,37 @@ def get_parameter_type(sample_input_ex, sample_output_ex=None):
model = load_model(model_path)


def get_aacs_access_key():
key = os.environ.get("CONTENT_SAFETY_KEY")

if key:
return key

uai_client_id = os.environ.get("UAI_CLIENT_ID")
if not uai_client_id:
raise RuntimeError(
"Cannot get AACS access key, both UAI_CLIENT_ID and CONTENT_SAFETY_KEY are not set, exiting..."
)

subscription_id = os.environ.get("SUBSCRIPTION_ID")
resource_group_name = os.environ.get("RESOURCE_GROUP_NAME")
aacs_account_name = os.environ.get("CONTENT_SAFETY_ACCOUNT_NAME")
from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient
from azure.identity import ManagedIdentityCredential

credential = ManagedIdentityCredential(client_id=uai_client_id)
cs_client = CognitiveServicesManagementClient(credential, subscription_id)
key = cs_client.accounts.list_keys(
resource_group_name=resource_group_name, account_name=aacs_account_name
).key1

return key


def init():
global inputs_collector, outputs_collector, aacs_client
endpoint = os.environ.get("CONTENT_SAFETY_ENDPOINT")
key = os.environ.get("CONTENT_SAFETY_KEY")
key = get_aacs_access_key()

# Create an Content Safety client
headers_policy = HeadersPolicy()
Expand Down Expand Up @@ -433,7 +460,9 @@ def analyze_text_async(text):

def analyze_text(text):
# Chunk text
print(f"Analyzing ...")
print("Analyzing ...")
if (not text) or (not text.strip()):
return 0
chunking_utils = CsChunkingUtils(chunking_n=1000, delimiter=".")
split_text = chunking_utils.split_by(text)

Expand Down
Loading

0 comments on commit 3a801d7

Please sign in to comment.