From 085c171b6b23cdfcd9d4453823ffdf2c92c9ad92 Mon Sep 17 00:00:00 2001 From: <> Date: Wed, 8 Nov 2023 15:09:49 +0000 Subject: [PATCH] Deployed 9560b70 with MkDocs version: 1.5.3 --- .nojekyll | 0 404.html | 386 + CLI_REFERENCE/index.html | 666 ++ advanced_user_guide/index.html | 1244 +++ assets/images/favicon.png | Bin 0 -> 1870 bytes assets/javascripts/bundle.efa0ade1.min.js | 29 + assets/javascripts/bundle.efa0ade1.min.js.map | 8 + assets/javascripts/lunr/min/lunr.ar.min.js | 1 + assets/javascripts/lunr/min/lunr.da.min.js | 18 + assets/javascripts/lunr/min/lunr.de.min.js | 18 + assets/javascripts/lunr/min/lunr.du.min.js | 18 + assets/javascripts/lunr/min/lunr.es.min.js | 18 + assets/javascripts/lunr/min/lunr.fi.min.js | 18 + assets/javascripts/lunr/min/lunr.fr.min.js | 18 + assets/javascripts/lunr/min/lunr.hi.min.js | 1 + assets/javascripts/lunr/min/lunr.hu.min.js | 18 + assets/javascripts/lunr/min/lunr.it.min.js | 18 + assets/javascripts/lunr/min/lunr.ja.min.js | 1 + assets/javascripts/lunr/min/lunr.jp.min.js | 1 + assets/javascripts/lunr/min/lunr.ko.min.js | 1 + assets/javascripts/lunr/min/lunr.multi.min.js | 1 + assets/javascripts/lunr/min/lunr.nl.min.js | 18 + assets/javascripts/lunr/min/lunr.no.min.js | 18 + assets/javascripts/lunr/min/lunr.pt.min.js | 18 + assets/javascripts/lunr/min/lunr.ro.min.js | 18 + assets/javascripts/lunr/min/lunr.ru.min.js | 18 + .../lunr/min/lunr.stemmer.support.min.js | 1 + assets/javascripts/lunr/min/lunr.sv.min.js | 18 + assets/javascripts/lunr/min/lunr.ta.min.js | 1 + assets/javascripts/lunr/min/lunr.th.min.js | 1 + assets/javascripts/lunr/min/lunr.tr.min.js | 18 + assets/javascripts/lunr/min/lunr.vi.min.js | 1 + assets/javascripts/lunr/min/lunr.zh.min.js | 1 + assets/javascripts/lunr/tinyseg.js | 206 + assets/javascripts/lunr/wordcut.js | 6708 +++++++++++++++++ .../workers/search.208ed371.min.js | 42 + .../workers/search.208ed371.min.js.map | 8 + assets/stylesheets/main.c4a75a56.min.css | 1 + assets/stylesheets/main.c4a75a56.min.css.map | 1 + assets/stylesheets/palette.a0c5b2b5.min.css | 1 + .../stylesheets/palette.a0c5b2b5.min.css.map | 1 + changelog/index.html | 1581 ++++ contributing/index.html | 637 ++ example/index.html | 470 ++ index.html | 511 ++ install/index.html | 573 ++ search/lunr.js | 3475 +++++++++ search/main.js | 109 + search/search_index.json | 1 + search/worker.js | 133 + setup/index.html | 802 ++ sitemap.xml | 3 + sitemap.xml.gz | Bin 0 -> 127 bytes techdocs_metadata.json | 1 + usage/index.html | 682 ++ 55 files changed, 18560 insertions(+) create mode 100644 .nojekyll create mode 100644 404.html create mode 100644 CLI_REFERENCE/index.html create mode 100644 advanced_user_guide/index.html create mode 100644 assets/images/favicon.png create mode 100644 assets/javascripts/bundle.efa0ade1.min.js create mode 100644 assets/javascripts/bundle.efa0ade1.min.js.map create mode 100644 assets/javascripts/lunr/min/lunr.ar.min.js create mode 100644 assets/javascripts/lunr/min/lunr.da.min.js create mode 100644 assets/javascripts/lunr/min/lunr.de.min.js create mode 100644 assets/javascripts/lunr/min/lunr.du.min.js create mode 100644 assets/javascripts/lunr/min/lunr.es.min.js create mode 100644 assets/javascripts/lunr/min/lunr.fi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.fr.min.js create mode 100644 assets/javascripts/lunr/min/lunr.hi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.hu.min.js create mode 100644 assets/javascripts/lunr/min/lunr.it.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ja.min.js create mode 100644 assets/javascripts/lunr/min/lunr.jp.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ko.min.js create mode 100644 assets/javascripts/lunr/min/lunr.multi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.nl.min.js create mode 100644 assets/javascripts/lunr/min/lunr.no.min.js create mode 100644 assets/javascripts/lunr/min/lunr.pt.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ro.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ru.min.js create mode 100644 assets/javascripts/lunr/min/lunr.stemmer.support.min.js create mode 100644 assets/javascripts/lunr/min/lunr.sv.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ta.min.js create mode 100644 assets/javascripts/lunr/min/lunr.th.min.js create mode 100644 assets/javascripts/lunr/min/lunr.tr.min.js create mode 100644 assets/javascripts/lunr/min/lunr.vi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.zh.min.js create mode 100644 assets/javascripts/lunr/tinyseg.js create mode 100644 assets/javascripts/lunr/wordcut.js create mode 100644 assets/javascripts/workers/search.208ed371.min.js create mode 100644 assets/javascripts/workers/search.208ed371.min.js.map create mode 100644 assets/stylesheets/main.c4a75a56.min.css create mode 100644 assets/stylesheets/main.c4a75a56.min.css.map create mode 100644 assets/stylesheets/palette.a0c5b2b5.min.css create mode 100644 assets/stylesheets/palette.a0c5b2b5.min.css.map create mode 100644 changelog/index.html create mode 100644 contributing/index.html create mode 100644 example/index.html create mode 100644 index.html create mode 100644 install/index.html create mode 100644 search/lunr.js create mode 100644 search/main.js create mode 100644 search/search_index.json create mode 100644 search/worker.js create mode 100644 setup/index.html create mode 100644 sitemap.xml create mode 100644 sitemap.xml.gz create mode 100644 techdocs_metadata.json create mode 100644 usage/index.html diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/404.html b/404.html new file mode 100644 index 0000000..2f7f6ab --- /dev/null +++ b/404.html @@ -0,0 +1,386 @@ + + + +
+ + + + + + + + + + + + + +vertex-deployer
¶Usage:
+1 |
|
Options:
+--log-level, -log [TRACE|DEBUG|INFO|SUCCESS|WARNING|ERROR|CRITICAL]
: Set the logging level. [default: LoguruLevel.INFO]--version, -v / --no-version
: Display the version number and exit. [default: no-version]--install-completion
: Install completion for the current shell.--show-completion
: Show completion for the current shell, to copy it or customize the installation.--help
: Show this message and exit.Commands:
+check
: Check that pipelines are valid.create
: Create files structure for a new pipeline.deploy
: Compile, upload, run and schedule pipelines.list
: List all pipelines.vertex-deployer check
¶Check that pipelines are valid.
+Checking that a pipeline is valid includes:
+pipeline
function decorated with @kfp.dsl.pipeline
.kfp.compiler.Compiler
.{CONFIG_ROOT_PATH}/{pipeline_name}
are corresponding to the
+pipeline parameters definition, using Pydantic.This command can be used to check pipelines in a Continuous Integration workflow.
+Usage:
+1 |
|
Options:
+--pipeline-name []
--all, -a / --no-all
: Whether to check all pipelines. [default: no-all]--config-filepath, -cfp PATH
: Path to the json/py file with parameter values and input artifacts to check. If not specified, all config files in the pipeline dir will be checked.--raise-error, -re / --no-raise-error, -nre
: Whether to raise an error if the pipeline is not valid. [default: no-raise-error]--help
: Show this message and exit.vertex-deployer config
¶Display the configuration from pyproject.toml.
+Usage:
+1 |
|
Options:
+-a, --all
: Whether to display all configuration values.--help
: Show this message and exit.vertex-deployer create
¶Create files structure for a new pipeline.
+Usage:
+1 |
|
Arguments:
+PIPELINE_NAME
: The name of the pipeline to create. [required]Options:
+--config-type, -ct [json|py|toml]
: The type of the config to create. [default: ConfigType.json]--help
: Show this message and exit.vertex-deployer deploy
¶Compile, upload, run and schedule pipelines.
+Usage:
+1 |
|
Arguments:
+PIPELINE_NAME:{}
: The name of the pipeline to run. [required]Options:
+--env-file PATH
: The environment file to use.--compile, -c / --no-compile, -nc
: Whether to compile the pipeline. [default: compile]--upload, -u / --no-upload, -nu
: Whether to upload the pipeline to Google Artifact Registry. [default: no-upload]--run, -r / --no-run, -nr
: Whether to run the pipeline. [default: no-run]--schedule, -s / --no-schedule, -ns
: Whether to create a schedule for the pipeline. [default: no-schedule]--cron TEXT
: Cron expression for scheduling the pipeline. To pass it to the CLI, use hyphens e.g. 0-10-*-*-*
.--delete-last-schedule, -dls / --no-delete-last-schedule
: Whether to delete the previous schedule before creating a new one. [default: no-delete-last-schedule]--tags TEXT
: The tags to use when uploading the pipeline. [default: latest]--config-filepath, -cfp PATH
: Path to the json/py file with parameter values and input artifacts to use when running the pipeline.--config-name, -cn TEXT
: Name of the json/py file with parameter values and input artifacts to use when running the pipeline. It must be in the pipeline config dir. e.g. config_dev.json
for ./vertex/configs/{pipeline-name}/config_dev.json
.--enable-caching, -ec / --no-enable-caching
: Whether to enable caching when running the pipeline. [default: no-enable-caching]--experiment-name, -en TEXT
: The name of the experiment to run the pipeline in. Defaults to '{pipeline_name}-experiment'.--local-package-path, -lpp PATH
: Local dir path where pipelines will be compiled. [default: vertex/pipelines/compiled_pipelines]--help
: Show this message and exit.vertex-deployer list
¶List all pipelines.
+Usage:
+1 |
|
Options:
+--with-configs, -wc / --no-with-configs, -nc
: Whether to list config files. [default: no-with-configs]--help
: Show this message and exit.Tip
+Add code provided in this page is available in the repo example.
+When developing a vertex pipeline locally, you may want to iterate quickly. The process is often the following:
+1. write new code and test its integration in the pipeline workflow code in a notebook
+2. script this new code in vertex/lib/
+3. modify associated component(s) in vertex/components
and pipelines in vertex/pipelines
+4. run the pipeline with dev settings to test the new code on Vertex
The latest may include: +- rebuilding the base image +- compiling the pipeline +- running the pipeline
+You can use this generic Dockerfile:
+1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 |
|
Then build it with docker or Cloud Build. For the latest, here is a sample cloudbuild.yaml:
+1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 |
|
Then you can trigger the build manually using this make command:
+1 +2 |
|
This command includes the following: +
1 +2 +3 +4 |
|
Now that you have a base image, you can compile your pipeline and trigger a run that will use the latest version of your docker base image
+1 |
|
You can check pipelines integrity and config integrity using the following command:
+1 |
|
To check a specific pipeline: +
1 |
|
You can add a github workflow checking your pipelines integrity using the following file: +
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 |
|
You can add a pre-commit hook checking your pipelines integrity using a local hook: +
1 +2 +3 +4 +5 +6 +7 +8 |
|
Once you have a valid pipeline, you want to deploy it on Vertex. To automate deployment when merging to develop
or main
, you have multiple options.
+- use CloudBuild and CloudBuild triggers)
+- use Github Action to trigger CloudBuild job
+- ๐ง use Github Action only
Note
+To use cloudbuild for CD, please update you Dockerfile with all these arguments.
+This will allow you to use vertex-deployer
from your base image in CloudBuild.
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 |
|
You can use the following cloudbuild.yaml to trigger a deployment on Vertex when merging to develop
or main
:
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 |
|
Then, you'll need to link your repo to CloudBuild and create a trigger for each branch you want to deploy on Vertex. +The documentation to link your repo is available here.
+Then, you can create create a triiger using this make command:
+1 +2 |
|
This command includes the following: +
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 |
|
You can also use Github Action to trigger CloudBuild job. You'll need to setup GCp authentication from your repo using Workload Identity Federation.
+1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 |
|
Warning
+This is a work in progress. Please use CloudBuild for CD. +Docker build and push to GCR example is not yet implemented.
+Signed-off-by: dependabot[bot] <support@github.com>
+Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (cff19af
)
Signed-off-by: dependabot[bot] <support@github.com>
+Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (5d2dbf9
)
Signed-off-by: dependabot[bot] <support@github.com>
+Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (9d25ad6
)
Signed-off-by: dependabot[bot] <support@github.com>
+Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> (c97ec85
)
fddf1a9
)01ce99a
)42ccfc9
)6e51b89
)7ba89f3
)19da429
)e9b6b36
)6fb7c14
)56d69f4
)5cd8321
)dd66594
)395b256
)7ea8975
)b3f3b46
)9cc7ad3
)c63a7fb
)65977a0
)e942add
)b997e69
){pipeline_name}
instead of pipeline
(#69) (f79d081
)d8dab84
)e284168
)9c973f0
)f005f44
)b635287
)e7119db
)f1171d2
)6753402
)11347ba
)0226088
)c80aeb1
)e220dc8
)3ebcf4a
)Automatically generated by python-semantic-release (0809df7
)
a3c18df
)a08b581
)f59b795
)4e50c99
)9500a03
)9b973bf
)d01d60c
)4d163bd
)ead427f
)94c8061
)9f41c8e
)92e1acb
)05deb15
)879c14a
)6c65c09
)7194c70
)267d169
)54f59f7
)b736c3a
)f00c231
)3070873
)f154389
)cab9963
)Issues and Pull requests templates are mandatory.
+At least one code review is needed to merge. Please merge your feature branches on develop
.
We try to rebase as much as possible and use squash and merge to keep a linear and condensed git history.
+This project uses Poetry for dependency management. Poetry's doc is really good, so you should check it out if you have any questions.
+To install poetry:
+1 |
|
You can start by creating a virtual environment (conda or other) or use poetry venv(please check the Makefile first if so, as poetry venv is deactivated there). Then, to install the project dependencies, run the following command:
+1 |
|
To develop, you will need dev requirements too. Run: +
1 |
|
About poetry.lock
+poetry.lock
is not committed deliberately, as recommended by Poetry's doc. You can read more about it here.
This projects uses Black, isort, ruff for codestyle. You can run the following command to format your code. It uses Pre-commit hooks to run the formatters and linters.
+1 |
|
This project uses Google docstring convention.
+A full example is available in here.
+This project uses Python Semantic Versioning +and Poetry to create releases and tags.
+The release process is automated through GitHub Actions. Here is the process:
+develop
to main
.The Release GitHub Action does the following:
+The action is triggered by any push to main.
+Tip
+The release action will be triggered by any push to main
only if the 'CI' job in the 'release.yaml' workflow succeeds.
+Python Semantic Release will take care of version number update, tag creation and release creation.
When it's done, rebase develop to keep it up to date with main.
+And you're done ! ๐
+ + + + + + +Deploy Vertex Pipelines within minutes
+This tool is a wrapper around kfp and google-cloud-aiplatform to check, compile, upload, run and schedule Vertex Pipelines in a standardized manner.
+Info
+This project is looking for beta testers and contributors.
+You can contact code owners or submit a new issue if you want to help.
+Three uses cases:
+Four commands:
+check
: check your pipelines (imports, compile, check configs validity against pipeline definition).deploy
: compile, upload to Artifact Registry, run and schedule your pipelines.create
: create a new pipeline and config files.list
: list all pipelines in the vertex/pipelines
folder.Install using pip: +
1 |
|
In your requirements: +
1 +2 |
|
Stable version: +
1 |
|
Develop version: +
1 |
|
If you want to test this package on examples from this repo: +
1 +2 +3 |
|
The package is available on a public Google Artifact Registry repo. You need to specify a +pip extra index url to install it.
+Install latest version: +
1 |
|
List available versions: +
1 |
|
It's better to get the .tar.gz archive from gcs, and version it.
+Then add the following lines to your requirements.in
file:
+
1 +2 +3 |
|
' + escapeHtml(summary) +'
' + noResultsText + '
'); + } +} + +function doSearch () { + var query = document.getElementById('mkdocs-search-query').value; + if (query.length > min_search_length) { + if (!window.Worker) { + displayResults(search(query)); + } else { + searchWorker.postMessage({query: query}); + } + } else { + // Clear results for short queries + displayResults([]); + } +} + +function initSearch () { + var search_input = document.getElementById('mkdocs-search-query'); + if (search_input) { + search_input.addEventListener("keyup", doSearch); + } + var term = getSearchTermFromLocation(); + if (term) { + search_input.value = term; + doSearch(); + } +} + +function onWorkerMessage (e) { + if (e.data.allowSearch) { + initSearch(); + } else if (e.data.results) { + var results = e.data.results; + displayResults(results); + } else if (e.data.config) { + min_search_length = e.data.config.min_search_length-1; + } +} + +if (!window.Worker) { + console.log('Web Worker API not supported'); + // load index in main thread + $.getScript(joinUrl(base_url, "search/worker.js")).done(function () { + console.log('Loaded worker'); + init(); + window.postMessage = function (msg) { + onWorkerMessage({data: msg}); + }; + }).fail(function (jqxhr, settings, exception) { + console.error('Could not load worker.js'); + }); +} else { + // Wrap search in a web worker + var searchWorker = new Worker(joinUrl(base_url, "search/worker.js")); + searchWorker.postMessage({init: true}); + searchWorker.onmessage = onWorkerMessage; +} diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 0000000..8ccfacc --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"Vertex Pipelines Deployer \u00b6 Deploy Vertex Pipelines within minutes This tool is a wrapper around kfp and google-cloud-aiplatform to check, compile, upload, run and schedule Vertex Pipelines in a standardized manner. Info This project is looking for beta testers and contributors. You can contact code owners or submit a new issue if you want to help. \u2753 Why this tool? \u00b6 Three uses cases: CI: check pipeline validity. Dev mode: duickly iterate over your pipelines by compiling and running them in multiple environments (test, dev, staging, etc) without duplicating code or looking for the right kfp / aiplatform snippet. CD: deploy your pipelines to Vertex Pipelines in a standardized manner in your CD with Cloud Build or GitHub Actions. Four commands: check : check your pipelines (imports, compile, check configs validity against pipeline definition). deploy : compile, upload to Artifact Registry, run and schedule your pipelines. create : create a new pipeline and config files. list : list all pipelines in the vertex/pipelines folder.","title":"Welcome"},{"location":"#vertex-pipelines-deployer","text":"Deploy Vertex Pipelines within minutes This tool is a wrapper around kfp and google-cloud-aiplatform to check, compile, upload, run and schedule Vertex Pipelines in a standardized manner. Info This project is looking for beta testers and contributors. You can contact code owners or submit a new issue if you want to help.","title":"Vertex Pipelines Deployer"},{"location":"#why-this-tool","text":"Three uses cases: CI: check pipeline validity. Dev mode: duickly iterate over your pipelines by compiling and running them in multiple environments (test, dev, staging, etc) without duplicating code or looking for the right kfp / aiplatform snippet. CD: deploy your pipelines to Vertex Pipelines in a standardized manner in your CD with Cloud Build or GitHub Actions. Four commands: check : check your pipelines (imports, compile, check configs validity against pipeline definition). deploy : compile, upload to Artifact Registry, run and schedule your pipelines. create : create a new pipeline and config files. list : list all pipelines in the vertex/pipelines folder.","title":"\u2753 Why this tool?"},{"location":"CLI_REFERENCE/","text":"vertex-deployer \u00b6 Usage : 1 $ vertex-deployer [ OPTIONS ] COMMAND [ ARGS ] ... Options : --log-level, -log [TRACE|DEBUG|INFO|SUCCESS|WARNING|ERROR|CRITICAL] : Set the logging level. [default: LoguruLevel.INFO] --version, -v / --no-version : Display the version number and exit. [default: no-version] --install-completion : Install completion for the current shell. --show-completion : Show completion for the current shell, to copy it or customize the installation. --help : Show this message and exit. Commands : check : Check that pipelines are valid. create : Create files structure for a new pipeline. deploy : Compile, upload, run and schedule pipelines. list : List all pipelines. vertex-deployer check \u00b6 Check that pipelines are valid. Checking that a pipeline is valid includes: Checking that the pipeline can be imported. It must be a valid python module with a pipeline function decorated with @kfp.dsl.pipeline . Checking that the pipeline can be compiled using kfp.compiler.Compiler . Checking that config files in {CONFIG_ROOT_PATH}/{pipeline_name} are corresponding to the pipeline parameters definition, using Pydantic. This command can be used to check pipelines in a Continuous Integration workflow. Usage : 1 $ vertex-deployer check [ OPTIONS ] Options : --pipeline-name [] --all, -a / --no-all : Whether to check all pipelines. [default: no-all] --config-filepath, -cfp PATH : Path to the json/py file with parameter values and input artifacts to check. If not specified, all config files in the pipeline dir will be checked. --raise-error, -re / --no-raise-error, -nre : Whether to raise an error if the pipeline is not valid. [default: no-raise-error] --help : Show this message and exit. vertex-deployer config \u00b6 Display the configuration from pyproject.toml. Usage : 1 $ vertex-deployer config [ OPTIONS ] Options : -a, --all : Whether to display all configuration values. --help : Show this message and exit. vertex-deployer create \u00b6 Create files structure for a new pipeline. Usage : 1 $ vertex-deployer create [ OPTIONS ] PIPELINE_NAME Arguments : PIPELINE_NAME : The name of the pipeline to create. [required] Options : --config-type, -ct [json|py|toml] : The type of the config to create. [default: ConfigType.json] --help : Show this message and exit. vertex-deployer deploy \u00b6 Compile, upload, run and schedule pipelines. Usage : 1 $ vertex-deployer deploy [ OPTIONS ] PIPELINE_NAME: {} Arguments : PIPELINE_NAME:{} : The name of the pipeline to run. [required] Options : --env-file PATH : The environment file to use. --compile, -c / --no-compile, -nc : Whether to compile the pipeline. [default: compile] --upload, -u / --no-upload, -nu : Whether to upload the pipeline to Google Artifact Registry. [default: no-upload] --run, -r / --no-run, -nr : Whether to run the pipeline. [default: no-run] --schedule, -s / --no-schedule, -ns : Whether to create a schedule for the pipeline. [default: no-schedule] --cron TEXT : Cron expression for scheduling the pipeline. To pass it to the CLI, use hyphens e.g. 0-10-*-*-* . --delete-last-schedule, -dls / --no-delete-last-schedule : Whether to delete the previous schedule before creating a new one. [default: no-delete-last-schedule] --tags TEXT : The tags to use when uploading the pipeline. [default: latest] --config-filepath, -cfp PATH : Path to the json/py file with parameter values and input artifacts to use when running the pipeline. --config-name, -cn TEXT : Name of the json/py file with parameter values and input artifacts to use when running the pipeline. It must be in the pipeline config dir. e.g. config_dev.json for ./vertex/configs/{pipeline-name}/config_dev.json . --enable-caching, -ec / --no-enable-caching : Whether to enable caching when running the pipeline. [default: no-enable-caching] --experiment-name, -en TEXT : The name of the experiment to run the pipeline in. Defaults to '{pipeline_name}-experiment'. --local-package-path, -lpp PATH : Local dir path where pipelines will be compiled. [default: vertex/pipelines/compiled_pipelines] --help : Show this message and exit. vertex-deployer list \u00b6 List all pipelines. Usage : 1 $ vertex-deployer list [ OPTIONS ] Options : --with-configs, -wc / --no-with-configs, -nc : Whether to list config files. [default: no-with-configs] --help : Show this message and exit.","title":"CLI Reference"},{"location":"CLI_REFERENCE/#vertex-deployer","text":"Usage : 1 $ vertex-deployer [ OPTIONS ] COMMAND [ ARGS ] ... Options : --log-level, -log [TRACE|DEBUG|INFO|SUCCESS|WARNING|ERROR|CRITICAL] : Set the logging level. [default: LoguruLevel.INFO] --version, -v / --no-version : Display the version number and exit. [default: no-version] --install-completion : Install completion for the current shell. --show-completion : Show completion for the current shell, to copy it or customize the installation. --help : Show this message and exit. Commands : check : Check that pipelines are valid. create : Create files structure for a new pipeline. deploy : Compile, upload, run and schedule pipelines. list : List all pipelines.","title":"vertex-deployer"},{"location":"CLI_REFERENCE/#vertex-deployer-check","text":"Check that pipelines are valid. Checking that a pipeline is valid includes: Checking that the pipeline can be imported. It must be a valid python module with a pipeline function decorated with @kfp.dsl.pipeline . Checking that the pipeline can be compiled using kfp.compiler.Compiler . Checking that config files in {CONFIG_ROOT_PATH}/{pipeline_name} are corresponding to the pipeline parameters definition, using Pydantic. This command can be used to check pipelines in a Continuous Integration workflow. Usage : 1 $ vertex-deployer check [ OPTIONS ] Options : --pipeline-name [] --all, -a / --no-all : Whether to check all pipelines. [default: no-all] --config-filepath, -cfp PATH : Path to the json/py file with parameter values and input artifacts to check. If not specified, all config files in the pipeline dir will be checked. --raise-error, -re / --no-raise-error, -nre : Whether to raise an error if the pipeline is not valid. [default: no-raise-error] --help : Show this message and exit.","title":"vertex-deployer check"},{"location":"CLI_REFERENCE/#vertex-deployer-config","text":"Display the configuration from pyproject.toml. Usage : 1 $ vertex-deployer config [ OPTIONS ] Options : -a, --all : Whether to display all configuration values. --help : Show this message and exit.","title":"vertex-deployer config"},{"location":"CLI_REFERENCE/#vertex-deployer-create","text":"Create files structure for a new pipeline. Usage : 1 $ vertex-deployer create [ OPTIONS ] PIPELINE_NAME Arguments : PIPELINE_NAME : The name of the pipeline to create. [required] Options : --config-type, -ct [json|py|toml] : The type of the config to create. [default: ConfigType.json] --help : Show this message and exit.","title":"vertex-deployer create"},{"location":"CLI_REFERENCE/#vertex-deployer-deploy","text":"Compile, upload, run and schedule pipelines. Usage : 1 $ vertex-deployer deploy [ OPTIONS ] PIPELINE_NAME: {} Arguments : PIPELINE_NAME:{} : The name of the pipeline to run. [required] Options : --env-file PATH : The environment file to use. --compile, -c / --no-compile, -nc : Whether to compile the pipeline. [default: compile] --upload, -u / --no-upload, -nu : Whether to upload the pipeline to Google Artifact Registry. [default: no-upload] --run, -r / --no-run, -nr : Whether to run the pipeline. [default: no-run] --schedule, -s / --no-schedule, -ns : Whether to create a schedule for the pipeline. [default: no-schedule] --cron TEXT : Cron expression for scheduling the pipeline. To pass it to the CLI, use hyphens e.g. 0-10-*-*-* . --delete-last-schedule, -dls / --no-delete-last-schedule : Whether to delete the previous schedule before creating a new one. [default: no-delete-last-schedule] --tags TEXT : The tags to use when uploading the pipeline. [default: latest] --config-filepath, -cfp PATH : Path to the json/py file with parameter values and input artifacts to use when running the pipeline. --config-name, -cn TEXT : Name of the json/py file with parameter values and input artifacts to use when running the pipeline. It must be in the pipeline config dir. e.g. config_dev.json for ./vertex/configs/{pipeline-name}/config_dev.json . --enable-caching, -ec / --no-enable-caching : Whether to enable caching when running the pipeline. [default: no-enable-caching] --experiment-name, -en TEXT : The name of the experiment to run the pipeline in. Defaults to '{pipeline_name}-experiment'. --local-package-path, -lpp PATH : Local dir path where pipelines will be compiled. [default: vertex/pipelines/compiled_pipelines] --help : Show this message and exit.","title":"vertex-deployer deploy"},{"location":"CLI_REFERENCE/#vertex-deployer-list","text":"List all pipelines. Usage : 1 $ vertex-deployer list [ OPTIONS ] Options : --with-configs, -wc / --no-with-configs, -nc : Whether to list config files. [default: no-with-configs] --help : Show this message and exit.","title":"vertex-deployer list"},{"location":"advanced_user_guide/","text":"Tip Add code provided in this page is available in the repo example . \ud83d\udcbb Dev: Compile and run to fasten your dev cycle \u00b6 When developing a vertex pipeline locally, you may want to iterate quickly. The process is often the following: 1. write new code and test its integration in the pipeline workflow code in a notebook 2. script this new code in vertex/lib/ 3. modify associated component(s) in vertex/components and pipelines in vertex/pipelines 4. run the pipeline with dev settings to test the new code on Vertex The latest may include: - rebuilding the base image - compiling the pipeline - running the pipeline \ud83c\udfd7\ufe0f Build base image \u00b6 You can use this generic Dockerfile: 1 2 3 4 5 6 7 8 9 10 11 FROM python:3.10-slim-buster ARG PROJECT_ID ENV PROJECT_ID = ${ PROJECT_ID } COPY requirements.txt . RUN python3 -m pip install --upgrade pip RUN python3 -m pip install -r requirements.txt COPY vertex . ENV PYTHONPATH \" ${ PYTHONPATH } :.\" Then build it with docker or Cloud Build. For the latest, here is a sample cloudbuild.yaml : 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 # This config file is meant to be used from a local dev machine to submit a vertex base image build to Cloud Build. # This generic image will then be used in all the Vertex components of your pipeline. steps : # Build base image - name : 'gcr.io/cloud-builders/docker' args : [ 'build' , '-t' , '${_GAR_IMAGE_PATH}' , '-f' , 'vertex/deployment/Dockerfile' , '--build-arg' , 'PROJECT_ID=${PROJECT_ID}' , '--build-arg' , 'GCP_REGION=${_GCP_REGION}' , '--build-arg' , 'GAR_LOCATION=${_GAR_LOCATION}' , '--build-arg' , 'GAR_PIPELINES_REPO_ID=${_GAR_PIPELINES_REPO_ID}' , '--build-arg' , 'VERTEX_STAGING_BUCKET_NAME=${_VERTEX_STAGING_BUCKET_NAME}' , '--build-arg' , 'VERTEX_SERVICE_ACCOUNT=${_VERTEX_SERVICE_ACCOUNT}' , '.' , ] id : build-base-image substitutions : _GAR_IMAGE_PATH : '${_GAR_LOCATION}-docker.pkg.dev/${PROJECT_ID}/${_GAR_DOCKER_REPO_ID}/${_GAR_VERTEX_BASE_IMAGE_NAME}:${_TAG}' options : logging : CLOUD_LOGGING_ONLY dynamic_substitutions : true images : - '${_GAR_IMAGE_PATH}' tags : - vertex-${_GAR_DOCKER_REPO_ID}-base-image-local-${_TAG} Then you can trigger the build manually using this make command: 1 2 export $( cat .env | xargs ) make build-base-image This command includes the following: 1 2 3 4 .PHONY : build - base - image build-base-image : @gcloud builds submit --config ./vertex/deployment/cloudbuild_local.yaml \\ --substitutions = _GAR_LOCATION = ${ GAR_LOCATION } ,_GAR_DOCKER_REPO_ID = ${ GAR_DOCKER_REPO_ID } ,_GAR_VERTEX_BASE_IMAGE_NAME = ${ GAR_VERTEX_BASE_IMAGE_NAME } ,_TAG = ${ TAG } \ud83d\ude80 Compile and run \u00b6 Now that you have a base image, you can compile your pipeline and trigger a run that will use the latest version of your docker base image 1 vertex-deployer deploy --compile --run --env-file .env --config-name my_config.json \ud83e\uddea CI: Check your pipelines and config integrity \u00b6 \ud83d\udcbb Check your pipelines locally \u00b6 You can check pipelines integrity and config integrity using the following command: 1 vertex-deployer check --all To check a specific pipeline: 1 vertex-deployer check my_pipeline \u2795 Add to CI \u00b6 You can add a github workflow checking your pipelines integrity using the following file: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 name : Check pipelines on : pull_request : branches : - '*' workflow_call : env : PYTHON_VERSION : \"3.10\" jobs : check-pipelines : name : Check Vertex Pipelines runs-on : ubuntu-latest steps : - uses : actions/checkout@v3 - name : Set up Python ${{ env.PYTHON_VERSION }} uses : actions/setup-python@v4 with : python-version : ${{ env.PYTHON_VERSION }} cache : 'pip' - name : Install requirements run : | python3 -m pip install -r requirements.txt - name : Check pipelines run : | export PYTHONPATH=. vertex-deployer check --all \u2795 Add to pre-commit hooks \u00b6 You can add a pre-commit hook checking your pipelines integrity using a local hook: 1 2 3 4 5 6 7 8 repos : - repo : local hooks : - id : vertex-deployer-check name : check pipelines entry : vertex-deployer check -a pass_filenames : false language : system \ud83d\ude80 CD: Deploy your pipelines in a standardized manner \u00b6 Once you have a valid pipeline, you want to deploy it on Vertex. To automate deployment when merging to develop or main , you have multiple options. - use CloudBuild and CloudBuild triggers ) - use Github Action to trigger CloudBuild job - \ud83d\udea7 use Github Action only Note To use cloudbuild for CD, please update you Dockerfile with all these arguments. This will allow you to use vertex-deployer from your base image in CloudBuild. 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 FROM python:3.10-slim-buster ARG PROJECT_ID ARG GCP_REGION ARG GAR_LOCATION ARG GAR_PIPELINES_REPO_ID ARG VERTEX_STAGING_BUCKET_NAME ARG VERTEX_SERVICE_ACCOUNT ENV PROJECT_ID = ${ PROJECT_ID } ENV GCP_REGION = ${ GCP_REGION } ENV GAR_LOCATION = ${ GAR_LOCATION } ENV GAR_PIPELINES_REPO_ID = ${ GAR_PIPELINES_REPO_ID } ENV VERTEX_STAGING_BUCKET_NAME = ${ VERTEX_STAGING_BUCKET_NAME } ENV VERTEX_SERVICE_ACCOUNT = ${ VERTEX_SERVICE_ACCOUNT } COPY requirements.txt . RUN python3 -m pip install --upgrade pip RUN python3 -m pip install -r requirements.txt COPY vertex . ENV PYTHONPATH \" ${ PYTHONPATH } :.\" \u2601\ufe0f CloudBuild \u00b6 You can use the following cloudbuild.yaml to trigger a deployment on Vertex when merging to develop or main : 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 steps : # Build base image - name : 'gcr.io/cloud-builders/docker' args : [ 'build' , '-t' , '${_GAR_IMAGE_PATH}' , '-f' , 'vertex/deployment/Dockerfile' , '--build-arg' , 'PROJECT_ID=${PROJECT_ID}' , '--build-arg' , 'GCP_REGION=${_GCP_REGION}' , '--build-arg' , 'GAR_LOCATION=${_GAR_LOCATION}' , '--build-arg' , 'GAR_PIPELINES_REPO_ID=${_GAR_PIPELINES_REPO_ID}' , '--build-arg' , 'VERTEX_STAGING_BUCKET_NAME=${_VERTEX_STAGING_BUCKET_NAME}' , '--build-arg' , 'VERTEX_SERVICE_ACCOUNT=${_VERTEX_SERVICE_ACCOUNT}' , '.' , ] id : build-base-image # schedule pipeline: compile, upload, schedule - name : '${_GAR_IMAGE_PATH}' entrypoint : 'bash' args : [ '-c' , 'vertex-deployer -log DEBUG deploy dummy_pipeline --compile --upload --run -ec --tags ${_TAG} --schedule --delete-last-schedule --cron *-*-19-*-* --config-name config_test.json' ] dir : '.' id : schedule-dummy-pipeline waitFor : [ 'build-base-image' ] substitutions : _GAR_IMAGE_PATH : '${_GAR_LOCATION}-docker.pkg.dev/${PROJECT_ID}/${_GAR_DOCKER_REPO_ID}/${_GAR_VERTEX_BASE_IMAGE_NAME}:${_TAG}' options : logging : CLOUD_LOGGING_ONLY dynamic_substitutions : true images : - '${_GAR_IMAGE_PATH}' tags : - vertex-${_GAR_DOCKER_REPO_ID}-deployment-example-${_TAG} \ud83c\udfaf Use CloudBuild trigger [PREFERRED OPTION] \u00b6 Then, you'll need to link your repo to CloudBuild and create a trigger for each branch you want to deploy on Vertex. The documentation to link your repo is available here . Then, you can create create a triiger using this make command: 1 2 export $( cat .env | xargs ) make create-trigger-cd This command includes the following: 1 2 3 4 5 6 7 8 9 10 .PHONY : create - trigger - cd create-trigger-cd : @gcloud builds triggers create github \\ --repo-owner = \"artefactory\" \\ --repo-name = \"test-vertex-deployer\" \\ --name = \"test-vertex-deployer-trigger\" \\ --branch-pattern = \"main\" \\ --build-config = ./vertex/deployment/cloudbuild_cd.yaml \\ --project = ${ PROJECT_ID } \\ --substitutions = _GCP_REGION = ${ GCP_REGION } ,_GAR_LOCATION = ${ GAR_LOCATION } ,_GAR_DOCKER_REPO_ID = ${ GAR_DOCKER_REPO_ID } ,_GAR_VERTEX_BASE_IMAGE_NAME = ${ GAR_VERTEX_BASE_IMAGE_NAME } ,_TAG = ${ TAG } ,_GAR_PIPELINES_REPO_ID = ${ GAR_PIPELINES_REPO_ID } ,_VERTEX_STAGING_BUCKET_NAME = ${ VERTEX_STAGING_BUCKET_NAME } ,_VERTEX_SERVICE_ACCOUNT = ${ VERTEX_SERVICE_ACCOUNT } \ud83d\udc19 Github Action + CloudBuild \u00b6 You can also use Github Action to trigger CloudBuild job. You'll need to setup GCp authentication from your repo using Workload Identity Federation. 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 # This workflow deploys a pipeline to Vertex AI Pipelines. # # Workflow Steps: # # 1. Check pipelines and config integrity # 2. Authenticate to Google Cloud using Workload Identity Federation (WIF) # 3. Submit cloud build job to build docker image and deploy pipeline # # For more details on setting up Workload Identity Federation for GitHub, visit https://github.com/google-github-actions/auth#setting-up-workload-identity-federation # # Your WIF service account must have the following IAM roles: # - roles/artifactregistry.writer # - roles/storage.admin # - roles/cloudbuild.builds.builder name : Deploy pipelines on : push : branches : - main env : PYTHON_VERSION : \"3.10\" PROJECT_ID : \"my-project\" GCP_REGION : \"europe-west1\" TAG : \"latest\" # Google Artifact Registry GAR_LOCATION : \"europe-west1\" GAR_DOCKER_REPO_ID : \"demo-docker\" GAR_PIPELINES_REPO_ID : \"test-pipelines\" GAR_VERTEX_BASE_IMAGE_NAME : \"base-image\" # Vertex AI VERTEX_STAGING_BUCKET_NAME : \"my-project-vertex-staging\" VERTEX_SERVICE_ACCOUNT : \"my-service-account@my-project.iam.gserviceaccount.com\" jobs : check-pipelines : name : Check Pipelines uses : ./.github/workflows/check_pipelines.yaml deploy-pipelines : name : Deploy pipelines needs : check-pipelines runs-on : ubuntu-latest concurrency : deploy-pipelines permissions : id-token : write contents : write steps : - uses : actions/checkout@v3 with : fetch-depth : 0 - name : Set up Python ${{ env.PYTHON_VERSION }} uses : actions/setup-python@v4 with : python-version : ${{ env.PYTHON_VERSION }} cache : 'pip' - name : Install requirements run : | python3 -m pip install --upgrade pip python3 -m pip install -r requirements.txt - name : 'Authenticate to Google Cloud' uses : 'google-github-actions/auth@v1' with : token_format : 'access_token' workload_identity_provider : '${{ secrets.WIF_PROVIDER }}' # e.g. - projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider service_account : '${{ secrets.WIF_SERVICE_ACCOUNT }}' # e.g. - my-service-account@my-project.iam.gserviceaccount.com - name : Trigger Cloud Build run : | export PROJECT_ID=vertex-deployer-sandbox-3a8a make deploy-pipeline \ud83d\udea7 Github Action only \u00b6 Warning This is a work in progress. Please use CloudBuild for CD. Docker build and push to GCR example is not yet implemented.","title":"Advanced User Guide"},{"location":"advanced_user_guide/#dev-compile-and-run-to-fasten-your-dev-cycle","text":"When developing a vertex pipeline locally, you may want to iterate quickly. The process is often the following: 1. write new code and test its integration in the pipeline workflow code in a notebook 2. script this new code in vertex/lib/ 3. modify associated component(s) in vertex/components and pipelines in vertex/pipelines 4. run the pipeline with dev settings to test the new code on Vertex The latest may include: - rebuilding the base image - compiling the pipeline - running the pipeline","title":"\ud83d\udcbb Dev: Compile and run to fasten your dev cycle"},{"location":"advanced_user_guide/#build-base-image","text":"You can use this generic Dockerfile: 1 2 3 4 5 6 7 8 9 10 11 FROM python:3.10-slim-buster ARG PROJECT_ID ENV PROJECT_ID = ${ PROJECT_ID } COPY requirements.txt . RUN python3 -m pip install --upgrade pip RUN python3 -m pip install -r requirements.txt COPY vertex . ENV PYTHONPATH \" ${ PYTHONPATH } :.\" Then build it with docker or Cloud Build. For the latest, here is a sample cloudbuild.yaml : 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 # This config file is meant to be used from a local dev machine to submit a vertex base image build to Cloud Build. # This generic image will then be used in all the Vertex components of your pipeline. steps : # Build base image - name : 'gcr.io/cloud-builders/docker' args : [ 'build' , '-t' , '${_GAR_IMAGE_PATH}' , '-f' , 'vertex/deployment/Dockerfile' , '--build-arg' , 'PROJECT_ID=${PROJECT_ID}' , '--build-arg' , 'GCP_REGION=${_GCP_REGION}' , '--build-arg' , 'GAR_LOCATION=${_GAR_LOCATION}' , '--build-arg' , 'GAR_PIPELINES_REPO_ID=${_GAR_PIPELINES_REPO_ID}' , '--build-arg' , 'VERTEX_STAGING_BUCKET_NAME=${_VERTEX_STAGING_BUCKET_NAME}' , '--build-arg' , 'VERTEX_SERVICE_ACCOUNT=${_VERTEX_SERVICE_ACCOUNT}' , '.' , ] id : build-base-image substitutions : _GAR_IMAGE_PATH : '${_GAR_LOCATION}-docker.pkg.dev/${PROJECT_ID}/${_GAR_DOCKER_REPO_ID}/${_GAR_VERTEX_BASE_IMAGE_NAME}:${_TAG}' options : logging : CLOUD_LOGGING_ONLY dynamic_substitutions : true images : - '${_GAR_IMAGE_PATH}' tags : - vertex-${_GAR_DOCKER_REPO_ID}-base-image-local-${_TAG} Then you can trigger the build manually using this make command: 1 2 export $( cat .env | xargs ) make build-base-image This command includes the following: 1 2 3 4 .PHONY : build - base - image build-base-image : @gcloud builds submit --config ./vertex/deployment/cloudbuild_local.yaml \\ --substitutions = _GAR_LOCATION = ${ GAR_LOCATION } ,_GAR_DOCKER_REPO_ID = ${ GAR_DOCKER_REPO_ID } ,_GAR_VERTEX_BASE_IMAGE_NAME = ${ GAR_VERTEX_BASE_IMAGE_NAME } ,_TAG = ${ TAG }","title":"\ud83c\udfd7\ufe0f Build base image"},{"location":"advanced_user_guide/#compile-and-run","text":"Now that you have a base image, you can compile your pipeline and trigger a run that will use the latest version of your docker base image 1 vertex-deployer deploy --compile --run --env-file .env --config-name my_config.json","title":"\ud83d\ude80 Compile and run"},{"location":"advanced_user_guide/#ci-check-your-pipelines-and-config-integrity","text":"","title":"\ud83e\uddea CI: Check your pipelines and config integrity"},{"location":"advanced_user_guide/#check-your-pipelines-locally","text":"You can check pipelines integrity and config integrity using the following command: 1 vertex-deployer check --all To check a specific pipeline: 1 vertex-deployer check my_pipeline","title":"\ud83d\udcbb Check your pipelines locally"},{"location":"advanced_user_guide/#add-to-ci","text":"You can add a github workflow checking your pipelines integrity using the following file: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 name : Check pipelines on : pull_request : branches : - '*' workflow_call : env : PYTHON_VERSION : \"3.10\" jobs : check-pipelines : name : Check Vertex Pipelines runs-on : ubuntu-latest steps : - uses : actions/checkout@v3 - name : Set up Python ${{ env.PYTHON_VERSION }} uses : actions/setup-python@v4 with : python-version : ${{ env.PYTHON_VERSION }} cache : 'pip' - name : Install requirements run : | python3 -m pip install -r requirements.txt - name : Check pipelines run : | export PYTHONPATH=. vertex-deployer check --all","title":"\u2795 Add to CI"},{"location":"advanced_user_guide/#add-to-pre-commit-hooks","text":"You can add a pre-commit hook checking your pipelines integrity using a local hook: 1 2 3 4 5 6 7 8 repos : - repo : local hooks : - id : vertex-deployer-check name : check pipelines entry : vertex-deployer check -a pass_filenames : false language : system","title":"\u2795 Add to pre-commit hooks"},{"location":"advanced_user_guide/#cd-deploy-your-pipelines-in-a-standardized-manner","text":"Once you have a valid pipeline, you want to deploy it on Vertex. To automate deployment when merging to develop or main , you have multiple options. - use CloudBuild and CloudBuild triggers ) - use Github Action to trigger CloudBuild job - \ud83d\udea7 use Github Action only Note To use cloudbuild for CD, please update you Dockerfile with all these arguments. This will allow you to use vertex-deployer from your base image in CloudBuild. 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 FROM python:3.10-slim-buster ARG PROJECT_ID ARG GCP_REGION ARG GAR_LOCATION ARG GAR_PIPELINES_REPO_ID ARG VERTEX_STAGING_BUCKET_NAME ARG VERTEX_SERVICE_ACCOUNT ENV PROJECT_ID = ${ PROJECT_ID } ENV GCP_REGION = ${ GCP_REGION } ENV GAR_LOCATION = ${ GAR_LOCATION } ENV GAR_PIPELINES_REPO_ID = ${ GAR_PIPELINES_REPO_ID } ENV VERTEX_STAGING_BUCKET_NAME = ${ VERTEX_STAGING_BUCKET_NAME } ENV VERTEX_SERVICE_ACCOUNT = ${ VERTEX_SERVICE_ACCOUNT } COPY requirements.txt . RUN python3 -m pip install --upgrade pip RUN python3 -m pip install -r requirements.txt COPY vertex . ENV PYTHONPATH \" ${ PYTHONPATH } :.\"","title":"\ud83d\ude80 CD: Deploy your pipelines in a standardized manner"},{"location":"advanced_user_guide/#cloudbuild","text":"You can use the following cloudbuild.yaml to trigger a deployment on Vertex when merging to develop or main : 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 steps : # Build base image - name : 'gcr.io/cloud-builders/docker' args : [ 'build' , '-t' , '${_GAR_IMAGE_PATH}' , '-f' , 'vertex/deployment/Dockerfile' , '--build-arg' , 'PROJECT_ID=${PROJECT_ID}' , '--build-arg' , 'GCP_REGION=${_GCP_REGION}' , '--build-arg' , 'GAR_LOCATION=${_GAR_LOCATION}' , '--build-arg' , 'GAR_PIPELINES_REPO_ID=${_GAR_PIPELINES_REPO_ID}' , '--build-arg' , 'VERTEX_STAGING_BUCKET_NAME=${_VERTEX_STAGING_BUCKET_NAME}' , '--build-arg' , 'VERTEX_SERVICE_ACCOUNT=${_VERTEX_SERVICE_ACCOUNT}' , '.' , ] id : build-base-image # schedule pipeline: compile, upload, schedule - name : '${_GAR_IMAGE_PATH}' entrypoint : 'bash' args : [ '-c' , 'vertex-deployer -log DEBUG deploy dummy_pipeline --compile --upload --run -ec --tags ${_TAG} --schedule --delete-last-schedule --cron *-*-19-*-* --config-name config_test.json' ] dir : '.' id : schedule-dummy-pipeline waitFor : [ 'build-base-image' ] substitutions : _GAR_IMAGE_PATH : '${_GAR_LOCATION}-docker.pkg.dev/${PROJECT_ID}/${_GAR_DOCKER_REPO_ID}/${_GAR_VERTEX_BASE_IMAGE_NAME}:${_TAG}' options : logging : CLOUD_LOGGING_ONLY dynamic_substitutions : true images : - '${_GAR_IMAGE_PATH}' tags : - vertex-${_GAR_DOCKER_REPO_ID}-deployment-example-${_TAG}","title":"\u2601\ufe0f CloudBuild"},{"location":"advanced_user_guide/#use-cloudbuild-trigger-preferred-option","text":"Then, you'll need to link your repo to CloudBuild and create a trigger for each branch you want to deploy on Vertex. The documentation to link your repo is available here . Then, you can create create a triiger using this make command: 1 2 export $( cat .env | xargs ) make create-trigger-cd This command includes the following: 1 2 3 4 5 6 7 8 9 10 .PHONY : create - trigger - cd create-trigger-cd : @gcloud builds triggers create github \\ --repo-owner = \"artefactory\" \\ --repo-name = \"test-vertex-deployer\" \\ --name = \"test-vertex-deployer-trigger\" \\ --branch-pattern = \"main\" \\ --build-config = ./vertex/deployment/cloudbuild_cd.yaml \\ --project = ${ PROJECT_ID } \\ --substitutions = _GCP_REGION = ${ GCP_REGION } ,_GAR_LOCATION = ${ GAR_LOCATION } ,_GAR_DOCKER_REPO_ID = ${ GAR_DOCKER_REPO_ID } ,_GAR_VERTEX_BASE_IMAGE_NAME = ${ GAR_VERTEX_BASE_IMAGE_NAME } ,_TAG = ${ TAG } ,_GAR_PIPELINES_REPO_ID = ${ GAR_PIPELINES_REPO_ID } ,_VERTEX_STAGING_BUCKET_NAME = ${ VERTEX_STAGING_BUCKET_NAME } ,_VERTEX_SERVICE_ACCOUNT = ${ VERTEX_SERVICE_ACCOUNT }","title":"\ud83c\udfaf Use CloudBuild trigger [PREFERRED OPTION]"},{"location":"advanced_user_guide/#github-action-cloudbuild","text":"You can also use Github Action to trigger CloudBuild job. You'll need to setup GCp authentication from your repo using Workload Identity Federation. 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 # This workflow deploys a pipeline to Vertex AI Pipelines. # # Workflow Steps: # # 1. Check pipelines and config integrity # 2. Authenticate to Google Cloud using Workload Identity Federation (WIF) # 3. Submit cloud build job to build docker image and deploy pipeline # # For more details on setting up Workload Identity Federation for GitHub, visit https://github.com/google-github-actions/auth#setting-up-workload-identity-federation # # Your WIF service account must have the following IAM roles: # - roles/artifactregistry.writer # - roles/storage.admin # - roles/cloudbuild.builds.builder name : Deploy pipelines on : push : branches : - main env : PYTHON_VERSION : \"3.10\" PROJECT_ID : \"my-project\" GCP_REGION : \"europe-west1\" TAG : \"latest\" # Google Artifact Registry GAR_LOCATION : \"europe-west1\" GAR_DOCKER_REPO_ID : \"demo-docker\" GAR_PIPELINES_REPO_ID : \"test-pipelines\" GAR_VERTEX_BASE_IMAGE_NAME : \"base-image\" # Vertex AI VERTEX_STAGING_BUCKET_NAME : \"my-project-vertex-staging\" VERTEX_SERVICE_ACCOUNT : \"my-service-account@my-project.iam.gserviceaccount.com\" jobs : check-pipelines : name : Check Pipelines uses : ./.github/workflows/check_pipelines.yaml deploy-pipelines : name : Deploy pipelines needs : check-pipelines runs-on : ubuntu-latest concurrency : deploy-pipelines permissions : id-token : write contents : write steps : - uses : actions/checkout@v3 with : fetch-depth : 0 - name : Set up Python ${{ env.PYTHON_VERSION }} uses : actions/setup-python@v4 with : python-version : ${{ env.PYTHON_VERSION }} cache : 'pip' - name : Install requirements run : | python3 -m pip install --upgrade pip python3 -m pip install -r requirements.txt - name : 'Authenticate to Google Cloud' uses : 'google-github-actions/auth@v1' with : token_format : 'access_token' workload_identity_provider : '${{ secrets.WIF_PROVIDER }}' # e.g. - projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider service_account : '${{ secrets.WIF_SERVICE_ACCOUNT }}' # e.g. - my-service-account@my-project.iam.gserviceaccount.com - name : Trigger Cloud Build run : | export PROJECT_ID=vertex-deployer-sandbox-3a8a make deploy-pipeline","title":"\ud83d\udc19 Github Action + CloudBuild"},{"location":"advanced_user_guide/#github-action-only","text":"Warning This is a work in progress. Please use CloudBuild for CD. Docker build and push to GCR example is not yet implemented.","title":"\ud83d\udea7 Github Action only"},{"location":"changelog/","text":"CHANGELOG \u00b6 0.3.2 (2023-11-08) \u00b6 Build \u00b6 build(deps-dev): update ruff requirement from ^0.0 to ^0.1 (#95) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( cff19af ) build(deps): bump actions/cache from 3.2.4 to 3.3.2 (#94) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( 5d2dbf9 ) build(deps): bump actions/setup-python from 2 to 4 (#92) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( 9d25ad6 ) build(deps): bump actions/checkout from 2 to 4 (#93) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( c97ec85 ) build: update dependencies specifiers to be more flexible (#91) ( fddf1a9 ) Ci \u00b6 ci: build and deploy docs when a release is published (#90) ( 01ce99a ) Documentation \u00b6 docs: add full example to repo (#72) ( 42ccfc9 ) docs: update installation instructions (#98) ( 6e51b89 ) Fix \u00b6 fix: warn when cannot associate run to experiment (#99) ( 7ba89f3 ) Performance \u00b6 perf: rationalize kfp imports (#97) ( 19da429 ) 0.3.1 (2023-11-06) \u00b6 Fix \u00b6 fix: build doc after release (#89) ( e9b6b36 ) fix: run option fails when not uploading to gar (#87) ( 6fb7c14 ) 0.3.0 (2023-11-06) \u00b6 Feature \u00b6 feat: configure with pyproject (#82) ( 56d69f4 ) 0.2.2 (2023-11-06) \u00b6 Ci \u00b6 ci: update changelog parameters (#81) ( 5cd8321 ) Documentation \u00b6 docs: add mkdocs documentation (#79) ( dd66594 ) Fix \u00b6 fix: release to artifact registry (#85) ( 395b256 ) fix: rm protected namespaces from pipeline model (#78) ( 7ea8975 ) fix: base unsupported config file error msg on config types enum (#77) ( b3f3b46 ) 0.2.1 (2023-10-13) \u00b6 Documentation \u00b6 docs: fix typos in readme (#74) ( 9cc7ad3 ) Fix \u00b6 fix: simplify check pipelines cmd (#73) ( c63a7fb ) fix: add pipeline root path check in create command (#75) ( 65977a0 ) 0.2.0 (2023-10-06) \u00b6 Documentation \u00b6 docs: update documentation (#68) ( e942add ) Feature \u00b6 feat: add support for toml config files (#70) ( b997e69 ) Fix \u00b6 fix: pipelines objects can be named as {pipeline_name} instead of pipeline (#69) ( f79d081 ) fix: bad config error at pipeline level in checks (#67) ( d8dab84 ) 0.1.1 (2023-10-05) \u00b6 Documentation \u00b6 docs: update installation guidelines (#61) ( e284168 ) Fix \u00b6 fix: make imports in cli commands to reduce overhead (#63) ( 9c973f0 ) fix: checks temp directory removal (#62) ( f005f44 ) 0.1.0 (2023-10-05) \u00b6 Chore \u00b6 chore: update release version tag format and commit message (#50) ( b635287 ) Ci \u00b6 ci: upload release to gcs (#56) ci: update version_variables and changelog patterns for release ci: upload release to gcs bucket ci: update ci actions version ( e7119db ) ci: update ci trigger policy (#45) ( f1171d2 ) Feature \u00b6 feat: add rich display in console (#54) feat: add console status for deploy command and console output for check enh: remove empty columns from rich table fix: rm time.sleep from code enh: use dataclass as row for pipeline checks and group errors by config path fix: make rich mandatory as dependency fix: typing error in python 3.8 enh: rename config_path as config_file ( 6753402 ) Fix \u00b6 fix: scheduling tag retrieval (#59) ( 11347ba ) fix: misc code improvements (#58) enh: add version callback for app test: add integration test for root command in CI doc: update README with installation from gcs guidelines doc: fix typos in readme enh: add possibility to use either --config-filepath or --config-name fix: log the right experiment name fix: check that cron arg is not empty string doc: add doc about cron job format in cli feat: add rich display for pipeline list command chore: rename version_callback to display_version_and_exit fix: typer bad parameter raised immediatelty after cli call in deploy command ( 0226088 ) fix: misc typing and logging typos (#52) fix: logging disable in checks fix: VertexPipelineDeployer type hints and paths construction ( c80aeb1 ) fix: rm unused files (#5) ( e220dc8 ) fix: readme typos (#4) ( 3ebcf4a ) Unknown \u00b6 0.0.1 Automatically generated by python-semantic-release ( 0809df7 ) Release v0.1.0 (#48) ( a3c18df ) Ci: Update Continuous Deployment (CD) Trigger Policy and Documentation (#47) ci: update cd trigger policy ci: update cd doc ci: test reusable ci ci: fix reusable ci ref ci: fix reusable ci ci: add need for CI to be completed ci: fix cd on main to be triggered only when pushing to main ci: update doc for cd ( a08b581 ) Chore: prepare for release (#38) chore: add release drafter chore: add release drafter chore: add init with version ci: update release action ci: update release action linting ci: add semantic release configuration doc: update CONTRIBUTING.md for release management ( f59b795 ) Feat: make it python 38 39 compatible (#41) feat: typing back to 3.8 doc: update readme with new python versions fix: update ci with new python versions ( 4e50c99 ) Feat: add cli checks to ci (#40) feat: add cli integration tets to ci fix: use poetry to run commands in ci fix: fix paths before launching cli commands ( 9500a03 ) Feat/add create and list commands (#39) feat: add comand list to list pipelines feat: add command create to cli and folder structure reorg enh: renamed pipelines_deployer.py -> pipeline_deployer.py test: update tests doc: update readme enh: factorize get config paths ( 9b973bf ) Test: add unit tests (#31) test: add tests for make_enum_from_python_package test: make them work test: add pytest cov fix: make file command name to run tests tests: add tests create_model_from_pipeline ( d01d60c ) Feat: pass artifacts as inputs (#28) feat: add argument input_artifacts_filepath to cli feat: add possibility to have python or json config files fix: update check command to support python files as config feat: allow to specify config path to check only one config file fix: change artifact type in pipeline dynamic model to allow valiation test: add tests to convert_artifact_type_to_str doc: update readme fix: change config file path option name enh: add and remove temp dir when checking pipelines ( 4d163bd ) Fix/deploy command (#36) fix: iam rights for service account fix: multiple formatting issues when uploading pipeline template fix: typo in readme instruction for gcs bucket iam binding ( ead427f ) Feat/misc code improvements (#32) enh: use urljoin to make urls enh: add TagNotFoundError fix: vertex settings loading and errors enh: use decortor to check garhost in deployer enh: check experiment anme and check gar host feat: add missing gar host error feat: add message in no configs were checked for pipeline fix: path for pipeline should be relative not absolute fix: temp fix for vertex artifacts validation; arbitrary types allowed fix: upload does not work if lpp is not . ( 94c8061 ) Feat: add command to check pipelines (#19) feat: add comment to check pipelines (import, compile, config files) enh: creation of pipeline model only once feat: use pydantic to validate configs and get all validation errors in one exception feat: add error if no pipelines found in check and log of pipelines / config checked feat: add specific validator for import pipeline computed field (works as a property) doc: update docstring for command doc: update readme and add --all flag doc: update README table of contents links feat: add context manager to disable loguru logger temporarily ( 9f41c8e ) Feat: add pr_agent (#29) feat: add pr_agent feat: update pr agent action name ( 92e1acb ) Fix: multiple issues raised in alpha testing (#27) fix: typos in code to make upload and run work doc: update readme doc: fix ruff and license badge doc: add why this tool in readme doc: add table of content enh: use --parameter-values-filepath instead of --config-name for clarity for user enh: put the vertex repository in example/ doc: fix typo doc: update repo structure doc: update CONTRIBUTE.md ( 05deb15 ) enh: use pydantic settings to get deployment variables from env file instead of os.environ (#24) ( 879c14a ) Feat/switch logging to loguru (#20) enh: use loguru instead of python logging feat: add typer callback to set logging level ( 6c65c09 ) Fix/inconsistencies in pipeline names (#18) fix: use pipelines names with underscore instead of hyphen fix: rename module different from package doc: update readme accordingly ( 7194c70 ) Feat: switch cli to typer (#8) feat: switch cli to typer fix: add options short names + use enum value ( 267d169 ) Feat: add constants file (#7) feat: add constants file fix: package name in pyproject.toml fix: pr template contributing link ( 54f59f7 ) Chore: add issue and pr templates (#6) chore: add pr template chore: add issue templates chore: add CONTRIBUTING.md ( b736c3a ) Feat: vertex deployer (#3) feat/add vertex deployer and cli feat: add entrypoint for deployer fix: paths to pipeline folder and root path feat: add vertex foledr with dummy pipelines and example.env doc: update doc with how-to section ( f00c231 ) Chore/update readme and add gitignore (#2) doc: update readme chore: add .gitignore ( 3070873 ) Chore: setup repo (#1) chore: setup repo fix: deployer is not a package error fix: rm pytest from prepush hooks chore: add to do list on the readme fix: add dummy test for the ci to pass ( f154389 ) Initial commit ( cab9963 )","title":"Changelog"},{"location":"changelog/#changelog","text":"","title":"CHANGELOG"},{"location":"changelog/#032-2023-11-08","text":"","title":"0.3.2 (2023-11-08)"},{"location":"changelog/#build","text":"build(deps-dev): update ruff requirement from ^0.0 to ^0.1 (#95) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( cff19af ) build(deps): bump actions/cache from 3.2.4 to 3.3.2 (#94) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( 5d2dbf9 ) build(deps): bump actions/setup-python from 2 to 4 (#92) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( 9d25ad6 ) build(deps): bump actions/checkout from 2 to 4 (#93) Signed-off-by: dependabot[bot] < support@github.com > Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ( c97ec85 ) build: update dependencies specifiers to be more flexible (#91) ( fddf1a9 )","title":"Build"},{"location":"changelog/#ci","text":"ci: build and deploy docs when a release is published (#90) ( 01ce99a )","title":"Ci"},{"location":"changelog/#documentation","text":"docs: add full example to repo (#72) ( 42ccfc9 ) docs: update installation instructions (#98) ( 6e51b89 )","title":"Documentation"},{"location":"changelog/#fix","text":"fix: warn when cannot associate run to experiment (#99) ( 7ba89f3 )","title":"Fix"},{"location":"changelog/#performance","text":"perf: rationalize kfp imports (#97) ( 19da429 )","title":"Performance"},{"location":"changelog/#031-2023-11-06","text":"","title":"0.3.1 (2023-11-06)"},{"location":"changelog/#fix_1","text":"fix: build doc after release (#89) ( e9b6b36 ) fix: run option fails when not uploading to gar (#87) ( 6fb7c14 )","title":"Fix"},{"location":"changelog/#030-2023-11-06","text":"","title":"0.3.0 (2023-11-06)"},{"location":"changelog/#feature","text":"feat: configure with pyproject (#82) ( 56d69f4 )","title":"Feature"},{"location":"changelog/#022-2023-11-06","text":"","title":"0.2.2 (2023-11-06)"},{"location":"changelog/#ci_1","text":"ci: update changelog parameters (#81) ( 5cd8321 )","title":"Ci"},{"location":"changelog/#documentation_1","text":"docs: add mkdocs documentation (#79) ( dd66594 )","title":"Documentation"},{"location":"changelog/#fix_2","text":"fix: release to artifact registry (#85) ( 395b256 ) fix: rm protected namespaces from pipeline model (#78) ( 7ea8975 ) fix: base unsupported config file error msg on config types enum (#77) ( b3f3b46 )","title":"Fix"},{"location":"changelog/#021-2023-10-13","text":"","title":"0.2.1 (2023-10-13)"},{"location":"changelog/#documentation_2","text":"docs: fix typos in readme (#74) ( 9cc7ad3 )","title":"Documentation"},{"location":"changelog/#fix_3","text":"fix: simplify check pipelines cmd (#73) ( c63a7fb ) fix: add pipeline root path check in create command (#75) ( 65977a0 )","title":"Fix"},{"location":"changelog/#020-2023-10-06","text":"","title":"0.2.0 (2023-10-06)"},{"location":"changelog/#documentation_3","text":"docs: update documentation (#68) ( e942add )","title":"Documentation"},{"location":"changelog/#feature_1","text":"feat: add support for toml config files (#70) ( b997e69 )","title":"Feature"},{"location":"changelog/#fix_4","text":"fix: pipelines objects can be named as {pipeline_name} instead of pipeline (#69) ( f79d081 ) fix: bad config error at pipeline level in checks (#67) ( d8dab84 )","title":"Fix"},{"location":"changelog/#011-2023-10-05","text":"","title":"0.1.1 (2023-10-05)"},{"location":"changelog/#documentation_4","text":"docs: update installation guidelines (#61) ( e284168 )","title":"Documentation"},{"location":"changelog/#fix_5","text":"fix: make imports in cli commands to reduce overhead (#63) ( 9c973f0 ) fix: checks temp directory removal (#62) ( f005f44 )","title":"Fix"},{"location":"changelog/#010-2023-10-05","text":"","title":"0.1.0 (2023-10-05)"},{"location":"changelog/#chore","text":"chore: update release version tag format and commit message (#50) ( b635287 )","title":"Chore"},{"location":"changelog/#ci_2","text":"ci: upload release to gcs (#56) ci: update version_variables and changelog patterns for release ci: upload release to gcs bucket ci: update ci actions version ( e7119db ) ci: update ci trigger policy (#45) ( f1171d2 )","title":"Ci"},{"location":"changelog/#feature_2","text":"feat: add rich display in console (#54) feat: add console status for deploy command and console output for check enh: remove empty columns from rich table fix: rm time.sleep from code enh: use dataclass as row for pipeline checks and group errors by config path fix: make rich mandatory as dependency fix: typing error in python 3.8 enh: rename config_path as config_file ( 6753402 )","title":"Feature"},{"location":"changelog/#fix_6","text":"fix: scheduling tag retrieval (#59) ( 11347ba ) fix: misc code improvements (#58) enh: add version callback for app test: add integration test for root command in CI doc: update README with installation from gcs guidelines doc: fix typos in readme enh: add possibility to use either --config-filepath or --config-name fix: log the right experiment name fix: check that cron arg is not empty string doc: add doc about cron job format in cli feat: add rich display for pipeline list command chore: rename version_callback to display_version_and_exit fix: typer bad parameter raised immediatelty after cli call in deploy command ( 0226088 ) fix: misc typing and logging typos (#52) fix: logging disable in checks fix: VertexPipelineDeployer type hints and paths construction ( c80aeb1 ) fix: rm unused files (#5) ( e220dc8 ) fix: readme typos (#4) ( 3ebcf4a )","title":"Fix"},{"location":"changelog/#unknown","text":"0.0.1 Automatically generated by python-semantic-release ( 0809df7 ) Release v0.1.0 (#48) ( a3c18df ) Ci: Update Continuous Deployment (CD) Trigger Policy and Documentation (#47) ci: update cd trigger policy ci: update cd doc ci: test reusable ci ci: fix reusable ci ref ci: fix reusable ci ci: add need for CI to be completed ci: fix cd on main to be triggered only when pushing to main ci: update doc for cd ( a08b581 ) Chore: prepare for release (#38) chore: add release drafter chore: add release drafter chore: add init with version ci: update release action ci: update release action linting ci: add semantic release configuration doc: update CONTRIBUTING.md for release management ( f59b795 ) Feat: make it python 38 39 compatible (#41) feat: typing back to 3.8 doc: update readme with new python versions fix: update ci with new python versions ( 4e50c99 ) Feat: add cli checks to ci (#40) feat: add cli integration tets to ci fix: use poetry to run commands in ci fix: fix paths before launching cli commands ( 9500a03 ) Feat/add create and list commands (#39) feat: add comand list to list pipelines feat: add command create to cli and folder structure reorg enh: renamed pipelines_deployer.py -> pipeline_deployer.py test: update tests doc: update readme enh: factorize get config paths ( 9b973bf ) Test: add unit tests (#31) test: add tests for make_enum_from_python_package test: make them work test: add pytest cov fix: make file command name to run tests tests: add tests create_model_from_pipeline ( d01d60c ) Feat: pass artifacts as inputs (#28) feat: add argument input_artifacts_filepath to cli feat: add possibility to have python or json config files fix: update check command to support python files as config feat: allow to specify config path to check only one config file fix: change artifact type in pipeline dynamic model to allow valiation test: add tests to convert_artifact_type_to_str doc: update readme fix: change config file path option name enh: add and remove temp dir when checking pipelines ( 4d163bd ) Fix/deploy command (#36) fix: iam rights for service account fix: multiple formatting issues when uploading pipeline template fix: typo in readme instruction for gcs bucket iam binding ( ead427f ) Feat/misc code improvements (#32) enh: use urljoin to make urls enh: add TagNotFoundError fix: vertex settings loading and errors enh: use decortor to check garhost in deployer enh: check experiment anme and check gar host feat: add missing gar host error feat: add message in no configs were checked for pipeline fix: path for pipeline should be relative not absolute fix: temp fix for vertex artifacts validation; arbitrary types allowed fix: upload does not work if lpp is not . ( 94c8061 ) Feat: add command to check pipelines (#19) feat: add comment to check pipelines (import, compile, config files) enh: creation of pipeline model only once feat: use pydantic to validate configs and get all validation errors in one exception feat: add error if no pipelines found in check and log of pipelines / config checked feat: add specific validator for import pipeline computed field (works as a property) doc: update docstring for command doc: update readme and add --all flag doc: update README table of contents links feat: add context manager to disable loguru logger temporarily ( 9f41c8e ) Feat: add pr_agent (#29) feat: add pr_agent feat: update pr agent action name ( 92e1acb ) Fix: multiple issues raised in alpha testing (#27) fix: typos in code to make upload and run work doc: update readme doc: fix ruff and license badge doc: add why this tool in readme doc: add table of content enh: use --parameter-values-filepath instead of --config-name for clarity for user enh: put the vertex repository in example/ doc: fix typo doc: update repo structure doc: update CONTRIBUTE.md ( 05deb15 ) enh: use pydantic settings to get deployment variables from env file instead of os.environ (#24) ( 879c14a ) Feat/switch logging to loguru (#20) enh: use loguru instead of python logging feat: add typer callback to set logging level ( 6c65c09 ) Fix/inconsistencies in pipeline names (#18) fix: use pipelines names with underscore instead of hyphen fix: rename module different from package doc: update readme accordingly ( 7194c70 ) Feat: switch cli to typer (#8) feat: switch cli to typer fix: add options short names + use enum value ( 267d169 ) Feat: add constants file (#7) feat: add constants file fix: package name in pyproject.toml fix: pr template contributing link ( 54f59f7 ) Chore: add issue and pr templates (#6) chore: add pr template chore: add issue templates chore: add CONTRIBUTING.md ( b736c3a ) Feat: vertex deployer (#3) feat/add vertex deployer and cli feat: add entrypoint for deployer fix: paths to pipeline folder and root path feat: add vertex foledr with dummy pipelines and example.env doc: update doc with how-to section ( f00c231 ) Chore/update readme and add gitignore (#2) doc: update readme chore: add .gitignore ( 3070873 ) Chore: setup repo (#1) chore: setup repo fix: deployer is not a package error fix: rm pytest from prepush hooks chore: add to do list on the readme fix: add dummy test for the ci to pass ( f154389 ) Initial commit ( cab9963 )","title":"Unknown"},{"location":"contributing/","text":"\ud83e\uddd1\u200d\ud83d\udcbb Contributing to Vertex Pipelines Deployer \u00b6 How to contribute \u00b6 Issues, Pull Requests and Code Reviews. \u00b6 Issues and Pull requests templates are mandatory. At least one code review is needed to merge. Please merge your feature branches on develop . We try to rebase as much as possible and use squash and merge to keep a linear and condensed git history. Getting started \u00b6 This project uses Poetry for dependency management. Poetry's doc is really good, so you should check it out if you have any questions. To install poetry: 1 make download-poetry You can start by creating a virtual environment (conda or other) or use poetry venv(please check the Makefile first if so, as poetry venv is deactivated there). Then, to install the project dependencies, run the following command: 1 make install To develop, you will need dev requirements too. Run: 1 make install-dev-requirements About poetry.lock poetry.lock is not committed deliberately, as recommended by Poetry's doc. You can read more about it here . Codestyle \u00b6 This projects uses Black , isort, ruff for codestyle. You can run the following command to format your code. It uses Pre-commit hooks to run the formatters and linters. 1 make format-code Docstring convention \u00b6 This project uses Google docstring convention . A full example is available in here . How to release \u00b6 This project uses Python Semantic Versioning and Poetry to create releases and tags. The release process is automated through GitHub Actions. Here is the process: Create a Pull Request from develop to main . Merge the Pull Request. This must create a merge commit. The merge will trigger the Release GitHub Action defined in this workflow . The Release GitHub Action does the following: Checks out the code. Runs the CI GitHub Action, which runs the tests and linters. Runs Python Semantic Release, which takes care of version update, tag creation, and release creation. The action is triggered by any push to main. Tip The release action will be triggered by any push to main only if the 'CI' job in the 'release.yaml' workflow succeeds. Python Semantic Release will take care of version number update, tag creation and release creation. When it's done, rebase develop to keep it up to date with main. And you're done ! \ud83c\udf89","title":"Contributing"},{"location":"contributing/#contributing-to-vertex-pipelines-deployer","text":"","title":"\ud83e\uddd1\u200d\ud83d\udcbb Contributing to Vertex Pipelines Deployer"},{"location":"contributing/#how-to-contribute","text":"","title":"How to contribute"},{"location":"contributing/#issues-pull-requests-and-code-reviews","text":"Issues and Pull requests templates are mandatory. At least one code review is needed to merge. Please merge your feature branches on develop . We try to rebase as much as possible and use squash and merge to keep a linear and condensed git history.","title":"Issues, Pull Requests and Code Reviews."},{"location":"contributing/#getting-started","text":"This project uses Poetry for dependency management. Poetry's doc is really good, so you should check it out if you have any questions. To install poetry: 1 make download-poetry You can start by creating a virtual environment (conda or other) or use poetry venv(please check the Makefile first if so, as poetry venv is deactivated there). Then, to install the project dependencies, run the following command: 1 make install To develop, you will need dev requirements too. Run: 1 make install-dev-requirements About poetry.lock poetry.lock is not committed deliberately, as recommended by Poetry's doc. You can read more about it here .","title":"Getting started"},{"location":"contributing/#codestyle","text":"This projects uses Black , isort, ruff for codestyle. You can run the following command to format your code. It uses Pre-commit hooks to run the formatters and linters. 1 make format-code","title":"Codestyle"},{"location":"contributing/#docstring-convention","text":"This project uses Google docstring convention . A full example is available in here .","title":"Docstring convention"},{"location":"contributing/#how-to-release","text":"This project uses Python Semantic Versioning and Poetry to create releases and tags. The release process is automated through GitHub Actions. Here is the process: Create a Pull Request from develop to main . Merge the Pull Request. This must create a merge commit. The merge will trigger the Release GitHub Action defined in this workflow . The Release GitHub Action does the following: Checks out the code. Runs the CI GitHub Action, which runs the tests and linters. Runs Python Semantic Release, which takes care of version update, tag creation, and release creation. The action is triggered by any push to main. Tip The release action will be triggered by any push to main only if the 'CI' job in the 'release.yaml' workflow succeeds. Python Semantic Release will take care of version number update, tag creation and release creation. When it's done, rebase develop to keep it up to date with main. And you're done ! \ud83c\udf89","title":"How to release"},{"location":"example/","text":"\ud83d\udea7 Dummy Pipeline \u00b6 \ud83d\udea7 Dev: Compile and run to fasten your dev cycle \u00b6 \ud83d\udea7 CI: Check your pipelines and config integrity \u00b6 \ud83d\udea7 CD: Deploy your pipelines in a standardized manner \u00b6 \ud83d\udea7 Github Action \u00b6 \ud83d\udea7 CloudBuild trigger \u00b6","title":"Example"},{"location":"example/#dummy-pipeline","text":"","title":"\ud83d\udea7 Dummy Pipeline"},{"location":"example/#dev-compile-and-run-to-fasten-your-dev-cycle","text":"","title":"\ud83d\udea7 Dev: Compile and run to fasten your dev cycle"},{"location":"example/#ci-check-your-pipelines-and-config-integrity","text":"","title":"\ud83d\udea7 CI: Check your pipelines and config integrity"},{"location":"example/#cd-deploy-your-pipelines-in-a-standardized-manner","text":"","title":"\ud83d\udea7 CD: Deploy your pipelines in a standardized manner"},{"location":"example/#github-action","text":"","title":"\ud83d\udea7 Github Action"},{"location":"example/#cloudbuild-trigger","text":"","title":"\ud83d\udea7 CloudBuild trigger"},{"location":"install/","text":"TL;DR Install using pip: 1 pip install --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer In your requirements: 1 2 --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer==0.3.1 From git repo \u00b6 Stable version: 1 pip install git+https://github.com/artefactory/vertex-pipelines-deployer.git@main Develop version: 1 pip install git+https://github.com/artefactory/vertex-pipelines-deployer.git@develop If you want to test this package on examples from this repo: 1 2 3 git clone git@github.com:artefactory/vertex-pipelines-deployer.git poetry install cd example From Artifact Registry (not available in PyPI yet) \u00b6 The package is available on a public Google Artifact Registry repo. You need to specify a pip extra index url to install it. Install latest version: 1 pip install --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer List available versions: 1 pip index versions --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer Add to requirements \u00b6 It's better to get the .tar.gz archive from gcs, and version it. Then add the following lines to your requirements.in file: 1 2 3 --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer == 0 .3.1","title":"Installation"},{"location":"install/#from-git-repo","text":"Stable version: 1 pip install git+https://github.com/artefactory/vertex-pipelines-deployer.git@main Develop version: 1 pip install git+https://github.com/artefactory/vertex-pipelines-deployer.git@develop If you want to test this package on examples from this repo: 1 2 3 git clone git@github.com:artefactory/vertex-pipelines-deployer.git poetry install cd example","title":"From git repo"},{"location":"install/#from-artifact-registry-not-available-in-pypi-yet","text":"The package is available on a public Google Artifact Registry repo. You need to specify a pip extra index url to install it. Install latest version: 1 pip install --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer List available versions: 1 pip index versions --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer","title":"From Artifact Registry (not available in PyPI yet)"},{"location":"install/#add-to-requirements","text":"It's better to get the .tar.gz archive from gcs, and version it. Then add the following lines to your requirements.in file: 1 2 3 --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer == 0 .3.1","title":"Add to requirements"},{"location":"setup/","text":"TL;DR You need a GCP project ready to use Vertex Pipelines. And the following file structure from Vertex Pipeline Starter Kit : 1 2 3 4 5 6 vertex \u251c\u2500 configs/ \u2502 \u2514\u2500 { pipeline_name } \u2502 \u2514\u2500 { config_name } .json \u2514\u2500 pipelines/ \u2514\u2500 { pipeline_name } .py \ud83d\udccb Prerequisites \u00b6 Unix-like environment (Linux, macOS, WSL, etc...) Python 3.8 to 3.10 Google Cloud SDK A GCP project with Vertex Pipelines enabled \ud83d\udee0\ufe0f Setup \u00b6 Setup your GCP environment: 1 2 3 4 export PROJECT_ID =You need a GCP project ready to use Vertex Pipelines.
+And the following file structure from Vertex Pipeline Starter Kit: +
1 +2 +3 +4 +5 +6 |
|
Setup your GCP environment: +
1 +2 +3 +4 |
|
You need the following APIs to be enabled: +- Cloud Build API +- Artifact Registry API +- Cloud Storage API +- Vertex AI API +
1 +2 +3 +4 +5 |
|
Create an artifact registry repository for your base images (Docker format): +
1 +2 +3 +4 +5 |
|
Build and upload your base images to the repository. To do so, please follow Google Cloud Build documentation.
+Create an artifact registry repository for your pipelines (KFP format): +
1 +2 +3 +4 |
|
Create a GCS bucket for Vertex Pipelines staging: +
1 +2 +3 |
|
Create a service account for Vertex Pipelines: +
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 +12 +13 +14 +15 +16 +17 |
|
You can use the deployer CLI (see example below) or import VertexPipelineDeployer
in your code (try it yourself).
You must respect the following folder structure. If you already follow the +Vertex Pipelines Starter Kit folder structure, it should be pretty smooth to use this tool:
+1 +2 +3 +4 +5 +6 |
|
About folder structure
+You must have at least these files. If you need to share some config elements between pipelines,
+you can have a shared
folder in configs
and import them in your pipeline configs.
If you're following a different folder structure, you can change the default paths in the pyproject.toml
file.
+See Configuration section for more information.
You file {pipeline_name}.py
must contain a function called {pipeline_name}
decorated using kfp.dsl.pipeline
.
+In previous versions, the functions / object used to be called pipeline
but it was changed to {pipeline_name}
to avoid confusion with the kfp.dsl.pipeline
decorator.
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 +11 +12 |
|
Config file can be either .py
, .json
or .toml
files.
+They must be located in the config/{pipeline_name}
folder.
Why three formats?
+.py
files are useful to define complex configs (e.g. a list of dicts) while .json
/ .toml
files are useful to define simple configs (e.g. a string).
How to format them?
+.json
and .toml
files must be valid json files containing only one dict of key: value representing parameter values..py
files must be valid python files with two important elements:* parameter_values
to pass arguments to your pipeline
+ * input_artifacts
if you want to retrieve and create input artifacts to your pipeline.
+ See Vertex Documentation for more information.
How to name them?
+{config_name}.py
or {config_name}.json
or {config_name}.toml
. config_name is free but must be unique for a given pipeline.
You will also need the following ENV variables, either exported or in a .env
file (see example in example.env
):
1 +2 +3 +4 +5 +6 +7 +8 |
|
About env files
+We're using env files and dotenv to load the environment variables.
+No default value for --env-file
argument is provided to ensure that you don't accidentally deploy to the wrong project.
+An example.env
file is provided in this repo.
+This also allows you to work with multiple environments thanks to env files (test.env
, dev.env
, prod.env
, etc)
Deploy pipeline: +
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 |
|
Check pipelines: +
1 |
|
deploy
¶Let's say you defines a pipeline in dummy_pipeline.py
and a config file named config_test.json
. You can deploy your pipeline using the following command:
+
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 +10 |
|
check
¶To check that your pipelines are valid, you can use the check
command. It uses a pydantic model to:
+- check that your pipeline imports and definition are valid
+- check that your pipeline can be compiled
+- check that all configs related to the pipeline are respecting the pipeline definition (using a Pydantic model based on pipeline signature)
To validate one specific pipeline: +
1 |
|
To validate all pipelines in the vertex/pipelines
folder:
+
1 |
|
create
¶You can create all files needed for a pipeline using the create
command:
+
1 |
|
This will create a my_new_pipeline.py
file in the vertex/pipelines
folder and a vertex/config/my_new_pipeline/
folder with mutliple config files in it.
list
¶You can list all pipelines in the vertex/pipelines
folder using the list
command:
+
1 |
|
config
¶You can check your vertex-deployer
configuration options using the config
command.
+Fields set in pyproject.toml
will overwrite default values and will be displayed differently:
+
1 |
|
1 |
|
To see package version: +
1 |
|
To adapt log level, use the --log-level
option. Default is INFO
.
+
1 |
|