diff --git a/code_snippets/api-guide/audit-log/operation.html b/code_snippets/api-guide/audit-log/operation.html index bbdaffbaed..f980b2b6d6 100644 --- a/code_snippets/api-guide/audit-log/operation.html +++ b/code_snippets/api-guide/audit-log/operation.html @@ -16,7 +16,7 @@ const raw = JSON.stringify({ "query": { - "operations": [300, 400, 600] + "operations": ["MODEL_CREATE", "WORKFLOW_CREATE", "APPLICATION_CREATE"] } }); diff --git a/code_snippets/api-guide/audit-log/operation.js b/code_snippets/api-guide/audit-log/operation.js index 132793095b..aac89ca449 100644 --- a/code_snippets/api-guide/audit-log/operation.js +++ b/code_snippets/api-guide/audit-log/operation.js @@ -27,7 +27,7 @@ stub.PostAuditLogSearches( user_id: USER_ID, }, query: { - operations: [300, 400, 600], + operations: ["MODEL_CREATE", "WORKFLOW_CREATE", "APPLICATION_CREATE"] }, }, metadata, diff --git a/code_snippets/api-guide/audit-log/operation.py b/code_snippets/api-guide/audit-log/operation.py index 0b4a1bc83a..4040af6954 100644 --- a/code_snippets/api-guide/audit-log/operation.py +++ b/code_snippets/api-guide/audit-log/operation.py @@ -26,7 +26,7 @@ service_pb2.PostAuditLogSearchesRequest( user_app_id=userDataObject, # The userDataObject is created in the overview and is required when using a PAT query=resources_pb2.AuditLogQuery( - operations=[300, 400, 600] + operations=[resources_pb2.EventType.MODEL_CREATE, resources_pb2.EventType.WORKFLOW_CREATE, resources_pb2.EventType.APPLICATION_CREATE] ) ), metadata=metadata diff --git a/code_snippets/api-guide/audit-log/operation.sh b/code_snippets/api-guide/audit-log/operation.sh index e9b9cf392f..70fe94d58f 100644 --- a/code_snippets/api-guide/audit-log/operation.sh +++ b/code_snippets/api-guide/audit-log/operation.sh @@ -3,6 +3,6 @@ curl -X POST "https://api.clarifai.com/v2/users/YOUR_USER_ID_HERE/audit_log/sear -H "Content-Type: application/json" \ -d '{ "query": { - "operations": [300, 400, 600] + "operations": ["MODEL_CREATE", "WORKFLOW_CREATE", "APPLICATION_CREATE"] } }' diff --git a/code_snippets/python-sdk/cli/predict_by_yaml.yaml b/code_snippets/python-sdk/cli/predict_by_yaml.yaml index 0e44074b3b..2d1cbc65d3 100644 --- a/code_snippets/python-sdk/cli/predict_by_yaml.yaml +++ b/code_snippets/python-sdk/cli/predict_by_yaml.yaml @@ -1,3 +1,3 @@ model_url: "https://clarifai.com/anthropic/completion/models/claude-v2" bytes: "Human: Write a tweet on future of AI\nAssistant:" -input_type: "text" +input_type: "text" \ No newline at end of file diff --git a/docs/api-guide/audit-log/README.mdx b/docs/api-guide/audit-log/README.mdx index 742ce9b2ed..a7c90acaa8 100644 --- a/docs/api-guide/audit-log/README.mdx +++ b/docs/api-guide/audit-log/README.mdx @@ -308,18 +308,18 @@ You can define the target of your query; that is, specify the resource on which **Target Types Supported** -| Target |Code | -|-----------------|--------------------| -| `User user` | 1 | -| `Role role` | 2 | -| `Team team` | 3 | -| `App app` | 4 | -| `Module module` | 5 | -| `ModuleVersion module_version` | 6 | -| `Workflow workflow` | 7 | -| `WorkflowVersion workflow_version ` | 8 | -| `Model model` | 9 | -| `ModelVersion model_version` | 10 | +| Target | +|-----------------| +| `User user` | +| `Role role` | +| `Team team` | +| `App app` | +| `Module module` | +| `ModuleVersion module_version` | +| `Workflow workflow` | +| `WorkflowVersion workflow_version ` | +| `Model model` | +| `ModelVersion model_version` | diff --git a/docs/portal-guide/compute-orchestration/README.mdx b/docs/portal-guide/compute-orchestration/README.mdx index 5de11558f5..fd16a37550 100644 --- a/docs/portal-guide/compute-orchestration/README.mdx +++ b/docs/portal-guide/compute-orchestration/README.mdx @@ -47,6 +47,7 @@ If you’re not using Compute Orchestration, the Shared SaaS (Serverless) deploy ![ ](/img/compute-orchestration/intro-1.png) + ## Compute Clusters and Nodepools We use [clusters and nodepools](https://docs.clarifai.com/portal-guide/compute-orchestration/set-up-compute) to organize and manage the compute resources required for the Compute Orchestration capabilities. diff --git a/docs/sdk/Inference-from-AI-Models/Audio-as-Input.md b/docs/sdk/Inference-from-AI-Models/Audio-as-Input.md index 4f33740f59..334be4d445 100644 --- a/docs/sdk/Inference-from-AI-Models/Audio-as-Input.md +++ b/docs/sdk/Inference-from-AI-Models/Audio-as-Input.md @@ -23,7 +23,7 @@ The Clarifai SDKs for Audio Processing provides a comprehensive set of tools and :::tip Clarifai CLI -Learn how to use the Clarifai CLI (Command Line Interface) tool [here](https://docs.clarifai.com/sdk/Inference-from-AI-Models/#clarifai-cli). +Learn how to use the Clarifai CLI (Command Line Interface) tool [here](https://docs.clarifai.com/sdk/cli). ::: diff --git a/docs/sdk/Inference-from-AI-Models/Image-as-Input.md b/docs/sdk/Inference-from-AI-Models/Image-as-Input.md index 1f0c8a052f..33ce0b54ab 100644 --- a/docs/sdk/Inference-from-AI-Models/Image-as-Input.md +++ b/docs/sdk/Inference-from-AI-Models/Image-as-Input.md @@ -53,7 +53,7 @@ Clarifai SDKs empowers you to seamlessly integrate advanced image recognition fu :::tip Clarifai CLI -Learn how to use the Clarifai CLI (Command Line Interface) tool [here](https://docs.clarifai.com/sdk/Inference-from-AI-Models/#clarifai-cli). +Learn how to use the Clarifai CLI (Command Line Interface) tool [here](https://docs.clarifai.com/sdk/cli). ::: diff --git a/docs/sdk/Inference-from-AI-Models/README.mdx b/docs/sdk/Inference-from-AI-Models/README.mdx index 743a0ba7ff..c69bfd5726 100644 --- a/docs/sdk/Inference-from-AI-Models/README.mdx +++ b/docs/sdk/Inference-from-AI-Models/README.mdx @@ -10,148 +10,10 @@ sidebar_position: 6 You can leverage the Clarifai Python SDK or the Node.js SDK to make accurate predictions on your data. Whether you're working with images, videos, text, or other formats, the SDKs offer an intuitive and efficient way to interact with AI models and perform inferences seamlessly. -For an even simpler approach, Clarifai provides a user-friendly Command Line Interface (CLI), bundled within the Python SDK package. The CLI streamlines inferencing tasks, allowing you to quickly run predictions and view results — all without requiring extensive setup. +For an even simpler approach, Clarifai provides a user-friendly [Command Line Interface](https://docs.clarifai.com/sdk/cli) (CLI), bundled within the Python SDK package. The CLI streamlines inferencing tasks, allowing you to quickly run predictions and view results — all without requiring extensive setup. This combination of tools ensures flexibility and ease of use, empowering you to harness the full potential of Clarifai's AI capabilities. -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; -import CodeBlock from "@theme/CodeBlock"; - -import Login from "!!raw-loader!../../../code_snippets/python-sdk/cli/login.yaml"; -import PredictOptions from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_options.txt"; -import PredictByIds from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_ids.sh"; -import PredictByIdsOutput from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_ids_output.txt"; -import PredictByFilePath from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_file_path.sh"; -import PredictByURL from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_url.sh"; -import PredictByModelURL from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_model_url.sh"; -import PredictByInputID from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_input_id.sh"; -import PredictByYAML from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_yaml.yaml"; -import PredictByYAMLBash from "!!raw-loader!../../../code_snippets/python-sdk/cli/predict_by_yaml_bash.sh"; -import InferenceParameters from "!!raw-loader!../../../code_snippets/python-sdk/cli/inference_parameters.sh"; -import OutputConfig from "!!raw-loader!../../../code_snippets/python-sdk/cli/output_config.sh"; - -## Clarifai CLI - -To make predictions using the [Clarifai CLI](https://github.com/Clarifai/examples/blob/main/CLI/model.ipynb), you first need to create a login configuration file for storing your account credentials. - - - - {Login} - - - -Then, authenticate your CLI session with Clarifai using the stored credentials in the configuration file: - -```bash -clarifai login --config -``` - -You can perform model predictions using the Clarifai CLI in the following ways: - -- By specifying `user_id`, `app_id`, and `model_id` -- By providing the model URL -- By using a YAML configuration file - - -
- CLI Predict Options - {PredictOptions} -
- -### Predict by IDs - -You can use the `--bytes` argument along with specifying `user_id`, `app_id`, and `model_id`. - - - - {PredictByIds} - - - -
- Output Example - {PredictByIdsOutput} -
- -> You can also use the `--file_path` argument, which specifies the local path to the file that contains the instructions for the model to generate predictions. - - - - {PredictByFilePath} - - - -> You can also use the `--url` argument, which specifies the URL of the file that contains the instructions for the model to generate predictions. - - - - {PredictByURL} - - - - - -### Predict by Model URL - -You can make predictions by using the `--model_url` argument, which specifies the URL of the model to be used for generating predictions. - - - - {PredictByModelURL} - - - -### Predict by a YAML file - -You can provide the instructions for generating predictions in a YMAL configuration file. - -Here is an example: - - - - {PredictByYAML} - - - -Then, you need to specify the path to that file. - - - - {PredictByYAMLBash} - - - -### Specify Prediction Parameters - -You can specify prediction parameters to influence the output of some models. These settings allow you to control the model's behavior during prediction, influencing attributes such as creativity, coherence, and diversity in the results. - -You can get a description of the prediction parameters [here](https://docs.clarifai.com/sdk/Inference-from-AI-Models/Advance-Inference-Options/#prediction-paramaters). - -Here is how you can specify various inference parameters : - - - - {InferenceParameters} - - - -Here is how you can specify output configuration parameters: - - - - {OutputConfig} - - import DocCardList from '@theme/DocCardList'; import {useCurrentSidebarCategory} from '@docusaurus/theme-common'; diff --git a/docs/sdk/Inference-from-AI-Models/Text-as-Input.md b/docs/sdk/Inference-from-AI-Models/Text-as-Input.md index fba19395f9..de17ac5236 100644 --- a/docs/sdk/Inference-from-AI-Models/Text-as-Input.md +++ b/docs/sdk/Inference-from-AI-Models/Text-as-Input.md @@ -52,7 +52,7 @@ Unlock the potential of Clarifai's state-of-the-art text-based AI features, allo :::tip Clarifai CLI -Learn how to use the Clarifai CLI (Command Line Interface) tool [here](https://docs.clarifai.com/sdk/Inference-from-AI-Models/#clarifai-cli). +Learn how to use the Clarifai CLI (Command Line Interface) tool [here](https://docs.clarifai.com/sdk/cli). ::: diff --git a/docs/sdk/advance-model-operations/model-upload.md b/docs/sdk/advance-model-operations/model-upload.md index c073d96bff..67d742a2c9 100644 --- a/docs/sdk/advance-model-operations/model-upload.md +++ b/docs/sdk/advance-model-operations/model-upload.md @@ -224,7 +224,8 @@ Ensure your local environment has sufficient memory and compute resources to loa ::: -There are two types of CLI (command line interface) commands you can use to test your models in your local development environment. You can learn more about the Clarifai CLI tool [here](https://github.com/Clarifai/examples/blob/main/CLI/model.ipynb). + +There are two types of CLI (command line interface) commands you can use to test your models in your local development environment. You can learn more about the Clarifai CLI tool [here](https://docs.clarifai.com/sdk/cli). #### 1. Using the `test-locally` Command diff --git a/docs/sdk/cli.md b/docs/sdk/cli.md new file mode 100644 index 0000000000..60b0a3480f --- /dev/null +++ b/docs/sdk/cli.md @@ -0,0 +1,242 @@ +--- +description: Learn how to use the Clarifai Command Line Interface (CLI) +sidebar_position: 1.1 +--- + +# Clarifai CLI + +**Learn how to use the Clarifai Command Line Interface (CLI)** +
+ +Clarifai’s Command Line Interface (CLI) is a powerful, user-friendly tool designed to simplify and enhance your experience with our AI platform. By offering a streamlined way to execute tasks directly from the terminal, the CLI eliminates the need for extensive coding or constant reliance on the web interface. + +[The Clarifai CLI](https://github.com/Clarifai/examples/tree/main/CLI) supports a broad range of functionalities, from making model predictions to leveraging advanced Compute Orchestration capabilities, making it an essential tool for for a wide range of use cases. + +Bundled within our Python SDK package, the CLI empowers both technical and non-technical users to efficiently manage tasks and boost productivity on the Clarifai platform. + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; +import CodeBlock from "@theme/CodeBlock"; + +import Login from "!!raw-loader!../../code_snippets/python-sdk/cli/login.yaml"; +import PredictOptions from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_options.txt"; +import PredictByIds from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_ids.sh"; +import PredictByIdsOutput from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_ids_output.txt"; +import PredictByFilePath from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_file_path.sh"; +import PredictByURL from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_url.sh"; +import PredictByModelURL from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_model_url.sh"; +import PredictByInputID from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_input_id.sh"; +import PredictByYAML from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_yaml.yaml"; +import PredictByYAMLBash from "!!raw-loader!../../code_snippets/python-sdk/cli/predict_by_yaml_bash.sh"; +import InferenceParameters from "!!raw-loader!../../code_snippets/python-sdk/cli/inference_parameters.sh"; +import OutputConfig from "!!raw-loader!../../code_snippets/python-sdk/cli/output_config.sh"; + +## Installation + +To begin, install the latest version of the `clarifai` Python package. + +```text +pip install --upgrade clarifai +``` + +## Basics + +The CLI tool is designed to help users manage various aspects of their compute resources, deployments, and models through a series of intuitive commands and aliases. + +```text +Usage: clarifai [OPTIONS] COMMAND [ARGS]... + + Clarifai CLI + +Options: + --help Show this message and exit. + +Commands: + cc Alias for 'computecluster' + computecluster Manage Compute Clusters: create, delete, list + deployment Manage Deployments: create, delete, list + dpl Alias for 'deployment' + login Login command to set PAT and other configurations. + model Manage models: upload, test locally, run_locally, predict + nodepool Manage Nodepools: create, delete, list + np Alias for 'nodepool' + +``` + +The `--help` option is particularly useful to quickly understand the available functionalities and how to use them. + + + + + clarifai COMMAND --help + + + + +For example: + + + + + clarifai model --help + + + + +Produces this output: + +```text +Usage: clarifai model [OPTIONS] COMMAND [ARGS]... + + Manage models: upload, test locally, run_locally, predict + +Options: + --help Show this message and exit. + +Commands: + predict Predict using the given model + run-locally Run the model locally and start a gRPC server to serve... + test-locally Test model locally. + upload Upload a model to Clarifai. + +``` + +:::tip + +You can learn how to use the `run-locally`, `test-locally`, and `upload` commands [here](https://docs.clarifai.com/sdk/advance-model-operations/model-upload#step-4-test-the-model-locally). + +::: + +## Login + +To use the Clarifai CLI, you must first log in using a [Personal Access Token](https://docs.clarifai.com/clarifai-basics/authentication/personal-access-tokens) (PAT). This requires creating a YMAL login configuration file to securely store your credentials. + + + + {Login} + + + +Once the configuration file is set up, you can authenticate your CLI session with Clarifai using the stored credentials. This ensures seamless access to the CLI's features and functionalities. + + + + + clarifai login --config `` + + + + +## Compute Orchestration + +The Clarifai CLI simplifies Compute Orchestration tasks. With the CLI, you can easily manage the infrastructure required for deploying and scaling machine learning models, even without extensive technical expertise. + +You can learn how to use the CLI for Compute Orchestration [here](https://docs.clarifai.com/sdk/compute-orchestration). + +## Model Operations + +You can perform model predictions using the Clarifai CLI in the following ways: + +- By specifying `user_id`, `app_id`, and `model_id` +- By providing the model URL +- By using a YAML configuration file + + +
+ CLI Predict Options + {PredictOptions} +
+ +### Predict by IDs + +You can use the `--bytes` argument along with specifying `user_id`, `app_id`, and `model_id`. + + + + {PredictByIds} + + + +
+ Output Example + {PredictByIdsOutput} +
+ +> You can also use the `--file_path` argument, which specifies the local path to the file that contains the instructions for the model to generate predictions. + + + + {PredictByFilePath} + + + +> You can also use the `--url` argument, which specifies the URL of the file that contains the instructions for the model to generate predictions. + + + + {PredictByURL} + + + + + +### Predict by Model URL + +You can make predictions by using the `--model_url` argument, which specifies the URL of the model to be used for generating predictions. + + + + {PredictByModelURL} + + + +### Predict by a YAML file + +You can provide the instructions for generating predictions in a YMAL configuration file. + +Here is an example: + + + + {PredictByYAML} + + + +Then, you need to specify the path to that file. + + + + {PredictByYAMLBash} + + + +### Specify Prediction Parameters + +You can specify prediction parameters to influence the output of some models. These settings allow you to control the model's behavior during prediction, influencing attributes such as creativity, coherence, and diversity in the results. + +You can get a description of the prediction parameters [here](https://docs.clarifai.com/sdk/Inference-from-AI-Models/Advance-Inference-Options/#prediction-paramaters). + +Here is how you can specify various inference parameters : + + + + {InferenceParameters} + + + +Here is how you can specify output configuration parameters: + + + + {OutputConfig} + + diff --git a/static/img/compute-orchestration/intro-1.png b/static/img/compute-orchestration/intro-1.png index 44a1908bc0..a08118d258 100644 Binary files a/static/img/compute-orchestration/intro-1.png and b/static/img/compute-orchestration/intro-1.png differ