Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Set default params for Watsonx chat & text models #260

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We just removed the upper bound on LiteLLM per user's request. See the latest in main, including changes to watsonx examples. Please merge that.

Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ dependencies = [
"jinja2~=3.0",
"PyYAML~=6.0",
"jsonschema~=4.0",
"litellm>=1.49",
"litellm>=1.52.1",
"termcolor~=2.0",
"ipython~=8.0",
]
Expand Down
81 changes: 44 additions & 37 deletions src/pdl/pdl_ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -655,41 +655,47 @@ def set_default_granite_model_parameters(
if parameters is None:
parameters = {}

if "watsonx" in model_id:
if "decoding_method" not in parameters:
parameters["decoding_method"] = (
DECODING_METHOD # pylint: disable=attribute-defined-outside-init
)
if "max_tokens" in parameters and parameters["max_tokens"] is None:
parameters["max_tokens"] = (
MAX_NEW_TOKENS # pylint: disable=attribute-defined-outside-init
# see https://cloud.ibm.com/apidocs/watsonx-ai#text-chat-request
if model_id.startswith("watsonx/"):
parameters.setdefault("temperature", 0) # setting to decoding greedy

# see https://cloud.ibm.com/apidocs/watsonx-ai#text-generation-request
if model_id.startswith("watsonx_text/"):
parameters.setdefault(
"decoding_method",
DECODING_METHOD,
)
parameters.setdefault(
"max_tokens",
MAX_NEW_TOKENS,
)
parameters.setdefault(
"min_new_tokens",
MIN_NEW_TOKENS,
)
parameters.setdefault(
"repetition_penalty",
REPETITION_PENATLY,
)
if parameters["decoding_method"] == "sample":
parameters.setdefault(
"temperature",
TEMPERATURE_SAMPLING,
)
if "min_new_tokens" not in parameters:
parameters["min_new_tokens"] = (
MIN_NEW_TOKENS # pylint: disable=attribute-defined-outside-init
parameters.setdefault(
"top_k",
TOP_K_SAMPLING,
)
if "repetition_penalty" not in parameters:
parameters["repetition_penalty"] = (
REPETITION_PENATLY # pylint: disable=attribute-defined-outside-init
parameters.setdefault(
"top_p",
TOP_P_SAMPLING,
)
if parameters["decoding_method"] == "sample":
if "temperature" not in parameters:
parameters["temperature"] = (
TEMPERATURE_SAMPLING # pylint: disable=attribute-defined-outside-init
)
if "top_k" not in parameters:
parameters["top_k"] = (
TOP_K_SAMPLING # pylint: disable=attribute-defined-outside-init
)
if "top_p" not in parameters:
parameters["top_p"] = (
TOP_P_SAMPLING # pylint: disable=attribute-defined-outside-init
)
if "replicate" in model_id and "granite-3.0" in model_id:
if "temperature" not in parameters or parameters["temperature"] is None:
parameters["temperature"] = 0 # setting to decoding greedy
if "roles" not in parameters:
parameters["roles"] = {

if model_id.startswith("replicate/") and "granite-3.0" in model_id:
parameters.setdefault("temperature", 0) # setting to decoding greedy
parameters.setdefault(
"roles",
{
"system": {
"pre_message": "<|start_of_role|>system<|end_of_role|>",
"post_message": "<|end_of_text|>",
Expand All @@ -710,10 +716,11 @@ def set_default_granite_model_parameters(
"pre_message": "<|start_of_role|>tool_response<|end_of_role|>",
"post_message": "<|end_of_text|>",
},
}
if "final_prompt_value" not in parameters:
parameters["final_prompt_value"] = (
"<|start_of_role|>assistant<|end_of_role|>"
)
},
)
parameters.setdefault(
"final_prompt_value",
"<|start_of_role|>assistant<|end_of_role|>",
)

return parameters
Loading