Skip to content

Commit

Permalink
bug fix for LLM function calling (#178)
Browse files Browse the repository at this point in the history
* bug fix for LLM function calling

Signed-off-by: Mandana Vaziri <[email protected]>
  • Loading branch information
vazirim authored Nov 11, 2024
1 parent a61c3af commit 60f15a7
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 32 deletions.
7 changes: 5 additions & 2 deletions src/pdl/pdl_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,10 @@ def generate_text(
)
msg = response.choices[0].message # pyright: ignore
if msg.content is None:
assert False, "TODO" # XXX TODO XXX
return {
"role": msg.role,
"content": "",
}, response.json() # pyright: ignore
return {
"role": msg.role,
"content": msg.content,
Expand Down Expand Up @@ -190,6 +193,6 @@ def generate_text_stream(
result.append(chunk.json()) # pyright: ignore
msg = chunk.choices[0].delta # pyright: ignore
if msg.content is None:
break
continue
yield {"role": msg.role, "content": msg.content}
return result
2 changes: 1 addition & 1 deletion tests/data/line/hello16.pdl
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ text:
- 'Question: Write a JSON object with 2 fields "bob" and "carol" set to "20" and "30" respectively.'
parser: yaml
parameters:
decoding_method: greedy
temperature: 0
stop:
- '}'
include_stop_sequence: true
Expand Down
5 changes: 2 additions & 3 deletions tests/data/line/hello3.pdl
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
description: Hello world to call into a model
defs:
model: watsonx/ibm/granite-34b-code-instruct
model: watsonx/ibm/granite-20b-code-instruct
text:
- Hello,
- model: ${ model }
spec: int
parameters:
decoding_method: greedy
temperature: 0
stop:
- '!'
include_stop_sequence: true
mock_response: " World!"
2 changes: 2 additions & 0 deletions tests/results/examples/react/demo.result
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ Act: {"name": "Search", "arguments": {"topic": "Henry Hudson"}}
Obs: Henry Hudson (c. 1565 – disappeared 23 June 1611) was an English sea explorer and navigator during the early 17th century, best known for his explorations of present-day Canada and parts of the Northeastern United States.
In 1607 and 1608, Hudson made two attempts on behalf of English merchants to find a rumoured Northeast Passage to Cathay via a route above the Arctic Circle. In 1609, he landed in North America on behalf of the Dutch East India Company and explored the region around the modern New York metropolitan area. Looking for a Northwest Passage to Asia on his ship Halve Maen ("Half Moon"), he sailed up the Hudson River, which was later named after him, and thereby laid the foundation for Dutch colonization of the region. His contributions to the exploration of the New World were significant and lasting. His voyages helped to establish European contact with the native peoples of North America and contributed to the development of trade and commerce.
On his final expedition, while still searching for the Northwest Passage, Hudson became the first European to see Hudson Strait and the immense Hudson Bay. In 1611, after wintering on the shore of James Bay, Hudson wanted to press on to the west, but most of his crew mutinied. The mutineers cast Hudson, his son, and six others adrift; what then happened to the Hudsons and their companions is unknown.


Tho: Henry Hudson was born around 1565. To find out how many years ago that was, I need to subtract his birth year from the current year, which is 2024.
Act: {"name": "Calc", "arguments": {"expr": "2024 - 1565"}}
Obs: 459
Expand Down
35 changes: 17 additions & 18 deletions tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,10 @@
"text": [
"Hello,",
{
"model": "watsonx/ibm/granite-34b-code-instruct",
"model": "watsonx/meta-llama/llama-3-8b-instruct",
"parameters": {
"decoding_method": "greedy",
"stop_sequences": ["!"],
"include_stop_sequence": False,
"temperature": 0,
"stop": ["!"],
"mock_response": " World",
},
},
Expand All @@ -34,9 +33,9 @@ def test_model():
"def": "SOMEONE",
"text": [
{
"model": "watsonx/ibm/granite-34b-code-instruct",
"model": "watsonx/meta-llama/llama-3-8b-instruct",
"parameters": {
"decoding_method": "greedy",
"temperature": 0,
"stop": ["!"],
"include_stop_sequence": False,
"mock_response": " World",
Expand All @@ -54,8 +53,8 @@ def test_model():
{
"model": "watsonx/google/flan-t5-xl",
"parameters": {
"decoding_method": "greedy",
"stop_sequences": ["."],
"temperature": 0,
"stop": ["."],
"include_stop_sequence": True,
"roles": {"user": {"pre_message": "", "post_message": ""}},
"mock_response": 'World is a fictional character in the popular science fiction television series "The X-Files',
Expand Down Expand Up @@ -85,7 +84,7 @@ def test_model_chain():
"def": "LOCATION",
"text": [
{
"model": "watsonx/ibm/granite-34b-code-instruct",
"model": "watsonx/meta-llama/llama-3-8b-instruct",
"input": {
"text": [
"Question: What is the weather in London?\n",
Expand All @@ -98,8 +97,8 @@ def test_model_chain():
]
},
"parameters": {
"decoding_method": "greedy",
"stop_sequences": ["Question"],
"temperature": 0,
"stop": ["Question"],
"include_stop_sequence": False,
"mock_response": "Armonk",
},
Expand All @@ -122,9 +121,9 @@ def test_multi_shot():
"text": [
"Hello,\n",
{
"model": "watsonx/ibm/granite-34b-code-instruct",
"model": "watsonx/meta-llama/llama-3-8b-instruct",
"parameters": {
"stop_sequences": ["."],
"stop": ["."],
"mock_response": '\nI have a question about the use of the word "in" in the sentence: "The cake was baked in the oven.',
},
},
Expand All @@ -144,12 +143,12 @@ def test_data_missing_parameters():

model_parameter = {
"description": "Hello world with a variable",
"defs": {"model": "watsonx/ibm/granite-34b-code-instruct"},
"defs": {"model": "watsonx/meta-llama/llama-3-8b-instruct"},
"text": [
"Hello,",
{
"model": "${ model }",
"parameters": {"stop_sequences": ["!"], "mock_response": " World!"},
"parameters": {"stop": ["!"], "mock_response": " World!"},
},
],
}
Expand All @@ -164,12 +163,12 @@ def test_model_parameter():

model_parameter1 = {
"description": "Hello world with a variable",
"defs": {"model": "granite-34b-code-instruct"},
"defs": {"model": "watsonx/meta-llama/llama-3-8b-instruct"},
"text": [
"Hello,",
{
"model": "watsonx/ibm/${ model }",
"parameters": {"stop_sequences": ["!"], "mock_response": " World!"},
"parameters": {"stop": ["!"], "mock_response": " World!"},
},
],
}
Expand All @@ -187,7 +186,7 @@ def test_model_parameter1():
"text": [
"Hello,",
{
"model": "watsonx/ibm/granite-34b-code-instruct-v2",
"model": "watsonx/meta-llama/llama-3-8b-instruct",
"platform": "litellm",
"parameters": {"stop": ["!"], "mock_response": " World!"},
},
Expand Down
3 changes: 1 addition & 2 deletions tests/test_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@
},
"parser": "json",
"parameters": {
"stop_sequences": ["}"],
"include_stop_sequence": True,
"stop": ["}"],
"mock_response": '{"bob": 20, "carol": 30}',
},
}
Expand Down
10 changes: 4 additions & 6 deletions tests/test_var.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,8 @@
{
"model": "watsonx/ibm/granite-34b-code-instruct",
"parameters": {
"decoding_method": "greedy",
"stop_sequences": ["!"],
"include_stop_sequence": False,
"temperature": 0,
"stop": ["!"],
"mock_response": " World",
},
}
Expand Down Expand Up @@ -51,9 +50,8 @@ def test_var():
{
"model": "watsonx/ibm/granite-34b-code-instruct",
"parameters": {
"decoding_method": "greedy",
"stop_sequences": ["!"],
"include_stop_sequence": False,
"temperature": 0,
"stop": ["!"],
"mock_response": " World",
},
}
Expand Down

0 comments on commit 60f15a7

Please sign in to comment.