diff --git a/src/pdl/pdl_llms.py b/src/pdl/pdl_llms.py index 7ebe19ad..12988d6a 100644 --- a/src/pdl/pdl_llms.py +++ b/src/pdl/pdl_llms.py @@ -162,7 +162,10 @@ def generate_text( ) msg = response.choices[0].message # pyright: ignore if msg.content is None: - assert False, "TODO" # XXX TODO XXX + return { + "role": msg.role, + "content": "", + }, response.json() # pyright: ignore return { "role": msg.role, "content": msg.content, @@ -190,6 +193,6 @@ def generate_text_stream( result.append(chunk.json()) # pyright: ignore msg = chunk.choices[0].delta # pyright: ignore if msg.content is None: - break + continue yield {"role": msg.role, "content": msg.content} return result diff --git a/tests/data/line/hello16.pdl b/tests/data/line/hello16.pdl index 9e530add..9f590b71 100644 --- a/tests/data/line/hello16.pdl +++ b/tests/data/line/hello16.pdl @@ -20,7 +20,7 @@ text: - 'Question: Write a JSON object with 2 fields "bob" and "carol" set to "20" and "30" respectively.' parser: yaml parameters: - decoding_method: greedy + temperature: 0 stop: - '}' include_stop_sequence: true diff --git a/tests/data/line/hello3.pdl b/tests/data/line/hello3.pdl index 5b8e194e..a1ca2ce8 100644 --- a/tests/data/line/hello3.pdl +++ b/tests/data/line/hello3.pdl @@ -1,13 +1,12 @@ description: Hello world to call into a model defs: - model: watsonx/ibm/granite-34b-code-instruct + model: watsonx/ibm/granite-20b-code-instruct text: - Hello, - model: ${ model } spec: int parameters: - decoding_method: greedy + temperature: 0 stop: - '!' - include_stop_sequence: true mock_response: " World!" \ No newline at end of file diff --git a/tests/results/examples/react/demo.result b/tests/results/examples/react/demo.result index 10db7862..8760cf32 100644 --- a/tests/results/examples/react/demo.result +++ b/tests/results/examples/react/demo.result @@ -35,6 +35,8 @@ Act: {"name": "Search", "arguments": {"topic": "Henry Hudson"}} Obs: Henry Hudson (c. 1565 – disappeared 23 June 1611) was an English sea explorer and navigator during the early 17th century, best known for his explorations of present-day Canada and parts of the Northeastern United States. In 1607 and 1608, Hudson made two attempts on behalf of English merchants to find a rumoured Northeast Passage to Cathay via a route above the Arctic Circle. In 1609, he landed in North America on behalf of the Dutch East India Company and explored the region around the modern New York metropolitan area. Looking for a Northwest Passage to Asia on his ship Halve Maen ("Half Moon"), he sailed up the Hudson River, which was later named after him, and thereby laid the foundation for Dutch colonization of the region. His contributions to the exploration of the New World were significant and lasting. His voyages helped to establish European contact with the native peoples of North America and contributed to the development of trade and commerce. On his final expedition, while still searching for the Northwest Passage, Hudson became the first European to see Hudson Strait and the immense Hudson Bay. In 1611, after wintering on the shore of James Bay, Hudson wanted to press on to the west, but most of his crew mutinied. The mutineers cast Hudson, his son, and six others adrift; what then happened to the Hudsons and their companions is unknown. + + Tho: Henry Hudson was born around 1565. To find out how many years ago that was, I need to subtract his birth year from the current year, which is 2024. Act: {"name": "Calc", "arguments": {"expr": "2024 - 1565"}} Obs: 459 diff --git a/tests/test_model.py b/tests/test_model.py index 929445fb..99fea97d 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -6,11 +6,10 @@ "text": [ "Hello,", { - "model": "watsonx/ibm/granite-34b-code-instruct", + "model": "watsonx/meta-llama/llama-3-8b-instruct", "parameters": { - "decoding_method": "greedy", - "stop_sequences": ["!"], - "include_stop_sequence": False, + "temperature": 0, + "stop": ["!"], "mock_response": " World", }, }, @@ -34,9 +33,9 @@ def test_model(): "def": "SOMEONE", "text": [ { - "model": "watsonx/ibm/granite-34b-code-instruct", + "model": "watsonx/meta-llama/llama-3-8b-instruct", "parameters": { - "decoding_method": "greedy", + "temperature": 0, "stop": ["!"], "include_stop_sequence": False, "mock_response": " World", @@ -54,8 +53,8 @@ def test_model(): { "model": "watsonx/google/flan-t5-xl", "parameters": { - "decoding_method": "greedy", - "stop_sequences": ["."], + "temperature": 0, + "stop": ["."], "include_stop_sequence": True, "roles": {"user": {"pre_message": "", "post_message": ""}}, "mock_response": 'World is a fictional character in the popular science fiction television series "The X-Files', @@ -85,7 +84,7 @@ def test_model_chain(): "def": "LOCATION", "text": [ { - "model": "watsonx/ibm/granite-34b-code-instruct", + "model": "watsonx/meta-llama/llama-3-8b-instruct", "input": { "text": [ "Question: What is the weather in London?\n", @@ -98,8 +97,8 @@ def test_model_chain(): ] }, "parameters": { - "decoding_method": "greedy", - "stop_sequences": ["Question"], + "temperature": 0, + "stop": ["Question"], "include_stop_sequence": False, "mock_response": "Armonk", }, @@ -122,9 +121,9 @@ def test_multi_shot(): "text": [ "Hello,\n", { - "model": "watsonx/ibm/granite-34b-code-instruct", + "model": "watsonx/meta-llama/llama-3-8b-instruct", "parameters": { - "stop_sequences": ["."], + "stop": ["."], "mock_response": '\nI have a question about the use of the word "in" in the sentence: "The cake was baked in the oven.', }, }, @@ -144,12 +143,12 @@ def test_data_missing_parameters(): model_parameter = { "description": "Hello world with a variable", - "defs": {"model": "watsonx/ibm/granite-34b-code-instruct"}, + "defs": {"model": "watsonx/meta-llama/llama-3-8b-instruct"}, "text": [ "Hello,", { "model": "${ model }", - "parameters": {"stop_sequences": ["!"], "mock_response": " World!"}, + "parameters": {"stop": ["!"], "mock_response": " World!"}, }, ], } @@ -164,12 +163,12 @@ def test_model_parameter(): model_parameter1 = { "description": "Hello world with a variable", - "defs": {"model": "granite-34b-code-instruct"}, + "defs": {"model": "watsonx/meta-llama/llama-3-8b-instruct"}, "text": [ "Hello,", { "model": "watsonx/ibm/${ model }", - "parameters": {"stop_sequences": ["!"], "mock_response": " World!"}, + "parameters": {"stop": ["!"], "mock_response": " World!"}, }, ], } @@ -187,7 +186,7 @@ def test_model_parameter1(): "text": [ "Hello,", { - "model": "watsonx/ibm/granite-34b-code-instruct-v2", + "model": "watsonx/meta-llama/llama-3-8b-instruct", "platform": "litellm", "parameters": {"stop": ["!"], "mock_response": " World!"}, }, diff --git a/tests/test_parser.py b/tests/test_parser.py index ed8ba51b..349c13ef 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -25,8 +25,7 @@ }, "parser": "json", "parameters": { - "stop_sequences": ["}"], - "include_stop_sequence": True, + "stop": ["}"], "mock_response": '{"bob": 20, "carol": 30}', }, } diff --git a/tests/test_var.py b/tests/test_var.py index 5d1b15aa..d2d477bf 100644 --- a/tests/test_var.py +++ b/tests/test_var.py @@ -18,9 +18,8 @@ { "model": "watsonx/ibm/granite-34b-code-instruct", "parameters": { - "decoding_method": "greedy", - "stop_sequences": ["!"], - "include_stop_sequence": False, + "temperature": 0, + "stop": ["!"], "mock_response": " World", }, } @@ -51,9 +50,8 @@ def test_var(): { "model": "watsonx/ibm/granite-34b-code-instruct", "parameters": { - "decoding_method": "greedy", - "stop_sequences": ["!"], - "include_stop_sequence": False, + "temperature": 0, + "stop": ["!"], "mock_response": " World", }, }