-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
93 additions
and
20 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -9,4 +9,5 @@ __pycache__/ | |
.ipynb_checkpoints | ||
node_modules | ||
/client | ||
.eggs/ | ||
.eggs/ | ||
.env |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
from ._lite_llm import LiteLLM, LiteLLMCompletion, LiteLLMInstruct | ||
|
||
class Cohere(LiteLLM): | ||
def __init__(self, model, tokenizer=None, echo=True, caching=True, api_base=None, api_key=None, custom_llm_provider=None, temperature=0.0, max_streaming_tokens=1000, **kwargs): | ||
try: | ||
import tokenizers | ||
except ImportError: | ||
raise Exception("Please install the HuggingFace tokenizers package using `pip install tokenizers -U` in order to use guidance.models.Cohere!") | ||
|
||
# get the tokenizer | ||
if tokenizer is None: | ||
try: | ||
tokenizer = tokenizers.Tokenizer.from_pretrained("Cohere/"+model) | ||
except: | ||
tokenizer = tokenizers.Tokenizer.from_pretrained("Cohere/command-nightly") | ||
|
||
super().__init__( | ||
model, tokenizer=tokenizer, echo=echo, | ||
caching=caching, temperature=temperature, | ||
max_streaming_tokens=max_streaming_tokens, **kwargs | ||
) | ||
|
||
|
||
class CohereCompletion(Cohere, LiteLLMCompletion): | ||
pass | ||
|
||
class CohereInstruct(Cohere, LiteLLMInstruct): | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
import pytest | ||
import guidance | ||
from guidance import gen, capture, select, user, system, assistant, instruction | ||
|
||
def test_lite_llm_basic(): | ||
try: | ||
lm = guidance.models.CohereCompletion("command-nightly") | ||
except: | ||
pytest.skip("Skipping Cohere test because we can't load the model!") | ||
lm += "Count to 20: 1,2,3,4," | ||
nl = "\n" | ||
lm += f"""\ | ||
5,6,7""" | ||
lm += f"""{gen(max_tokens=1, suffix=nl)}aaaaaa""" | ||
assert str(lm)[-5:] == "aaaaa" | ||
|
||
def test_lite_llm_instruct(): | ||
try: | ||
lm = guidance.models.CohereInstruct("command-nightly") | ||
except: | ||
pytest.skip("Skipping LiteLLM test because we can't load the model!") | ||
with instruction(): | ||
lm += "Count to 20." | ||
lm += gen('val', max_tokens=1) | ||
assert len(lm['val']) > 0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters