Skip to content

Commit

Permalink
Fix #481, remove token limit
Browse files Browse the repository at this point in the history
  • Loading branch information
slundberg committed Nov 29, 2023
1 parent 7923cb6 commit e9157f6
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 7 deletions.
2 changes: 1 addition & 1 deletion guidance/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.1.4"
__version__ = "0.1.5"

import nest_asyncio
nest_asyncio.apply()
Expand Down
9 changes: 3 additions & 6 deletions guidance/models/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ def tool_def(self, functions):

return self

def _run_stateless(lm, stateless_function, max_tokens=1000, temperature=0.0, top_p=1.0, n=1):
def _run_stateless(lm, stateless_function, temperature=0.0, top_p=1.0, n=1):
assert Model._grammar_only == 0, "We can't run grammar parsing while in context free mode! (for example inside a block closer)"

logger.debug("start Model._run_stateless")
Expand All @@ -389,10 +389,7 @@ def _run_stateless(lm, stateless_function, max_tokens=1000, temperature=0.0, top
replacements = replace_model_variables(stateless_function, lm)

# start the generation stream
gen_obj = lm(
grammar=stateless_function, max_tokens=max_tokens, n=n,
temperature=temperature, top_p=top_p
)
gen_obj = lm(grammar=stateless_function, n=n, temperature=temperature, top_p=top_p)

# single generation
if n == 1:
Expand Down Expand Up @@ -539,7 +536,7 @@ def _cleanup_tokens(self, token_ids, token_byte_positions):
return token_ids, token_byte_positions


def __call__(self, grammar, max_tokens=100, n=1, top_p=1, temperature=0.0, ensure_bos_token=True, log_probs=False):
def __call__(self, grammar, max_tokens=1000000, n=1, top_p=1, temperature=0.0, ensure_bos_token=True, log_probs=False):
assert n == 1, "Still need to add support for n > 1!"

# get our current context in bytes
Expand Down

0 comments on commit e9157f6

Please sign in to comment.