Skip to content

Commit

Permalink
Fix #122, and fix default silent mode to None
Browse files Browse the repository at this point in the history
  • Loading branch information
slundberg committed May 28, 2023
1 parent 8ecb237 commit 545223f
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 7 deletions.
2 changes: 1 addition & 1 deletion guidance/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

# This makes the guidance module callable
class Guidance(types.ModuleType):
def __call__(self, template, llm=None, cache_seed=0, logprobs=None, silent='auto', async_mode=False, stream=None, caching=None, await_missing=False, **kwargs):
def __call__(self, template, llm=None, cache_seed=0, logprobs=None, silent=None, async_mode=False, stream=None, caching=None, await_missing=False, **kwargs):
return Program(template, llm=llm, cache_seed=cache_seed, logprobs=logprobs, silent=silent, async_mode=async_mode, stream=stream, caching=caching, await_missing=await_missing, **kwargs)
sys.modules[__name__].__class__ = Guidance

Expand Down
19 changes: 13 additions & 6 deletions guidance/library/_select.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,13 +109,20 @@ async def recursive_select(current_prefix, allow_token_extension=True):
cache_seed=0,
token_healing=False # we manage token boundary healing ourselves for this function
)
logprobs_result = gen_obj["choices"][0]["logprobs"]
gen_obj = gen_obj["choices"][0] # get the first choice (we only asked for one)
if "logprobs" in gen_obj:
logprobs_result = gen_obj["logprobs"]

# convert the logprobs keys from string back to token ids
top_logprobs = {}
for k,v in logprobs_result["top_logprobs"][0].items():
id = parser.program.llm.token_to_id(k)
top_logprobs[id] = v

# convert the logprobs keys from string back to token ids if needed
top_logprobs = {}
for k,v in logprobs_result["top_logprobs"][0].items():
id = parser.program.llm.token_to_id(k)
top_logprobs[id] = v
# this happens if LLM does not return logprobs (like an OpenAI chat model)
else:
assert logprobs is None, "You cannot ask for the logprobs in a select call when using a model that does not return logprobs!"
top_logprobs = {parser.program.llm.token_to_id(gen_obj["text"]): 0}

# no need to explore all branches if we are just taking the greedy max
if logprobs is None:
Expand Down

0 comments on commit 545223f

Please sign in to comment.