Skip to content

Commit

Permalink
Merge pull request #1559 from stanfordnlp/typed_predictors_fixes_v2_5
Browse files Browse the repository at this point in the history
Improve o1 support in dspy.LM (QoL errors)
  • Loading branch information
okhat authored Sep 28, 2024
2 parents a052ff4 + 1de0db7 commit 1c94347
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions dspy/clients/lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,10 @@ def __init__(self, model, model_type='chat', temperature=0.0, max_tokens=1000, c
self.kwargs = dict(temperature=temperature, max_tokens=max_tokens, **kwargs)
self.history = []

# Exceptions for O-1 models, which requires a temperature of 1.0.
if "o1-" in model: self.kwargs['temperature'] = 1.0
if "o1-" in model:
assert max_tokens >= 5000 and temperature == 1.0, \
"OpenAI's o1-* models require passing temperature=1.0 and max_tokens >= 5000 to `dspy.LM(...)`"


def __call__(self, prompt=None, messages=None, **kwargs):
# Build the request.
Expand Down

0 comments on commit 1c94347

Please sign in to comment.