Skip to content

Commit

Permalink
fix: re-enable streaming for openai_plugin /v1/completions
Browse files Browse the repository at this point in the history
This PR fixes a bug where streaming: True was not set when using
the /v1/completions endpoint with the openai plugin.
  • Loading branch information
dagrayvid committed Jun 5, 2024
1 parent 833246f commit 3206598
Showing 1 changed file with 7 additions and 13 deletions.
20 changes: 7 additions & 13 deletions plugins/openai_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,24 +128,18 @@ def request_http(self, query: dict, user_id: int, test_end_time: float = 0):
def streaming_request_http(self, query: dict, user_id: int, test_end_time: float):
headers = {"Content-Type": "application/json"}

if "/v1/chat/completions" in self.host:
data = {
"messages": [
{"role": "user", "content": query["text"]}
],
data = {
"max_tokens": query["output_tokens"],
"temperature": 0.1,
"stream": True,
}
if "/v1/chat/completions" in self.host:
data["messages"] = [
{"role": "user", "content": query["text"]}
]
else:
data = {
"prompt": query["text"],
"max_tokens": query["output_tokens"],
"min_tokens": query["output_tokens"],
"temperature": 0.1,
"top_p": 0.9,
"seed": 10,
}
data["prompt"] = query["text"],
data["min_tokens"] = query["output_tokens"]

# some runtimes only serve one model, won't check this.
if self.model_name is not None:
Expand Down

0 comments on commit 3206598

Please sign in to comment.