Skip to content

Commit e264513

Browse files
committed
fix(generate): rm temperature for o1
1 parent 66d9499 commit e264513

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

bigcodebench/gen/util/openai_request.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,16 +15,16 @@ def make_request(
1515
) -> ChatCompletion:
1616
kwargs["top_p"] = 0.95
1717
kwargs["max_completion_tokens"] = max_tokens
18+
kwargs["temperature"] = temperature
1819
if model.startswith("o1-"): # pop top-p and max_completion_tokens
1920
kwargs.pop("top_p")
2021
kwargs.pop("max_completion_tokens")
21-
22+
kwargs.pop("temperature")
2223
return client.chat.completions.create(
2324
model=model,
2425
messages=[
2526
{"role": "user", "content": message},
2627
],
27-
temperature=temperature,
2828
n=n,
2929
**kwargs
3030
)

0 commit comments

Comments
 (0)