Skip to content

Commit 982a257

Browse files
authored
Merge pull request #273 from algorithmicsuperintelligence/fix-response-format-none
Fix response format none
2 parents 50273fc + 27d6d0a commit 982a257

File tree

3 files changed

+6
-3
lines changed

3 files changed

+6
-3
lines changed

optillm/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# Version information
2-
__version__ = "0.3.6"
2+
__version__ = "0.3.7"
33

44
# Import from server module
55
from .server import (

optillm/server.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -714,9 +714,12 @@ def proxy():
714714
request_config.update({
715715
"stream": stream,
716716
"n": n,
717-
"response_format": response_format, # Add response_format to config
718717
})
719718

719+
# Only add response_format if it's not None
720+
if response_format is not None:
721+
request_config['response_format'] = response_format
722+
720723
# Add token limits to request_config with proper priority
721724
if max_completion_tokens is not None:
722725
request_config['max_completion_tokens'] = max_completion_tokens

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "optillm"
7-
version = "0.3.6"
7+
version = "0.3.7"
88
description = "An optimizing inference proxy for LLMs."
99
readme = "README.md"
1010
license = "Apache-2.0"

0 commit comments

Comments
 (0)