Skip to content

Commit 142923c

Browse files
committed
Simplify reasoning model detection logic
Refactored OpenAI reasoning model detection to rely solely on model name patterns, removing endpoint-based checks. Updated related tests to reflect the new logic, ensuring consistent behavior across all endpoints including Azure, OptiLLM, and OpenRouter. Bumped version to 0.2.20.
1 parent 648ccfb commit 142923c

File tree

3 files changed

+17
-38
lines changed

3 files changed

+17
-38
lines changed

openevolve/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""Version information for openevolve package."""
22

3-
__version__ = "0.2.19"
3+
__version__ = "0.2.20"

openevolve/llm/openai.py

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -87,23 +87,10 @@ async def generate_with_context(
8787
"gpt-oss-20b",
8888
)
8989

90-
# Check if this is an OpenAI reasoning model
90+
# Check if this is an OpenAI reasoning model based on model name pattern
91+
# This works for all endpoints (OpenAI, Azure, OptiLLM, OpenRouter, etc.)
9192
model_lower = str(self.model).lower()
92-
api_base_lower = (self.api_base or "").lower()
93-
#check for official OpenAI API endpoints
94-
is_openai_api = (
95-
api_base_lower.startswith("https://api.openai.com")
96-
or api_base_lower.startswith("https://eu.api.openai.com")
97-
or api_base_lower.startswith("https://apac.api.openai.com")
98-
or api_base_lower.startswith("http://api.openai.com") # Allow http for testing
99-
or api_base_lower.startswith("http://eu.api.openai.com")
100-
or api_base_lower.startswith("http://apac.api.openai.com")
101-
)
102-
103-
is_openai_reasoning_model = (
104-
is_openai_api
105-
and model_lower.startswith(OPENAI_REASONING_MODEL_PREFIXES)
106-
)
93+
is_openai_reasoning_model = model_lower.startswith(OPENAI_REASONING_MODEL_PREFIXES)
10794

10895
if is_openai_reasoning_model:
10996
# For OpenAI reasoning models

tests/test_regional_endpoint.py

Lines changed: 13 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,15 @@ def test_endpoint_detection():
1818
("https://apac.api.openai.com/v1", "o3-mini", True, "APAC endpoint with o3-mini"),
1919
("https://eu.api.openai.com/v1", "gpt-4", False, "EU endpoint with gpt-4"),
2020
("https://api.openai.com/v1", "gpt-3.5-turbo", False, "US endpoint with gpt-3.5"),
21-
("https://azure.openai.com/", "o1-mini", False, "Azure endpoint (not OpenAI)"),
22-
("https://fake.com/api.openai.com", "o1-mini", False, "Fake endpoint with o1"),
23-
(None, "o1-mini", False, "None endpoint"),
24-
("", "o1-mini", False, "Empty endpoint"),
21+
("https://azure.openai.com/", "o1-mini", True, "Azure endpoint with reasoning model"),
22+
("https://my-resource.openai.azure.com/", "gpt-5", True, "Azure with gpt-5"),
23+
("http://localhost:8000/v1", "o1-mini", True, "OptiLLM proxy with o1-mini"),
24+
("http://localhost:8000/v1", "gpt-5-nano", True, "OptiLLM proxy with gpt-5-nano"),
25+
("http://localhost:8000/v1", "gpt-4", False, "OptiLLM proxy with gpt-4"),
26+
("https://openrouter.ai/api/v1", "o3-mini", True, "OpenRouter with reasoning model"),
27+
("https://fake.com/api.openai.com", "o1-mini", True, "Any endpoint with reasoning model"),
28+
(None, "o1-mini", True, "None endpoint with reasoning model"),
29+
("", "o1-mini", True, "Empty endpoint with reasoning model"),
2530
("https://eu.api.openai.com/v1", "O1-MINI", True, "EU with uppercase model"),
2631
("HTTPS://EU.API.OPENAI.COM/v1", "o1-mini", True, "Uppercase URL"),
2732
]
@@ -33,23 +38,11 @@ def test_endpoint_detection():
3338
failed = 0
3439

3540
for api_base, model, expected_result, description in test_cases:
36-
# This is the exact logic from your fixed code
41+
# This is the exact logic from the fixed code
3742
model_lower = str(model).lower()
38-
api_base_lower = (api_base or "").lower()
39-
40-
is_openai_api = (
41-
api_base_lower.startswith("https://api.openai.com")
42-
or api_base_lower.startswith("https://eu.api.openai.com")
43-
or api_base_lower.startswith("https://apac.api.openai.com")
44-
or api_base_lower.startswith("http://api.openai.com")
45-
or api_base_lower.startswith("http://eu.api.openai.com")
46-
or api_base_lower.startswith("http://apac.api.openai.com")
47-
)
48-
49-
is_openai_reasoning_model = (
50-
is_openai_api
51-
and model_lower.startswith(OPENAI_REASONING_MODEL_PREFIXES)
52-
)
43+
44+
# Model-pattern based detection (works for all endpoints)
45+
is_openai_reasoning_model = model_lower.startswith(OPENAI_REASONING_MODEL_PREFIXES)
5346

5447
# Determine which parameter would be used
5548
param_used = "max_completion_tokens" if is_openai_reasoning_model else "max_tokens"
@@ -66,7 +59,6 @@ def test_endpoint_detection():
6659
print(f"\n{status} | {description}")
6760
print(f" API Base: {api_base}")
6861
print(f" Model: {model}")
69-
print(f" is_openai_api: {is_openai_api}")
7062
print(f" is_reasoning_model: {is_openai_reasoning_model}")
7163
print(f" Parameter used: {param_used}")
7264
print(f" Expected: {expected_param}")

0 commit comments

Comments
 (0)