@@ -53,69 +53,8 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
5353 to := sdktranslator .FromString ("codex" )
5454 body := sdktranslator .TranslateRequest (from , to , req .Model , bytes .Clone (req .Payload ), false )
5555
56- if util .InArray ([]string {"gpt-5" , "gpt-5-minimal" , "gpt-5-low" , "gpt-5-medium" , "gpt-5-high" }, req .Model ) {
57- body , _ = sjson .SetBytes (body , "model" , "gpt-5" )
58- switch req .Model {
59- case "gpt-5-minimal" :
60- body , _ = sjson .SetBytes (body , "reasoning.effort" , "minimal" )
61- case "gpt-5-low" :
62- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
63- case "gpt-5-medium" :
64- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
65- case "gpt-5-high" :
66- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
67- }
68- } else if util .InArray ([]string {"gpt-5-codex" , "gpt-5-codex-low" , "gpt-5-codex-medium" , "gpt-5-codex-high" }, req .Model ) {
69- body , _ = sjson .SetBytes (body , "model" , "gpt-5-codex" )
70- switch req .Model {
71- case "gpt-5-codex-low" :
72- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
73- case "gpt-5-codex-medium" :
74- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
75- case "gpt-5-codex-high" :
76- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
77- }
78- } else if util .InArray ([]string {"gpt-5-codex-mini" , "gpt-5-codex-mini-medium" , "gpt-5-codex-mini-high" }, req .Model ) {
79- body , _ = sjson .SetBytes (body , "model" , "gpt-5-codex-mini" )
80- switch req .Model {
81- case "gpt-5-codex-mini-medium" :
82- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
83- case "gpt-5-codex-mini-high" :
84- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
85- default :
86- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
87- }
88- } else if util .InArray ([]string {"gpt-5.1" , "gpt-5.1-low" , "gpt-5.1-medium" , "gpt-5.1-high" }, req .Model ) {
89- body , _ = sjson .SetBytes (body , "model" , "gpt-5.1" )
90- switch req .Model {
91- case "gpt-5.1-low" :
92- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
93- case "gpt-5.1-medium" , "gpt-5.1" :
94- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
95- case "gpt-5.1-high" :
96- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
97- }
98- } else if util .InArray ([]string {"gpt-5.1-codex" , "gpt-5.1-codex-low" , "gpt-5.1-codex-medium" , "gpt-5.1-codex-high" }, req .Model ) {
99- body , _ = sjson .SetBytes (body , "model" , "gpt-5.1-codex" )
100- switch req .Model {
101- case "gpt-5.1-codex-low" :
102- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
103- case "gpt-5.1-codex-medium" :
104- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
105- case "gpt-5.1-codex-high" :
106- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
107- }
108- } else if util .InArray ([]string {"gpt-5.1-codex-mini" , "gpt-5.1-codex-mini-medium" , "gpt-5.1-codex-mini-high" }, req .Model ) {
109- body , _ = sjson .SetBytes (body , "model" , "gpt-5.1-codex-mini" )
110- switch req .Model {
111- case "gpt-5.1-codex-mini-medium" :
112- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
113- case "gpt-5.1-codex-mini-high" :
114- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
115- default :
116- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
117- }
118- }
56+ body = e .setReasoningEffortByAlias (req .Model , body )
57+
11958 body = applyPayloadConfig (e .cfg , req .Model , body )
12059
12160 body , _ = sjson .SetBytes (body , "stream" , true )
@@ -207,67 +146,7 @@ func (e *CodexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
207146 to := sdktranslator .FromString ("codex" )
208147 body := sdktranslator .TranslateRequest (from , to , req .Model , bytes .Clone (req .Payload ), true )
209148
210- if util .InArray ([]string {"gpt-5" , "gpt-5-minimal" , "gpt-5-low" , "gpt-5-medium" , "gpt-5-high" }, req .Model ) {
211- body , _ = sjson .SetBytes (body , "model" , "gpt-5" )
212- switch req .Model {
213- case "gpt-5-minimal" :
214- body , _ = sjson .SetBytes (body , "reasoning.effort" , "minimal" )
215- case "gpt-5-low" :
216- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
217- case "gpt-5-medium" :
218- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
219- case "gpt-5-high" :
220- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
221- }
222- } else if util .InArray ([]string {"gpt-5-codex" , "gpt-5-codex-low" , "gpt-5-codex-medium" , "gpt-5-codex-high" }, req .Model ) {
223- body , _ = sjson .SetBytes (body , "model" , "gpt-5-codex" )
224- switch req .Model {
225- case "gpt-5-codex-low" :
226- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
227- case "gpt-5-codex-medium" :
228- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
229- case "gpt-5-codex-high" :
230- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
231- }
232- } else if util .InArray ([]string {"gpt-5-codex-mini" , "gpt-5-codex-mini-medium" , "gpt-5-codex-mini-high" }, req .Model ) {
233- body , _ = sjson .SetBytes (body , "model" , "gpt-5-codex-mini" )
234- switch req .Model {
235- case "gpt-5-codex-mini-medium" :
236- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
237- case "gpt-5-codex-mini-high" :
238- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
239- }
240- } else if util .InArray ([]string {"gpt-5.1" , "gpt-5.1-low" , "gpt-5.1-medium" , "gpt-5.1-high" }, req .Model ) {
241- body , _ = sjson .SetBytes (body , "model" , "gpt-5.1" )
242- switch req .Model {
243- case "gpt-5.1-low" :
244- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
245- case "gpt-5.1-medium" , "gpt-5.1" :
246- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
247- case "gpt-5.1-high" :
248- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
249- }
250- } else if util .InArray ([]string {"gpt-5.1-codex" , "gpt-5.1-codex-low" , "gpt-5.1-codex-medium" , "gpt-5.1-codex-high" }, req .Model ) {
251- body , _ = sjson .SetBytes (body , "model" , "gpt-5.1-codex" )
252- switch req .Model {
253- case "gpt-5.1-codex-low" :
254- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
255- case "gpt-5.1-codex-medium" :
256- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
257- case "gpt-5.1-codex-high" :
258- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
259- }
260- } else if util .InArray ([]string {"gpt-5.1-codex-mini" , "gpt-5.1-codex-mini-medium" , "gpt-5.1-codex-mini-high" }, req .Model ) {
261- body , _ = sjson .SetBytes (body , "model" , "gpt-5.1-codex-mini" )
262- switch req .Model {
263- case "gpt-5.1-codex-mini-medium" :
264- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
265- case "gpt-5.1-codex-mini-high" :
266- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
267- default :
268- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
269- }
270- }
149+ body = e .setReasoningEffortByAlias (req .Model , body )
271150 body = applyPayloadConfig (e .cfg , req .Model , body )
272151 body , _ = sjson .DeleteBytes (body , "previous_response_id" )
273152
@@ -363,46 +242,7 @@ func (e *CodexExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth
363242
364243 modelForCounting := req .Model
365244
366- if util .InArray ([]string {"gpt-5" , "gpt-5-minimal" , "gpt-5-low" , "gpt-5-medium" , "gpt-5-high" }, req .Model ) {
367- modelForCounting = "gpt-5"
368- body , _ = sjson .SetBytes (body , "model" , "gpt-5" )
369- switch req .Model {
370- case "gpt-5-minimal" :
371- body , _ = sjson .SetBytes (body , "reasoning.effort" , "minimal" )
372- case "gpt-5-low" :
373- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
374- case "gpt-5-medium" :
375- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
376- case "gpt-5-high" :
377- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
378- default :
379- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
380- }
381- } else if util .InArray ([]string {"gpt-5-codex" , "gpt-5-codex-low" , "gpt-5-codex-medium" , "gpt-5-codex-high" }, req .Model ) {
382- modelForCounting = "gpt-5"
383- body , _ = sjson .SetBytes (body , "model" , "gpt-5-codex" )
384- switch req .Model {
385- case "gpt-5-codex-low" :
386- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
387- case "gpt-5-codex-medium" :
388- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
389- case "gpt-5-codex-high" :
390- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
391- default :
392- body , _ = sjson .SetBytes (body , "reasoning.effort" , "low" )
393- }
394- } else if util .InArray ([]string {"gpt-5-codex-mini" , "gpt-5-codex-mini-medium" , "gpt-5-codex-mini-high" }, req .Model ) {
395- modelForCounting = "gpt-5"
396- body , _ = sjson .SetBytes (body , "model" , "codex-mini-latest" )
397- switch req .Model {
398- case "gpt-5-codex-mini-medium" :
399- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
400- case "gpt-5-codex-mini-high" :
401- body , _ = sjson .SetBytes (body , "reasoning.effort" , "high" )
402- default :
403- body , _ = sjson .SetBytes (body , "reasoning.effort" , "medium" )
404- }
405- }
245+ body = e .setReasoningEffortByAlias (req .Model , body )
406246
407247 body , _ = sjson .DeleteBytes (body , "previous_response_id" )
408248 body , _ = sjson .SetBytes (body , "stream" , false )
@@ -422,6 +262,71 @@ func (e *CodexExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth
422262 return cliproxyexecutor.Response {Payload : []byte (translated )}, nil
423263}
424264
265+ func (e * CodexExecutor ) setReasoningEffortByAlias (modelName string , payload []byte ) []byte {
266+ if util .InArray ([]string {"gpt-5" , "gpt-5-minimal" , "gpt-5-low" , "gpt-5-medium" , "gpt-5-high" }, modelName ) {
267+ payload , _ = sjson .SetBytes (payload , "model" , "gpt-5" )
268+ switch modelName {
269+ case "gpt-5-minimal" :
270+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "minimal" )
271+ case "gpt-5-low" :
272+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "low" )
273+ case "gpt-5-medium" :
274+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "medium" )
275+ case "gpt-5-high" :
276+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "high" )
277+ }
278+ } else if util .InArray ([]string {"gpt-5-codex" , "gpt-5-codex-low" , "gpt-5-codex-medium" , "gpt-5-codex-high" }, modelName ) {
279+ payload , _ = sjson .SetBytes (payload , "model" , "gpt-5-codex" )
280+ switch modelName {
281+ case "gpt-5-codex-low" :
282+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "low" )
283+ case "gpt-5-codex-medium" :
284+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "medium" )
285+ case "gpt-5-codex-high" :
286+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "high" )
287+ }
288+ } else if util .InArray ([]string {"gpt-5-codex-mini" , "gpt-5-codex-mini-medium" , "gpt-5-codex-mini-high" }, modelName ) {
289+ payload , _ = sjson .SetBytes (payload , "model" , "gpt-5-codex-mini" )
290+ switch modelName {
291+ case "gpt-5-codex-mini-medium" :
292+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "medium" )
293+ case "gpt-5-codex-mini-high" :
294+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "high" )
295+ }
296+ } else if util .InArray ([]string {"gpt-5.1" , "gpt-5.1-none" , "gpt-5.1-low" , "gpt-5.1-medium" , "gpt-5.1-high" }, modelName ) {
297+ payload , _ = sjson .SetBytes (payload , "model" , "gpt-5.1" )
298+ switch modelName {
299+ case "gpt-5.1-none" :
300+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "none" )
301+ case "gpt-5.1-low" :
302+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "low" )
303+ case "gpt-5.1-medium" :
304+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "medium" )
305+ case "gpt-5.1-high" :
306+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "high" )
307+ }
308+ } else if util .InArray ([]string {"gpt-5.1-codex" , "gpt-5.1-codex-low" , "gpt-5.1-codex-medium" , "gpt-5.1-codex-high" }, modelName ) {
309+ payload , _ = sjson .SetBytes (payload , "model" , "gpt-5.1-codex" )
310+ switch modelName {
311+ case "gpt-5.1-codex-low" :
312+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "low" )
313+ case "gpt-5.1-codex-medium" :
314+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "medium" )
315+ case "gpt-5.1-codex-high" :
316+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "high" )
317+ }
318+ } else if util .InArray ([]string {"gpt-5.1-codex-mini" , "gpt-5.1-codex-mini-medium" , "gpt-5.1-codex-mini-high" }, modelName ) {
319+ payload , _ = sjson .SetBytes (payload , "model" , "gpt-5.1-codex-mini" )
320+ switch modelName {
321+ case "gpt-5.1-codex-mini-medium" :
322+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "medium" )
323+ case "gpt-5.1-codex-mini-high" :
324+ payload , _ = sjson .SetBytes (payload , "reasoning.effort" , "high" )
325+ }
326+ }
327+ return payload
328+ }
329+
425330func tokenizerForCodexModel (model string ) (tokenizer.Codec , error ) {
426331 sanitized := strings .ToLower (strings .TrimSpace (model ))
427332 switch {
0 commit comments