Skip to content

Commit c4b678d

Browse files
committed
Add SupportsTemperature capability to model handling
Introduced a new `SupportsTemperature` property in the `ModelCapability` structure to specify whether models support temperature control. Updated the capability map in `OpenAIModelExtensions` to reflect this new property for all models. Added two new extension methods to check temperature support for models (`SupportsTemperature` for both `OpenAIModel` and string model IDs). Updated `OpenAIService` to dynamically set temperature based on model capabilities, replacing hardcoded assumptions about specific model families (e.g., O-series). Ensured backward compatibility by extending the `ModelCapability` structure. Improved flexibility and maintainability by aligning logic with the updated capability map.
1 parent a20f930 commit c4b678d

2 files changed

Lines changed: 19 additions & 13 deletions

File tree

src/DesktopApp/ImageToPose.Core/Models/OperatingMode.cs

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -51,12 +51,12 @@ static OpenAIModelExtensions()
5151
// Conservative defaults: many o-series and 4.1-family models do not allow token logprobs.
5252
var caps = new Dictionary<OpenAIModel, ModelCapability>
5353
{
54-
[OpenAIModel.Gpt41Nano] = new ModelCapability(SupportsLogProbs: true),
55-
[OpenAIModel.Gpt41Mini] = new ModelCapability(SupportsLogProbs: true),
56-
[OpenAIModel.Gpt41] = new ModelCapability(SupportsLogProbs: true),
57-
[OpenAIModel.O4Mini] = new ModelCapability(SupportsLogProbs: false),
58-
[OpenAIModel.Gpt5] = new ModelCapability(SupportsLogProbs: false),
59-
[OpenAIModel.O3] = new ModelCapability(SupportsLogProbs: false)
54+
[OpenAIModel.Gpt41Nano] = new ModelCapability(SupportsLogProbs: true, SupportsTemperature: true),
55+
[OpenAIModel.Gpt41Mini] = new ModelCapability(SupportsLogProbs: true, SupportsTemperature: true),
56+
[OpenAIModel.Gpt41] = new ModelCapability(SupportsLogProbs: true, SupportsTemperature: true),
57+
[OpenAIModel.O4Mini] = new ModelCapability(SupportsLogProbs: false, SupportsTemperature: false),
58+
[OpenAIModel.Gpt5] = new ModelCapability(SupportsLogProbs: false, SupportsTemperature: true),
59+
[OpenAIModel.O3] = new ModelCapability(SupportsLogProbs: false, SupportsTemperature: false)
6060
};
6161
_capabilities = new ReadOnlyDictionary<OpenAIModel, ModelCapability>(caps);
6262
All = Array.AsReadOnly(allValues);
@@ -88,9 +88,15 @@ public static bool SupportsLogProbs(this OpenAIModel model)
8888

8989
public static bool SupportsLogProbs(string modelId)
9090
=> TryParse(modelId, out var m) && m.SupportsLogProbs();
91+
92+
public static bool SupportsTemperature(this OpenAIModel model)
93+
=> _capabilities.TryGetValue(model, out var cap) && cap.SupportsTemperature;
94+
95+
public static bool SupportsTemperature(string modelId)
96+
=> TryParse(modelId, out var m) && m.SupportsTemperature();
9197
}
9298

93-
public readonly record struct ModelCapability(bool SupportsLogProbs);
99+
public readonly record struct ModelCapability(bool SupportsLogProbs, bool SupportsTemperature);
94100

95101
public static class ModeModelMap
96102
{

src/DesktopApp/ImageToPose.Core/Services/IOpenAIService.cs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -220,14 +220,14 @@ public async Task<ExtendedPose> AnalyzePoseAsync(PoseInput input, CancellationTo
220220
MaxOutputTokenCount = 1000
221221
};
222222

223-
// O-series models only support default temperature (1)
224-
if (!IsOSeriesModel(model))
223+
// Set temperature only if the resolved model supports it
224+
if (OpenAIModelExtensions.SupportsTemperature(model))
225225
{
226226
chatOptions.Temperature = temperature;
227227
}
228228
else
229229
{
230-
temperature = 1.0f; // Use default for O-series models
230+
temperature = 1.0f; // Use default for models that do not support temperature control
231231
}
232232

233233
// Enable logprobs only when explicitly supported by our capability map
@@ -347,14 +347,14 @@ public async Task<PoseRig> GenerateRigAsync(string extendedPoseText, Cancellatio
347347
MaxOutputTokenCount = 2000
348348
};
349349

350-
// O-series models only support default temperature (1)
351-
if (!IsOSeriesModel(model))
350+
// Set temperature only if the resolved model supports it
351+
if (OpenAIModelExtensions.SupportsTemperature(model))
352352
{
353353
chatOptions.Temperature = temperature;
354354
}
355355
else
356356
{
357-
temperature = 1.0f; // Use default for O-series models
357+
temperature = 1.0f; // Use default for models that do not support temperature control
358358
}
359359

360360
// Enable logprobs only when explicitly supported by our capability map

0 commit comments

Comments
 (0)