Skip to content

ModelCapabilities

The ModelCapabilities class represents the capabilities, constraints, and parameters for a specific OpenAI model.

Class Reference

openai_model_registry.registry.ModelCapabilities

Represents the capabilities of a model.

Source code in src/openai_model_registry/registry.py
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
class ModelCapabilities:
    """Represents the capabilities of a model."""

    def __init__(
        self,
        model_name: str,
        openai_model_name: str,
        context_window: int,
        max_output_tokens: int,
        deprecation: DeprecationInfo,
        supports_vision: bool = False,
        supports_functions: bool = False,
        supports_streaming: bool = False,
        supports_structured: bool = False,
        supports_web_search: bool = False,
        min_version: Optional[ModelVersion] = None,
        aliases: Optional[List[str]] = None,
        supported_parameters: Optional[List[ParameterReference]] = None,
        constraints: Optional[
            Dict[
                str, Union[NumericConstraint, EnumConstraint, ObjectConstraint]
            ]
        ] = None,
    ):
        """Initialize model capabilities.

        Args:
            model_name: The model identifier in the registry
            openai_model_name: The model name to use with OpenAI API
            context_window: Maximum context window size in tokens
            max_output_tokens: Maximum output tokens
            deprecation: Deprecation metadata (mandatory in schema v2)
            supports_vision: Whether the model supports vision inputs
            supports_functions: Whether the model supports function calling
            supports_streaming: Whether the model supports streaming
            supports_structured: Whether the model supports structured output
            supports_web_search: Whether the model supports web search (Chat API search-preview models or Responses API tool)
            min_version: Minimum version for dated model variants
            aliases: List of aliases for this model
            supported_parameters: List of parameter references supported by this model
            constraints: Dictionary of constraints for validation
        """
        self.model_name = model_name
        self.openai_model_name = openai_model_name
        self.context_window = context_window
        self.max_output_tokens = max_output_tokens
        self.deprecation = deprecation
        self.supports_vision = supports_vision
        self.supports_functions = supports_functions
        self.supports_streaming = supports_streaming
        self.supports_structured = supports_structured
        self.supports_web_search = supports_web_search
        self.min_version = min_version
        self.aliases = aliases or []
        self.supported_parameters = supported_parameters or []
        self._constraints = constraints or {}

    @property
    def is_sunset(self) -> bool:
        """Check if the model is sunset."""
        return self.deprecation.status == "sunset"

    @property
    def is_deprecated(self) -> bool:
        """Check if the model is deprecated or sunset."""
        return self.deprecation.status in ["deprecated", "sunset"]

    def get_constraint(
        self, ref: str
    ) -> Optional[Union[NumericConstraint, EnumConstraint, ObjectConstraint]]:
        """Get a constraint by reference.

        Args:
            ref: Constraint reference (key in constraints dict)

        Returns:
            The constraint or None if not found
        """
        return self._constraints.get(ref)

    def validate_parameter(
        self, name: str, value: Any, used_params: Optional[Set[str]] = None
    ) -> None:
        """Validate a parameter against constraints.

        Args:
            name: Parameter name
            value: Parameter value to validate
            used_params: Optional set to track used parameters

        Raises:
            ParameterNotSupportedError: If the parameter is not supported
            ConstraintNotFoundError: If a constraint reference is invalid
            ModelRegistryError: If validation fails for other reasons
        """
        # Track used parameters if requested
        if used_params is not None:
            used_params.add(name)

        # Find matching parameter reference
        param_ref = next(
            (
                p
                for p in self.supported_parameters
                if p.ref.split(".")[-1] == name
            ),
            None,
        )

        if not param_ref:
            # If we're validating a parameter explicitly, it should be supported
            raise ParameterNotSupportedError(
                f"Parameter '{name}' is not supported for model '{self.model_name}'",
                param_name=name,
                value=value,
                model=self.model_name,
            )

        constraint = self.get_constraint(param_ref.ref)
        if not constraint:
            # If a parameter references a constraint, the constraint should exist
            raise ConstraintNotFoundError(
                f"Constraint reference '{param_ref.ref}' not found for parameter '{name}'",
                ref=param_ref.ref,
            )

        # Validate based on constraint type
        if isinstance(constraint, NumericConstraint):
            constraint.validate(name=name, value=value)
        elif isinstance(constraint, EnumConstraint):
            constraint.validate(name=name, value=value)
        elif isinstance(constraint, ObjectConstraint):
            constraint.validate(name=name, value=value)
        else:
            # This shouldn't happen with proper type checking, but just in case
            raise TypeError(
                f"Unknown constraint type for '{name}': {type(constraint).__name__}"
            )

    def validate_parameters(
        self, params: Dict[str, Any], used_params: Optional[Set[str]] = None
    ) -> None:
        """Validate multiple parameters against constraints.

        Args:
            params: Dictionary of parameter names and values to validate
            used_params: Optional set to track used parameters

        Raises:
            ModelRegistryError: If validation fails for any parameter
        """
        for name, value in params.items():
            self.validate_parameter(name, value, used_params)

Attributes

is_deprecated property

Check if the model is deprecated or sunset.

is_sunset property

Check if the model is sunset.

Functions

__init__(model_name, openai_model_name, context_window, max_output_tokens, deprecation, supports_vision=False, supports_functions=False, supports_streaming=False, supports_structured=False, supports_web_search=False, min_version=None, aliases=None, supported_parameters=None, constraints=None)

Initialize model capabilities.

Parameters:

Name Type Description Default
model_name str

The model identifier in the registry

required
openai_model_name str

The model name to use with OpenAI API

required
context_window int

Maximum context window size in tokens

required
max_output_tokens int

Maximum output tokens

required
deprecation DeprecationInfo

Deprecation metadata (mandatory in schema v2)

required
supports_vision bool

Whether the model supports vision inputs

False
supports_functions bool

Whether the model supports function calling

False
supports_streaming bool

Whether the model supports streaming

False
supports_structured bool

Whether the model supports structured output

False
supports_web_search bool

Whether the model supports web search (Chat API search-preview models or Responses API tool)

False
min_version Optional[ModelVersion]

Minimum version for dated model variants

None
aliases Optional[List[str]]

List of aliases for this model

None
supported_parameters Optional[List[ParameterReference]]

List of parameter references supported by this model

None
constraints Optional[Dict[str, Union[NumericConstraint, EnumConstraint, ObjectConstraint]]]

Dictionary of constraints for validation

None
Source code in src/openai_model_registry/registry.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
def __init__(
    self,
    model_name: str,
    openai_model_name: str,
    context_window: int,
    max_output_tokens: int,
    deprecation: DeprecationInfo,
    supports_vision: bool = False,
    supports_functions: bool = False,
    supports_streaming: bool = False,
    supports_structured: bool = False,
    supports_web_search: bool = False,
    min_version: Optional[ModelVersion] = None,
    aliases: Optional[List[str]] = None,
    supported_parameters: Optional[List[ParameterReference]] = None,
    constraints: Optional[
        Dict[
            str, Union[NumericConstraint, EnumConstraint, ObjectConstraint]
        ]
    ] = None,
):
    """Initialize model capabilities.

    Args:
        model_name: The model identifier in the registry
        openai_model_name: The model name to use with OpenAI API
        context_window: Maximum context window size in tokens
        max_output_tokens: Maximum output tokens
        deprecation: Deprecation metadata (mandatory in schema v2)
        supports_vision: Whether the model supports vision inputs
        supports_functions: Whether the model supports function calling
        supports_streaming: Whether the model supports streaming
        supports_structured: Whether the model supports structured output
        supports_web_search: Whether the model supports web search (Chat API search-preview models or Responses API tool)
        min_version: Minimum version for dated model variants
        aliases: List of aliases for this model
        supported_parameters: List of parameter references supported by this model
        constraints: Dictionary of constraints for validation
    """
    self.model_name = model_name
    self.openai_model_name = openai_model_name
    self.context_window = context_window
    self.max_output_tokens = max_output_tokens
    self.deprecation = deprecation
    self.supports_vision = supports_vision
    self.supports_functions = supports_functions
    self.supports_streaming = supports_streaming
    self.supports_structured = supports_structured
    self.supports_web_search = supports_web_search
    self.min_version = min_version
    self.aliases = aliases or []
    self.supported_parameters = supported_parameters or []
    self._constraints = constraints or {}

get_constraint(ref)

Get a constraint by reference.

Parameters:

Name Type Description Default
ref str

Constraint reference (key in constraints dict)

required

Returns:

Type Description
Optional[Union[NumericConstraint, EnumConstraint, ObjectConstraint]]

The constraint or None if not found

Source code in src/openai_model_registry/registry.py
200
201
202
203
204
205
206
207
208
209
210
211
def get_constraint(
    self, ref: str
) -> Optional[Union[NumericConstraint, EnumConstraint, ObjectConstraint]]:
    """Get a constraint by reference.

    Args:
        ref: Constraint reference (key in constraints dict)

    Returns:
        The constraint or None if not found
    """
    return self._constraints.get(ref)

validate_parameter(name, value, used_params=None)

Validate a parameter against constraints.

Parameters:

Name Type Description Default
name str

Parameter name

required
value Any

Parameter value to validate

required
used_params Optional[Set[str]]

Optional set to track used parameters

None

Raises:

Type Description
ParameterNotSupportedError

If the parameter is not supported

ConstraintNotFoundError

If a constraint reference is invalid

ModelRegistryError

If validation fails for other reasons

Source code in src/openai_model_registry/registry.py
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
def validate_parameter(
    self, name: str, value: Any, used_params: Optional[Set[str]] = None
) -> None:
    """Validate a parameter against constraints.

    Args:
        name: Parameter name
        value: Parameter value to validate
        used_params: Optional set to track used parameters

    Raises:
        ParameterNotSupportedError: If the parameter is not supported
        ConstraintNotFoundError: If a constraint reference is invalid
        ModelRegistryError: If validation fails for other reasons
    """
    # Track used parameters if requested
    if used_params is not None:
        used_params.add(name)

    # Find matching parameter reference
    param_ref = next(
        (
            p
            for p in self.supported_parameters
            if p.ref.split(".")[-1] == name
        ),
        None,
    )

    if not param_ref:
        # If we're validating a parameter explicitly, it should be supported
        raise ParameterNotSupportedError(
            f"Parameter '{name}' is not supported for model '{self.model_name}'",
            param_name=name,
            value=value,
            model=self.model_name,
        )

    constraint = self.get_constraint(param_ref.ref)
    if not constraint:
        # If a parameter references a constraint, the constraint should exist
        raise ConstraintNotFoundError(
            f"Constraint reference '{param_ref.ref}' not found for parameter '{name}'",
            ref=param_ref.ref,
        )

    # Validate based on constraint type
    if isinstance(constraint, NumericConstraint):
        constraint.validate(name=name, value=value)
    elif isinstance(constraint, EnumConstraint):
        constraint.validate(name=name, value=value)
    elif isinstance(constraint, ObjectConstraint):
        constraint.validate(name=name, value=value)
    else:
        # This shouldn't happen with proper type checking, but just in case
        raise TypeError(
            f"Unknown constraint type for '{name}': {type(constraint).__name__}"
        )

validate_parameters(params, used_params=None)

Validate multiple parameters against constraints.

Parameters:

Name Type Description Default
params Dict[str, Any]

Dictionary of parameter names and values to validate

required
used_params Optional[Set[str]]

Optional set to track used parameters

None

Raises:

Type Description
ModelRegistryError

If validation fails for any parameter

Source code in src/openai_model_registry/registry.py
272
273
274
275
276
277
278
279
280
281
282
283
284
285
def validate_parameters(
    self, params: Dict[str, Any], used_params: Optional[Set[str]] = None
) -> None:
    """Validate multiple parameters against constraints.

    Args:
        params: Dictionary of parameter names and values to validate
        used_params: Optional set to track used parameters

    Raises:
        ModelRegistryError: If validation fails for any parameter
    """
    for name, value in params.items():
        self.validate_parameter(name, value, used_params)

options: show_root_heading: false show_source: true

Usage Examples

Accessing Basic Properties

from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Access basic properties
print(f"Model name: {capabilities.openai_model_name}")
print(f"Context window: {capabilities.context_window}")
print(f"Max output tokens: {capabilities.max_output_tokens}")
print(f"Supports streaming: {capabilities.supports_streaming}")
print(f"Supports structured output: {capabilities.supports_structured}")

# Check for aliases
if capabilities.aliases:
    print(f"Aliases: {', '.join(capabilities.aliases)}")
# Expected output: Model name: gpt-4o
#                  Context window: 128000
#                  Max output tokens: 16384
#                  Supports streaming: True
#                  Supports structured output: True

Validating Parameters

from openai_model_registry import ModelRegistry, ModelRegistryError

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Validate a parameter
try:
    capabilities.validate_parameter("temperature", 0.7)
    print("Temperature 0.7 is valid")
except ModelRegistryError as e:
    print(f"Invalid parameter: {e}")

# Validate with context (tracking used parameters)
used_params = set()
capabilities.validate_parameter("temperature", 0.7, used_params)
print(f"Used parameters: {used_params}")  # Contains 'temperature'
# Expected output: Temperature 0.7 is valid
#                  Used parameters: {'temperature'}

# Validate multiple parameters
params_to_validate = {"temperature": 0.7, "top_p": 0.9, "max_completion_tokens": 500}

for param_name, value in params_to_validate.items():
    try:
        capabilities.validate_parameter(param_name, value, used_params)
        print(f"✓ {param_name}={value} is valid")
    except ModelRegistryError as e:
        print(f"✗ {param_name}={value} is invalid: {e}")

Working with Parameter Constraints

from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Get a specific constraint
temperature_constraint = capabilities.get_constraint("temperature")
if temperature_constraint:
    print(f"Type: {type(temperature_constraint).__name__}")
    print(f"Min value: {temperature_constraint.min_value}")
    print(f"Max value: {temperature_constraint.max_value}")
    print(f"Description: {temperature_constraint.description}")

# List all parameter references
for param_ref in capabilities.supported_parameters:
    constraint = capabilities.get_constraint(param_ref.ref)
    print(f"Parameter: {param_ref.ref}")
    print(f"  Description: {param_ref.description}")
    print(f"  Constraint type: {type(constraint).__name__ if constraint else 'None'}")

Creating Custom Capabilities

from openai_model_registry import ModelRegistry
from openai_model_registry.registry import ModelCapabilities
from openai_model_registry.constraints import NumericConstraint, EnumConstraint
from typing import Dict, Union

# Get existing constraints for reference
registry = ModelRegistry.get_default()
base_capabilities = registry.get_capabilities("gpt-4o")

# Create custom capabilities (with basic properties)
custom_capabilities = ModelCapabilities(
    model_name="custom-model",
    openai_model_name="custom-model",
    context_window=8192,
    max_output_tokens=4096,
    supports_streaming=True,
    supports_structured=True,
)

# Add aliases
custom_capabilities.aliases = ["custom-alias"]

# Copy supported parameters from base model
custom_capabilities.supported_parameters = base_capabilities.supported_parameters

# Add constraints manually
constraints: Dict[str, Union[NumericConstraint, EnumConstraint]] = {
    "temperature": NumericConstraint(
        min_value=0.0,
        max_value=1.0,
        allow_float=True,
        allow_int=True,
        description="Custom temperature description",
    ),
    "response_format": EnumConstraint(
        allowed_values=["text", "json_schema"],
        description="Custom response format description",
    ),
}
custom_capabilities._constraints = constraints

# Use custom capabilities
custom_capabilities.validate_parameter("temperature", 0.7)
from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Check if model is deprecated
if capabilities.is_deprecated:
    print(f"⚠️  Model is deprecated since {capabilities.deprecation.deprecation_date}")
    if capabilities.deprecation.sunset_date:
        print(f"🚫 Model will be sunset on {capabilities.deprecation.sunset_date}")
from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Validate parameters
try:
    capabilities.validate_parameter("temperature", 0.7)
    print("✅ Temperature value is valid")
except ValueError as e:
    print(f"❌ Invalid temperature: {e}")
from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Check feature support
if capabilities.supports_structured:
    print("✅ Model supports structured output")
if capabilities.supports_streaming:
    print("✅ Model supports streaming")