Skip to content

ModelCapabilities

The ModelCapabilities class represents the capabilities, constraints, and parameters for a specific OpenAI model.

WebSearchBilling

Web search billing is represented by a dedicated dataclass:

openai_model_registry.registry.WebSearchBilling dataclass

Web search billing policy and rates for a model.

  • call_fee_per_1000: flat fee per 1000 calls
  • content_token_policy: whether content tokens are included or billed at model rate
  • currency: ISO currency code (default USD)
  • notes: optional free-form notes
Source code in src/openai_model_registry/registry.py
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
@dataclass(frozen=True)
class WebSearchBilling:
    """Web search billing policy and rates for a model.

    - call_fee_per_1000: flat fee per 1000 calls
    - content_token_policy: whether content tokens are included or billed at model rate
    - currency: ISO currency code (default USD)
    - notes: optional free-form notes
    """

    call_fee_per_1000: float
    content_token_policy: Literal["included_in_call_fee", "billed_at_model_rate"]
    currency: str = "USD"
    notes: Optional[str] = None

    def __post_init__(self) -> None:
        if self.call_fee_per_1000 < 0:
            raise ValueError("call_fee_per_1000 must be non-negative")

options: show_root_heading: false show_source: true

Class Reference

openai_model_registry.registry.ModelCapabilities

Represents the capabilities of a model.

Source code in src/openai_model_registry/registry.py
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
class ModelCapabilities:
    """Represents the capabilities of a model."""

    def __init__(
        self,
        model_name: str,
        openai_model_name: str,
        context_window: int,
        max_output_tokens: int,
        deprecation: DeprecationInfo,
        supports_vision: bool = False,
        supports_functions: bool = False,
        supports_streaming: bool = False,
        supports_structured: bool = False,
        supports_web_search: bool = False,
        supports_audio: bool = False,
        supports_json_mode: bool = False,
        pricing: Optional["PricingInfo"] = None,
        input_modalities: Optional[List[str]] = None,
        output_modalities: Optional[List[str]] = None,
        min_version: Optional[ModelVersion] = None,
        aliases: Optional[List[str]] = None,
        supported_parameters: Optional[List[ParameterReference]] = None,
        constraints: Optional[Dict[str, Union[NumericConstraint, EnumConstraint, ObjectConstraint]]] = None,
        inline_parameters: Optional[Dict[str, Dict[str, Any]]] = None,
        web_search_billing: Optional["WebSearchBilling"] = None,
    ):
        """Initialize model capabilities.

        Args:
            model_name: The model identifier in the registry
            openai_model_name: The model name to use with OpenAI API
            context_window: Maximum context window size in tokens
            max_output_tokens: Maximum output tokens
            deprecation: Deprecation metadata (mandatory in current schema)
            supports_vision: Whether the model supports vision inputs
            supports_functions: Whether the model supports function calling
            supports_streaming: Whether the model supports streaming
            supports_structured: Whether the model supports structured output
            supports_web_search: Whether the model supports web search
                (Chat API search-preview models or Responses API tool)
            supports_audio: Whether the model supports audio inputs
            supports_json_mode: Whether the model supports JSON mode
            pricing: Pricing information for the model
            input_modalities: List of supported input modalities (e.g., ["text", "image"]).
            output_modalities: List of supported output modalities (e.g., ["text", "image"]).
            min_version: Minimum version for dated model variants
            aliases: List of aliases for this model
            supported_parameters: List of parameter references supported by this model
            constraints: Dictionary of constraints for validation
            inline_parameters: Dictionary of inline parameter configurations from schema
            web_search_billing: Optional web-search billing policy and rates for the model
        """
        self.model_name = model_name
        self.openai_model_name = openai_model_name
        self.context_window = context_window
        self.max_output_tokens = max_output_tokens
        self.deprecation = deprecation
        self.supports_vision = supports_vision
        self.supports_functions = supports_functions
        self.supports_streaming = supports_streaming
        self.supports_structured = supports_structured
        self.supports_web_search = supports_web_search
        self.supports_audio = supports_audio
        self.supports_json_mode = supports_json_mode
        self.pricing = pricing
        self.input_modalities = input_modalities or []
        self.output_modalities = output_modalities or []
        self.min_version = min_version
        self.aliases = aliases or []
        self.supported_parameters = supported_parameters or []
        self._constraints = constraints or {}
        self._inline_parameters = inline_parameters or {}
        self.web_search_billing = web_search_billing

    @property
    def inline_parameters(self) -> Dict[str, Any]:
        """Inline parameter definitions for this model (if any)."""
        return self._inline_parameters

    @property
    def is_sunset(self) -> bool:
        """Check if the model is sunset."""
        return self.deprecation.status == "sunset"

    @property
    def is_deprecated(self) -> bool:
        """Check if the model is deprecated or sunset."""
        return self.deprecation.status in ["deprecated", "sunset"]

    def get_constraint(self, ref: str) -> Optional[Union[NumericConstraint, EnumConstraint, ObjectConstraint]]:
        """Get a constraint by reference.

        Args:
            ref: Constraint reference (key in constraints dict)

        Returns:
            The constraint or None if not found
        """
        return self._constraints.get(ref)

    def validate_parameter(self, name: str, value: Any, used_params: Optional[Set[str]] = None) -> None:
        """Validate a parameter against constraints.

        Args:
            name: Parameter name
            value: Parameter value to validate
            used_params: Optional set to track used parameters

        Raises:
            ParameterNotSupportedError: If the parameter is not supported
            ConstraintNotFoundError: If a constraint reference is invalid
            ModelRegistryError: If validation fails for other reasons
        """
        # Track used parameters if requested
        if used_params is not None:
            used_params.add(name)

        # Check if we have inline parameter constraints
        if name in self._inline_parameters:
            self._validate_inline_parameter(name, value)
            return

        # Find matching parameter reference
        param_ref = next(
            (p for p in self.supported_parameters if p.ref == name or p.ref.split(".")[-1] == name),
            None,
        )

        if not param_ref:
            # If we're validating a parameter explicitly, it should be supported
            raise ParameterNotSupportedError(
                f"Parameter '{name}' is not supported for model '{self.model_name}'",
                param_name=name,
                value=value,
                model=self.model_name,
            )

        constraint = self.get_constraint(param_ref.ref)
        if not constraint:
            # If a parameter references a constraint, the constraint should exist
            raise ConstraintNotFoundError(
                f"Constraint reference '{param_ref.ref}' not found for parameter '{name}'",
                ref=param_ref.ref,
            )

        # Validate based on constraint type
        if isinstance(constraint, NumericConstraint):
            constraint.validate(name=name, value=value)
        elif isinstance(constraint, EnumConstraint):
            constraint.validate(name=name, value=value)
        elif isinstance(constraint, ObjectConstraint):
            constraint.validate(name=name, value=value)
        else:
            # This shouldn't happen with proper type checking, but just in case
            raise TypeError(f"Unknown constraint type for '{name}': {type(constraint).__name__}")

    def validate_parameters(self, params: Dict[str, Any], used_params: Optional[Set[str]] = None) -> None:
        """Validate multiple parameters against constraints.

        Args:
            params: Dictionary of parameter names and values to validate
            used_params: Optional set to track used parameters

        Raises:
            ModelRegistryError: If validation fails for any parameter
        """
        for name, value in params.items():
            self.validate_parameter(name, value, used_params)

    def _validate_inline_parameter(self, name: str, value: Any) -> None:
        """Validate a parameter using inline parameter constraints.

        Args:
            name: Parameter name
            value: Parameter value to validate

        Raises:
            ValidationError: If validation fails
        """
        from .errors import ParameterValidationError

        param_config = self._inline_parameters[name]
        param_type = param_config.get("type")

        # Handle numeric parameters (temperature, top_p, etc.)
        if param_type == "number":
            if not isinstance(value, (int, float)):
                raise ParameterValidationError(
                    f"Parameter '{name}' expects a numeric value",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )
            min_val = param_config.get("min")
            max_val = param_config.get("max")

            if min_val is not None and value < min_val:
                raise ParameterValidationError(
                    f"Parameter '{name}' value {value} is below minimum {min_val}",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

            if max_val is not None and value > max_val:
                raise ParameterValidationError(
                    f"Parameter '{name}' value {value} is above maximum {max_val}",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

        # Handle canonical numeric schema (min_value/max_value)
        elif param_type == "numeric":
            allow_float = param_config.get("allow_float", True)
            allow_int = param_config.get("allow_int", True)

            if not isinstance(value, (int, float)):
                raise ParameterValidationError(
                    f"Parameter '{name}' expects a numeric value",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

            # Enforce numeric subtype rules when provided
            if isinstance(value, float) and not allow_float:
                raise ParameterValidationError(
                    f"Parameter '{name}' does not allow float values",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )
            if isinstance(value, int) and not allow_int:
                raise ParameterValidationError(
                    f"Parameter '{name}' does not allow integer values",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

            min_val = param_config.get("min_value")
            max_val = param_config.get("max_value")

            if min_val is not None and value < min_val:
                raise ParameterValidationError(
                    f"Parameter '{name}' value {value} is below minimum {min_val}",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

            if max_val is not None and value > max_val:
                raise ParameterValidationError(
                    f"Parameter '{name}' value {value} is above maximum {max_val}",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

        # Handle integer parameters (max_tokens, etc.)
        elif param_type == "integer":
            if not isinstance(value, int):
                raise ParameterValidationError(
                    f"Parameter '{name}' expects an integer value",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )
            # Support both min/max (models.yaml) and min_value/max_value (constraints)
            min_val = param_config.get("min") or param_config.get("min_value")
            max_val = param_config.get("max") or param_config.get("max_value")

            if min_val is not None and value < min_val:
                raise ParameterValidationError(
                    f"Parameter '{name}' value {value} is below minimum {min_val}",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

            if max_val is not None and value > max_val:
                raise ParameterValidationError(
                    f"Parameter '{name}' value {value} is above maximum {max_val}",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )
        # Handle enum parameters declared inline
        elif param_type == "enum":
            allowed_values = param_config.get("enum", [])
            if value not in allowed_values:
                raise ParameterValidationError(
                    f"Parameter '{name}' value '{value}' is not one of: {', '.join(map(str, allowed_values))}",
                    param_name=name,
                    value=value,
                    model=self.model_name,
                )

Attributes

inline_parameters property

Inline parameter definitions for this model (if any).

is_deprecated property

Check if the model is deprecated or sunset.

is_sunset property

Check if the model is sunset.

Functions

__init__(model_name, openai_model_name, context_window, max_output_tokens, deprecation, supports_vision=False, supports_functions=False, supports_streaming=False, supports_structured=False, supports_web_search=False, supports_audio=False, supports_json_mode=False, pricing=None, input_modalities=None, output_modalities=None, min_version=None, aliases=None, supported_parameters=None, constraints=None, inline_parameters=None, web_search_billing=None)

Initialize model capabilities.

Parameters:

Name Type Description Default
model_name str

The model identifier in the registry

required
openai_model_name str

The model name to use with OpenAI API

required
context_window int

Maximum context window size in tokens

required
max_output_tokens int

Maximum output tokens

required
deprecation DeprecationInfo

Deprecation metadata (mandatory in current schema)

required
supports_vision bool

Whether the model supports vision inputs

False
supports_functions bool

Whether the model supports function calling

False
supports_streaming bool

Whether the model supports streaming

False
supports_structured bool

Whether the model supports structured output

False
supports_web_search bool

Whether the model supports web search (Chat API search-preview models or Responses API tool)

False
supports_audio bool

Whether the model supports audio inputs

False
supports_json_mode bool

Whether the model supports JSON mode

False
pricing Optional[PricingInfo]

Pricing information for the model

None
input_modalities Optional[List[str]]

List of supported input modalities (e.g., ["text", "image"]).

None
output_modalities Optional[List[str]]

List of supported output modalities (e.g., ["text", "image"]).

None
min_version Optional[ModelVersion]

Minimum version for dated model variants

None
aliases Optional[List[str]]

List of aliases for this model

None
supported_parameters Optional[List[ParameterReference]]

List of parameter references supported by this model

None
constraints Optional[Dict[str, Union[NumericConstraint, EnumConstraint, ObjectConstraint]]]

Dictionary of constraints for validation

None
inline_parameters Optional[Dict[str, Dict[str, Any]]]

Dictionary of inline parameter configurations from schema

None
web_search_billing Optional[WebSearchBilling]

Optional web-search billing policy and rates for the model

None
Source code in src/openai_model_registry/registry.py
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
def __init__(
    self,
    model_name: str,
    openai_model_name: str,
    context_window: int,
    max_output_tokens: int,
    deprecation: DeprecationInfo,
    supports_vision: bool = False,
    supports_functions: bool = False,
    supports_streaming: bool = False,
    supports_structured: bool = False,
    supports_web_search: bool = False,
    supports_audio: bool = False,
    supports_json_mode: bool = False,
    pricing: Optional["PricingInfo"] = None,
    input_modalities: Optional[List[str]] = None,
    output_modalities: Optional[List[str]] = None,
    min_version: Optional[ModelVersion] = None,
    aliases: Optional[List[str]] = None,
    supported_parameters: Optional[List[ParameterReference]] = None,
    constraints: Optional[Dict[str, Union[NumericConstraint, EnumConstraint, ObjectConstraint]]] = None,
    inline_parameters: Optional[Dict[str, Dict[str, Any]]] = None,
    web_search_billing: Optional["WebSearchBilling"] = None,
):
    """Initialize model capabilities.

    Args:
        model_name: The model identifier in the registry
        openai_model_name: The model name to use with OpenAI API
        context_window: Maximum context window size in tokens
        max_output_tokens: Maximum output tokens
        deprecation: Deprecation metadata (mandatory in current schema)
        supports_vision: Whether the model supports vision inputs
        supports_functions: Whether the model supports function calling
        supports_streaming: Whether the model supports streaming
        supports_structured: Whether the model supports structured output
        supports_web_search: Whether the model supports web search
            (Chat API search-preview models or Responses API tool)
        supports_audio: Whether the model supports audio inputs
        supports_json_mode: Whether the model supports JSON mode
        pricing: Pricing information for the model
        input_modalities: List of supported input modalities (e.g., ["text", "image"]).
        output_modalities: List of supported output modalities (e.g., ["text", "image"]).
        min_version: Minimum version for dated model variants
        aliases: List of aliases for this model
        supported_parameters: List of parameter references supported by this model
        constraints: Dictionary of constraints for validation
        inline_parameters: Dictionary of inline parameter configurations from schema
        web_search_billing: Optional web-search billing policy and rates for the model
    """
    self.model_name = model_name
    self.openai_model_name = openai_model_name
    self.context_window = context_window
    self.max_output_tokens = max_output_tokens
    self.deprecation = deprecation
    self.supports_vision = supports_vision
    self.supports_functions = supports_functions
    self.supports_streaming = supports_streaming
    self.supports_structured = supports_structured
    self.supports_web_search = supports_web_search
    self.supports_audio = supports_audio
    self.supports_json_mode = supports_json_mode
    self.pricing = pricing
    self.input_modalities = input_modalities or []
    self.output_modalities = output_modalities or []
    self.min_version = min_version
    self.aliases = aliases or []
    self.supported_parameters = supported_parameters or []
    self._constraints = constraints or {}
    self._inline_parameters = inline_parameters or {}
    self.web_search_billing = web_search_billing

get_constraint(ref)

Get a constraint by reference.

Parameters:

Name Type Description Default
ref str

Constraint reference (key in constraints dict)

required

Returns:

Type Description
Optional[Union[NumericConstraint, EnumConstraint, ObjectConstraint]]

The constraint or None if not found

Source code in src/openai_model_registry/registry.py
255
256
257
258
259
260
261
262
263
264
def get_constraint(self, ref: str) -> Optional[Union[NumericConstraint, EnumConstraint, ObjectConstraint]]:
    """Get a constraint by reference.

    Args:
        ref: Constraint reference (key in constraints dict)

    Returns:
        The constraint or None if not found
    """
    return self._constraints.get(ref)

validate_parameter(name, value, used_params=None)

Validate a parameter against constraints.

Parameters:

Name Type Description Default
name str

Parameter name

required
value Any

Parameter value to validate

required
used_params Optional[Set[str]]

Optional set to track used parameters

None

Raises:

Type Description
ParameterNotSupportedError

If the parameter is not supported

ConstraintNotFoundError

If a constraint reference is invalid

ModelRegistryError

If validation fails for other reasons

Source code in src/openai_model_registry/registry.py
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
def validate_parameter(self, name: str, value: Any, used_params: Optional[Set[str]] = None) -> None:
    """Validate a parameter against constraints.

    Args:
        name: Parameter name
        value: Parameter value to validate
        used_params: Optional set to track used parameters

    Raises:
        ParameterNotSupportedError: If the parameter is not supported
        ConstraintNotFoundError: If a constraint reference is invalid
        ModelRegistryError: If validation fails for other reasons
    """
    # Track used parameters if requested
    if used_params is not None:
        used_params.add(name)

    # Check if we have inline parameter constraints
    if name in self._inline_parameters:
        self._validate_inline_parameter(name, value)
        return

    # Find matching parameter reference
    param_ref = next(
        (p for p in self.supported_parameters if p.ref == name or p.ref.split(".")[-1] == name),
        None,
    )

    if not param_ref:
        # If we're validating a parameter explicitly, it should be supported
        raise ParameterNotSupportedError(
            f"Parameter '{name}' is not supported for model '{self.model_name}'",
            param_name=name,
            value=value,
            model=self.model_name,
        )

    constraint = self.get_constraint(param_ref.ref)
    if not constraint:
        # If a parameter references a constraint, the constraint should exist
        raise ConstraintNotFoundError(
            f"Constraint reference '{param_ref.ref}' not found for parameter '{name}'",
            ref=param_ref.ref,
        )

    # Validate based on constraint type
    if isinstance(constraint, NumericConstraint):
        constraint.validate(name=name, value=value)
    elif isinstance(constraint, EnumConstraint):
        constraint.validate(name=name, value=value)
    elif isinstance(constraint, ObjectConstraint):
        constraint.validate(name=name, value=value)
    else:
        # This shouldn't happen with proper type checking, but just in case
        raise TypeError(f"Unknown constraint type for '{name}': {type(constraint).__name__}")

validate_parameters(params, used_params=None)

Validate multiple parameters against constraints.

Parameters:

Name Type Description Default
params Dict[str, Any]

Dictionary of parameter names and values to validate

required
used_params Optional[Set[str]]

Optional set to track used parameters

None

Raises:

Type Description
ModelRegistryError

If validation fails for any parameter

Source code in src/openai_model_registry/registry.py
322
323
324
325
326
327
328
329
330
331
332
333
def validate_parameters(self, params: Dict[str, Any], used_params: Optional[Set[str]] = None) -> None:
    """Validate multiple parameters against constraints.

    Args:
        params: Dictionary of parameter names and values to validate
        used_params: Optional set to track used parameters

    Raises:
        ModelRegistryError: If validation fails for any parameter
    """
    for name, value in params.items():
        self.validate_parameter(name, value, used_params)

options: show_root_heading: false show_source: true

Notes

  • input_modalities and output_modalities are provided in addition to the legacy modalities (input) for clarity.

Usage Examples

Accessing Basic Properties

from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Access basic properties
print(f"Model name: {capabilities.openai_model_name}")
print(f"Context window: {capabilities.context_window}")
print(f"Max output tokens: {capabilities.max_output_tokens}")
print(f"Supports streaming: {capabilities.supports_streaming}")
print(f"Supports structured output: {capabilities.supports_structured}")

# Expected output: Model name: gpt-4o
#                  Context window: 128000
#                  Max output tokens: 16384
#                  Supports streaming: True
#                  Supports structured output: True

Validating Parameters

from openai_model_registry import ModelRegistry, ModelRegistryError

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Validate a parameter
try:
    capabilities.validate_parameter("temperature", 0.7)
    print("Temperature 0.7 is valid")
except ModelRegistryError as e:
    print(f"Invalid parameter: {e}")

# Validate with context (tracking used parameters)
used_params = set()
capabilities.validate_parameter("temperature", 0.7, used_params)
print(f"Used parameters: {used_params}")  # Contains 'temperature'
# Expected output: Temperature 0.7 is valid
#                  Used parameters: {'temperature'}

# Validate multiple parameters
params_to_validate = {"temperature": 0.7, "top_p": 0.9, "max_completion_tokens": 500}

for param_name, value in params_to_validate.items():
    try:
        capabilities.validate_parameter(param_name, value, used_params)
        print(f"✓ {param_name}={value} is valid")
    except ModelRegistryError as e:
        print(f"✗ {param_name}={value} is invalid: {e}")

Working with Parameter Constraints

from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Get a specific constraint
temperature_constraint = capabilities.get_constraint("temperature")
if temperature_constraint:
    print(f"Type: {type(temperature_constraint).__name__}")
    print(f"Min value: {temperature_constraint.min_value}")
    print(f"Max value: {temperature_constraint.max_value}")
    print(f"Description: {temperature_constraint.description}")

# List all parameter references
for param_ref in capabilities.supported_parameters:
    constraint = capabilities.get_constraint(param_ref.ref)
    print(f"Parameter: {param_ref.ref}")
    print(f"  Description: {param_ref.description}")
    print(f"  Constraint type: {type(constraint).__name__ if constraint else 'None'}")

Creating Custom Capabilities

from openai_model_registry import ModelRegistry
from openai_model_registry.registry import ModelCapabilities
from openai_model_registry.constraints import NumericConstraint, EnumConstraint
from typing import Dict, Union

# Get existing constraints for reference
registry = ModelRegistry.get_default()
base_capabilities = registry.get_capabilities("gpt-4o")

# Create custom capabilities (with basic properties)
custom_capabilities = ModelCapabilities(
    model_name="custom-model",
    openai_model_name="custom-model",
    context_window=8192,
    max_output_tokens=4096,
    supports_streaming=True,
    supports_structured=True,
)

# Copy supported parameters from base model
custom_capabilities.supported_parameters = base_capabilities.supported_parameters

# Add constraints manually
constraints: Dict[str, Union[NumericConstraint, EnumConstraint]] = {
    "temperature": NumericConstraint(
        min_value=0.0,
        max_value=1.0,
        allow_float=True,
        allow_int=True,
        description="Custom temperature description",
    ),
    "response_format": EnumConstraint(
        allowed_values=["text", "json_schema"],
        description="Custom response format description",
    ),
}
custom_capabilities._constraints = constraints

# Use custom capabilities
custom_capabilities.validate_parameter("temperature", 0.7)
from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Check if model is deprecated
if capabilities.is_deprecated:
    print(f"⚠️  Model is deprecated since {capabilities.deprecation.deprecation_date}")
    if capabilities.deprecation.sunset_date:
        print(f"🚫 Model will be sunset on {capabilities.deprecation.sunset_date}")
from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Validate parameters
try:
    capabilities.validate_parameter("temperature", 0.7)
    print("✅ Temperature value is valid")
except ValueError as e:
    print(f"❌ Invalid temperature: {e}")
from openai_model_registry import ModelRegistry

registry = ModelRegistry.get_default()
capabilities = registry.get_capabilities("gpt-4o")

# Check feature support
if capabilities.supports_structured:
    print("✅ Model supports structured output")
if capabilities.supports_streaming:
    print("✅ Model supports streaming")