models method
Get available models from the provider
cancelToken - Optional token to cancel the request
Returns a list of available models or throws an LLMError
Implementation
@override
Future<List<AIModel>> models({CancelToken? cancelToken}) async {
  final responseData = await client.get('models', cancelToken: cancelToken);
  // responseData is already Map<String, dynamic> from client.get()
  final modelsData = responseData['data'] as List?;
  if (modelsData == null) {
    return [];
  }
  // Convert OpenAI model format to AIModel
  final models = modelsData
      .map((modelData) {
        if (modelData is! Map<String, dynamic>) return null;
        try {
          return AIModel(
            id: modelData['id'] as String,
            description: modelData['description'] as String?,
            object: modelData['object'] as String? ?? 'model',
            ownedBy: modelData['owned_by'] as String?,
          );
        } catch (e) {
          client.logger.warning('Failed to parse model: $e');
          return null;
        }
      })
      .where((model) => model != null)
      .cast<AIModel>()
      .toList();
  client.logger.fine('Retrieved ${models.length} models from OpenAI');
  return models;
}