getCapabilities static method
Get supported capabilities for a provider
Implementation
static Set<LLMCapability> getCapabilities(String providerId) {
switch (providerId) {
case 'openai':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.embedding,
LLMCapability.modelListing,
LLMCapability.toolCalling,
LLMCapability.reasoning,
LLMCapability.vision,
LLMCapability.textToSpeech,
LLMCapability.speechToText,
LLMCapability.imageGeneration,
// Note: openaiResponses is added dynamically based on configuration
};
case 'anthropic':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
LLMCapability.reasoning,
LLMCapability.vision,
};
case 'google':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
LLMCapability.reasoning,
LLMCapability.vision,
LLMCapability.imageGeneration,
};
case 'deepseek':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
LLMCapability.reasoning,
};
case 'groq':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
};
case 'xai':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
LLMCapability.reasoning,
LLMCapability.embedding,
};
case 'phind':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
};
case 'elevenlabs':
return {
LLMCapability.textToSpeech,
LLMCapability.speechToText,
};
case 'ollama':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.embedding,
LLMCapability.modelListing,
};
// OpenAI-compatible providers
case 'openrouter':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
LLMCapability.vision,
};
case 'github-copilot':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
};
case 'together-ai':
return {
LLMCapability.chat,
LLMCapability.streaming,
LLMCapability.toolCalling,
};
default:
return <LLMCapability>{};
}
}