OllamaConfig.fromLLMConfig constructor

OllamaConfig.fromLLMConfig(
  1. LLMConfig config
)

Create OllamaConfig from unified LLMConfig

Implementation

factory OllamaConfig.fromLLMConfig(LLMConfig config) {
  return OllamaConfig(
    baseUrl: config.baseUrl,
    apiKey: config.apiKey,
    model: config.model,
    maxTokens: config.maxTokens,
    temperature: config.temperature,
    systemPrompt: config.systemPrompt,
    timeout: config.timeout,

    topP: config.topP,
    topK: config.topK,
    tools: config.tools,
    // Ollama-specific extensions
    jsonSchema: config.getExtension<StructuredOutputFormat>('jsonSchema'),
    numCtx: config.getExtension<int>('numCtx'),
    numGpu: config.getExtension<int>('numGpu'),
    numThread: config.getExtension<int>('numThread'),
    numa: config.getExtension<bool>('numa'),
    numBatch: config.getExtension<int>('numBatch'),
    keepAlive: config.getExtension<String>('keepAlive'),
    raw: config.getExtension<bool>('raw'),
    reasoning: config.getExtension<bool>('reasoning'),
    originalConfig: config,
  );
}