OllamaConfig constructor
const
OllamaConfig({
- String baseUrl = ProviderDefaults.ollamaBaseUrl,
- String? apiKey,
- String model = ProviderDefaults.ollamaDefaultModel,
- int? maxTokens,
- double? temperature,
- String? systemPrompt,
- Duration? timeout,
- double? topP,
- int? topK,
- List<
Tool> ? tools, - StructuredOutputFormat? jsonSchema,
- int? numCtx,
- int? numGpu,
- int? numThread,
- bool? numa,
- int? numBatch,
- String? keepAlive,
- bool? raw,
- bool? reasoning,
- LLMConfig? originalConfig,
Implementation
const OllamaConfig({
this.baseUrl = ProviderDefaults.ollamaBaseUrl,
this.apiKey,
this.model = ProviderDefaults.ollamaDefaultModel,
this.maxTokens,
this.temperature,
this.systemPrompt,
this.timeout,
this.topP,
this.topK,
this.tools,
this.jsonSchema,
// Ollama-specific parameters
this.numCtx,
this.numGpu,
this.numThread,
this.numa,
this.numBatch,
this.keepAlive,
this.raw,
this.reasoning,
LLMConfig? originalConfig,
}) : _originalConfig = originalConfig;