init method
Future<void>
init({
- int maxTokens = 1024,
- double temperature = 1.0,
- int randomSeed = 1,
- int topK = 1,
- int? numOfSupportedLoraRanks,
- List<
int> ? supportedLoraRanks, - String? loraPath,
override
Implementation
@override
Future<void> init({
int maxTokens = 1024,
temperature = 1.0,
randomSeed = 1,
topK = 1,
int? numOfSupportedLoraRanks,
List<int>? supportedLoraRanks,
String? loraPath,
}) async {
try {
final fileset = await promiseToFuture<FilesetResolver>(
FilesetResolver.forGenAiTasks('https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/wasm'),
);
llmInference = await promiseToFuture<LlmInference>(
LlmInference.createFromOptions(
fileset,
jsify(
{
'baseOptions': {'modelAssetPath': _path},
'maxTokens': maxTokens,
'randomSeed': randomSeed,
'topK': topK,
'temperature': temperature,
if (numOfSupportedLoraRanks != null) 'numOfSupportedLoraRanks': numOfSupportedLoraRanks,
if (supportedLoraRanks != null) 'supportedLoraRanks': supportedLoraRanks,
if (loraPath != null) 'loraPath': loraPath,
},
),
),
);
if (!_initCompleter.isCompleted) {
_initCompleter.complete(true);
}
} catch (e) {
throw Exception("Failed to initialize inference: $e");
}
}