flama 0.0.2+2
flama: ^0.0.2+2 copied to clipboard
Flama is a Dart package designed to streamline interactions with Large Language Models (LLMs).
import 'dart:io';
import 'package:flama/flama.dart';
Future<void> main(List<String> args) async {
// Download your model from Hugging Face
// Example:
// huggingface-cli download stabilityai/stablelm-2-zephyr-1_6b stablelm-2-zephyr-1_6b-Q4_0.gguf --local-dir .
final llama = await LlamaLocal.create(
const LlamaLocalParams(model: 'stablelm-2-zephyr-1_6b-Q4_0.gguf'),
);
const prompt = 'How to build a ML systems?';
final tokenStream = llama(prompt);
await for (final token in tokenStream) {
stdout.write(token);
}
stdout.writeln();
await stdout.flush();
await llama.dispose();
}