picolm_flutter 0.0.1
picolm_flutter: ^0.0.1 copied to clipboard
On-device LLM inference engine for Flutter. Run LLaMA-architecture models via Dart FFI — no cloud, no internet, no API keys.
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:http/http.dart' as http;
import 'package:path_provider/path_provider.dart';
import 'package:picolm_flutter/picolm_flutter.dart';
const _modelUrl =
'https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf';
const _modelFileName = 'tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf';
const _modelSizeMB = 638;
void main() {
runApp(const PicoLMExampleApp());
}
class PicoLMExampleApp extends StatelessWidget {
const PicoLMExampleApp({super.key});
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'PicoLM Flutter',
theme: ThemeData(
colorSchemeSeed: Colors.deepPurple,
useMaterial3: true,
brightness: Brightness.dark,
),
home: const PicoLMHomePage(),
);
}
}
class PicoLMHomePage extends StatefulWidget {
const PicoLMHomePage({super.key});
@override
State<PicoLMHomePage> createState() => _PicoLMHomePageState();
}
class _PicoLMHomePageState extends State<PicoLMHomePage> {
PicoLM? _model;
final _promptController = TextEditingController(
text: 'Explain gravity in simple terms.',
);
String _output = '';
String? _modelPath;
bool _isDownloading = false;
bool _isLoading = false;
bool _isGenerating = false;
double _downloadProgress = 0;
String _statusText = 'No model downloaded';
@override
void initState() {
super.initState();
_checkExistingModel();
}
@override
void dispose() {
_model?.dispose();
_promptController.dispose();
super.dispose();
}
Future<String> get _modelDir async {
final dir = await getApplicationDocumentsDirectory();
return dir.path;
}
Future<void> _checkExistingModel() async {
final dir = await _modelDir;
final file = File('$dir/$_modelFileName');
if (await file.exists()) {
final size = await file.length();
setState(() {
_modelPath = file.path;
_statusText =
'Model ready (${(size / 1024 / 1024).toStringAsFixed(0)} MB)';
});
}
}
Future<void> _downloadModel() async {
setState(() {
_isDownloading = true;
_downloadProgress = 0;
_statusText = 'Connecting...';
});
try {
final dir = await _modelDir;
final filePath = '$dir/$_modelFileName';
final file = File(filePath);
// Check if already downloaded
if (await file.exists()) {
final size = await file.length();
if (size > 600 * 1024 * 1024) {
setState(() {
_modelPath = filePath;
_statusText =
'Model already downloaded (${(size / 1024 / 1024).toStringAsFixed(0)} MB)';
_isDownloading = false;
});
return;
}
// Incomplete download, delete and retry
await file.delete();
}
// Stream download with progress
final request = http.Request('GET', Uri.parse(_modelUrl));
final response = await http.Client().send(request);
if (response.statusCode != 200) {
throw Exception('HTTP ${response.statusCode}');
}
final totalBytes = response.contentLength ?? (_modelSizeMB * 1024 * 1024);
var receivedBytes = 0;
final sink = file.openWrite();
await for (final chunk in response.stream) {
sink.add(chunk);
receivedBytes += chunk.length;
final progress = receivedBytes / totalBytes;
setState(() {
_downloadProgress = progress;
final mb = (receivedBytes / 1024 / 1024).toStringAsFixed(0);
_statusText = 'Downloading: $mb / $_modelSizeMB MB';
});
}
await sink.close();
setState(() {
_modelPath = filePath;
_statusText = 'Download complete! ($_modelSizeMB MB)';
});
} catch (e) {
setState(() {
_statusText = 'Download failed: $e';
});
} finally {
setState(() => _isDownloading = false);
}
}
Future<void> _loadModel() async {
if (_modelPath == null) {
_showSnackBar('Download the model first');
return;
}
setState(() {
_isLoading = true;
_statusText = 'Loading model...';
});
try {
_model?.dispose();
_model = await PicoLM.load(_modelPath!);
setState(() {
_statusText =
'Model loaded! Vocab: ${_model!.vocabSize}, '
'Context: ${_model!.contextLength}';
});
} on PicoLMException catch (e) {
setState(() => _statusText = 'Load error: ${e.message}');
} finally {
setState(() => _isLoading = false);
}
}
Future<void> _generate() async {
if (_model == null) {
_showSnackBar('Load the model first');
return;
}
setState(() {
_output = '';
_isGenerating = true;
});
try {
// TinyLlama-Chat requires ChatML template formatting
final chatMlPrompt =
'<|user|>\n${_promptController.text}</s>\n<|assistant|>\n';
await for (final token in _model!.generate(
chatMlPrompt,
maxTokens: 200,
)) {
setState(() => _output += token);
}
} on PicoLMException catch (e) {
_showSnackBar('Error: ${e.message}');
} finally {
setState(() => _isGenerating = false);
}
}
void _showSnackBar(String message) {
ScaffoldMessenger.of(
context,
).showSnackBar(SnackBar(content: Text(message)));
}
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
return Scaffold(
appBar: AppBar(title: const Text('PicoLM Flutter'), centerTitle: true),
body: Padding(
padding: const EdgeInsets.all(16),
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
children: [
// ---- Model status card ----
Card(
child: Padding(
padding: const EdgeInsets.all(16),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(
'TinyLlama 1.1B (Q4_K_M)',
style: theme.textTheme.titleMedium,
),
const SizedBox(height: 4),
Text(
_statusText,
style: theme.textTheme.bodySmall?.copyWith(
color: theme.colorScheme.onSurfaceVariant,
),
),
if (_isDownloading) ...[
const SizedBox(height: 8),
LinearProgressIndicator(value: _downloadProgress),
],
const SizedBox(height: 12),
Row(
children: [
Expanded(
child: FilledButton.icon(
onPressed: _isDownloading || _modelPath != null
? null
: _downloadModel,
icon: const Icon(Icons.download),
label: Text(
_modelPath != null
? 'Downloaded'
: _isDownloading
? '${(_downloadProgress * 100).toInt()}%'
: 'Download ($_modelSizeMB MB)',
),
),
),
const SizedBox(width: 8),
Expanded(
child: FilledButton.tonalIcon(
onPressed:
_isLoading ||
_modelPath == null ||
_model != null
? null
: _loadModel,
icon: _isLoading
? const SizedBox(
height: 16,
width: 16,
child: CircularProgressIndicator(
strokeWidth: 2,
),
)
: const Icon(Icons.memory),
label: Text(
_model != null ? 'Loaded ✓' : 'Load Model',
),
),
),
],
),
],
),
),
),
const SizedBox(height: 16),
// ---- Prompt ----
TextField(
controller: _promptController,
maxLines: 3,
decoration: const InputDecoration(
labelText: 'Prompt',
border: OutlineInputBorder(),
),
),
const SizedBox(height: 8),
FilledButton.icon(
onPressed: _isGenerating || _model == null ? null : _generate,
icon: _isGenerating
? const SizedBox(
height: 16,
width: 16,
child: CircularProgressIndicator(
strokeWidth: 2,
color: Colors.white,
),
)
: const Icon(Icons.auto_awesome),
label: Text(_isGenerating ? 'Generating...' : 'Generate'),
),
const SizedBox(height: 16),
// ---- Output ----
Text('Output', style: theme.textTheme.titleSmall),
const SizedBox(height: 4),
Expanded(
child: Container(
padding: const EdgeInsets.all(12),
decoration: BoxDecoration(
color: theme.colorScheme.surfaceContainerHighest,
borderRadius: BorderRadius.circular(12),
),
child: SingleChildScrollView(
child: SelectableText(
_output.isEmpty ? '(output will appear here)' : _output,
style: TextStyle(
fontFamily: 'monospace',
fontSize: 14,
color: _output.isEmpty
? theme.colorScheme.onSurfaceVariant
: theme.colorScheme.onSurface,
),
),
),
),
),
],
),
),
);
}
}