noc_llm_dart 1.0.0
noc_llm_dart: ^1.0.0 copied to clipboard
Lightweight, asynchronous Dart & Flutter library to interact with LLMs from Cloud (OpenAI, Gemini, Groq, Sumopod) and Local APIs (LM Studio, Ollama). Features SSE streaming, auto-provider detection, a [...]
example/noc_llm_dart_example.dart
// ignore_for_file: avoid_print
/// NocLLM Dart - Main Example
///
/// This is the default example shown on pub.dev.
/// It demonstrates the simplest possible usage of NocLLM.
import 'dart:io';
import 'package:noc_llm_dart/noc_llm_dart.dart';
void main() async {
// Initialize with any OpenAI-compatible API
final ai = NocAI(
apiKey: 'YOUR_API_KEY',
baseUrl: 'https://api.openai.com/v1',
model: 'gpt-3.5-turbo',
);
// --- Streaming (real-time token display) ---
print('Streaming:');
await for (final chunk in ai.stream('Tell me a short fable')) {
stdout.write(chunk);
}
print('\n');
// --- Non-Streaming (full response) ---
print('Chat:');
final response = await ai.chat('Tell me a joke');
print(response);
// --- Gemini Native (auto-detected from URL!) ---
final gemini = NocAI(
apiKey: 'YOUR_GEMINI_KEY',
baseUrl: 'https://generativelanguage.googleapis.com',
model: 'gemini-1.5-flash',
);
print('\nGemini detected: ${gemini.config.isGeminiNative}'); // true!
// --- Local LM Studio (zero SSL overhead) ---
final local = NocAI(
apiKey: '', // No key needed!
baseUrl: 'http://localhost:1234/v1',
model: 'local-model',
);
print('Local HTTPS: ${local.config.isHTTPS}'); // false!
// Clean up
ai.dispose();
gemini.dispose();
local.dispose();
}