listModels method
Lists available models.
Results are cached after the first call. The cache is cleared on stop
or forceStop. Concurrent calls share the same fetch (lock-based).
Set forceRefresh to true to bypass the cache.
Implementation
Future<List<ModelInfo>> listModels({bool forceRefresh = false}) async {
_ensureConnected();
if (forceRefresh) {
_modelsCache = null;
}
// Wait for any in-flight fetch to complete
while (_modelsCacheLock != null) {
await _modelsCacheLock!.future;
}
// Return cached copy if available
if (_modelsCache != null) {
return List.of(_modelsCache!);
}
// Acquire lock and fetch
final lock = Completer<void>();
_modelsCacheLock = lock;
try {
final result = await _connection!.sendRequest(
'models.list',
<String, dynamic>{},
const Duration(seconds: 10),
);
final map = result as Map<String, dynamic>;
final models = (map['models'] as List<dynamic>)
.map((m) => ModelInfo.fromJson(m as Map<String, dynamic>))
.toList();
_modelsCache = models;
return List.of(models);
} finally {
_modelsCacheLock = null;
lock.complete();
}
}