streamQueryBatched method
Runs sql in the worker using native batched streaming.
This path uses odbc_stream_start_batched + odbc_stream_fetch,
yielding chunks progressively. maxBufferBytes caps internal pending
bytes for message framing.
Implementation
Stream<ParsedRowBuffer> streamQueryBatched(
int connectionId,
String sql, {
int fetchSize = 1000,
int chunkSize = 64 * 1024,
int? maxBufferBytes,
}) async* {
final streamId = await _streamStartBatched(
connectionId,
sql,
fetchSize: fetchSize,
chunkSize: chunkSize,
);
if (streamId == 0) {
final workerError = await _safeGetWorkerError();
throw AsyncError(
code: AsyncErrorCode.queryFailed,
message: workerError ?? 'Failed to start batched stream',
);
}
final pending = BinaryFrameAccumulator();
final limit = maxBufferBytes;
var completed = false;
try {
while (true) {
final fetched = await _streamFetch(streamId);
if (!fetched.success) {
final workerError = fetched.error ?? await _safeGetWorkerError();
throw AsyncError(
code: AsyncErrorCode.queryFailed,
message: workerError ?? 'Batched stream fetch failed',
);
}
final data = fetched.data;
if (data != null && data.isNotEmpty) {
pending.add(data);
if (limit != null && pending.length > limit) {
throw const AsyncError(
code: AsyncErrorCode.queryFailed,
message: 'Streaming buffer exceeded maxBufferBytes',
);
}
for (final msg in pending.drainFrames()) {
yield BinaryProtocolParser.parse(msg);
}
}
if (!fetched.hasMore) {
break;
}
}
if (pending.length > 0) {
throw const FormatException(
'Leftover bytes after stream; expected complete protocol messages',
);
}
completed = true;
} finally {
if (!completed) {
await streamCancel(streamId);
}
await _streamClose(streamId);
}
}