writeBytes method
Write a CLDF archive to bytes
Implementation
Future<List<int>> writeBytes(CLDFArchive archive) async {
_logger.info('Starting CLDF export to bytes');
_logger.fine(
'Archive info: version=${archive.manifest.version}, '
'format=${archive.manifest.format}, platform=${archive.manifest.platform}',
);
final zipArchive = Archive();
final checksums = <String, String>{};
try {
// Calculate and set stats if not already present
final manifestData = archive.manifest.toJson();
if (archive.manifest.stats == null) {
_logger.fine('Calculating archive statistics');
manifestData['stats'] = _calculateStats(archive).toJson();
}
// Add manifest
_logger.fine('Adding manifest.json');
await _addJsonFile(zipArchive, 'manifest.json', manifestData, checksums);
// Add locations (required)
_logger.fine(
'Adding locations.json with ${archive.locations.length} locations',
);
await _addJsonFile(zipArchive, 'locations.json', {
'locations': archive.locations.map((l) => l.toJson()).toList(),
}, checksums);
// Add optional files
if (archive.hasSectors) {
_logger.fine(
'Adding sectors.json with ${archive.sectors!.length} sectors',
);
await _addJsonFile(zipArchive, 'sectors.json', {
'sectors': archive.sectors!.map((s) => s.toJson()).toList(),
}, checksums);
}
if (archive.hasRoutes) {
_logger.fine(
'Adding routes.json with ${archive.routes!.length} routes',
);
await _addJsonFile(zipArchive, 'routes.json', {
'routes': archive.routes!.map((r) => r.toJson()).toList(),
}, checksums);
}
if (archive.hasClimbs) {
_logger.fine(
'Adding climbs.json with ${archive.climbs!.length} climbs',
);
await _addJsonFile(zipArchive, 'climbs.json', {
'climbs': archive.climbs!.map((c) => c.toJson()).toList(),
}, checksums);
}
if (archive.hasSessions) {
_logger.fine(
'Adding sessions.json with ${archive.sessions!.length} sessions',
);
await _addJsonFile(zipArchive, 'sessions.json', {
'sessions': archive.sessions!.map((s) => s.toJson()).toList(),
}, checksums);
}
if (archive.hasTags) {
_logger.fine('Adding tags.json with ${archive.tags!.length} tags');
await _addJsonFile(zipArchive, 'tags.json', {
'tags': archive.tags!.map((t) => t.toJson()).toList(),
}, checksums);
}
if (archive.hasMedia) {
_logger.fine(
'Adding media_metadata.json with ${archive.mediaItems!.length} media items',
);
await _addJsonFile(zipArchive, 'media_metadata.json', {
'media': archive.mediaItems!.map((m) => m.toJson()).toList(),
}, checksums);
}
// Add embedded media files
if (archive.hasEmbeddedMedia) {
_logger.fine(
'Adding ${archive.mediaFiles!.length} embedded media files',
);
for (final entry in archive.mediaFiles!.entries) {
final path = entry.key;
final bytes = entry.value;
_logger.finer('Adding media file: $path (${bytes.length} bytes)');
// Calculate checksum
final digest = sha256.convert(bytes);
checksums[path] = digest.toString();
// Add to archive
zipArchive.addFile(ArchiveFile(path, bytes.length, bytes));
}
}
// Add checksums file
_logger.fine('Adding checksums.json');
final checksumsData = Checksums(algorithm: 'SHA-256', files: checksums);
final checksumsJson = json.encode(checksumsData.toJson());
zipArchive.addFile(
ArchiveFile(
'checksums.json',
checksumsJson.length,
utf8.encode(checksumsJson),
),
);
// Encode to zip
_logger.fine('Encoding ZIP archive');
final encodedBytes = ZipEncoder().encode(zipArchive);
_logger.info('Export completed successfully. Summary:');
_logger.info(' - Archive size: ${encodedBytes.length} bytes');
_logger.info(' - Locations: ${archive.locations.length}');
_logger.info(' - Sectors: ${archive.sectors?.length ?? 0}');
_logger.info(' - Routes: ${archive.routes?.length ?? 0}');
_logger.info(' - Sessions: ${archive.sessions?.length ?? 0}');
_logger.info(' - Climbs: ${archive.climbs?.length ?? 0}');
_logger.info(' - Tags: ${archive.tags?.length ?? 0}');
_logger.info(' - Media items: ${archive.mediaItems?.length ?? 0}');
_logger.info(' - Media files: ${archive.mediaFiles?.length ?? 0}');
return encodedBytes;
} catch (e, stackTrace) {
_logger.severe('Export failed with error', e, stackTrace);
rethrow;
}
}