processFragmentsBatch method

Future<List<FragmentDefinitionNode>> processFragmentsBatch(
  1. List<FragmentDefinitionNode> fragments,
  2. List<TransformingVisitor> transformers
)

Process fragments in batch with optimized traversal

Implementation

Future<List<FragmentDefinitionNode>> processFragmentsBatch(
  List<FragmentDefinitionNode> fragments,
  List<TransformingVisitor> transformers,
) async {
  final processingStart = DateTime.now();

  try {
    if (fragments.isEmpty) return [];

    // Update metrics
    _metrics.totalFragments += fragments.length;

    _logDebug(
      'Starting fragment batch processing',
      context: {
        'fragmentCount': fragments.length,
        'transformerCount': transformers.length,
      },
    );

    // Validate fragments
    if (enableValidation) {
      for (var i = 0; i < fragments.length; i++) {
        final fragment = fragments[i];
        if (fragment.name.value.isEmpty) {
          throw BatchValidationError(
            'Fragment at index $i has empty name',
            'fragment_validation',
            validationDetails: {'fragmentIndex': i},
          );
        }
      }
    }

    // Check if AppendTypename transformer is present
    final hasAppendTypename = _hasAppendTypenameTransformer(transformers);

    if (hasAppendTypename) {
      // Use specialized fragment processing for AppendTypename
      return await _processFragmentsBatchWithAppendTypename(
        fragments,
        transformers,
      );
    }

    // Convert fragments to documents for batch processing
    final fragmentDocs = fragments
        .map((fragment) => DocumentNode(definitions: [fragment]))
        .toList();

    final processedDocs = await processBatch(fragmentDocs, transformers);

    // Extract fragments from processed documents
    final results = processedDocs
        .expand((doc) => doc.definitions.whereType<FragmentDefinitionNode>())
        .toList();

    _logDebug(
      'Fragment batch processing completed successfully',
      context: {
        'processedFragments': results.length,
        'processingTimeMs': DateTime.now()
            .difference(processingStart)
            .inMilliseconds,
      },
    );

    return results;
  } catch (error) {
    _logError(
      'Fragment batch processing failed',
      error,
      context: {
        'fragmentCount': fragments.length,
        'transformerCount': transformers.length,
      },
    );

    // Re-throw validation errors immediately - they should not be recovered from
    if (error is BatchValidationError) {
      rethrow;
    }

    // Attempt recovery for other types of errors
    try {
      return await _attemptFragmentRecovery(fragments, transformers, error);
    } on Exception catch (recoveryError) {
      _logError('Fragment recovery failed', recoveryError);

      // Re-throw original error with additional context
      if (error is BatchProcessingError) {
        throw error;
      } else {
        throw BatchProcessingError(
          'Fragment batch processing failed and recovery unsuccessful',
          batchSize: fragments.length,
          originalError: error,
        );
      }
    }
  } finally {
    _metrics.totalProcessingTime += DateTime.now().difference(
      processingStart,
    );
  }
}