processBatch method

Future<List<DocumentNode>> processBatch(
  1. List<DocumentNode> documents,
  2. List<TransformingVisitor> transformers, {
  3. bool enableLazyEvaluation = true,
})

Process multiple documents in a single batch operation This reduces the overhead of individual AST traversals

Implementation

Future<List<DocumentNode>> processBatch(
  List<DocumentNode> documents,
  List<TransformingVisitor> transformers, {
  bool enableLazyEvaluation = true,
}) async {
  final processingStart = DateTime.now();

  try {
    _logDebug(
      'Starting processBatch',
      context: {
        'documentCount': documents.length,
        'transformerCount': transformers.length,
      },
    );

    // Validate input parameters first, before early returns
    _validateBatchInput(documents, transformers);

    if (documents.isEmpty) return [];

    // Update metrics
    _metrics.totalBatches++;
    _metrics.totalDocuments += documents.length;

    // Update transformer usage statistics
    for (final transformer in transformers) {
      final transformerName = transformer.runtimeType.toString();
      _metrics.transformerUsage[transformerName] =
          (_metrics.transformerUsage[transformerName] ?? 0) + 1;
    }

    // Generate batch key for caching
    final batchKey = _generateBatchKey(documents, transformers);

    // Check if we have cached results for this batch
    if (_batchCache.containsKey(batchKey)) {
      _metrics.cacheHits++;
      _logDebug(
        'Cache hit for batch',
        context: {
          'batchKey': batchKey,
          'documentCount': documents.length,
        },
      );
      return _batchCache[batchKey]!;
    }

    _metrics.cacheMisses++;
    _logDebug(
      'Cache miss for batch, processing...',
      context: {
        'batchKey': batchKey,
        'documentCount': documents.length,
        'transformerCount': transformers.length,
      },
    );

    List<DocumentNode> results;

    // Detect AppendTypename transformers in the transformer list
    final hasAppendTypename = _hasAppendTypenameTransformer(transformers);

    if (hasAppendTypename) {
      // Use specialized processing path when AppendTypename is present
      // Ensure proper sequencing when AppendTypename is combined with other transformers
      results = await _processBatchWithAppendTypename(
        documents,
        transformers,
      );
    } else if (enableLazyEvaluation &&
        _shouldDeferTransformation(transformers)) {
      // Queue for lazy evaluation
      results = _queueForLazyEvaluation(documents, transformers);
    } else {
      // Process immediately with batched optimization
      results = await _processBatchImmediate(documents, transformers);
    }

    // Cache the results
    _batchCache[batchKey] = results;

    _logDebug(
      'Batch processing completed successfully',
      context: {
        'processedDocuments': results.length,
        'processingTimeMs': DateTime.now()
            .difference(processingStart)
            .inMilliseconds,
      },
    );

    return results;
  } catch (error) {
    _logError(
      'Batch processing failed',
      error,
      context: {
        'documentCount': documents.length,
        'transformerCount': transformers.length,
      },
    );

    // Re-throw validation errors immediately - they should not be recovered from
    if (error is BatchValidationError) {
      rethrow;
    }

    // Attempt recovery for other types of errors
    try {
      return await _attemptBatchRecovery(documents, transformers, error);
    } on Exception catch (recoveryError) {
      _logError('Batch recovery failed', recoveryError);

      // Re-throw original error with additional context
      if (error is BatchProcessingError) {
        throw error;
      } else {
        throw BatchProcessingError(
          'Batch processing failed and recovery unsuccessful',
          batchSize: documents.length,
          originalError: error,
        );
      }
    }
  } finally {
    _metrics.totalProcessingTime += DateTime.now().difference(
      processingStart,
    );
  }
}