adk_flutter 1.0.0 copy "adk_flutter: ^1.0.0" to clipboard
adk_flutter: ^1.0.0 copied to clipboard

ADKflutter - Official Flutter package by 20Centra for connecting to Gemini Live API server with WebSocket support for multimodal AI interactions.

example/lib/main.dart

import 'dart:async';

import 'dart:io';
import 'dart:math';
import 'package:flutter/material.dart' hide ConnectionState;
import 'package:flutter/services.dart';
import 'package:siri_wave/siri_wave.dart';

import 'package:flutter_keyboard_visibility/flutter_keyboard_visibility.dart';

// Import the Gemini Live client
import 'package:adk_flutter/adk_flutter.dart';

// Import extracted models

import 'models/app_state.dart';
import 'models/chat_message.dart';

// Import extracted services

import 'services/network/network_quality_manager.dart';
import 'services/audio/audio_level_manager.dart';
import 'services/platform/native_peripheral_manager.dart';
import 'services/logic/connection_service.dart';
import 'services/logic/audio_recording_service.dart';
import 'services/logic/camera_service.dart';
import 'services/audio/audio_player_service.dart';

// Import extracted widgets
import 'widgets/camera/camera_preview_widget.dart';

// Import BLoC state management
import 'package:flutter_bloc/flutter_bloc.dart';
import 'blocs/connection/connection_bloc.dart';
import 'blocs/audio/audio_bloc.dart';
import 'blocs/camera/camera_bloc.dart';
import 'blocs/chat/chat_bloc.dart';
// Import BLoC events (needed for .add() calls)
import 'blocs/chat/chat_event.dart';

import 'blocs/audio/audio_event.dart'; // ✅ AudioBloc events
import 'blocs/audio/audio_state.dart'; // ✅ AudioBloc states
import 'blocs/camera/camera_event.dart'; // ✅ CameraBloc events
import 'blocs/camera/camera_state.dart'; // ✅ CameraBloc states
import 'blocs/connection/connection_event.dart'; // ✅ ConnectionBloc events
import 'blocs/connection/connection_state.dart'; // ✅ ConnectionBloc states

// Audio buffering simplified - ai-backend-services handles audio streaming directly

// Conversation memory removed - ai-backend-services handles session state internally

// NativeAudioStreamPlayer moved to services/audio/audio_player_service.dart

/// ==========================================
/// APPLICATION ENTRY POINT
/// ==========================================

void main() => runApp(const GeminiLiveExampleApp());

/// ==========================================
/// ROOT APPLICATION WIDGET
/// ==========================================

class GeminiLiveExampleApp extends StatelessWidget {
  const GeminiLiveExampleApp({super.key});

  @override
  Widget build(BuildContext context) {
    // Setup BLoC providers (infrastructure only - not integrated yet)
    return MultiBlocProvider(
      providers: [
        // Note: These BLoCs are created but NOT used yet
        // Integration will happen incrementally to maintain stability
        BlocProvider<ConnectionBloc>(
          create: (context) => ConnectionBloc(
            client: AdkFlutterClient(
              serverUrl: 'wss://aiapi.20c.org',
              apiKey: const String.fromEnvironment('API_KEY'),
            ),
            networkQualityManager:
                NetworkQualityManager('https://aiapi.20c.org/health'),
          ),
          lazy: false,
        ),
        BlocProvider<AudioBloc>(
          create: (context) => AudioBloc(
            audioLevelManager: AudioLevelManager(),
          ),
          lazy: false,
        ),
        BlocProvider<CameraBloc>(
          create: (context) => CameraBloc(
            peripheralManager: NativePeripheralManager(),
          ),
          lazy: false,
        ),
        BlocProvider<ChatBloc>(
          create: (context) => ChatBloc(),
          lazy: false,
        ),
      ],
      child: MaterialApp(
        title: 'Gemini Live Example',
        theme: ThemeData.dark().copyWith(
          colorScheme: ColorScheme.fromSeed(
            seedColor: Colors.blue,
            brightness: Brightness.dark,
          ),
        ),
        home: const ChatScreen(),
      ),
    );
  }
}

/// ==========================================
/// CHAT SCREEN WIDGET
/// ==========================================

class ChatScreen extends StatefulWidget {
  const ChatScreen({super.key});

  @override
  State<ChatScreen> createState() => _ChatScreenState();
}

/// ==========================================
/// CHAT SCREEN STATE MANAGEMENT
/// ==========================================

class _ChatScreenState extends State<ChatScreen> with TickerProviderStateMixin {
  // ==========================================
  // SECTION: STATE VARIABLES
  // ==========================================
  // Core State
  // Core State
  AppState _currentState = AppState.disconnected;

  // State synchronization lock to prevent race conditions
  final _stateLock = Object();
  bool _isStateChanging = false;

  // ==========================================
  // CORE COMPONENTS
  // ==========================================

  late final AdkFlutterClient _client;
  late final NativePeripheralManager _nativePeripheralManager;
  late final AudioPlayerService _audioPlayerService;

  // ==========================================
  // UNIFIED AUDIO LEVEL SYSTEM
  // ==========================================

  final AudioLevelManager _audioLevelManager = AudioLevelManager();

  // ==========================================
  // AUDIO BUFFERING SYSTEM - FIX SMALL CHUNKS
  // ==========================================

  // ==========================================
  // MULTIMODAL IMAGE PREVIEW SYSTEM (OPTIMIZED)
  // ==========================================

  Uint8List? _lastCapturedImage;

  bool _isImagePreviewActive = false;
  Timer? _imagePreviewThrottleTimer; // OPTIMIZATION: Throttle image sending
  int? _cameraTextureId; // Texture ID for high-performance preview

  // ==========================================
  // KEYBOARD VISIBILITY & CAMERA MANAGEMENT
  // ==========================================

  late final StreamSubscription<bool> _keyboardSubscription;
  bool _isKeyboardVisible = false;

  // ==========================================
  // SIRI WAVE CONTROLLER - SINGLE INSTANCE (FIXED: Use iOS7 style)
  // ==========================================

  IOS7SiriWaveformController _siriWaveController = IOS7SiriWaveformController(
    amplitude: 0.5,
    speed: 0.15,
  );

  // ==========================================
  // CONSOLIDATED STATE MANAGEMENT
  // ==========================================

  // AppState _currentState = AppState.disconnected; // MOVED TO TOP
  StatusPriority _currentStatusPriority = StatusPriority.idle;
  String _statusText = 'Tap Jarvis to start';
  String _transcriptText = '';
  DateTime _lastAIAudioReceivedTime =
      DateTime.now(); // Track for animation sync
  late TextEditingController _textController;

  // ==========================================
  // NETWORK QUALITY MONITORING
  // ==========================================

  NetworkQualityManager? _networkQualityManager; // Nullable - initialized async
  late final ConnectionService _connectionService; // PHASE 3: Logic extraction
  late final AudioRecordingService
      _audioRecordingService; // PHASE 3: Logic extraction
  late final CameraService _cameraService; // PHASE 3: Logic extraction
  // bool _showCamera = false; // REMOVED: Duplicate state, use _isImagePreviewActive

  // ==========================================
  // UI STATE & DATA
  // ==========================================

  // PHASE 4.1: ROLLBACK - Re-enabled for UI rendering

  final List<WebSocketLog> _websocketLogs = [];
  bool _showLogs = false;

  // ==========================================
  // AUDIO & ANIMATION
  // ==========================================

  double _jarvisScale = 1.0;
  AnimationController? _animationController;
  int _animationFrameCounter =
      0; // OPTIMIZATION: Frame counter for selective UI updates
  StreamSubscription<Uint8List>? _audioStreamSubscription;
  StreamSubscription<Uint8List>?
      _cameraStreamSubscription; // Track camera stream subscription

  // ==========================================
  // PHASE 3A: Fix Stream Subscription Accumulation
  // ==========================================

  final List<StreamSubscription> _activeSubscriptions = [];

  // ==========================================
  // PHASE 6: Fix Tap-to-Stop Recording Race Conditions
  // ==========================================

  bool _isTapProcessing = false; // Prevent rapid successive taps

  // ==========================================
  // SECTION: LIFECYCLE METHODS
  // ==========================================

  @override
  void initState() {
    super.initState();

    // Initialize core components (synchronous only)
    _nativePeripheralManager = NativePeripheralManager();
    _textController = TextEditingController();

    // Initialize network quality manager placeholder (will be set after _client initialized)
    // _networkQualityManager and _connectionService initialized in _initializeClient()

    // Initialize audio session (synchronous)
    _initializeAudioSession();
    _initializeFlutterSound();

    // Request microphone permission first - all other initialization happens after permission granted
    _requestPermissions();

    // Setup keyboard visibility monitoring
    _keyboardSubscription = KeyboardVisibilityController().onChange.listen(
          _onKeyboardVisibilityChanged,
        );
  }

  /// Initialize unified animation system with real-time audio updates (CRITICALLY OPTIMIZED)
  void _startAnimationSystem() {
    // PHASE 3B: Optimize Animation System - Replace Timer with AnimationController
    _animationController?.dispose();

    // FIXED: Siri Wave controller already initialized in declaration - no need to recreate

    // CRITICAL OPTIMIZATION: Use very long duration and minimal updates to prevent frame drops
    _animationController = AnimationController(
      vsync: this,
      duration: const Duration(
          milliseconds: 5000), // Increased to 5 seconds to reduce frequency
    )
      ..addListener(() {
        // Advance audio level queue (sync animation with playback)
        _audioLevelManager.tick();

        // Update Jarvis scale based on current state (only when necessary)
        _updateJarvisScale();

        // Update Siri Wave amplitude dynamically (FIXED: Direct assignment like example)
        _updateSiriWaveAmplitude();

        // CRITICAL OPTIMIZATION: Only update UI for critical states and skip most frames
        // NETWORK QUALITY FIX: Also update every 20 frames to refresh ping display

        _animationFrameCounter++;

        bool shouldUpdate = false;
        if (_needsUIUpdate()) {
          // Active state (recording/speaking): Update every frame for smooth animation
          shouldUpdate = true;
        } else if (_animationFrameCounter % 20 == 0) {
          // Idle state: Update infrequently for network stats
          shouldUpdate = true;
        }

        if (mounted && shouldUpdate) {
          setState(() {}); // Rebuilds UI for animation
        }
      })
      ..repeat(); // Very slow animation to prevent UI blocking
  }

  /// Check if UI update is actually needed to prevent unnecessary setState calls
  bool _needsUIUpdate() {
    // OPTIMIZATION: Only update UI for critical states to reduce memory pressure
    return _currentState == AppState.recording ||
        _currentState == AppState.ai_speaking;
    // Removed 'processing' state to further reduce updates
  }

  /// Start network quality monitoring (combined WebSocket + HTTP ping)
  void _startConnectionMonitoring() {
    if (_networkQualityManager == null) return; // Safety check
    _addWebSocketLog(
        'Network quality monitoring started (WebSocket + HTTP ping)',
        LogType.info);
  }

  Future<void> _initializeAudioSession() async {
    _addWebSocketLog(
        'Audio session handled by native Platform Channels', LogType.info);
  }

  Future<void> _initializeFlutterSound() async {
    _addWebSocketLog('Native peripheral access initialized', LogType.info);
  }

  Future<void> _requestPermissions() async {
    // Check if permission is already granted without showing dialog
    final micGranted = await _isMicrophonePermissionGranted();
    if (micGranted) {
      // Permission already granted, proceed with initialization
      _addWebSocketLog(
          'Microphone permission already granted - initializing WebSocket',
          LogType.success);
      await _proceedWithInitialization();
    } else {
      // PHASE 3B: Add Permission Rationale Dialogs - Show explanation before requesting
      await _showPermissionRationaleDialog();
      // Permission not granted, set up listener and request it
      _setupPermissionListener();
      await _requestMicrophonePermission();
      _addWebSocketLog('Requesting microphone permission...', LogType.info);
    }
  }

  /// PHASE 3B: Add Permission Rationale Dialogs - Show explanation before requesting permission
  Future<void> _showPermissionRationaleDialog() async {
    await showDialog(
      context: context,
      builder: (BuildContext context) {
        return AlertDialog(
          title: const Text('Microphone Permission Required'),
          content: const Text(
              'This app needs microphone access to record your voice for real-time conversation with AI. '
              'Your voice data is processed locally and sent securely to our servers.'),
          actions: [
            TextButton(
              onPressed: () => Navigator.of(context).pop(),
              child: const Text('Continue'),
            ),
          ],
        );
      },
    );
  }

  void _setupPermissionListener() {
    // PHASE 3A: Fix Method Channel Handler Conflicts - Use proper channel management
    const MethodChannel('com.example.gemini_live/audio_recording')
        .setMethodCallHandler((call) async {
      if (call.method == 'onMicrophonePermissionResult') {
        final granted = call.arguments['granted'] as bool?;
        if (granted == true) {
          _addWebSocketLog(
              'Microphone permission granted - initializing WebSocket',
              LogType.success);
          await _proceedWithInitialization();
        } else {
          _addWebSocketLog(
              'Microphone permission denied - WebSocket will not initialize',
              LogType.warning);
          await _changeStateSafely(AppState.error);
        }
      }
    });
  }

  Future<bool> _isMicrophonePermissionGranted() async {
    try {
      if (Platform.isAndroid) {
        final result =
            await const MethodChannel('com.example.gemini_live/audio_recording')
                .invokeMethod('isMicrophonePermissionGranted');
        return result == true;
      }
      return true; // iOS handles automatically
    } catch (e) {
      _addWebSocketLog('Microphone permission check failed: $e', LogType.error);
      return false;
    }
  }

  Future<void> _proceedWithInitialization() async {
    // Initialize everything AFTER microphone permission is confirmed granted
    await _initializeClient(); // WebSocket client
    _startAnimationSystem(); // Animation system
    _startConnectionMonitoring(); // Safe connection monitoring (no ping/pong)
    // Camera is on-demand only (user must tap camera button to enable)
    _autoConnectOnStart(); // Auto-connect to server
  }

  Future<void> _autoConnectOnStart() async {
    try {
      await _connectToServer();
      // Wait for connection to be established
      await Future.delayed(const Duration(milliseconds: 500));
      if (_currentState == AppState.connected_idle) {
        // Send initial greeting message and enable auto-start recording
        await _sendTextMessage('halo, kamu siapa ya');
        // PHASE 3A: Fix State Management Race Conditions - Use synchronized state change
        await _changeStateSafely(AppState.processing);
      }
    } catch (e) {
      _addWebSocketLog('Auto-connect failed: $e', LogType.error);
      await _changeStateSafely(AppState.error);
    }
  }

  Future<bool> _requestMicrophonePermission() async {
    try {
      if (Platform.isAndroid) {
        final result =
            await const MethodChannel('com.example.gemini_live/audio_recording')
                .invokeMethod('requestMicrophonePermission');
        return result == true;
      }
      return true; // iOS handles automatically
    } catch (e) {
      _addWebSocketLog(
          'Microphone permission request failed: $e', LogType.error);
      return false;
    }
  }

  Future<bool> _requestCameraPermission() async {
    try {
      if (Platform.isAndroid) {
        final result =
            await const MethodChannel('com.example.gemini_live/camera')
                .invokeMethod('requestCameraPermission');
        return result == true;
      }
      return true; // iOS handles automatically
    } catch (e) {
      _addWebSocketLog('Camera permission request failed: $e', LogType.error);
      return false;
    }
  }

  // NEW: Delegate to CameraService
  Future<void> _toggleCamera() async {
    final hasPermission = await _requestCameraPermission();
    await _cameraService.toggleCamera(hasPermission);
    // PHASE 4.3: Dispatch to CameraBloc
    if (mounted) context.read<CameraBloc>().add(const CameraToggled());
  }

  /// PHASE 3B: Add Permission Rationale Dialogs - Show explanation for camera permission
  Future<void> _showCameraPermissionRationaleDialog() async {
    await showDialog(
      context: context,
      builder: (BuildContext context) {
        return AlertDialog(
          title: const Text('Camera Permission Required'),
          content: const Text(
              'Camera access is needed to show your video feed during conversations. '
              'This helps create a more interactive experience. Your camera data stays private and is not stored.'),
          actions: [
            TextButton(
              onPressed: () => Navigator.of(context).pop(),
              child: const Text('OK'),
            ),
          ],
        );
      },
    );
  }

  /// Handle keyboard visibility changes to manage UI layout
  void _onKeyboardVisibilityChanged(bool visible) {
    _isKeyboardVisible = visible;

    // Track keyboard state for UI adjustments, but don't hide camera preview
    // The camera should continue working in background even when keyboard is shown
    // This allows users to type text while still seeing camera preview for multimodal input
    setState(() {
      _isKeyboardVisible = visible;
    });

    _addWebSocketLog(
        'Keyboard ${visible ? 'shown' : 'hidden'} - camera preview remains active',
        LogType.info);
  }

  // ==========================================
  // UNIFIED ANIMATION SYSTEM
  // ==========================================

  /// Update Jarvis scale based on current state and audio levels
  void _updateJarvisScale() {
    switch (_currentState) {
      case AppState.recording:
        // Enhanced scaling: 1.0 to 1.5x based on real audio level
        _jarvisScale = 1.0 + (_audioLevelManager.smoothedLevel * 0.5);
        break;

      case AppState.ai_speaking:
        // AI speaking animation with unified audio level system
        _jarvisScale = 1.0 + (_audioLevelManager.smoothedLevel * 0.5);

        // FIX ANIMATION SYNC: Check if audio has finished playing
        // If no audio received for 1.5s AND audio level is low, assume playback finished
        final timeSinceLastAudio =
            DateTime.now().difference(_lastAIAudioReceivedTime);
        if (timeSinceLastAudio.inMilliseconds > 1500 &&
            _audioLevelManager.smoothedLevel < 0.05) {
          // Use Future.microtask to avoid setState during build/layout
          Future.microtask(() => _changeStateSafely(AppState.connected_idle));
        }
        break;

      case AppState.processing:
        // Gentle breathing during processing
        _jarvisScale =
            1.0 + 0.05 * sin(DateTime.now().millisecondsSinceEpoch * 0.005);
        break;

      default:
        // Idle state - no animation
        _jarvisScale = 1.0;
        break;
    }
  }

  /// Update Siri Wave amplitude dynamically (FIXED: Direct amplitude update for iOS9)
  void _updateSiriWaveAmplitude() {
    // Update amplitude directly on iOS9 controller
    _siriWaveController.amplitude =
        _audioLevelManager.smoothedLevel.clamp(0.0, 1.0);
  }

  // ==========================================
  // MULTIMODAL IMAGE PREVIEW METHODS
  // ==========================================

  /// Send latest captured image as preview to server (DISABLED - images sent with text/audio)

  /// OPTIMIZATION: Counter for frame logging
  int _imageFrameCounter = 0;

  /// OPTIMIZATION: Force memory cleanup to reduce code cache pressure - LESS FREQUENT
  void _forceMemoryCleanup() {
    // OPTIMIZATION: Only cleanup every 100 frames instead of every send
    if (_imageFrameCounter % 100 != 0) return;

    // Clear unused image data when not actively previewing
    // FIX: Use _isImagePreviewActive to determine if we should keep the image
    if (_lastCapturedImage != null && !_isImagePreviewActive) {
      _lastCapturedImage = null;

      debugPrint('🧹 Memory cleanup: Cleared unused image data');
    }
  }

  // ==========================================
  // STATUS TEXT MANAGEMENT
  // ==========================================

  /// Update status text with priority hierarchy
  void _updateStatusText(String text, StatusPriority priority) {
    // Only update if new priority is equal to or higher than current
    if (priority.index >= _currentStatusPriority.index) {
      setState(() {
        _statusText = text;
        _currentStatusPriority = priority;
      });
    }
  }

  /// Reset status text to default based on current state
  void _resetStatusText() {
    switch (_currentState) {
      case AppState.disconnected:
        _updateStatusText('Disconnected', StatusPriority.idle);
        break;
      case AppState.connecting:
        _updateStatusText('Wait, Ruru is Preparing...', StatusPriority.idle);
        break;
      case AppState.connected_idle:
        _updateStatusText('Tap Ruru for Talking', StatusPriority.idle);
        break;
      case AppState.recording:
        _updateStatusText(
            'Speak now, I\'m Hearing...', StatusPriority.listening);
        break;
      case AppState.processing:
        _updateStatusText('Wait, I\'m Thinking...', StatusPriority.thinking);
        break;
      case AppState.ai_speaking:
        // Keep transcript visible during AI speaking
        break;
      case AppState.error:
        _updateStatusText(
            'Something problem with your connections...', StatusPriority.error);
        break;
    }
  }

  Future<void> _initializeClient() async {
    _client = AdkFlutterClient(serverUrl: 'wss://aiapi.20c.org');

    // PHASE 3: Initialize NetworkQualityManager and ConnectionService after client created
    // NOTE: _networkQualityManager is nullable but will be initialized here before any usage
    _networkQualityManager = NetworkQualityManager(
        'https://aiapi.20c.org/health'); // Hardcoded - client doesn't have healthUrl
    _networkQualityManager!
        .startMonitoring(); // Safe to use ! because we just initialized it above
    _startConnectionMonitoring();

    _connectionService = ConnectionService(
      client: _client,
      networkQualityManager:
          _networkQualityManager!, // Safe to use ! because we just initialized it above
      connectionBloc: context.read<ConnectionBloc>(),
      onLog: _addWebSocketLog,
      onStateChanged: (state) {
        // Connection state changes handled in caller methods
      },
    );

    // PHASE 3: Initialize AudioRecordingService for audio recording logic extraction
    _audioRecordingService = AudioRecordingService(
      client: _client,
      nativePeripheralManager: _nativePeripheralManager,
      audioLevelManager: _audioLevelManager,
      context: context,
      onLog: _addWebSocketLog,
      onStateChanged: (state, {statusText, priority}) async {
        await _changeStateSafely(state,
            statusText: statusText, priority: priority ?? StatusPriority.idle);
      },
      activeSubscriptions: _activeSubscriptions,
      isCameraEnabled: () => _isImagePreviewActive,
      getLastCapturedImage: () => _lastCapturedImage,
      isCameraInitialized: () => _nativePeripheralManager.isCameraInitialized,
      getCurrentState: () => _currentState,
    );

    // PHASE 3: Initialize CameraService for camera logic extraction
    _cameraService = CameraService(
      client: _client,
      nativePeripheralManager: _nativePeripheralManager,
      context: context,
      onLog: _addWebSocketLog,
      onCameraStateChanged: (show) =>
          setState(() => _isImagePreviewActive = show),
      onTextureIdChanged: (id) => setState(() => _cameraTextureId = id),
      onImageCaptured: (data) => _lastCapturedImage = data,
      onPermissionDenied: _showCameraPermissionRationaleDialog,
      activeSubscriptions: _activeSubscriptions,
      isShowingCamera: () => _isImagePreviewActive,
      isCameraInitialized: () => _nativePeripheralManager.isCameraInitialized,
    );

    _audioPlayerService = AudioPlayerService(
      onLog: (msg) => _addWebSocketLog(msg, LogType.info),
    );
    // ==========================================
    // CLIENT EVENT LISTENERS
    // ==========================================

    // Interrupt handler for barge-in functionality
    _client.onInterrupt.listen((interrupted) {
      if (interrupted) {
        _addWebSocketLog('Interrupt received - stopping audio playback',
            LogType.info); // Stop audio playback if active (barge-in)
        _audioPlayerService.stop();
        setState(() {
          _currentState = AppState.recording;
          _updateStatusText(
              'Speak now, I\'m Hearing...', StatusPriority.listening);
        });
      }
    });

    // Connection state listener
    _client.onConnectionState.listen((state) async {
      // PHASE 3A: Fix State Management Race Conditions - Use synchronized state change
      await _changeStateSafely(
          state == WebSocketConnectionState.disconnected
              ? AppState.disconnected
              : state == WebSocketConnectionState.connecting
                  ? AppState.connecting
                  : state == WebSocketConnectionState.connected
                      ? AppState.connected_idle
                      : AppState.disconnected, // Default fallback
          priority: StatusPriority.idle);

      if (state == WebSocketConnectionState.connected) {
        _transcriptText = '';
      }

      if (state != WebSocketConnectionState.disconnected) {
        _addWebSocketLog('Connection state: $state', LogType.info);
      }
    });

    // Text responses - don't show in status text
    _client.onText.listen((text) {
      // New text response means a new turn has started (or is about to start audio)
      // So we can stop ignoring audio chunks
      _audioPlayerService.resetInterruptionFlag();

      _addWebSocketLog('Text response: ${text.length} chars', LogType.info);
      // Update network quality manager on message receipt
      _networkQualityManager?.onMessageReceived();

      // Add message to ChatBloc
      context.read<ChatBloc>().add(MessageAdded(
          ChatMessage(text: text, isUser: false, timestamp: DateTime.now())));
      // Don't update status text with AI responses per simplified logic
      // ==========================================
    });

// ==========================================
// END DUPLICATE CODE SECTION
// ==========================================

    // Transcriptions - accumulate user input text
    _client.onTranscription.listen((transcription) {
      _addWebSocketLog(
          'Transcription: ${transcription.type.name}', LogType.info);
      // Update network quality manager on message receipt
      _networkQualityManager?.onMessageReceived();
      final transcriptText = transcription.isPartial
          ? '🎤 ${transcription.type == TranscriptionType.input ? 'You' : 'AI'}: ${transcription.text}...'
          : '🎤 ${transcription.type == TranscriptionType.input ? 'You' : 'AI'}: ${transcription.text}';

      // ==========================================
      // STEP 2.2: CHATBLOC MIGRATION - Transcription Messages
      // ==========================================
      // NEW CODE (ChatBloc event):
      context.read<ChatBloc>().add(MessageAdded(ChatMessage(
          text: transcriptText,
          isUser: transcription.type == TranscriptionType.input,
          timestamp: transcription.timestamp,
          isTranscription: true)));

      // Keep transcript accumulation (not part of ChatBloc yet)
      if (transcription.type == TranscriptionType.input) {
        setState(() {
          if (transcription.text.trim().isNotEmpty) {
            _transcriptText = transcription.text;
          }
        });
      }
      // ==========================================
      if (transcription.type == TranscriptionType.input) {
        if (transcription.text.trim().isNotEmpty) {
          _updateStatusText(_transcriptText, StatusPriority.transcription);
        }
      }
    });

    // Turn complete events - auto-start recording after AI response
    _client.onTurnCompleteEvent.listen((turnEvent) async {
      _addWebSocketLog('Turn complete - interrupted: ${turnEvent.interrupted}',
          LogType.info);

      // PHASE 3A: Fix State Management Race Conditions - Use synchronized state change
      // FIX ANIMATION SYNC: Don't reset state if AI is speaking (audio might still be playing)
      // The state will be reset by the animation system when audio level drops
      if (_currentState != AppState.ai_speaking) {
        await _changeStateSafely(AppState.connected_idle);
      }

      // ==========================================
      // STEP 2.3: CHATBLOC MIGRATION - Turn Complete Messages
      // ==========================================

      // NEW CODE (ChatBloc event):
      context.read<ChatBloc>().add(MessageAdded(ChatMessage(
          text: turnEvent.interrupted ? '⚡ Interrupted' : '✅ Turn completed',
          isUser: false,
          timestamp: turnEvent.timestamp,
          isSystemMessage: true)));
      // ==========================================

      // PHASE 3: Reset image sent flag for the NEXT turn (delegated to AudioRecordingService)
      _audioRecordingService.resetImageContextFlag();

      // Auto-start recording after AI response completes (only if not interrupted and not user-stopped)
      if (!turnEvent.interrupted && !_userInitiatedStop) {
        Future.delayed(const Duration(milliseconds: 500), () {
          _startRecording();
        });
      }
    });

    // Handle server-side interruption (VAD)
    // This triggers when the server detects the user is speaking while AI is speaking
    _client.onInterrupt.listen((_) async {
      _addWebSocketLog('⚡ Server signaled interruption (VAD)', LogType.warning);

      // Handle interruption via service (sets ignore flag and stops audio)
      await _audioPlayerService.handleInterruption();

      // Clear AI audio queue in AudioLevelManager to stop animation
      // (We can't clear the private queue directly, but stopping audio player helps)

      // Transition to idle/recording state
      await _changeStateSafely(AppState.connected_idle);

      // Notify user via UI
      context.read<ChatBloc>().add(MessageAdded(ChatMessage(
          text: '⚡ Interrupted by user',
          isUser: false,
          timestamp: DateTime.now(),
          isSystemMessage: true)));

      // Ensure recording is active (since user is speaking)
      if (_currentState != AppState.recording) {
        _startRecording();
      }
    });

    // Audio responses - keep transcript visible during playback
    _client.onAudio.listen((audioData) async {
      // AudioPlayerService handles the ignore flag internally now

      _addWebSocketLog(
          'Audio response: ${audioData.length} bytes', LogType.info);
      // Update network quality manager on message receipt
      _networkQualityManager?.onMessageReceived();

      // ==========================================
      // STEP 5.2: AUDIOBLOC MIGRATION - AI Audio Handling
      // ==========================================
      // Keep old code ACTIVE for Siri Wave animation (hybrid approach)
      // _aiAudioLevel calculation removed (handled internally by AudioLevelManager)
      _audioLevelManager.updateFromAIAudio(audioData);
      _lastAIAudioReceivedTime =
          DateTime.now(); // Update timestamp for animation sync

      // ALSO send to AudioBloc for state tracking (Phase 3 ready)
      context.read<AudioBloc>().add(AIAudioReceived(audioData));
      // ==========================================

      // PHASE 3A: Fix State Management Race Conditions - Use synchronized state change
      await _changeStateSafely(AppState.ai_speaking,
          statusText: 'Wait, I\'m Thinking...',
          priority: StatusPriority.thinking);

      await _audioPlayerService.playAudioChunk(audioData);
      if (_currentState == AppState.recording)
        _addWebSocketLog('Barge-in active', LogType.info);
    });

    // Error handling - More selective auto-reconnect
    _client.onError.listen((error) {
      _addWebSocketLog('WebSocket error: $error', LogType.error);

      // Only auto-reconnect for actual connection loss, not parsing errors
      final errorStr = error.toString().toLowerCase();
      final isConnectionError = errorStr.contains('connection lost') ||
          errorStr.contains('websocketexception') ||
          errorStr.contains('connection reset') ||
          errorStr.contains('connection refused') ||
          errorStr.contains('network is unreachable');

      if (isConnectionError && _currentState != AppState.connecting) {
        _addWebSocketLog(
            'Auto-reconnecting in 5 seconds due to connection loss',
            LogType.warning);
        Future.delayed(const Duration(seconds: 5), () async {
          // Only reconnect if still disconnected
          if (_currentState == AppState.disconnected) {
            await _connectToServer();
          }
        });
      }

      // NEW CODE: Dispatch to ChatBloc
      context.read<ChatBloc>().add(MessageAdded(ChatMessage(
          text: 'Error: $error',
          isUser: false,
          timestamp: DateTime.now(),
          isError: true)));
    });

    // REMOVED: Manual pong handler - WebSocket handles ping/pong automatically

    _addWebSocketLog('Client initialized', LogType.info);
  }

  // ==========================================
  // WEB SOCKET CONNECTION METHODS
  // ==========================================

  // ==========================================
  // SECTION: LOGIC - CONNECTION
  // ==========================================

  Future<void> _connectToServer() async {
    // PHASE 3: Delegate to ConnectionService
    context.read<ConnectionBloc>().add(const ConnectRequested());

    try {
      // NEW CODE: Use ConnectionService
      await _connectionService.connect();
    } catch (e) {
      _addWebSocketLog('Connection failed: $e', LogType.error);

      // Add connection error to ChatBloc
      context.read<ChatBloc>().add(MessageAdded(ChatMessage(
          text: 'Connection failed: $e',
          isUser: false,
          timestamp: DateTime.now(),
          isError: true)));
    }
  }

  // NEW: Delegate to AudioRecordingService
  Future<void> _startRecording() async {
    await _audioRecordingService.startRecording();
    // PHASE 4.2: Dispatch to AudioBloc
    if (mounted) context.read<AudioBloc>().add(StartRecordingRequested());
  }

  // NEW: Delegate to AudioRecordingService
  Future<void> _stopRecording() async {
    await _audioRecordingService.stopRecording();
    // PHASE 4.2: Dispatch to AudioBloc
    if (mounted) context.read<AudioBloc>().add(StopRecordingRequested());
  }

  Future<void> _toggleContinuousMode() async {
    // PHASE 6: Fix Tap-to-Stop Recording Race Conditions - Prevent rapid successive taps
    if (_isTapProcessing) {
      _addWebSocketLog(
          'Tap already being processed, ignoring', LogType.warning);
      return;
    }

    _isTapProcessing = true;

    try {
      if (_currentState == AppState.recording) {
        // User-initiated stop - prevent auto-restart
        await _stopRecordingUserInitiated();
        _addWebSocketLog(
            'Continuous mode disabled (user initiated)', LogType.info);
      } else {
        // Only allow recording if we're in a valid connected state
        if (_currentState == AppState.disconnected) {
          _addWebSocketLog(
              'Cannot start recording - not connected. Please wait for initialization.',
              LogType.warning);
          return;
        }
        if (_currentState == AppState.connecting) {
          _addWebSocketLog(
              'Cannot start recording - still connecting. Please wait.',
              LogType.warning);
          return;
        }
        if (_currentState == AppState.processing) {
          _addWebSocketLog(
              'Cannot start recording - AI is processing. Please wait.',
              LogType.warning);
          return;
        }
        if (_currentState == AppState.ai_speaking) {
          _addWebSocketLog(
              'Cannot start recording - AI is speaking. Please wait or interrupt.',
              LogType.warning);
          return;
        }
        // Check if recording is already in progress to prevent duplicate starts
        if (_audioStreamSubscription != null) {
          _addWebSocketLog('Recording already in progress', LogType.warning);
          return;
        }
        await _startRecording();
        _addWebSocketLog('Continuous mode enabled', LogType.success);
      }
    } finally {
      // Reset tap processing flag after a short delay to prevent rapid taps
      Future.delayed(const Duration(milliseconds: 300), () {
        _isTapProcessing = false;
      });
    }
  }

  /// PHASE 3A: Fix State Management Race Conditions
  /// Add user-initiated stop flag to prevent auto-restart
  bool _userInitiatedStop = false;

  Future<void> _stopRecordingUserInitiated() async {
    _userInitiatedStop = true;
    await _stopRecording();
    _userInitiatedStop = false;
  }

  /// Synchronized state change method to prevent race conditions
  Future<void> _changeStateSafely(AppState newState,
      {String? statusText,
      StatusPriority priority = StatusPriority.idle}) async {
    // Use state lock to prevent concurrent state changes
    await synchronized(_stateLock, () async {
      if (_isStateChanging) {
        debugPrint('State change already in progress, skipping: $newState');
        return;
      }

      _isStateChanging = true;
      try {
        _currentState = newState;
        if (statusText != null) {
          _updateStatusText(statusText, priority);
        } else {
          _resetStatusText();
        }

        // Force UI update
        if (mounted) {
          setState(() {});
        }
      } finally {
        _isStateChanging = false;
      }
    });
  }

  /// Helper method for synchronized execution
  Future<T> synchronized<T>(Object lock, Future<T> Function() block) async {
    // Simple lock implementation using Completer
    final completer = Completer<T>();
    // Execute block immediately (in Flutter, UI operations are single-threaded)
    try {
      final result = await block();
      completer.complete(result);
    } catch (e) {
      completer.completeError(e);
    }
    return completer.future;
  }

  Future<void> _sendTextMessage(String text) async {
    if (text.trim().isEmpty) return;

    _addWebSocketLog(
        'Sending text: "${text.substring(0, min(50, text.length))}${text.length > 50 ? '...' : ''}"',
        LogType.info);

    // Add user message to ChatBloc
    context.read<ChatBloc>().add(MessageAdded(
        ChatMessage(text: text, isUser: true, timestamp: DateTime.now())));
    // ==========================================

    try {
      // BARU: Jika camera aktif, capture real-time dan kirim dengan pesan (ATOMIC)
      if (_isImagePreviewActive &&
          _nativePeripheralManager.isCameraInitialized) {
        try {
          // 1. Trigger Capture
          await _nativePeripheralManager.captureImage();
          _addWebSocketLog(
              'Camera capture initiated for atomic message', LogType.info);

          // 2. Wait for capture to complete (via stream listener)
          // Note: We still need a small delay to allow the stream to update _lastCapturedImage
          // But we don't need to wait for a server roundtrip anymore!
          await Future.delayed(const Duration(milliseconds: 500));

          // 3. Send Atomic Message (Text + Image)
          if (_lastCapturedImage != null) {
            await _client.sendText(text, cameraImage: _lastCapturedImage);
            _addWebSocketLog(
                'Text sent with atomic camera image', LogType.success);
          } else {
            throw Exception('Image capture timed out or returned null');
          }
        } catch (e) {
          _addWebSocketLog(
              'Camera capture failed ($e), sending text only', LogType.warning);
          // Fallback: Kirim text saja
          await _client.sendText(text);
        }
      } else {
        // Kirim text saja tanpa gambar
        await _client.sendText(text);
        _addWebSocketLog('Text dikirim (tanpa kamera)', LogType.success);
      }
    } catch (e) {
      _addWebSocketLog('Send text failed: $e', LogType.error);

      // ==========================================
      // STEP 2.9: CHATBLOC MIGRATION - Send Failed Messages
      // ==========================================
      // NEW CODE (ChatBloc event):
      context.read<ChatBloc>().add(MessageAdded(ChatMessage(
          text: 'Failed to send message: $e',
          isUser: false,
          timestamp: DateTime.now(),
          isError: true)));
      // ==========================================
    }
  }

  Future<void> _disconnectFromServer() async {
    // PHASE 3: Delegate to ConnectionService
    context.read<ConnectionBloc>().add(const DisconnectRequested());

    // NEW CODE: Use ConnectionService
    await _connectionService.disconnect();
  }

  // ==========================================
  // WEB SOCKET LOGGING METHODS
  // ==========================================

  /// Add WebSocket log entry
  /// PHASE 4.1: HYBRID - Both setState + BLoC dispatch
  void _addWebSocketLog(String message, LogType type) {
    setState(() {
      _websocketLogs.add(WebSocketLog(
          message: message, timestamp: DateTime.now(), type: type));
    });

    // Also dispatch to ChatBloc for future use
    context.read<ChatBloc>().add(LogAdded(
        WebSocketLog(message: message, timestamp: DateTime.now(), type: type)));

    // print('[${type.name.toUpperCase()}] $message'); // DISABLED: Focus on image debug - too verbose
  }

  // ==========================================
  // LEGACY METHODS - TO BE REMOVED
  // ==========================================

  /// Get appropriate hint text based on camera state
  String _getTextFieldHint() {
    if (_isImagePreviewActive) {
      return 'Ketik pesan (kamera aktif - gambar akan di-capture saat kirim)...';
    } else {
      return 'Ketik pesan...';
    }
  }

  /// Clear all WebSocket logs
  /// PHASE 4.1: HYBRID - Both setState + BLoC dispatch
  void _clearLogs() {
    setState(() {
      _websocketLogs.clear();
    });

    // Also dispatch to ChatBloc for future use
    context.read<ChatBloc>().add(LogsCleared());
  }

  // ==========================================
  // UI METHODS
  // ==========================================

  // Settings dialog removed - ai-backend-services manages all AI configuration internally

  /// Show WebSocket logs dialog
  void _showLogsDialog() {
    showDialog(
      context: context,
      builder: (context) => AlertDialog(
        title: Row(
          children: [
            const Text('WebSocket Logs'),
            const Spacer(),
            IconButton(
              icon: const Icon(Icons.clear),
              onPressed: () {
                _clearLogs();
                Navigator.of(context).pop();
                _showLogsDialog();
              },
              tooltip: 'Clear logs',
            ),
          ],
        ),
        content: SizedBox(
          width: double.maxFinite,
          height: 300,
          child: ListView.builder(
            itemCount: _websocketLogs.length,
            itemBuilder: (context, index) {
              final log = _websocketLogs[
                  _websocketLogs.length - 1 - index]; // Show newest first
              return Container(
                margin: const EdgeInsets.symmetric(vertical: 2),
                padding: const EdgeInsets.all(8),
                decoration: BoxDecoration(
                  color: log.type == LogType.error
                      ? Colors.red.shade50
                      : log.type == LogType.success
                          ? Colors.green.shade50
                          : Colors.grey.shade50,
                  borderRadius: BorderRadius.circular(4),
                ),
                child: Column(
                  crossAxisAlignment: CrossAxisAlignment.start,
                  children: [
                    Row(
                      children: [
                        Icon(
                          log.type == LogType.error
                              ? Icons.error
                              : log.type == LogType.success
                                  ? Icons.check_circle
                                  : Icons.info,
                          size: 16,
                          color: log.type == LogType.error
                              ? Colors.red
                              : log.type == LogType.success
                                  ? Colors.green
                                  : Colors.blue,
                        ),
                        const SizedBox(width: 4),
                        Text(
                          log.type.name.toUpperCase(),
                          style: TextStyle(
                            fontSize: 10,
                            fontWeight: FontWeight.bold,
                            color: log.type == LogType.error
                                ? Colors.red
                                : log.type == LogType.success
                                    ? Colors.green
                                    : Colors.blue,
                          ),
                        ),
                        const Spacer(),
                        Text(
                          '${log.timestamp.hour}:${log.timestamp.minute.toString().padLeft(2, '0')}:${log.timestamp.second.toString().padLeft(2, '0')}',
                          style:
                              const TextStyle(fontSize: 8, color: Colors.grey),
                        ),
                      ],
                    ),
                    const SizedBox(height: 4),
                    Text(
                      log.message,
                      style: const TextStyle(fontSize: 11),
                    ),
                  ],
                ),
              );
            },
          ),
        ),
        actions: [
          TextButton(
            onPressed: () => Navigator.of(context).pop(),
            child: const Text('Close'),
          ),
        ],
      ),
    );
  }

  // ==========================================
  // WIDGET BUILD METHOD
  // ==========================================
  // SECTION: UI BUILD METHOD
  // ==========================================

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      // PHASE 6A: Remove AppBar for full-screen overlay design
      // appBar: AppBar(...), // Removed for overlay layout

      body: Stack(
        children: [
          // PHASE 6B: Full Page Background Layer (Jarvis/Camera)
          Positioned.fill(
            child: GestureDetector(
              onTap: _currentState != AppState.disconnected
                  ? () => _toggleContinuousMode()
                  : null,
              // PHASE 4.3: BlocBuilder for Camera Preview Visibility
              child: BlocBuilder<CameraBloc, CameraState>(
                builder: (context, state) {
                  final isCameraReady = state is CameraReady;
                  final isCameraActive = isCameraReady && state.isActive;

                  // Hybrid fallback: use local state if BLoC not ready
                  final showCamera = isCameraActive ||
                      (_isImagePreviewActive &&
                          _nativePeripheralManager.isCameraInitialized);

                  return showCamera
                      ? Stack(
                          children: [
                            // Camera preview - always visible when camera is active
                            CameraPreviewWidget(
                              textureId: _cameraTextureId,
                            ),
                            // Optional overlay when keyboard is visible to indicate camera is still active
                            if (_isKeyboardVisible)
                              Positioned(
                                top: 100,
                                left: 20,
                                right: 20,
                                child: Container(
                                  padding: const EdgeInsets.symmetric(
                                      horizontal: 16, vertical: 8),
                                  decoration: BoxDecoration(
                                    color:
                                        const Color.fromRGBO(33, 150, 243, 0.8),
                                    borderRadius: BorderRadius.circular(20),
                                  ),
                                  child: const Text(
                                    '📷 Camera active - multimodal input enabled',
                                    style: TextStyle(
                                      fontSize: 14,
                                      fontWeight: FontWeight.bold,
                                      color: Colors.white,
                                    ),
                                    textAlign: TextAlign.center,
                                  ),
                                ),
                              ),
                          ],
                        )
                      : AnimatedScale(
                          scale: _jarvisScale,
                          duration: const Duration(milliseconds: 100),
                          curve: Curves.easeOut,
                          child: Container(
                            color: Colors.black, // Background color for Jarvis
                            child: Center(
                              child: Image.asset(
                                'jarvis.gif',
                                width: MediaQuery.of(context).size.width *
                                    0.8, // 80% of screen width
                                height: MediaQuery.of(context).size.height *
                                    0.6, // 60% of screen height
                                fit: BoxFit.contain,
                              ),
                            ),
                          ),
                        );
                },
              ),
            ),
          ),

          // PHASE 6C: Header Overlay (AppBar content)
          Positioned(
            top: 0,
            left: 0,
            right: 0,
            child: SafeArea(
              child: Container(
                padding:
                    const EdgeInsets.symmetric(horizontal: 16, vertical: 8),
                decoration: BoxDecoration(
                  gradient: LinearGradient(
                    begin: Alignment.topCenter,
                    end: Alignment.bottomCenter,
                    colors: [
                      const Color.fromRGBO(0, 0, 0, 0.8),
                      const Color.fromRGBO(0, 0, 0, 0.4),
                      Colors.transparent,
                    ],
                  ),
                ),
                child: Row(
                  children: [
                    // PHASE 4.4: BlocBuilder for Network Quality
                    BlocBuilder<ConnectionBloc, ConnectionState>(
                      builder: (context, state) {
                        // Hybrid: Use BLoC state if connected
                        if (state is ConnectionConnected) {
                          final quality = state.networkQuality;

                          // Inline logic
                          Color color = Colors.green;
                          IconData icon = Icons.wifi;
                          String text = 'Good';

                          if (quality < 50) {
                            color = Colors.red;
                            icon = Icons.wifi_off;
                            text = 'Poor';
                          } else if (quality < 80) {
                            color = Colors.orange;
                            icon = Icons.network_check;
                            text = 'Fair';
                          }

                          return Container(
                            padding: const EdgeInsets.symmetric(
                                horizontal: 8, vertical: 4),
                            decoration: BoxDecoration(
                              color: color,
                              borderRadius: BorderRadius.circular(8),
                            ),
                            child: Row(
                              children: [
                                Icon(icon, size: 16, color: Colors.white),
                                const SizedBox(width: 4),
                                Text('$text (${quality}ms)',
                                    style: const TextStyle(
                                        color: Colors.white, fontSize: 12)),
                              ],
                            ),
                          );
                        }

                        // Fallback: Use local manager with ValueListenableBuilder for reactivity
                        if (_networkQualityManager != null) {
                          return ValueListenableBuilder<int>(
                            valueListenable:
                                _networkQualityManager!.latencyNotifier,
                            builder: (context, quality, _) {
                              // Inline logic
                              Color color = Colors.green;
                              IconData icon = Icons.wifi;
                              String text = 'Good';

                              if (quality < 50) {
                                color = Colors.red;
                                icon = Icons.wifi_off;
                                text = 'Poor';
                              } else if (quality < 80) {
                                color = Colors.orange;
                                icon = Icons.network_check;
                                text = 'Fair';
                              }

                              return Container(
                                padding: const EdgeInsets.symmetric(
                                    horizontal: 8, vertical: 4),
                                decoration: BoxDecoration(
                                  color: color,
                                  borderRadius: BorderRadius.circular(8),
                                ),
                                child: Row(
                                  children: [
                                    Icon(icon, size: 16, color: Colors.white),
                                    const SizedBox(width: 4),
                                    Text('$text (${quality}ms)',
                                        style: const TextStyle(
                                            color: Colors.white, fontSize: 12)),
                                  ],
                                ),
                              );
                            },
                          );
                        }

                        // Default/Loading state
                        return Container(
                          padding: const EdgeInsets.symmetric(
                              horizontal: 8, vertical: 4),
                          decoration: BoxDecoration(
                            color: Colors.grey,
                            borderRadius: BorderRadius.circular(8),
                          ),
                          child: const Row(
                            children: [
                              Icon(Icons.wifi_off,
                                  size: 16, color: Colors.white),
                              SizedBox(width: 4),
                              Text('Init (0ms)',
                                  style: TextStyle(
                                      color: Colors.white, fontSize: 12)),
                            ],
                          ),
                        );
                      },
                    ),
                    const Spacer(),
                    IconButton(
                      icon: Icon(
                          _showLogs ? Icons.visibility_off : Icons.visibility,
                          color: Colors.white),
                      onPressed: () => setState(() => _showLogs = !_showLogs),
                      tooltip: _showLogs ? 'Hide logs' : 'Show logs',
                    ),
                    IconButton(
                      icon: const Icon(Icons.bug_report, color: Colors.white),
                      onPressed: _showLogsDialog,
                      tooltip: 'View WebSocket logs',
                    ),
                    // PHASE 4.4: BlocBuilder for Connection Status
                    BlocBuilder<ConnectionBloc, ConnectionState>(
                      builder: (context, state) {
                        // Hybrid: Map BLoC state to UI properties
                        Color color = Colors.red;
                        String text = 'Disconnected';
                        VoidCallback? onTap = _connectToServer;

                        if (state is ConnectionConnected) {
                          color = Colors.green;
                          text = 'Connected';
                          onTap = _disconnectFromServer;
                        } else if (state is ConnectionConnecting) {
                          color = Colors.orange;
                          text = 'Preparing....';
                          onTap = _disconnectFromServer; // Allow cancelling
                        } else {
                          // Fallback to local state if BLoC is initial/disconnected
                          if (_currentState == AppState.connecting) {
                            color = Colors.orange;
                            text = 'Preparing....';
                          } else if (_currentState != AppState.disconnected) {
                            color = Colors.green;
                            text = 'Connected';
                            onTap = _disconnectFromServer;
                          }
                        }

                        return GestureDetector(
                          onTap: onTap,
                          child: Container(
                            padding: const EdgeInsets.symmetric(
                                horizontal: 12, vertical: 4),
                            decoration: BoxDecoration(
                              color: color,
                              borderRadius: BorderRadius.circular(12),
                            ),
                            child: Text(
                              text,
                              style: const TextStyle(
                                  color: Colors.white, fontSize: 12),
                            ),
                          ),
                        );
                      },
                    ),
                  ],
                ),
              ),
            ),
          ),

          // PHASE 6D: Status Text Overlay
          Positioned(
            top: MediaQuery.of(context).size.height * 0.25, // 25% from top
            left: 20,
            right: 20,
            child: Container(
              padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 8),
              decoration: BoxDecoration(
                color: const Color.fromRGBO(0, 0, 0, 0.6),
                borderRadius: BorderRadius.circular(20),
              ),
              child: Text(
                _statusText,
                style: const TextStyle(
                  fontSize: 18,
                  fontWeight: FontWeight.bold,
                  color: Colors.white,
                ),
                textAlign: TextAlign.center,
                maxLines: 2,
                overflow: TextOverflow.ellipsis,
              ),
            ),
          ),

          // PHASE 6E: Siri Wave Overlay - Using direct audio manager (Animation working!)
          Positioned(
            bottom: MediaQuery.of(context).size.height *
                0.15, // Moved lower (15% from bottom instead of 25%)
            left: 20,
            right: 20,
            child: SizedBox(
              height: 90, // Reduced height for better spacing
              child: LayoutBuilder(
                builder: (context, constraints) {
                  final availableWidth = constraints.maxWidth;
                  final safeWidth = availableWidth > 0 ? availableWidth : 360.0;

                  // PHASE 4.2: BlocBuilder for Audio Visualization
                  return BlocBuilder<AudioBloc, AudioState>(
                    builder: (context, state) {
                      // Determine amplitude based on state
                      double currentAmplitude = 0.0;
                      if (state is AudioRecording) {
                        currentAmplitude = state.level;
                      } else if (state is AudioPlaying) {
                        currentAmplitude = state.aiLevel;
                      }

                      // Update controller
                      _siriWaveController.amplitude =
                          currentAmplitude.clamp(0.0, 1.0);

                      return SiriWaveform.ios7(
                        key: const ValueKey('siri_wave_overlay'),
                        controller: _siriWaveController,
                        options: IOS7SiriWaveformOptions(
                          height: 90,
                          width: safeWidth,
                        ),
                      );
                    },
                  );
                },
              ),
            ),
          ),

          // PHASE 6F: Chat Input Overlay - Adjust position when keyboard is visible
          Positioned(
            bottom: _isKeyboardVisible
                ? MediaQuery.of(context).viewInsets.bottom
                : 0,
            left: 0,
            right: 0,
            child: Container(
              padding: const EdgeInsets.all(16),
              decoration: BoxDecoration(
                color: const Color.fromRGBO(0, 0, 0, 0.9),
                border: _isKeyboardVisible
                    ? Border(
                        top: BorderSide(
                            color: const Color.fromRGBO(255, 255, 255, 0.2),
                            width: 1),
                      )
                    : null,
              ),
              child: Row(
                children: [
                  Expanded(
                    child: Container(
                      decoration: BoxDecoration(
                        color: const Color.fromRGBO(255, 255, 255, 0.9),
                        borderRadius: BorderRadius.circular(25),
                      ),
                      child: TextField(
                        controller: _textController,
                        decoration: InputDecoration(
                          hintText: _getTextFieldHint(),
                          border: InputBorder.none,
                          contentPadding: const EdgeInsets.symmetric(
                              horizontal: 20, vertical: 12),
                        ),
                        style:
                            const TextStyle(color: Colors.black, fontSize: 14),
                        maxLines: 1,
                      ),
                    ),
                  ),
                  const SizedBox(width: 12),
                  // Camera toggle button
                  // PHASE 4.3: BlocBuilder for Camera Button
                  BlocBuilder<CameraBloc, CameraState>(
                    builder: (context, state) {
                      final isCameraActive =
                          state is CameraReady && state.isActive;
                      // Hybrid fallback: use _isImagePreviewActive if BLoC not ready/synced
                      final isActive = isCameraActive || _isImagePreviewActive;

                      return Container(
                        width: 48,
                        height: 48,
                        decoration: BoxDecoration(
                          color: isActive ? Colors.blue : Colors.grey,
                          shape: BoxShape.circle,
                        ),
                        child: IconButton(
                          onPressed: _toggleCamera,
                          icon: Icon(
                            isActive
                                ? Icons.camera_alt
                                : Icons.camera_alt_outlined,
                            color: Colors.white,
                            size: 20,
                          ),
                        ),
                      );
                    },
                  ),
                  const SizedBox(width: 8),
                  Container(
                    width: 48,
                    height: 48,
                    decoration: const BoxDecoration(
                      color: Colors.blue,
                      shape: BoxShape.circle,
                    ),
                    child: IconButton(
                      onPressed: () {
                        final text = _textController.text.trim();
                        if (text.isNotEmpty) {
                          _sendTextMessage(text);
                          _textController.clear();
                        }
                      },
                      icon:
                          const Icon(Icons.send, color: Colors.white, size: 20),
                    ),
                  ),
                ],
              ),
            ),
          ),

          // WebSocket Logs Panel (overlay when shown)
          if (_showLogs && _websocketLogs.isNotEmpty)
            Positioned(
              top: 80,
              left: 16,
              right: 16,
              bottom: MediaQuery.of(context).size.height * 0.4,
              child: Container(
                decoration: BoxDecoration(
                  color: const Color.fromRGBO(0, 0, 0, 0.8),
                  borderRadius: BorderRadius.circular(12),
                ),
                child: Column(
                  children: [
                    Container(
                      padding: const EdgeInsets.symmetric(
                          horizontal: 16, vertical: 8),
                      child: Row(
                        children: [
                          const Text(
                            'WebSocket Logs',
                            style: TextStyle(
                                color: Colors.white,
                                fontWeight: FontWeight.bold),
                          ),
                          const Spacer(),
                          Text(
                            '${_websocketLogs.length} entries',
                            style: const TextStyle(
                                color: Colors.grey, fontSize: 12),
                          ),
                        ],
                      ),
                    ),
                    Expanded(
                      child: ListView.builder(
                        padding: const EdgeInsets.symmetric(horizontal: 16),
                        itemCount: min(10, _websocketLogs.length),
                        itemBuilder: (context, index) {
                          final log =
                              _websocketLogs[_websocketLogs.length - 1 - index];
                          return Text(
                            '[${log.timestamp.hour}:${log.timestamp.minute.toString().padLeft(2, '0')}] ${log.type.name.toUpperCase()}: ${log.message}',
                            style: TextStyle(
                              color: log.type == LogType.error
                                  ? Colors.red.shade300
                                  : log.type == LogType.success
                                      ? Colors.green.shade300
                                      : Colors.white,
                              fontSize: 10,
                            ),
                          );
                        },
                      ),
                    ),
                  ],
                ),
              ),
            ), // Positioned close
        ],
      ),
    );
  }

  @override
  void dispose() {
    // PHASE 3C: Fix Memory Leaks in Development - Proper cleanup order
    // PHASE 6: Fix Tap-to-Stop Recording Race Conditions - Reset tap processing flag

    // Reset tap processing flag immediately
    _isTapProcessing = false;

    // Cancel all timers/controllers first
    _animationController?.dispose();
    _imagePreviewThrottleTimer
        ?.cancel(); // OPTIMIZATION: Cancel image throttle timer

    // Cancel keyboard visibility subscription
    _keyboardSubscription.cancel();

    // FIX MEMORY LEAK: Cancel camera stream subscription
    _cameraStreamSubscription?.cancel();
    _audioStreamSubscription?.cancel();

    // Siri Wave controller doesn't need explicit disposal

    // Dispose managers
    _networkQualityManager?.dispose();

    // OPTIMIZATION: Final memory cleanup
    _forceMemoryCleanup();

    // Clean up subscriptions
    _audioStreamSubscription?.cancel();
    for (final subscription in _activeSubscriptions) {
      subscription.cancel();
    }
    _activeSubscriptions.clear();

    // Dispose managers
    _nativePeripheralManager.dispose();
    _audioPlayerService.dispose();

    // Clean up controllers
    _textController.dispose();

    // Dispose client last
    _client.dispose();

    super.dispose();
  }
}
1
likes
160
points
31
downloads

Publisher

unverified uploader

Weekly Downloads

ADKflutter - Official Flutter package by 20Centra for connecting to Gemini Live API server with WebSocket support for multimodal AI interactions.

Homepage

Documentation

API reference

License

MIT (license)

Dependencies

flutter, http, image, rxdart, uuid, web_socket_channel

More

Packages that depend on adk_flutter