transformMap top-level property

Map<String, MethodTransFormer> transformMap
final

Implementation

final Map<String, MethodTransFormer> transformMap = {
  DelegateMethod.kNERtcOnJoinChannel: (Map<String, dynamic> values) {
    sink_?.onJoinChannel(values['result'], values['cid'].toInt(),
        values['elapsed'], values['uid'].toInt());
  },
  DelegateMethod.kNERtcOnLeaveChannel: (Map<String, dynamic> values) {
    sink_?.onLeaveChannel(values['result']);
  },
  DelegateMethod.kNERtcOnUserJoined: (Map<String, dynamic> values) {
    UserJoinedEvent event = UserJoinedEvent(uid: values['uid']);
    if (values.containsKey('join_extra_info')) {
      final joinExtraInfo = values['join_extra_info'] as String;
      if (joinExtraInfo.isNotEmpty) {
        event.joinExtraInfo = NERtcUserJoinExtraInfo(customInfo: joinExtraInfo);
      }
    }
    sink_?.onUserJoined(event);
  },
  DelegateMethod.kNERtcOnUserLeave: (Map<String, dynamic> values) {
    UserLeaveEvent event =
        UserLeaveEvent(uid: values['uid'], reason: values['reason']);
    if (values.containsKey('leave_extra_info')) {
      final leaveExtraInfo = values['leave_extra_info'] as String;
      if (leaveExtraInfo.isNotEmpty) {
        event.leaveExtraInfo =
            NERtcUserLeaveExtraInfo(customInfo: leaveExtraInfo);
      }
    }
    sink_?.onUserLeave(event);
  },
  DelegateMethod.kNERtcOnUserAudioStart: (Map<String, dynamic> values) {
    sink_?.onUserAudioStart(values['uid']);
  },
  DelegateMethod.kNERtcOnUserAudioStop: (Map<String, dynamic> values) {
    sink_?.onUserAudioStop(values['uid']);
  },
  DelegateMethod.kNERtcOnUserVideoStart: (Map<String, dynamic> values) {
    sink_?.onUserVideoStart(values['uid'], values['max_profile']);
  },
  DelegateMethod.kNERtcOnUserVideoStop: (Map<String, dynamic> values) {
    sink_?.onUserVideoStop(values['uid']);
  },
  DelegateMethod.kNERtcOnError: (Map<String, dynamic> values) {
    sink_?.onError(values['error_code']);
  },
  DelegateMethod.kNERtcOnWarning: (Map<String, dynamic> values) {
    sink_?.onWarning(values['warn_code']);
  },
  DelegateMethod.kNERtcOnReconnectingStart: (Map<String, dynamic> values) {
    sink_?.onReconnectingStart();
  },
  DelegateMethod.kNERtcOnConnectionStateChange: (Map<String, dynamic> values) {
    sink_?.onConnectionStateChanged(values['state'], values['reason']);
  },
  DelegateMethod.kNERtcOnRejoinChannel: (Map<String, dynamic> values) {
    sink_?.onReJoinChannel(values['result'], values['cid']);
  },
  DelegateMethod.kNERtcOnDisconnect: (Map<String, dynamic> values) {
    sink_?.onDisconnect(values['reason']);
  },
  DelegateMethod.kNERtcOnClientRoleChanged: (Map<String, dynamic> values) {
    sink_?.onClientRoleChange(values['oldRole'], values['newRole']);
  },
  DelegateMethod.kNERtcOnUserSubStreamAudioStart:
      (Map<String, dynamic> values) {
    sink_?.onUserSubStreamAudioStart(values['uid']);
  },
  DelegateMethod.kNERtcOnUserSubStreamAudioStop: (Map<String, dynamic> values) {
    sink_?.onUserSubStreamAudioStop(values['uid']);
  },
  DelegateMethod.kNERtcOnUserAudioMute: (Map<String, dynamic> values) {
    sink_?.onUserAudioMute(values['uid'], values['mute']);
  },
  DelegateMethod.kNERtcOnUserVideoMute: (Map<String, dynamic> values) {
    UserVideoMuteEvent event =
        UserVideoMuteEvent(uid: values['uid'], muted: values['mute']);
    event.streamType = values['streamType'];
    sink_?.onUserVideoMute(event);
  },
  DelegateMethod.kNERtcOnUserSubStreamAudioMute: (Map<String, dynamic> values) {
    sink_?.onUserSubStreamAudioMute(values['uid'], values['mute']);
  },
  DelegateMethod.kNERtcOnFirstAudioDataReceived: (Map<String, dynamic> values) {
    sink_?.onFirstAudioDataReceived(values['uid']);
  },
  DelegateMethod.kNERtcOnFirstVideoDataReceived: (Map<String, dynamic> values) {
    FirstVideoDataReceivedEvent event =
        FirstVideoDataReceivedEvent(uid: values['uid']);
    event.streamType = values['type'];
    sink_?.onFirstVideoDataReceived(event);
  },
  DelegateMethod.kNERtcOnFirstAudioFrameDecoded: (Map<String, dynamic> values) {
    sink_?.onFirstAudioFrameDecoded(values['uid']);
  },
  DelegateMethod.kNERtcOnFirstVideoFrameDecoded: (Map<String, dynamic> values) {
    FirstVideoFrameDecodedEvent event = FirstVideoFrameDecodedEvent(
        uid: values['uid'], width: values['width'], height: values['height']);
    event.streamType = values['streamType'];
    sink_?.onFirstVideoFrameDecoded(event);
  },
  DelegateMethod.kNERtcOnVirtualBackgroundSourceEnabled:
      (Map<String, dynamic> values) {
    VirtualBackgroundSourceEnabledEvent event =
        VirtualBackgroundSourceEnabledEvent(
            enabled: values['enabled'], reason: values['reason']);
    sink_?.onVirtualBackgroundSourceEnabled(event);
  },
  DelegateMethod.kNERtcOnNetworkConnectionTypeChanged:
      (Map<String, dynamic> values) {
    sink_?.onConnectionTypeChanged(values['newConnectionType']);
  },
  DelegateMethod.kNERtcOnLocalAudioVolumeIndication:
      (Map<String, dynamic> values) {
    sink_?.onLocalAudioVolumeIndication(values['volume'], values['enable_vad']);
  },
  DelegateMethod.kNERtcOnRemoteAudioVolumeIndication:
      (Map<String, dynamic> values) {
    if (values['volume_info'] == null) return;
    final List<dynamic> volumeInfos = values['volume_info'];

    RemoteAudioVolumeIndicationEvent event =
        RemoteAudioVolumeIndicationEvent(totalVolume: values['total_volume']);
    List<AudioVolumeInfo> infos = volumeInfos.map((item) {
      final map = item as Map<String, dynamic>;
      return AudioVolumeInfo(
          uid: map['uid'] as int,
          volume: map['volume'] as int,
          subStreamVolume: map['sub_stream_volume'] as int);
    }).toList();
    event.volumeList = infos;
    event.totalVolume = values['total_volume'];
    sink_?.onRemoteAudioVolumeIndication(event);
  },
  DelegateMethod.kNERtcOnAudioHowling: (Map<String, dynamic> values) {
    bool howling = values['howling'];
    if (howling) {
      sink_?.onAudioHasHowling();
    }
  },
  DelegateMethod.kNERtcOnLastMileQuality: (Map<String, dynamic> values) {
    sink_?.onLastmileQuality(values['quality']);
  },
  DelegateMethod.kNERtcOnLastMileProbeResult: (Map<String, dynamic> values) {
    NERtcLastmileProbeOneWayResult uplinkResult =
        NERtcLastmileProbeOneWayResult(
            packetLossRate: values['uplink_report']['packet_loss_rate'],
            jitter: values['uplink_report']['jitter'],
            availableBandwidth: values['uplink_report']
                ['available_band_width']);
    NERtcLastmileProbeOneWayResult downlinkResult =
        NERtcLastmileProbeOneWayResult(
            packetLossRate: values['downlink_report']['packet_loss_rate'],
            jitter: values['downlink_report']['jitter'],
            availableBandwidth: values['downlink_report']
                ['available_band_width']);
    NERtcLastmileProbeResult result = NERtcLastmileProbeResult(
        state: values['state'],
        rtt: values['rtt'],
        uplinkReport: uplinkResult,
        downlinkReport: downlinkResult);
    sink_?.onLastmileProbeResult(result);
  },
  DelegateMethod.kNERtcOnAddLiveStreamTask: (Map<String, dynamic> values) {
    print(
        "onAddLiveStreamTask, task_id: ${values['task_id']}, url: ${values['url']}, error_code: ${values['error_code']}");
  },
  DelegateMethod.kNERtcOnUpdateLiveStreamTask: (Map<String, dynamic> values) {
    print(
        "onUpdateLiveStreamTask, task_id: ${values['task_id']}, url: ${values['url']}, error_code: ${values['error_code']}");
  },
  DelegateMethod.kNERtcOnRemoveLiveStreamTask: (Map<String, dynamic> values) {
    print(
        "onRemoveLiveStreamTask, task_id: ${values['task_id']}, error_code: ${values['error_code']}");
  },
  DelegateMethod.kNERtcOnLiveStreamStateChanged: (Map<String, dynamic> values) {
    sink_?.onLiveStreamState(values['task_id'], values['url'], values['state']);
  },
  DelegateMethod.kNERtcOnRecvSEIMsg: (Map<String, dynamic> values) {
    sink_?.onRecvSEIMsg(values['uid'], values['data']);
  },
  DelegateMethod.kNERtcOnAudioRecording: (Map<String, dynamic> values) {
    sink_?.onAudioRecording(values['code'], values['file_path']);
  },
  DelegateMethod.kNERtcOnMediaRightChange: (Map<String, dynamic> values) {
    sink_?.onMediaRightChange(
        values['is_audio_banned'], values['is_video_banned']);
  },
  DelegateMethod.kNERtcOnMediaRelayStateChanged: (Map<String, dynamic> values) {
    sink_?.onMediaRelayStatesChange(values['state'], values['channel_name']);
  },
  DelegateMethod.kNERtcOnMediaRelayEvent: (Map<String, dynamic> values) {
    sink_?.onMediaRelayReceiveEvent(
        values['event'], values['error'], values['channel_name']);
  },
  DelegateMethod.kNERtcOnLocalPublishFallbackToAudioOnly:
      (Map<String, dynamic> values) {
    sink_?.onLocalPublishFallbackToAudioOnly(
        values['is_fallback'], values['stream_type']);
  },
  DelegateMethod.kNERtcOnRemoteSubscribeFallbackAudioOnly:
      (Map<String, dynamic> values) {
    sink_?.onRemoteSubscribeFallbackToAudioOnly(
        values['uid'], values['is_fallback'], values['stream_type']);
  },
  DelegateMethod.kNERtcOnTakeSnapshotResult: (Map<String, dynamic> values) {
    sink_?.onTakeSnapshotResult(values['code'], values['path']);
  },
  DelegateMethod.kNERtcOnUserSubVideoStreamStart:
      (Map<String, dynamic> values) {
    sink_?.onUserSubStreamVideoStart(values['uid'], values['max_profile']);
  },
  DelegateMethod.kNERtcOnUserSubVideoStreamStop: (Map<String, dynamic> values) {
    sink_?.onUserSubStreamVideoStop(values['uid']);
  },
  DelegateMethod.kNERtcOnAudioMixingStateChanged:
      (Map<String, dynamic> values) {
    audio_mixing_sink_?.onAudioMixingStateChanged(values['state']);
  },
  DelegateMethod.kNERtcOnAudioMixingTimestampUpdate:
      (Map<String, dynamic> values) {
    audio_mixing_sink_?.onAudioMixingTimestampUpdate(values['timestamp_ms']);
  },
  DelegateMethod.kNERtcOnAudioEffectFinished: (Map<String, dynamic> values) {
    audio_effect_sink_?.onAudioEffectFinished(values['effect_id']);
  },
  DelegateMethod.kNERtcOnAudioEffectTimestampUpdate:
      (Map<String, dynamic> values) {
    audio_effect_sink_?.onAudioEffectTimestampUpdate(
        values['effect_id'], values['timestamp_ms']);
  },
  DelegateMethod.kNERtcOnLocalVideoWatermarkState:
      (Map<String, dynamic> values) {
    sink_?.onLocalVideoWatermarkState(
        values['videoStreamType'], values['state']);
  },
  MediaStatsDelegate.kNERtcOnStats: (Map<String, dynamic> values) {
    stats_sink_?.onRtcStats(values, "");
  },
  MediaStatsDelegate.kNERtcOnNetworkQuality: (Map<String, dynamic> values) {
    stats_sink_?.onNetworkQuality(values, "");
  },
  MediaStatsDelegate.kNERtcOnLocalAudioStats: (Map<String, dynamic> values) {
    stats_sink_?.onLocalAudioStats(values, "");
  },
  MediaStatsDelegate.kNERtcOnRemoteAudioStats: (Map<String, dynamic> values) {
    stats_sink_?.onRemoteAudioStats(values, "");
  },
  MediaStatsDelegate.kNERtcOnLocalVideoStats: (Map<String, dynamic> values) {
    stats_sink_?.onLocalVideoStats(values, "");
  },
  MediaStatsDelegate.kNERtcOnRemoteVideoStats: (Map<String, dynamic> values) {
    stats_sink_?.onRemoteVideoStats(values, "");
  },
  DelegateMethod.kNERtcOnAsrCaptionStateChanged: (Map<String, dynamic> values) {
    sink_?.onAsrCaptionStateChanged(values['asrState'] as int,
        values['code'] as int, values['message'] as String);
  },
  DelegateMethod.kNERtcOnAsrCaptionResult: (Map<String, dynamic> values) {
    final List<dynamic> resultList = values['result'] as List<dynamic>;
    final List<Map<Object?, Object?>> result = resultList
        .map((item) => Map<Object?, Object?>.from(item as Map))
        .toList();
    sink_?.onAsrCaptionResult(result, values['resultCount'] as int);
  },
  DelegateMethod.kNERtcOnPlayStreamingStateChange:
      (Map<String, dynamic> values) {
    sink_?.onPlayStreamingStateChange(values['streamId'] as String,
        values['state'] as int, values['reason'] as int);
  },
  DelegateMethod.kNERtcOnPlayStreamingReceiveSeiMessage:
      (Map<String, dynamic> values) {
    sink_?.onPlayStreamingReceiveSeiMessage(
        values['streamId'] as String, values['message'] as String);
  },
  DelegateMethod.kNERtcOnPlayStreamingFirstAudioFramePlayed:
      (Map<String, dynamic> values) {
    sink_?.onPlayStreamingFirstAudioFramePlayed(
        values['streamId'] as String, values['timeMs'] as int);
  },
  DelegateMethod.kNERtcOnPlayStreamingFirstVideoFrameRender:
      (Map<String, dynamic> values) {
    sink_?.onPlayStreamingFirstVideoFrameRender(
        values['streamId'] as String,
        values['timeMs'] as int,
        values['width'] as int,
        values['height'] as int);
  },
  DelegateMethod.kNERtcOnFirstVideoFrameRender: (Map<String, dynamic> values) {
    sink_?.onFirstVideoFrameRender(
        values['userID'] as int,
        values['streamType'] as int,
        values['width'] as int,
        values['height'] as int,
        values['elapsedTime'] as int);
  },
  DelegateMethod.kNERtcOnLocalVideoRenderSizeChanged:
      (Map<String, dynamic> values) {
    sink_?.onLocalVideoRenderSizeChanged(values['videoType'] as int,
        values['width'] as int, values['height'] as int);
  },
  DelegateMethod.kNERtcOnUserVideoProfileUpdate: (Map<String, dynamic> values) {
    sink_?.onUserVideoProfileUpdate(
        values['uid'] as int, values['maxProfile'] as int);
  },
  DelegateMethod.kNERtcOnAudioDeviceStateChange: (Map<String, dynamic> values) {
    sink_?.onAudioDeviceStateChange(
        values['deviceType'] as int, values['deviceState'] as int);
  },
  DelegateMethod.kNERtcOnRemoteVideoSizeChanged: (Map<String, dynamic> values) {
    sink_?.onRemoteVideoSizeChanged(
        values['uid'] as int,
        values['streamType'] as int,
        values['width'] as int,
        values['height'] as int);
  },
  DelegateMethod.kNERtcOnUserDataStart: (Map<String, dynamic> values) {
    sink_?.onUserDataStart(values['uid'] as int);
  },
  DelegateMethod.kNERtcOnUserDataStop: (Map<String, dynamic> values) {
    sink_?.onUserDataStop(values['uid'] as int);
  },
  DelegateMethod.kNERtcOnUserDataStateChanged: (Map<String, dynamic> values) {
    sink_?.onUserDataStateChanged(values['uid'] as int);
  },
  DelegateMethod.kNERtcOnUserDataBufferedAmountChanged:
      (Map<String, dynamic> values) {
    sink_?.onUserDataBufferedAmountChanged(
        values['uid'] as int, values['previousAmount'] as int);
  },
  DelegateMethod.kNERtcOnLabFeatureCallback: (Map<String, dynamic> values) {
    final param = values['param'];
    Map<Object?, Object?> paramMap;
    if (param is Map) {
      paramMap = Map<Object?, Object?>.from(param);
    } else {
      paramMap = {};
    }
    sink_?.onLabFeatureCallback(values['key'] as String, paramMap);
  },
  DelegateMethod.kNERtcOnAiData: (Map<String, dynamic> values) {
    sink_?.onAiData(values['type'] as String, values['data'] as String);
  },
  DelegateMethod.kNERtcOnStartPushStreaming: (Map<String, dynamic> values) {
    sink_?.onStartPushStreaming(
        values['result'] as int, values['channelId'] as int);
  },
  DelegateMethod.kNERtcOnStopPushStreaming: (Map<String, dynamic> values) {
    sink_?.onStopPushStreaming(values['result'] as int);
  },
  DelegateMethod.kNERtcOnPushStreamingReconnecting:
      (Map<String, dynamic> values) {
    sink_?.onPushStreamingReconnecting(values['reason'] as int);
  },
  DelegateMethod.kNERtcOnPushStreamingReconnectedSuccess:
      (Map<String, dynamic> values) {
    sink_?.onPushStreamingReconnectedSuccess();
  },
  DelegateMethod.kNERtcOnScreenCaptureStatus: (Map<String, dynamic> values) {
    sink_?.onScreenCaptureStatus(values['status'] as int);
  },
  DelegateMethod.kNERtcOnScreenCaptureSourceDataUpdate:
      (Map<String, dynamic> values) {
    final captureRectMap = values['capture_rect'] as Map<String, dynamic>;
    final captureRect = Rectangle(
        x: captureRectMap['x'] as int,
        y: captureRectMap['y'] as int,
        width: captureRectMap['width'] as int,
        height: captureRectMap['height'] as int);
    final data = ScreenCaptureSourceData(
        type: values['type'] as int,
        sourceId: 0,
        status: values['status'] as int,
        action: values['action'] as int,
        captureRect: captureRect,
        level: values['level'] as int);
    sink_?.onScreenCaptureSourceDataUpdate(data);
  },
  DelegateMethod.kNERtcOnLocalRecorderStatus: (Map<String, dynamic> values) {
    sink_?.onLocalRecorderStatus(
        values['status'] as int, values['task_id'] as String);
  },
  DelegateMethod.kNERtcOnLocalRecorderError: (Map<String, dynamic> values) {
    sink_?.onLocalRecorderError(
        values['error'] as int, values['task_id'] as String);
  },
  DelegateMethod.kNERtcOnCheckNECastAudioDriverResult:
      (Map<String, dynamic> values) {
    sink_?.onCheckNECastAudioDriverResult(values['result'] as int);
  },
  DelegateMethod.kNERtcOnLocalAudioFirstPacketSent:
      (Map<String, dynamic> values) {
    sink_?.onLocalAudioFirstPacketSent(values['audioStreamType'] as int);
  },
  DelegateMethod.kNERtcOnReleasedHwResources: (Map<String, dynamic> values) {
    sink_?.onReleasedHwResources(values['result']);
  },
  DelegateMethod.kNERtcOnApiCallExecuted: (Map<String, dynamic> values) {
    sink_?.onApiCallExecuted(
        values['api_name'], values['error'], values['message']);
  }
};