answer method

void answer(
  1. Map<String, dynamic> options
)

Answer the call.

Implementation

void answer(Map<String, dynamic> options) async {
  logger.debug('answer()');
  dynamic request = _request;
  List<dynamic> extraHeaders = utils.cloneArray(options['extraHeaders']);
  Map<String, dynamic> mediaConstraints =
      options['mediaConstraints'] ?? <String, dynamic>{};
  MediaStream? mediaStream = options['mediaStream'] ?? null;
  Map<String, dynamic> pcConfig =
      options['pcConfig'] ?? <String, dynamic>{'iceServers': <dynamic>[]};
  Map<String, dynamic> rtcConstraints =
      options['rtcConstraints'] ?? <String, dynamic>{};
  Map<String, dynamic> rtcAnswerConstraints =
      options['rtcAnswerConstraints'] ?? <String, dynamic>{};

  List<MediaStreamTrack> tracks;
  bool peerHasAudioLine = false;
  bool peerHasVideoLine = false;
  bool peerOffersFullAudio = false;
  bool peerOffersFullVideo = false;

  // In future versions, unified-plan will be used by default
  String? sdpSemantics = 'unified-plan';
  if (pcConfig['sdpSemantics'] != null) {
    sdpSemantics = pcConfig['sdpSemantics'];
  }

  _rtcAnswerConstraints = rtcAnswerConstraints;
  _rtcOfferConstraints = options['rtcOfferConstraints'] ?? null;

  data = options['data'] ?? data;

  // Check Session Direction and Status.
  if (_direction != 'incoming') {
    throw exceptions.NotSupportedError(
        '"answer" not supported for outgoing RTCSession');
  }

  // Check Session status.
  if (_status != C.statusWaitingForAnswer) {
    throw exceptions.InvalidStateError(_status);
  }

  // Session Timers.
  if (_sessionTimers.enabled) {
    if (utils.isDecimal(options['sessionTimersExpires'])) {
      if (options['sessionTimersExpires'] >= dart_sip_c.MIN_SESSION_EXPIRES) {
        _sessionTimers.defaultExpires = options['sessionTimersExpires'];
      } else {
        _sessionTimers.defaultExpires = dart_sip_c.SESSION_EXPIRES;
      }
    }
  }

  _status = C.statusAnswered;

  // An error on dialog creation will fire 'failed' event.
  if (!_createDialog(request, 'UAS')) {
    request.reply(500, 'Error creating dialog');

    return;
  }

  clearTimeout(_timers.userNoAnswerTimer);
  extraHeaders.insert(0, 'Contact: $_contact');

  // Determine incoming media from incoming SDP offer (if any).
  Map<String, dynamic> sdp = request.parseSDP();

  // Make sure sdp['media'] is an array, not the case if there is only one media.
  if (sdp['media'] is! List) {
    sdp['media'] = <dynamic>[sdp['media']];
  }

  // Go through all medias in SDP to find offered capabilities to answer with.
  for (Map<String, dynamic> m in sdp['media']) {
    if (m['type'] == 'audio') {
      peerHasAudioLine = true;
      if (m['direction'] == null || m['direction'] == 'sendrecv') {
        peerOffersFullAudio = true;
      }
    }
    if (m['type'] == 'video') {
      peerHasVideoLine = true;
      if (m['direction'] == null || m['direction'] == 'sendrecv') {
        peerOffersFullVideo = true;
      }
    }
  }

  // Remove audio from mediaStream if suggested by mediaConstraints.
  if (mediaStream != null && mediaConstraints['audio'] == false) {
    tracks = mediaStream.getAudioTracks();
    for (MediaStreamTrack track in tracks) {
      mediaStream.removeTrack(track);
    }
  }

  // Remove video from mediaStream if suggested by mediaConstraints.
  if (mediaStream != null && mediaConstraints['video'] == false) {
    tracks = mediaStream.getVideoTracks();
    for (MediaStreamTrack track in tracks) {
      mediaStream.removeTrack(track);
    }
  }

  // Set audio constraints based on incoming stream if not supplied.
  if (mediaStream == null && mediaConstraints['audio'] == null) {
    mediaConstraints['audio'] = peerOffersFullAudio;
  }

  // Set video constraints based on incoming stream if not supplied.
  if (mediaStream == null && mediaConstraints['video'] == null) {
    mediaConstraints['video'] = peerOffersFullVideo;
  }

  // Don't ask for audio if the incoming offer has no audio section.
  if (mediaStream == null && !peerHasAudioLine) {
    mediaConstraints['audio'] = false;
  }

  // Don't ask for video if the incoming offer has no video section.
  if (mediaStream == null && !peerHasVideoLine) {
    mediaConstraints['video'] = false;
  }

  // Create a RTCPeerConnection instance.
  // TODO(cloudwebrtc): This may throw an error, should react.
  await _createRTCConnection(pcConfig, rtcConstraints);

  MediaStream? stream;
  // A local MediaStream is given, use it.
  if (mediaStream != null) {
    stream = mediaStream;
    emit(EventStream(session: this, originator: 'local', stream: stream));
  }
  // Audio and/or video requested, prompt getUserMedia.
  else if (mediaConstraints['audio'] != null ||
      mediaConstraints['video'] != null) {
    _localMediaStreamLocallyGenerated = true;
    try {
      stream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
      emit(EventStream(session: this, originator: 'local', stream: stream));
    } catch (error) {
      if (_status == C.statusTerminated) {
        throw exceptions.InvalidStateError('terminated');
      }
      request.reply(480);
      _failed(
          'local',
          null,
          null,
          null,
          480,
          dart_sip_c.CausesType.USER_DENIED_MEDIA_ACCESS,
          'User Denied Media Access');
      logger.error('emit "getusermediafailed" [error:${error.toString()}]');
      emit(EventGetUserMediaFailed(exception: error));
      throw exceptions.InvalidStateError('getUserMedia() failed');
    }
  }

  if (_status == C.statusTerminated) {
    throw exceptions.InvalidStateError('terminated');
  }

  // Attach MediaStream to RTCPeerconnection.
  _localMediaStream = stream;

  if (stream != null) {
    switch (sdpSemantics) {
      case 'unified-plan':
        stream.getTracks().forEach((MediaStreamTrack track) {
          _connection!.addTrack(track, stream!);
        });
        break;
      case 'plan-b':
        _connection!.addStream(stream);
        break;
      default:
        logger.error('Unkown sdp semantics $sdpSemantics');
        throw exceptions.NotReadyError('Unkown sdp semantics $sdpSemantics');
    }
  }

  // Set remote description.
  if (_late_sdp) {
    return;
  }

  logger.debug('emit "sdp"');
  emit(EventSdp(originator: 'remote', type: 'offer', sdp: request.body));

  RTCSessionDescription offer = RTCSessionDescription(request.body, 'offer');
  try {
    await _connection!.setRemoteDescription(offer);
  } catch (error) {
    request.reply(488);
    _failed(
        'system',
        null,
        null,
        null,
        488,
        dart_sip_c.CausesType.WEBRTC_ERROR,
        'SetRemoteDescription(offer) failed');
    logger.error(
        'emit "peerconnection:setremotedescriptionfailed" [error:${error.toString()}]');
    emit(EventSetRemoteDescriptionFailed(exception: error));
    throw exceptions.TypeError(
        'peerconnection.setRemoteDescription() failed');
  }

  // Create local description.
  if (_status == C.statusTerminated) {
    throw exceptions.InvalidStateError('terminated');
  }

  // TODO(cloudwebrtc): Is this event already useful?
  _connecting(request);
  RTCSessionDescription desc;
  try {
    if (!_late_sdp) {
      desc = await _createLocalDescription('answer', rtcAnswerConstraints);
    } else {
      desc = await _createLocalDescription('offer', _rtcOfferConstraints);
    }
  } catch (e) {
    request.reply(500);
    throw exceptions.TypeError('_createLocalDescription() failed');
  }

  if (_status == C.statusTerminated) {
    throw exceptions.InvalidStateError('terminated');
  }

  // Send reply.
  try {
    _handleSessionTimersInIncomingRequest(request, extraHeaders);
    request.reply(200, null, extraHeaders, desc.sdp, () {
      _status = C.statusWaitingForAck;
      _setInvite2xxTimer(request, desc.sdp);
      _setACKTimer();
      _accepted('local');
    }, () {
      _failed('system', null, null, null, 500,
          dart_sip_c.CausesType.CONNECTION_ERROR, 'Transport Error');
    });
  } catch (error, s) {
    if (_status == C.statusTerminated) {
      return;
    }
    logger.error('Failed to answer(): ${error.toString()}', error, s);
  }
}