prepareWebrtc method

dynamic prepareWebrtc (
  1. int handleId,
  2. bool offer,
  3. Callbacks callbacks
)

Implementation

prepareWebrtc(int handleId, bool offer, Callbacks callbacks) {
  RTCSessionDescription jsep;
  if (callbacks.jsep != null) {
    jsep =
        RTCSessionDescription(callbacks.jsep['sdp'], callbacks.jsep['type']);
  }

  if (offer && jsep != null) {
    Janus.error("Provided a JSEP to a createOffer");
    callbacks.error("Provided a JSEP to a createOffer");
    return;
  } else if (!offer &&
      (jsep == null || jsep.type == null || jsep.sdp == null)) {
    Janus.error("A valid JSEP is required for createAnswer");
    callbacks.error("A valid JSEP is required for createAnswer");
    return;
  }

  /* Check that callbacks.media is a (not null) Object */
  callbacks.media = (callbacks.media != null)
      ? callbacks.media
      : {'audio': true, 'video': true};
  Map<String, dynamic> media = callbacks.media;
  Plugin pluginHandle = this.pluginHandles[handleId.toString()];
  if (pluginHandle == null) {
    Janus.warn("Invalid handle");
    callbacks.error("Invalid handle");
    return;
  }
  pluginHandle.trickle = isTrickleEnabled(callbacks.trickle);
  // Are we updating a session?
  if (pluginHandle.pc == null) {
    // Nope, new PeerConnection
    media['update'] = false;
    media['keepAudio'] = false;
    media['keepVideo'] = false;
  } else {
    Janus.log("Updating existing media session");
    media['update'] = true;
    // Check if there's anything to add/remove/replace, or if we
    // can go directly to preparing the new SDP offer or answer
    if (callbacks.stream != null) {
      // External stream: is this the same as the one we were using before?
      if (callbacks.stream != pluginHandle.myStream) {
        Janus.log("Renegotiation involves a new external stream");
      }
    } else {
      // Check if there are changes on audio
      if (media['addAudio']) {
        media['keepAudio'] = false;
        media['replaceAudio'] = false;
        media['removeAudio'] = false;
        media['audioSend'] = true;
        if (pluginHandle.myStream != null &&
            pluginHandle.myStream.getAudioTracks() != null &&
            pluginHandle.myStream.getAudioTracks().length > 0) {
          Janus.error("Can't add audio stream, there already is one");
          callbacks.error("Can't add audio stream, there already is one");
          return;
        }
      } else if (media['removeAudio']) {
        media['keepAudio'] = false;
        media['replaceAudio'] = false;
        media['addAudio'] = false;
        media['audioSend'] = false;
      } else if (media['replaceAudio']) {
        media['keepAudio'] = false;
        media['addAudio'] = false;
        media['removeAudio'] = false;
        media['audioSend'] = true;
      }

      if (pluginHandle.myStream == null) {
        // No media stream: if we were asked to replace, it's actually an "add"
        if (media['replaceAudio']) {
          media['keepAudio'] = false;
          media['replaceAudio'] = false;
          media['addAudio'] = true;
          media['audioSend'] = true;
        }
        if (isAudioSendEnabled(media)) {
          media['keepAudio'] = false;
          media['addAudio'] = true;
        }
      } else {
        if (pluginHandle.myStream.getAudioTracks() == null ||
            pluginHandle.myStream.getAudioTracks().length == 0) {
          // No audio track: if we were asked to replace, it's actually an "add"
          if (media['replaceAudio']) {
            media['keepAudio'] = false;
            media['replaceAudio'] = false;
            media['addAudio'] = true;
            media['audioSend'] = true;
          }
          if (isAudioSendEnabled(media)) {
            media['keepAudio'] = false;
            media['addAudio'] = true;
          }
        } else {
          // We have an audio track: should we keep it as it is?
          if (isAudioSendEnabled(media) &&
              !media['removeAudio'] &&
              !media['replaceAudio']) {
            media['keepAudio'] = true;
          }
        }
      }
      // Check if there are changes on video
      if (media['addVideo']) {
        media['keepVideo'] = false;
        media['replaceVideo'] = false;
        media['removeVideo'] = false;
        media['videoSend'] = true;
        if (pluginHandle.myStream != null &&
            pluginHandle.myStream.getVideoTracks() != null &&
            pluginHandle.myStream.getVideoTracks().length > 0) {
          Janus.error("Can't add video stream, there already is one");
          callbacks.error("Can't add video stream, there already is one");
          return;
        }
      } else if (media['removeVideo']) {
        media['keepVideo'] = false;
        media['replaceVideo'] = false;
        media['addVideo'] = false;
        media['videoSend'] = false;
      } else if (media['replaceVideo']) {
        media['keepVideo'] = false;
        media['addVideo'] = false;
        media['removeVideo'] = false;
        media['videoSend'] = true;
      }
      if (pluginHandle.myStream == null) {
        // No media stream: if we were asked to replace, it's actually an "add"
        if (media['replaceVideo']) {
          media['keepVideo'] = false;
          media['replaceVideo'] = false;
          media['addVideo'] = true;
          media['videoSend'] = true;
        }
        if (isVideoSendEnabled(media)) {
          media['keepVideo'] = false;
          media['addVideo'] = true;
        }
      } else {
        if (pluginHandle.myStream.getVideoTracks() == null ||
            pluginHandle.myStream.getVideoTracks().length == 0) {
          // No video track: if we were asked to replace, it's actually an "add"
          if (media['replaceVideo']) {
            media['keepVideo'] = false;
            media['replaceVideo'] = false;
            media['addVideo'] = true;
            media['videoSend'] = true;
          }
          if (isVideoSendEnabled(media)) {
            media['keepVideo'] = false;
            media['addVideo'] = true;
          }
        } else {
          // We have a video track: should we keep it as it is?
          if (isVideoSendEnabled(media) &&
              !media['removeVideo'] &&
              !media['replaceVideo']) {
            media['keepVideo'] = true;
          }
        }
      }
      // Data channels can only be added
      if (media['addData']) {
        media['data'] = true;
      }
    }
    // If we're updating and keeping all tracks, let's skip the getUserMedia part
    if ((isAudioSendEnabled(media) && media['keepAudio']) &&
        (isVideoSendEnabled(media) && media['keepVideo'])) {
      // pluginHandle.consentDialog(false);
      streamsDone(handleId, jsep, media, callbacks, pluginHandle.myStream);
      return;
    }
  }

  // If we're updating, check if we need to remove/replace one of the tracks
  if (media['update'] && pluginHandle.streamExternal) {
    if (media['removeAudio'] || media['replaceAudio']) {
      if (pluginHandle.myStream != null &&
          pluginHandle.myStream.getAudioTracks() != null &&
          pluginHandle.myStream.getAudioTracks().length > 0) {
        var at = pluginHandle.myStream.getAudioTracks()[0];
        Janus.log("Removing audio track:" + at.toString());
        pluginHandle.myStream.removeTrack(at);
        try {
          // at.stop();
        } catch (e) {}
      }
      // FIX ME
      // if (pluginHandle.pc.getSenders() && pluginHandle.pc.getSenders().length) {
      //   var ra = true;
      //   if (media['replaceAudio'] && Janus.unifiedPlan) {
      //     // We can use replaceTrack
      //     ra = false;
      //   }
      //   if (ra) {
      //     for (var asnd in pluginHandle.pc.getSenders()) {
      //       if (asnd != null &&
      //           asnd.track != null &&
      //           asnd.track.kind == "audio") {
      //         Janus.log("Removing audio sender:" + asnd.toString());
      //         pluginHandle.pc.removeTrack(asnd);
      //       }
      //     }
      //   }
      // }
    }
    if (media['removeVideo'] || media['replaceVideo']) {
      if (pluginHandle.myStream != null &&
          pluginHandle.myStream.getVideoTracks() != null &&
          pluginHandle.myStream.getVideoTracks().length > 0) {
        var vt = pluginHandle.myStream.getVideoTracks()[0];
        Janus.log("Removing video track:", vt);
        pluginHandle.myStream.removeTrack(vt);
        try {
          // vt.stop();
        } catch (e) {}
      }
      // FIX ME
      // if (pluginHandle.pc.getSenders() && pluginHandle.pc.getSenders().length) {
      //   var rv = true;
      //   if (media['replaceVideo'] && Janus.unifiedPlan) {
      //     // We can use replaceTrack
      //     rv = false;
      //   }
      //   if (rv) {
      //     for (var vsnd in pc.getSenders()) {
      //       if (vsnd != null &&
      //           vsnd.track != null &&
      //           vsnd.track.kind == "video") {
      //         Janus.log("Removing video sender:", vsnd);
      //         pluginHandle.pc.removeTrack(vsnd);
      //       }
      //     }
      //   }
      // }
    }
  }

  // Was a MediaStream object passed, or do we need to take care of that?
  if (callbacks.stream != null) {
    MediaStream stream = callbacks.stream;
    Janus.log("MediaStream provided by the application");
    Janus.debug(stream);
    // If this is an update, let's check if we need to release the previous stream
    if (media['update']) {
      if (pluginHandle.myStream != null &&
          pluginHandle.myStream != callbacks.stream &&
          pluginHandle.streamExternal) {
        // We're replacing a stream we captured ourselves with an external one
        try {
          // Try a MediaStreamTrack.stop() for each track
          // List tracks = pluginHandle.myStream.getTracks();
          // for (MediaStreamTrack mst in tracks) {
          //   Janus.log(mst);
          //   if (mst != null) mst.dispose();
          // }
        } catch (e) {
          // Do nothing if this fails
        }
        pluginHandle.myStream = null;
      }
    }
    // Skip the getUserMedia part
    pluginHandle.streamExternal = true;
    // pluginHandle.consentDialog(false);
    streamsDone(handleId, jsep, media, callbacks, stream);
    return;
  }

  if (isAudioSendEnabled(media) || isVideoSendEnabled(media)) {
    if (!Janus.isGetUserMediaAvailable()) {
      callbacks.error("getUserMedia not available");
      return;
    }
    Map<String, dynamic> constraints = {
      'mandatory': {},
      'optional': [
        {'DtlsSrtpKeyAgreement': true},
      ]
    };
    // pluginHandle.consentDialog(true);
    bool audioSupport = isAudioSendEnabled(media);
    if (audioSupport && media != null && media['audio'] is bool)
      bool audioSupport = media['audio'];

    bool videoSupport = isVideoSendEnabled(media);
    if (videoSupport && media != null) {
      bool simulcast = (callbacks.simulcast == true);
      bool simulcast2 = (callbacks.simulcast2 == true);
      if ((simulcast || simulcast2) && jsep == null && !media['video'])
        media['video'] = "hires";
      if (media['video'] &&
          media['video'] != 'screen' &&
          media['video'] != 'window') {
        if (media['video'] is String) {
          videoSupport = media['video'];
        } else {
          int width = 0;
          int height = 0;
          int maxHeight = 0;
          if (media['video'] == 'lowres') {
            // Small resolution, 4:3
            height = 240;
            maxHeight = 240;
            width = 320;
          } else if (media['video'] == 'lowres-16:9') {
            // Small resolution, 16:9
            height = 180;
            maxHeight = 180;
            width = 320;
          } else if (media['video'] == 'hires' ||
              media['video'] == 'hires-16:9' ||
              media['video'] == 'hdres') {
            // High(HD) resolution is only 16:9
            height = 720;
            maxHeight = 720;
            width = 1280;
          } else if (media['video'] == 'fhdres') {
            // Full HD resolution is only 16:9
            height = 1080;
            maxHeight = 1080;
            width = 1920;
          } else if (media['video'] == '4kres') {
            // 4K resolution is only 16:9
            height = 2160;
            maxHeight = 2160;
            width = 3840;
          } else if (media['video'] == 'stdres') {
            // Normal resolution, 4:3
            height = 480;
            maxHeight = 480;
            width = 640;
          } else if (media['video'] == 'stdres-16:9') {
            // Normal resolution, 16:9
            height = 360;
            maxHeight = 360;
            width = 640;
          } else {
            Janus.log("Default video setting is stdres 4:3");
            height = 480;
            maxHeight = 480;
            width = 640;
          }
          Janus.log("Adding media constraint:", media['video'].toString());
          Map videoSupport = {
            'height': {'ideal': height},
            'width': {'ideal': width}
          };
          Janus.log("Adding video constraint:", videoSupport);
        }
      } else if (media['video'] == 'screen' || media['video'] == 'window') {
        if (MediaDevices != null && MediaDevices.getDisplayMedia != null) {
          // The new experimental getDisplayMedia API is available, let's use that
          // https://groups.google.com/forum/#!topic/discuss-webrtc/Uf0SrR4uxzk
          // https://webrtchacks.com/chrome-screensharing-getdisplaymedia/
          constraints['video'] = {};
          if (media['screenshareFrameRate'] != null) {
            constraints['video']['frameRate'] = media['screenshareFrameRate'];
          }
          if (media['screenshareHeight'] != null) {
            constraints['video']['height'] = media['screenshareHeight'];
          }
          if (media['screenshareWidth']) {
            constraints['video']['width'] = media['screenshareWidth'];
          }
          constraints['audio'] = media['captureDesktopAudio'];
          MediaDevices.getDisplayMedia(constraints)
              .then((MediaStream stream) {
            //pluginHandle.consentDialog(false);
            if (isAudioSendEnabled(media) && !media['keepAudio']) {
              MediaDevices.getUserMedia({'audio': true, 'video': false})
                  .then((MediaStream stream) {
                // stream.addTrack(stream.getAudioTracks()[0]);
                streamsDone(handleId, jsep, media, callbacks, stream);
              });
            } else {
              streamsDone(handleId, jsep, media, callbacks, stream);
            }
          }).catchError((error, StackTrace stackTrace) {
            // pluginHandle.consentDialog(false);
            callbacks.error(error);
          });
          return;
        }
        // We're going to try and use the extension for Chrome 34+, the old approach
        // for older versions of Chrome, or the experimental support in Firefox 33+
        callbackUserMedia(error, stream) {
          // pluginHandle.consentDialog(false);
          if (error) {
            callbacks.error(error);
          } else {
            streamsDone(handleId, jsep, media, callbacks, stream);
          }
        }

        getScreenMedia(constraints, gsmCallback, useAudio) {
          Janus.log("Adding media constraint (screen capture)");
          Janus.debug(constraints);
          MediaDevices.getUserMedia(constraints).then((MediaStream stream) {
            if (useAudio) {
              MediaDevices.getUserMedia({'audio': true, 'video': false})
                  .then((audioStream) {
                stream.addTrack(audioStream.getAudioTracks()[0]);
                gsmCallback(null, stream);
              });
            } else {
              gsmCallback(null, stream);
            }
          }).catchError((error, StackTrace stackTrace) {
            // pluginHandle.consentDialog(false);
            gsmCallback(error);
          });
        }

        if (Janus.webRTCAdapter['browserDetails']['browser'] == 'chrome') {
          var chromever = Janus.webRTCAdapter['browserDetails']['version'];
          var maxver = 33;
          Map<String, dynamic> window;
          // if (navigator.userAgent.match('Linux'))
          //   maxver = 35; // "known" crash in chrome 34 and 35 on linux
          if (chromever >= 26 && chromever <= maxver) {
            // Chrome 26->33 requires some awkward chrome://flags manipulation
            constraints = {
              'video': {
                'mandatory': {
                  'googLeakyBucket': true,
                  'maxWidth': window['screen']['width'],
                  'maxHeight': window['screen']['height'],
                  'minFrameRate': media['screenshareFrameRate'],
                  'maxFrameRate': media['screenshareFrameRate'],
                  'chromeMediaSource': 'screen'
                }
              },
              'audio': isAudioSendEnabled(media) && !media['keepAudio']
            };
            getScreenMedia(constraints, callbackUserMedia,
                isAudioSendEnabled(media) && !media['keepAudio']);
          } else {
            // Chrome 34+ requires an extension
            // Janus.extension.getScreen((error, sourceId) {
            //   if (error) {
            //     pluginHandle.consentDialog(false);
            //     return callbacks.error(error);
            //   }
            //   constraints = {
            //     'audio': false,
            //     'video': {
            //       'mandatory': {
            //         'chromeMediaSource': 'desktop',
            //         'maxWidth': window['screen']['width'],
            //         'maxHeight': window['screen']['height'],
            //         'minFrameRate': media['screenshareFrameRate'],
            //         'maxFrameRate': media['screenshareFrameRate'],
            //       },
            //       'optional': [
            //         {'googLeakyBucket': true},
            //         {'googTemporalLayeredScreencast': true}
            //       ]
            //     }
            //   };
            //   constraints['video']['mandatory']['chromeMediaSourceId'] =
            //       sourceId;
            //   getScreenMedia(constraints, callbackUserMedia,
            //       isAudioSendEnabled(media) && !media['keepAudio']);
            // });
          }
        } else if (Janus.webRTCAdapter['browserDetails']['browser'] ==
            'firefox') {
          if (Janus.webRTCAdapter['browserDetails']['version'] >= 33) {
            // Firefox 33+ has experimental support for screen sharing
            constraints = {
              'video': {
                'mozMediaSource': media['video'],
                'mediaSource': media['video']
              },
              'audio': isAudioSendEnabled(media) && !media['keepAudio']
            };
            getScreenMedia(constraints, (err, stream) {
              callbackUserMedia(err, stream);
              // Workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=1045810
              if (!err) {
                var lastTime = stream.currentTime;
                Timer polly = Timer(Duration(milliseconds: 500), () {});
                if (!stream) polly.cancel();
                if (stream.currentTime == lastTime) {
                  polly.cancel();
                  if (stream.onended) {
                    stream.onended();
                  }
                }
                lastTime = stream.currentTime;
              }
            }, isAudioSendEnabled(media) && !media['keepAudio']);
          } else {
            Map<String, String> error = {'type': 'NavigatorUserMediaError'};
            error['name'] =
                'Your version of Firefox does not support screen sharing, please install Firefox 33 (or more recent versions)';
            // pluginHandle.consentDialog(false);
            callbacks.error(error);
            return;
          }
        }
        return;
      }
    }

    // If we got here, we're not screensharing
    if (media == null || media['video'] != 'screen') {
      // Check whether all media sources are actually available or not
      MediaDevices.getSources().then((devices) {
        Janus.debug(devices.toString());
        bool audioExist = devices.any((device) {
          return device['kind'] == 'audioinput';
        });

        bool videoExist = isScreenSendEnabled(media) ||
            devices.any((device) {
              return device['kind'] == 'videoinput';
            });

        // Check whether a missing device is really a problem
        bool audioSend = isAudioSendEnabled(media);
        bool videoSend = isVideoSendEnabled(media);
        bool needAudioDevice = isAudioSendRequired(media);
        bool needVideoDevice = isVideoSendRequired(media);

        if (audioSend || videoSend || needAudioDevice || needVideoDevice) {
          // We need to send either audio or video
          var haveAudioDevice = audioSend ? audioExist : false;
          var haveVideoDevice = videoSend ? videoExist : false;
          if (!haveAudioDevice && !haveVideoDevice) {
            // FIXME Should we really give up, or just assume recvonly for both?
            // pluginHandle.consentDialog(false);
            callbacks.error('No capture device found');
            return false;
          } else if (!haveAudioDevice && needAudioDevice) {
            // pluginHandle.consentDialog(false);
            callbacks.error(
                'Audio capture is required, but no capture device found');
            return false;
          } else if (!haveVideoDevice && needVideoDevice) {
            // pluginHandle.consentDialog(false);
            callbacks.error(
                'Video capture is required, but no capture device found');
            return false;
          }
        }

        Map<String, dynamic> gumConstraints = {
          'audio': (audioExist && !media['keepAudio']) ? audioSupport : false,
          'video': (videoExist && !media['keepVideo']) ? videoSupport : false
        };
        Janus.debug("getUserMedia constraints", gumConstraints.toString());
        if (!gumConstraints['audio'] && !gumConstraints['video']) {
          // pluginHandle.consentDialog(false);
          streamsDone(handleId, jsep, media, callbacks, callbacks.stream);
        } else {
          // Override mediaConstraints
          if (gumConstraints['video']) {
            gumConstraints['video'] = {
              "mandatory": {
                "minWidth":
                    '640', // Provide your own width, height and frame rate here
                "minHeight": '480',
                "minFrameRate": '30',
              },
              "facingMode": "user",
              "optional": [],
            };
          }
          Janus.debug(gumConstraints);
          MediaDevices.getUserMedia(gumConstraints)
              .then((MediaStream stream) {
            // pluginHandle.consentDialog(false);
            streamsDone(handleId, jsep, media, callbacks, stream);
          }).catchError((error, StackTrace stackTrace) {
            Janus.log(error);
            // pluginHandle.consentDialog(false);
            callbacks.error({
              'code': error.code,
              'name': error.name,
              'message': error.message
            });
          });
        }
      }).catchError((error, StackTrace stackTrace) {
        // pluginHandle.consentDialog(false);
        Janus.log(error);
        callbacks.error('enumerateDevices error', error);
      });
    }
  } else {
    // No need to do a getUserMedia, create offer/answer right away
    streamsDone(handleId, jsep, media, callbacks, null);
  }
}