AudioPlayer constructor Null safety

AudioPlayer(
  1. {String? userAgent,
  2. bool handleInterruptions = true,
  3. bool androidApplyAudioAttributes = true,
  4. bool handleAudioSessionActivation = true,
  5. AudioLoadConfiguration? audioLoadConfiguration,
  6. AudioPipeline? audioPipeline}
)

Creates an AudioPlayer.

If userAgent is specified, it will be included in the header of all HTTP requests on Android, iOS and macOS to identify your agent to the server. If set, just_audio will create a cleartext local HTTP proxy on your device to forward HTTP requests with headers included. If userAgent is not specified, this will default to Apple's Core Audio user agent on iOS/macOS and to just_audio's own user agent on Android. On Web, the browser will override any specified user-agent string with its own.

The player will automatically pause/duck and resume/unduck when audio interruptions occur (e.g. a phone call) or when headphones are unplugged. If you wish to handle audio interruptions manually, set handleInterruptions to false and interface directly with the audio session via the audio_session package. If you do not wish just_audio to automatically activate the audio session when playing audio, set handleAudioSessionActivation to false. If you do not want just_audio to respect the global AndroidAudioAttributes configured by audio_session, set androidApplyAudioAttributes to false.

The default audio loading and buffering behaviour can be configured via the audioLoadConfiguration parameter.

Implementation

AudioPlayer({
  String? userAgent,
  bool handleInterruptions = true,
  bool androidApplyAudioAttributes = true,
  bool handleAudioSessionActivation = true,
  AudioLoadConfiguration? audioLoadConfiguration,
  AudioPipeline? audioPipeline,
})  : _id = _uuid.v4(),
      _userAgent = userAgent,
      _androidApplyAudioAttributes =
          androidApplyAudioAttributes && _isAndroid(),
      _handleAudioSessionActivation = handleAudioSessionActivation,
      _audioLoadConfiguration = audioLoadConfiguration,
      _audioPipeline = audioPipeline ?? AudioPipeline() {
  _audioPipeline._setup(this);
  if (_audioLoadConfiguration?.darwinLoadControl != null) {
    _automaticallyWaitsToMinimizeStalling = _audioLoadConfiguration!
        .darwinLoadControl!.automaticallyWaitsToMinimizeStalling;
  }
  _playbackEventSubject.add(_playbackEvent);
  _processingStateSubject.addStream(playbackEventStream
      .map((event) => event.processingState)
      .distinct()
      .handleError((Object err, StackTrace stackTrace) {/* noop */}));
  _bufferedPositionSubject.addStream(playbackEventStream
      .map((event) => event.bufferedPosition)
      .distinct()
      .handleError((Object err, StackTrace stackTrace) {/* noop */}));
  _icyMetadataSubject.addStream(playbackEventStream
      .map((event) => event.icyMetadata)
      .distinct()
      .handleError((Object err, StackTrace stackTrace) {/* noop */}));
  _currentIndexSubject.addStream(playbackEventStream
      .map((event) => event.currentIndex)
      .distinct()
      .handleError((Object err, StackTrace stackTrace) {/* noop */}));
  _androidAudioSessionIdSubject.addStream(playbackEventStream
      .map((event) => event.androidAudioSessionId)
      .distinct()
      .handleError((Object err, StackTrace stackTrace) {/* noop */}));
  _sequenceStateSubject.addStream(Rx.combineLatest5<List<IndexedAudioSource>?,
      List<int>?, int?, bool, LoopMode, SequenceState?>(
    sequenceStream,
    shuffleIndicesStream,
    currentIndexStream,
    shuffleModeEnabledStream,
    loopModeStream,
    (sequence, shuffleIndices, currentIndex, shuffleModeEnabled, loopMode) {
      if (sequence == null) return null;
      if (shuffleIndices == null) return null;
      currentIndex ??= 0;
      currentIndex = max(min(sequence.length - 1, max(0, currentIndex)), 0);
      return SequenceState(
        sequence,
        currentIndex,
        shuffleIndices,
        shuffleModeEnabled,
        loopMode,
      );
    },
  ).distinct().handleError((Object err, StackTrace stackTrace) {/* noop */}));
  _playerStateSubject.addStream(
      Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
              playingStream,
              playbackEventStream,
              (playing, event) => PlayerState(playing, event.processingState))
          .distinct()
          .handleError((Object err, StackTrace stackTrace) {/* noop */}));
  _shuffleModeEnabledSubject.add(false);
  _loopModeSubject.add(LoopMode.off);
  _setPlatformActive(false, force: true)?.catchError((dynamic e) {});
  _sequenceSubject.add(null);
  // Respond to changes to AndroidAudioAttributes configuration.
  if (androidApplyAudioAttributes && _isAndroid()) {
    AudioSession.instance.then((audioSession) {
      audioSession.configurationStream
          .map((conf) => conf.androidAudioAttributes)
          .where((attributes) => attributes != null)
          .cast<AndroidAudioAttributes>()
          .distinct()
          .listen(setAndroidAudioAttributes);
    });
  }
  if (handleInterruptions) {
    AudioSession.instance.then((session) {
      session.becomingNoisyEventStream.listen((_) {
        pause();
      });
      session.interruptionEventStream.listen((event) {
        if (event.begin) {
          switch (event.type) {
            case AudioInterruptionType.duck:
              assert(_isAndroid());
              if (session.androidAudioAttributes!.usage ==
                  AndroidAudioUsage.game) {
                setVolume(volume / 2);
              }
              _playInterrupted = false;
              break;
            case AudioInterruptionType.pause:
            case AudioInterruptionType.unknown:
              if (playing) {
                pause();
                // Although pause is async and sets _playInterrupted = false,
                // this is done in the sync portion.
                _playInterrupted = true;
              }
              break;
          }
        } else {
          switch (event.type) {
            case AudioInterruptionType.duck:
              assert(_isAndroid());
              setVolume(min(1.0, volume * 2));
              _playInterrupted = false;
              break;
            case AudioInterruptionType.pause:
              if (_playInterrupted) play();
              _playInterrupted = false;
              break;
            case AudioInterruptionType.unknown:
              _playInterrupted = false;
              break;
          }
        }
      });
    });
  }
  _removeOldAssetCacheDir();
}