cookies = connection2.getHeaderFields().get("Set-Cookie");
+ for (String cookie : cookies) {
+ if (cookie.contains("token-test=")) {
+ return cookie;
+ }
+ }
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ if (outputStream != null) {
+ try {
+ outputStream.close();
+ } catch (IOException e) {
+ System.out.println("输出流关闭失败");
+ }
+ }
+ }
+ return null;
+ }
+}
+
+
diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/PeerConnectionClient.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/PeerConnectionClient.java
new file mode 100644
index 00000000..460d9499
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/PeerConnectionClient.java
@@ -0,0 +1,1511 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.zlm.rtc.client;
+
+import android.content.Context;
+import android.os.Environment;
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+
+import androidx.annotation.Nullable;
+
+import org.webrtc.AudioSource;
+import org.webrtc.AudioTrack;
+import org.webrtc.CameraVideoCapturer;
+import org.webrtc.DataChannel;
+import org.webrtc.DefaultVideoDecoderFactory;
+import org.webrtc.DefaultVideoEncoderFactory;
+import org.webrtc.EglBase;
+import org.webrtc.IceCandidate;
+import org.webrtc.Logging;
+import org.webrtc.MediaConstraints;
+import org.webrtc.MediaStream;
+import org.webrtc.MediaStreamTrack;
+import org.webrtc.PeerConnection;
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnectionFactory;
+import org.webrtc.RtpParameters;
+import org.webrtc.RtpReceiver;
+import org.webrtc.RtpSender;
+import org.webrtc.RtpTransceiver;
+import org.webrtc.SdpObserver;
+import org.webrtc.SessionDescription;
+import org.webrtc.SoftwareVideoDecoderFactory;
+import org.webrtc.SoftwareVideoEncoderFactory;
+import org.webrtc.StatsObserver;
+import org.webrtc.StatsReport;
+import org.webrtc.SurfaceTextureHelper;
+import org.webrtc.SurfaceViewRenderer;
+import org.webrtc.VideoCapturer;
+import org.webrtc.VideoDecoderFactory;
+import org.webrtc.VideoEncoderFactory;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoSink;
+import org.webrtc.VideoSource;
+import org.webrtc.VideoTrack;
+import org.webrtc.audio.AudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback;
+import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback;
+import org.webrtc.audio.LegacyAudioDeviceModule;
+import org.webrtc.voiceengine.WebRtcAudioManager;
+import org.webrtc.voiceengine.WebRtcAudioRecord;
+import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
+import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
+import org.webrtc.voiceengine.WebRtcAudioTrack;
+import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode;
+import org.webrtc.voiceengine.WebRtcAudioUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+
+/**
+ * Peer connection client implementation.
+ *
+ * All public methods are routed to local looper thread.
+ * All PeerConnectionEvents callbacks are invoked from the same looper thread.
+ * This class is a singleton.CreateOffer
+ */
+public class PeerConnectionClient {
+ public static final String VIDEO_TRACK_ID = "ARDAMSv0";
+ public static final String AUDIO_TRACK_ID = "ARDAMSa0";
+ public static final String VIDEO_TRACK_TYPE = "video";
+ private static final String TAG = "PCRTCClient";
+ private static final String VIDEO_CODEC_VP8 = "VP8";
+ private static final String VIDEO_CODEC_VP9 = "VP9";
+ private static final String VIDEO_CODEC_H264 = "H264";
+ private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline";
+ private static final String VIDEO_CODEC_H264_HIGH = "H264 High";
+ private static final String AUDIO_CODEC_OPUS = "opus";
+ private static final String AUDIO_CODEC_ISAC = "ISAC";
+ private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
+ private static final String VIDEO_FLEXFEC_FIELDTRIAL =
+ "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/";
+ private static final String VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL = "WebRTC-IntelVP8/Enabled/";
+ private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
+ "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
+ private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
+ private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
+ private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
+ private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
+ private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
+ private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
+ private static final int HD_VIDEO_WIDTH = 1280;
+ private static final int HD_VIDEO_HEIGHT = 720;
+ private static final int BPS_IN_KBPS = 1000;
+ private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log";
+
+ // Executor thread is started once in private ctor and is used for all
+ // peer connection API calls to ensure new peer connection factory is
+ // created on the same thread as previously destroyed factory.
+ private static final ExecutorService executor = Executors.newSingleThreadExecutor();
+
+ //private final PCObserver pcObserver = new PCObserver();
+ //private final SDPObserver sdpObserver = new SDPObserver();
+ private final Timer statsTimer = new Timer();
+ private final EglBase rootEglBase;
+ private final Context appContext;
+ private final PeerConnectionParameters peerConnectionParameters;
+ private final PeerConnectionEvents events;
+
+ @Nullable
+ private PeerConnectionFactory factory;
+ @Nullable
+ private ConcurrentHashMap peerConnectionMap;
+ @Nullable
+ private ConcurrentHashMap videoSinkMap;
+ @Nullable
+ private AudioSource audioSource;
+ @Nullable
+ private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable
+ private VideoSource videoSource;
+ private boolean preferIsac;
+ private boolean videoCapturerStopped;
+ private boolean isError;
+ @Nullable
+ private VideoSink localSink;
+ @Nullable
+ private List remoteSinks;
+ private int videoWidth;
+ private int videoHeight;
+ private int videoFps;
+ private MediaConstraints audioConstraints;
+ private MediaConstraints sdpMediaConstraints;
+ // Queued remote ICE candidates are consumed only after both local and
+ // remote descriptions are set. Similarly local ICE candidates are sent to
+ // remote peer after both local and remote description are set.
+ @Nullable
+ private List queuedRemoteCandidates;
+ private boolean isInitiator;
+ @Nullable
+ private SessionDescription localSdp; // either offer or answer SDP
+ @Nullable
+ private VideoCapturer videoCapturer;
+ // enableVideo is set to true if video should be rendered and sent.
+ private boolean renderVideo = true;
+ @Nullable
+ private VideoTrack localVideoTrack;
+ @Nullable
+ private VideoTrack remoteVideoTrack;
+ @Nullable
+ private RtpSender localVideoSender;
+ // enableAudio is set to true if audio should be sent.
+ private boolean enableAudio = true;
+ @Nullable
+ private AudioTrack localAudioTrack;
+ @Nullable
+ private DataChannel dataChannel;
+ private final boolean dataChannelEnabled;
+ // Enable RtcEventLog.
+ @Nullable
+ private RtcEventLog rtcEventLog;
+ // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes
+ // recorded audio samples to an output file.
+ @Nullable
+ private RecordedAudioToFileController saveRecordedAudioToFile = null;
+
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class DataChannelParameters {
+ public final boolean ordered;
+ public final int maxRetransmitTimeMs;
+ public final int maxRetransmits;
+ public final String protocol;
+ public final boolean negotiated;
+ public final int id;
+
+ public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+ String protocol, boolean negotiated, int id) {
+ this.ordered = ordered;
+ this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+ this.maxRetransmits = maxRetransmits;
+ this.protocol = protocol;
+ this.negotiated = negotiated;
+ this.id = id;
+ }
+ }
+
+ /**
+ * Peer connection parameters.
+ */
+ public static class PeerConnectionParameters {
+ public final boolean videoCallEnabled;
+ public final boolean loopback;
+ public final boolean tracing;
+ public final int videoWidth;
+ public final int videoHeight;
+ public final int videoFps;
+ public final int videoMaxBitrate;
+ public final String videoCodec;
+ public final boolean videoCodecHwAcceleration;
+ public final boolean videoFlexfecEnabled;
+ public final int audioStartBitrate;
+ public final String audioCodec;
+ public final boolean noAudioProcessing;
+ public final boolean aecDump;
+ public final boolean saveInputAudioToFile;
+ public final boolean useOpenSLES;
+ public final boolean disableBuiltInAEC;
+ public final boolean disableBuiltInAGC;
+ public final boolean disableBuiltInNS;
+ public final boolean disableWebRtcAGCAndHPF;
+ public final boolean enableRtcEventLog;
+ public final boolean useLegacyAudioDevice;
+ private final DataChannelParameters dataChannelParameters;
+
+ public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
+ int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
+ boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
+ String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile,
+ boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
+ boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog,
+ boolean useLegacyAudioDevice, DataChannelParameters dataChannelParameters) {
+ this.videoCallEnabled = videoCallEnabled;
+ this.loopback = loopback;
+ this.tracing = tracing;
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ this.videoFps = videoFps;
+ this.videoMaxBitrate = videoMaxBitrate;
+ this.videoCodec = videoCodec;
+ this.videoFlexfecEnabled = videoFlexfecEnabled;
+ this.videoCodecHwAcceleration = videoCodecHwAcceleration;
+ this.audioStartBitrate = audioStartBitrate;
+ this.audioCodec = audioCodec;
+ this.noAudioProcessing = noAudioProcessing;
+ this.aecDump = aecDump;
+ this.saveInputAudioToFile = saveInputAudioToFile;
+ this.useOpenSLES = useOpenSLES;
+ this.disableBuiltInAEC = disableBuiltInAEC;
+ this.disableBuiltInAGC = disableBuiltInAGC;
+ this.disableBuiltInNS = disableBuiltInNS;
+ this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF;
+ this.enableRtcEventLog = enableRtcEventLog;
+ this.useLegacyAudioDevice = useLegacyAudioDevice;
+ this.dataChannelParameters = dataChannelParameters;
+ }
+ }
+
+ /**
+ * Peer connection events.
+ */
+ public interface PeerConnectionEvents {
+ /**
+ * Callback fired once local SDP is created and set.
+ */
+ void onLocalDescription(final BigInteger handleId, final SessionDescription sdp);
+
+
+ /**
+ * Callback fired once local Ice candidate is generated.
+ */
+ void onIceCandidate(final BigInteger handleId, final IceCandidate candidate);
+
+ /**
+ * Callback fired once local ICE candidates are removed.
+ */
+ void onIceCandidatesRemoved(final BigInteger handleId, final IceCandidate[] candidates);
+
+ /**
+ * Callback fired once connection is established (IceConnectionState is
+ * CONNECTED).
+ */
+ void onIceConnected(final BigInteger handleId);
+
+ /**
+ * Callback fired once connection is closed (IceConnectionState is
+ * DISCONNECTED).
+ */
+ void onIceDisconnected(final BigInteger handleId);
+
+ /**
+ * Callback fired once peer connection is closed.
+ */
+ void onPeerConnectionClosed(final BigInteger handleId);
+
+ /**
+ * Callback fired once peer connection statistics is ready.
+ */
+ void onPeerConnectionStatsReady(final BigInteger handleId, final StatsReport[] reports);
+
+ /**
+ * Callback fired once peer connection error happened.
+ */
+ void onPeerConnectionError(final BigInteger handleId, final String description);
+
+ void onLocalRender(final BigInteger handleId);
+
+ void onRemoteRender(final BigInteger handleId);
+ }
+
+ public static class proxyVideoSinks implements VideoSink {
+ private VideoSink target = null;
+
+ @Override
+ synchronized public void onFrame(VideoFrame frame) {
+ if (target == null) {
+ Logging.d(TAG, "Dropping frame in proxy because target is null.");
+ return;
+ }
+
+ target.onFrame(frame);
+ }
+
+ synchronized public void setTarget(VideoSink target) {
+ this.target = target;
+ }
+
+ synchronized public void reset() {
+ target = null;
+ }
+ }
+
+ /**
+ * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes
+ * ownership of |eglBase|.
+ */
+ public PeerConnectionClient(Context appContext, EglBase eglBase,
+ PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) {
+ this.rootEglBase = eglBase;
+ this.appContext = appContext;
+ this.events = events;
+ this.peerConnectionParameters = peerConnectionParameters;
+ this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null;
+
+ this.peerConnectionMap = new ConcurrentHashMap<>();
+ this.videoSinkMap = new ConcurrentHashMap<>();
+
+ Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters));
+
+ final String fieldTrials = getFieldTrials(peerConnectionParameters);
+ executor.execute(() -> {
+ Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials);
+ PeerConnectionFactory.initialize(
+ PeerConnectionFactory.InitializationOptions.builder(appContext)
+ .setFieldTrials(fieldTrials)
+ .setEnableInternalTracer(true)
+ .createInitializationOptions());
+ });
+ }
+
+ /**
+ * This function should only be called once.
+ */
+ public void createPeerConnectionFactory(PeerConnectionFactory.Options options) {
+ if (factory != null) {
+ throw new IllegalStateException("PeerConnectionFactory has already been constructed");
+ }
+ executor.execute(() -> createPeerConnectionFactoryInternal(options));
+ }
+
+ public void createPeerConnection(final VideoCapturer videoCapturer, final BigInteger handleId) {
+ if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) {
+ Log.w(TAG, "Video call enabled but no video capturer provided.");
+ }
+ if (peerConnectionParameters == null) {
+ Log.e(TAG, "Creating peer connection without initializing factory.");
+ return;
+ }
+ this.videoCapturer = videoCapturer;
+ executor.execute(() -> {
+ try {
+ createMediaConstraintsInternal();
+ createPeerConnectionInternal(handleId);
+ //maybeCreateAndStartRtcEventLog();
+ } catch (Exception e) {
+ reportError("Failed to create peer connection: " + e.getMessage());
+ throw e;
+ }
+ });
+ }
+
+ public void close() {
+ executor.execute(this::closeInternal);
+ }
+
+ private boolean isVideoCallEnabled() {
+ return peerConnectionParameters.videoCallEnabled && videoCapturer != null;
+ }
+
+ private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) {
+ isError = false;
+
+ if (peerConnectionParameters.tracing) {
+ PeerConnectionFactory.startInternalTracingCapture(
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ + "webrtc-trace.txt");
+ }
+
+ // Check if ISAC is used by default.
+ preferIsac = peerConnectionParameters.audioCodec != null
+ && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);
+
+ final AudioDeviceModule adm = peerConnectionParameters.useLegacyAudioDevice
+ ? createLegacyAudioDevice()
+ : createJavaAudioDevice();
+
+ // Create peer connection factory.
+ if (options != null) {
+ Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
+ }
+ final boolean enableH264HighProfile =
+ VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec);
+ final VideoEncoderFactory encoderFactory;
+ final VideoDecoderFactory decoderFactory;
+
+ if (peerConnectionParameters.videoCodecHwAcceleration) {
+ encoderFactory = new DefaultVideoEncoderFactory(
+ rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile);
+ decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
+ } else {
+ encoderFactory = new SoftwareVideoEncoderFactory();
+ decoderFactory = new SoftwareVideoDecoderFactory();
+ }
+
+ factory = PeerConnectionFactory.builder()
+ .setOptions(options)
+ .setAudioDeviceModule(adm)
+ .setVideoEncoderFactory(encoderFactory)
+ .setVideoDecoderFactory(decoderFactory)
+ .createPeerConnectionFactory();
+ Log.d(TAG, "Peer connection factory created.");
+ adm.release();
+ }
+
+ AudioDeviceModule createLegacyAudioDevice() {
+ // Enable/disable OpenSL ES playback.
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
+ } else {
+ Log.d(TAG, "Allow OpenSL ES audio if device supports it");
+ WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
+ }
+
+ if (peerConnectionParameters.disableBuiltInAEC) {
+ Log.d(TAG, "Disable built-in AEC even if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
+ } else {
+ Log.d(TAG, "Enable built-in AEC if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
+ }
+
+ if (peerConnectionParameters.disableBuiltInNS) {
+ Log.d(TAG, "Disable built-in NS even if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true);
+ } else {
+ Log.d(TAG, "Enable built-in NS if device supports it");
+ WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
+ }
+
+ WebRtcAudioRecord.setOnAudioSamplesReady(saveRecordedAudioToFile);
+
+ // Set audio record error callbacks.
+ WebRtcAudioRecord.setErrorCallback(new WebRtcAudioRecordErrorCallback() {
+ @Override
+ public void onWebRtcAudioRecordInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordStartError(
+ AudioRecordStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ });
+
+ WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrack.ErrorCallback() {
+ @Override
+ public void onWebRtcAudioTrackInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackStartError(
+ AudioTrackStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ });
+
+
+ return new LegacyAudioDeviceModule();
+ }
+
+ AudioDeviceModule createJavaAudioDevice() {
+ // Enable/disable OpenSL ES playback.
+ if (!peerConnectionParameters.useOpenSLES) {
+ Log.w(TAG, "External OpenSLES ADM not implemented yet.");
+ // TODO(magjed): Add support for external OpenSLES ADM.
+ }
+
+ // Set audio record error callbacks.
+ AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() {
+ @Override
+ public void onWebRtcAudioRecordInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordStartError(
+ JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ };
+
+ AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() {
+ @Override
+ public void onWebRtcAudioTrackInitError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackStartError(
+ JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
+ reportError(errorMessage);
+ }
+
+ @Override
+ public void onWebRtcAudioTrackError(String errorMessage) {
+ Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
+ reportError(errorMessage);
+ }
+ };
+
+ return JavaAudioDeviceModule.builder(appContext)
+ .setSamplesReadyCallback(saveRecordedAudioToFile)
+ .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC)
+ .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS)
+ .setAudioRecordErrorCallback(audioRecordErrorCallback)
+ .setAudioTrackErrorCallback(audioTrackErrorCallback)
+ .createAudioDeviceModule();
+ }
+
+ private void createMediaConstraintsInternal() {
+ // Create video constraints if video call is enabled.
+ if (isVideoCallEnabled()) {
+ videoWidth = peerConnectionParameters.videoWidth;
+ videoHeight = peerConnectionParameters.videoHeight;
+ videoFps = peerConnectionParameters.videoFps;
+
+ // If video resolution is not specified, default to HD.
+ if (videoWidth == 0 || videoHeight == 0) {
+ videoWidth = HD_VIDEO_WIDTH;
+ videoHeight = HD_VIDEO_HEIGHT;
+ }
+
+ // If fps is not specified, default to 30.
+ if (videoFps == 0) {
+ videoFps = 30;
+ }
+ Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
+ }
+
+ // Create audio constraints.
+ audioConstraints = new MediaConstraints();
+ // added for audio performance measurements
+ if (peerConnectionParameters.noAudioProcessing) {
+ Log.d(TAG, "Disabling audio processing");
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
+ audioConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
+ }
+ // Create SDP constraints.
+ sdpMediaConstraints = new MediaConstraints();
+ sdpMediaConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
+ sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
+ "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled())));
+ }
+
+ private void createPeerConnectionInternal(final BigInteger handleId) {
+ if (factory == null || isError) {
+ Log.e(TAG, "Peerconnection factory is not created");
+ return;
+ }
+ Log.d(TAG, "Create peer connection.");
+
+ queuedRemoteCandidates = new ArrayList<>();
+
+ Log.d(TAG, "createPeerConnectioning...");
+
+ PeerConnection peerConnection = createPeerConnection(handleId, true);
+
+ // Set INFO libjingle logging.
+ // NOTE: this _must_ happen while |factory| is alive!
+ Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
+
+ List mediaStreamLabels = Collections.singletonList("ARDAMS");
+ if (isVideoCallEnabled()) {
+ peerConnection.addTrack(createVideoTrack(handleId, videoCapturer), mediaStreamLabels);
+ events.onLocalRender(handleId);
+ // We can add the renderers right away because we don't need to wait for an
+ // answer to get the remote track.
+ /*remoteVideoTrack = getRemoteVideoTrack(handleId);
+ remoteVideoTrack.setEnabled(renderVideo);
+ for (VideoSink remoteSink : remoteSinks) {
+ remoteVideoTrack.addSink(remoteSink);
+ }*/
+ }
+
+ peerConnection.addTrack(createAudioTrack(), mediaStreamLabels);
+
+ if (isVideoCallEnabled()) {
+ findVideoSender(handleId);
+ }
+
+ if (peerConnectionParameters.aecDump) {
+ try {
+ ParcelFileDescriptor aecDumpFileDescriptor =
+ ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
+ + File.separator + "Download/audio.aecdump"),
+ ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+ | ParcelFileDescriptor.MODE_TRUNCATE);
+ factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1);
+ } catch (IOException e) {
+ Log.e(TAG, "Can not open aecdump file", e);
+ }
+ }
+
+ if (saveRecordedAudioToFile != null) {
+ if (saveRecordedAudioToFile.start()) {
+ Log.d(TAG, "Recording input audio to file is activated");
+ }
+ }
+ Log.d(TAG, "Peer connection created.");
+
+ }
+
+ private PeerConnection createPeerConnection(final BigInteger handleId, final boolean type) {
+ Log.d(TAG, "Create peer connection.");
+ PeerConnection.IceServer iceServer = new PeerConnection.IceServer("stun:stun.freeswitch.org");
+ List iceServers = new ArrayList<>();
+ iceServers.add(iceServer);
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ //added by pcg
+ //下面这一行,如果加上,就不会发送本地的local ice candidates了,那我先不加,先发送本地ice再说
+ //rtcConfig.iceTransportsType = PeerConnection.IceTransportsType.RELAY;
+ //added end
+ // TCP candidates are only useful when connecting to a server that supports
+ // ICE-TCP.
+ rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
+ rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
+ rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
+ rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+ // Use ECDSA encryption.
+ rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
+ // Enable DTLS for normal calls and disable for loopback calls.
+ rtcConfig.activeResetSrtpParams = true;//!peerConnectionParameters.loopback;
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ PCObserver pcObserver = new PCObserver();
+ SDPObserver sdpObserver = new SDPObserver();
+ PeerConnection peerConnection = factory.createPeerConnection(rtcConfig, pcObserver);
+
+ ZLMConnection JanusConnection2 = new ZLMConnection();
+ JanusConnection2.handleId = handleId;
+ JanusConnection2.sdpObserver = sdpObserver;
+ JanusConnection2.peerConnection = peerConnection;
+ JanusConnection2.type = type;
+ Log.d(TAG, "We are putting handleId=" + handleId);
+ peerConnectionMap.put(handleId, JanusConnection2);
+ videoSinkMap.put(handleId, new proxyVideoSinks());
+ pcObserver.setConnection(JanusConnection2);
+ sdpObserver.setConnection(JanusConnection2);
+ Log.d(TAG, "Peer connection created.");
+ return peerConnection;
+ }
+
+ private File createRtcEventLogOutputFile() {
+ DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault());
+ Date date = new Date();
+ final String outputFileName = "event_log_" + dateFormat.format(date) + ".log";
+ return new File(
+ appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName);
+ }
+
+
+ private void closeInternal() {
+ if (factory != null && peerConnectionParameters.aecDump) {
+ factory.stopAecDump();
+ }
+ Log.d(TAG, "Closing peer connection.");
+ statsTimer.cancel();
+
+ if (peerConnectionMap != null) {
+ for (ZLMConnection conn : peerConnectionMap.values()) {
+ if (conn.peerConnection != null) {
+ conn.peerConnection.dispose();
+ conn.peerConnection = null;
+ }
+ }
+ }
+ peerConnectionMap.clear();
+
+ if (videoSinkMap != null) {
+ for (proxyVideoSinks sink : videoSinkMap.values()) {
+ if (sink != null) {
+ sink.reset();
+ }
+ }
+ }
+ videoSinkMap.clear();
+
+
+ Log.d(TAG, "Closing audio source.");
+ if (audioSource != null) {
+ audioSource.dispose();
+ audioSource = null;
+ }
+ Log.d(TAG, "Stopping capture.");
+ if (videoCapturer != null) {
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ videoCapturerStopped = true;
+ videoCapturer.dispose();
+ videoCapturer = null;
+ }
+ Log.d(TAG, "Closing video source.");
+ if (videoSource != null) {
+ videoSource.dispose();
+ videoSource = null;
+ }
+ if (surfaceTextureHelper != null) {
+ surfaceTextureHelper.dispose();
+ surfaceTextureHelper = null;
+ }
+
+ localSink = null;
+ remoteSinks = null;
+ Log.d(TAG, "Closing peer connection factory.");
+ if (factory != null) {
+ factory.dispose();
+ factory = null;
+ }
+ rootEglBase.release();
+ Log.d(TAG, "Closing peer connection done.");
+ //events.onPeerConnectionClosed(); fixme:
+ PeerConnectionFactory.stopInternalTracingCapture();
+ PeerConnectionFactory.shutdownInternalTracer();
+ }
+
+ public boolean isHDVideo() {
+ return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720;
+ }
+
+ @SuppressWarnings("deprecation") // TODO(sakal): getStats is deprecated.
+ private void getStats(final BigInteger handleId) {
+ PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection;
+ if (peerConnection == null || isError) {
+ return;
+ }
+ boolean success = peerConnection.getStats(new StatsObserver() {
+ @Override
+ public void onComplete(final StatsReport[] reports) {
+ //events.onPeerConnectionStatsReady(reports); fixme:
+ }
+ }, null);
+ if (!success) {
+ Log.e(TAG, "getStats() returns false!");
+ }
+ }
+
+ public void enableStatsEvents(boolean enable, int periodMs, final BigInteger handleId) {
+ if (enable) {
+ try {
+ statsTimer.schedule(new TimerTask() {
+ @Override
+ public void run() {
+ executor.execute(() -> getStats(handleId));
+ }
+ }, 0, periodMs);
+ } catch (Exception e) {
+ Log.e(TAG, "Can not schedule statistics timer", e);
+ }
+ } else {
+ statsTimer.cancel();
+ }
+ }
+
+ public void setAudioEnabled(final boolean enable) {
+ executor.execute(() -> {
+ enableAudio = enable;
+ if (localAudioTrack != null) {
+ localAudioTrack.setEnabled(enableAudio);
+ }
+ });
+ }
+
+ public void setVideoEnabled(final boolean enable) {
+ executor.execute(() -> {
+ renderVideo = enable;
+ if (localVideoTrack != null) {
+ localVideoTrack.setEnabled(renderVideo);
+ }
+ if (remoteVideoTrack != null) {
+ remoteVideoTrack.setEnabled(renderVideo);
+ }
+ });
+ }
+
+ public void createOffer(final BigInteger handleId) {
+ Log.d(TAG, "peerConnectionMap get handleId=" + peerConnectionMap.size());
+ executor.execute(() -> {
+ Log.d(TAG, "peerConnectionMap get handleId=" + handleId);
+ ZLMConnection connection = peerConnectionMap.get(handleId);
+ PeerConnection peerConnection = connection.peerConnection;
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC Create OFFER");
+ peerConnection.createOffer(connection.sdpObserver, sdpMediaConstraints);
+ }
+ });
+ }
+
+ public void subscriberHandleRemoteJsep(final BigInteger handleId, final SessionDescription sdp) {
+ executor.execute(() -> {
+ PeerConnection peerConnection = createPeerConnection(handleId, false);
+ SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver;
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "PC create ANSWER");
+ ZLMConnection connection = peerConnectionMap.get(handleId);
+ peerConnection.setRemoteDescription(sdpObserver, sdp);
+ peerConnection.createAnswer(connection.sdpObserver, sdpMediaConstraints);
+ }
+ });
+ }
+
+ public void addRemoteIceCandidate(final IceCandidate candidate, final BigInteger handleId) {
+ executor.execute(() -> {
+ PeerConnection peerConnection = createPeerConnection(handleId, false);
+ SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver;
+ if (peerConnection != null && !isError) {
+ if (queuedRemoteCandidates != null) {
+ queuedRemoteCandidates.add(candidate);
+ } else {
+ peerConnection.addIceCandidate(candidate);
+ }
+ }
+ });
+ }
+
+ public void removeRemoteIceCandidates(final IceCandidate[] candidates, final BigInteger handleId) {
+ executor.execute(() -> {
+ PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection;
+ SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver;
+ if (peerConnection == null || isError) {
+ return;
+ }
+ // Drain the queued remote candidates if there is any so that
+ // they are processed in the proper order.
+ drainCandidates(handleId);
+ peerConnection.removeIceCandidates(candidates);
+ });
+ }
+
+ public void setRemoteDescription(BigInteger handleId, final SessionDescription sdp) {
+ executor.execute(() -> {
+ PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection;
+ SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver;
+ if (peerConnection == null || isError) {
+ return;
+ }
+ String sdpDescription = sdp.description;
+ if (preferIsac) {
+ sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
+ }
+ if (isVideoCallEnabled()) {
+ sdpDescription =
+ preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false);
+ }
+ if (peerConnectionParameters.audioStartBitrate > 0) {
+ sdpDescription = setStartBitrate(
+ AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
+ }
+ Log.d(TAG, "Set remote SDP.");
+ SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
+ peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
+ });
+ }
+
+ public void setVideoRender(BigInteger handleId, SurfaceViewRenderer videoRender) {
+ executor.execute(() -> {
+ videoSinkMap.get(handleId).setTarget(videoRender);
+ });
+ }
+
+ public void dispose(BigInteger handleId) {
+ executor.execute(() -> {
+ videoSinkMap.remove(handleId);
+ ZLMConnection conn = peerConnectionMap.get(handleId);
+ if (conn.peerConnection != null) {
+ conn.peerConnection.dispose();
+ conn.peerConnection = null;
+ }
+ peerConnectionMap.remove(handleId);
+ });
+ }
+
+
+ public void stopVideoSource() {
+ executor.execute(() -> {
+ if (videoCapturer != null && !videoCapturerStopped) {
+ Log.d(TAG, "Stop video source.");
+ try {
+ videoCapturer.stopCapture();
+ } catch (InterruptedException e) {
+ }
+ videoCapturerStopped = true;
+ }
+ });
+ }
+
+ public void startVideoSource() {
+ executor.execute(() -> {
+ if (videoCapturer != null && videoCapturerStopped) {
+ Log.d(TAG, "Restart video source.");
+ videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
+ videoCapturerStopped = false;
+ }
+ });
+ }
+
+ public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps, final BigInteger handleId) {
+ PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection;
+ SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver;
+ executor.execute(() -> {
+ if (peerConnection == null || localVideoSender == null || isError) {
+ return;
+ }
+ Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps);
+ if (localVideoSender == null) {
+ Log.w(TAG, "Sender is not ready.");
+ return;
+ }
+
+ RtpParameters parameters = localVideoSender.getParameters();
+ if (parameters.encodings.size() == 0) {
+ Log.w(TAG, "RtpParameters are not ready.");
+ return;
+ }
+
+ for (RtpParameters.Encoding encoding : parameters.encodings) {
+ // Null value means no limit.
+ encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
+ }
+ if (!localVideoSender.setParameters(parameters)) {
+ Log.e(TAG, "RtpSender.setParameters failed.");
+ }
+ Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps);
+ });
+ }
+
+ private void reportError(final String errorMessage) {
+ Log.e(TAG, "Peerconnection error: " + errorMessage);
+ executor.execute(() -> {
+ if (!isError) {
+ //events.onPeerConnectionError(errorMessage); fixme:
+ isError = true;
+ }
+ });
+ }
+
+ @Nullable
+ private AudioTrack createAudioTrack() {
+ audioSource = factory.createAudioSource(audioConstraints);
+ localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
+ localAudioTrack.setEnabled(enableAudio);
+ return localAudioTrack;
+ }
+
+ @Nullable
+ private VideoTrack createVideoTrack(BigInteger handleId, VideoCapturer capturer) {
+ surfaceTextureHelper =
+ SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
+ videoSource = factory.createVideoSource(capturer.isScreencast());
+ capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver());
+ capturer.startCapture(videoWidth, videoHeight, videoFps);
+
+ localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
+ localVideoTrack.setEnabled(renderVideo);
+ localVideoTrack.addSink(videoSinkMap.get(handleId));
+ return localVideoTrack;
+ }
+
+ private void findVideoSender(final BigInteger handleId) {
+ PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection;
+ for (RtpSender sender : peerConnection.getSenders()) {
+ if (sender.track() != null) {
+ String trackType = sender.track().kind();
+ if (trackType.equals(VIDEO_TRACK_TYPE)) {
+ Log.d(TAG, "Found video sender.");
+ localVideoSender = sender;
+ }
+ }
+ }
+ }
+
+ // Returns the remote VideoTrack, assuming there is only one.
+ private @Nullable VideoTrack getRemoteVideoTrack(BigInteger handleId) {
+ PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection;
+ for (RtpTransceiver transceiver : peerConnection.getTransceivers()) {
+ MediaStreamTrack track = transceiver.getReceiver().track();
+ if (track instanceof VideoTrack) {
+ return (VideoTrack) track;
+ }
+ }
+ return null;
+ }
+
+ private static String getSdpVideoCodecName(PeerConnectionParameters parameters) {
+ switch (parameters.videoCodec) {
+ case VIDEO_CODEC_VP8:
+ return VIDEO_CODEC_VP8;
+ case VIDEO_CODEC_VP9:
+ return VIDEO_CODEC_VP9;
+ case VIDEO_CODEC_H264_HIGH:
+ case VIDEO_CODEC_H264_BASELINE:
+ return VIDEO_CODEC_H264;
+ default:
+ return VIDEO_CODEC_VP8;
+ }
+ }
+
+ private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) {
+ String fieldTrials = "";
+ if (peerConnectionParameters.videoFlexfecEnabled) {
+ fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL;
+ Log.d(TAG, "Enable FlexFEC field trial.");
+ }
+ fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL;
+ if (peerConnectionParameters.disableWebRtcAGCAndHPF) {
+ fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
+ Log.d(TAG, "Disable WebRTC AGC field trial.");
+ }
+ return fieldTrials;
+ }
+
+ @SuppressWarnings("StringSplitter")
+ private static String setStartBitrate(
+ String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
+ String[] lines = sdpDescription.split("\r\n");
+ int rtpmapLineIndex = -1;
+ boolean sdpFormatUpdated = false;
+ String codecRtpMap = null;
+ // Search for codec rtpmap in format
+ // a=rtpmap: / [/]
+ String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
+ Pattern codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ codecRtpMap = codecMatcher.group(1);
+ rtpmapLineIndex = i;
+ break;
+ }
+ }
+ if (codecRtpMap == null) {
+ Log.w(TAG, "No rtpmap for " + codec + " codec");
+ return sdpDescription;
+ }
+ Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
+
+ // Check if a=fmtp string already exist in remote SDP for this codec and
+ // update it with new bitrate parameter.
+ regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$";
+ codecPattern = Pattern.compile(regex);
+ for (int i = 0; i < lines.length; i++) {
+ Matcher codecMatcher = codecPattern.matcher(lines[i]);
+ if (codecMatcher.matches()) {
+ Log.d(TAG, "Found " + codec + " " + lines[i]);
+ if (isVideoCodec) {
+ lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Update remote SDP line: " + lines[i]);
+ sdpFormatUpdated = true;
+ break;
+ }
+ }
+
+ StringBuilder newSdpDescription = new StringBuilder();
+ for (int i = 0; i < lines.length; i++) {
+ newSdpDescription.append(lines[i]).append("\r\n");
+ // Append new a=fmtp line if no such line exist for a codec.
+ if (!sdpFormatUpdated && i == rtpmapLineIndex) {
+ String bitrateSet;
+ if (isVideoCodec) {
+ bitrateSet =
+ "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+ } else {
+ bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
+ + (bitrateKbps * 1000);
+ }
+ Log.d(TAG, "Add remote SDP line: " + bitrateSet);
+ newSdpDescription.append(bitrateSet).append("\r\n");
+ }
+ }
+ return newSdpDescription.toString();
+ }
+
+ /**
+ * Returns the line number containing "m=audio|video", or -1 if no such line exists.
+ */
+ private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) {
+ final String mediaDescription = isAudio ? "m=audio " : "m=video ";
+ for (int i = 0; i < sdpLines.length; ++i) {
+ if (sdpLines[i].startsWith(mediaDescription)) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ private static String joinString(
+ Iterable extends CharSequence> s, String delimiter, boolean delimiterAtEnd) {
+ Iterator extends CharSequence> iter = s.iterator();
+ if (!iter.hasNext()) {
+ return "";
+ }
+ StringBuilder buffer = new StringBuilder(iter.next());
+ while (iter.hasNext()) {
+ buffer.append(delimiter).append(iter.next());
+ }
+ if (delimiterAtEnd) {
+ buffer.append(delimiter);
+ }
+ return buffer.toString();
+ }
+
+ private static @Nullable String movePayloadTypesToFront(
+ List preferredPayloadTypes, String mLine) {
+ // The format of the media description line should be: m= ...
+ final List origLineParts = Arrays.asList(mLine.split(" "));
+ if (origLineParts.size() <= 3) {
+ Log.e(TAG, "Wrong SDP media description format: " + mLine);
+ return null;
+ }
+ final List header = origLineParts.subList(0, 3);
+ final List unpreferredPayloadTypes =
+ new ArrayList<>(origLineParts.subList(3, origLineParts.size()));
+ unpreferredPayloadTypes.removeAll(preferredPayloadTypes);
+ // Reconstruct the line with |preferredPayloadTypes| moved to the beginning of the payload
+ // types.
+ final List newLineParts = new ArrayList<>();
+ newLineParts.addAll(header);
+ newLineParts.addAll(preferredPayloadTypes);
+ newLineParts.addAll(unpreferredPayloadTypes);
+ return joinString(newLineParts, " ", false /* delimiterAtEnd */);
+ }
+
+ private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
+ final String[] lines = sdpDescription.split("\r\n");
+ final int mLineIndex = findMediaDescriptionLine(isAudio, lines);
+ if (mLineIndex == -1) {
+ Log.w(TAG, "No mediaDescription line, so can't prefer " + codec);
+ return sdpDescription;
+ }
+ // A list with all the payload types with name |codec|. The payload types are integers in the
+ // range 96-127, but they are stored as strings here.
+ final List codecPayloadTypes = new ArrayList<>();
+ // a=rtpmap: / [/]
+ final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$");
+ for (String line : lines) {
+ Matcher codecMatcher = codecPattern.matcher(line);
+ if (codecMatcher.matches()) {
+ codecPayloadTypes.add(codecMatcher.group(1));
+ }
+ }
+ if (codecPayloadTypes.isEmpty()) {
+ Log.w(TAG, "No payload types with name " + codec);
+ return sdpDescription;
+ }
+
+ final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]);
+ if (newMLine == null) {
+ return sdpDescription;
+ }
+ Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine);
+ lines[mLineIndex] = newMLine;
+ return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */);
+ }
+
+ private void drainCandidates(BigInteger handleId) {
+ PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection;
+ if (queuedRemoteCandidates != null) {
+ Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates");
+ for (IceCandidate candidate : queuedRemoteCandidates) {
+ peerConnection.addIceCandidate(candidate);
+ }
+ queuedRemoteCandidates = null;
+ }
+ }
+
+ private void switchCameraInternal() {
+ if (videoCapturer instanceof CameraVideoCapturer) {
+ if (!isVideoCallEnabled() || isError) {
+ Log.e(TAG,
+ "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError);
+ return; // No video is sent or only one camera is available or error happened.
+ }
+ Log.d(TAG, "Switch camera");
+ CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
+ cameraVideoCapturer.switchCamera(null);
+ } else {
+ Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
+ }
+ }
+
+ public void switchCamera() {
+ executor.execute(this::switchCameraInternal);
+ }
+
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ executor.execute(() -> changeCaptureFormatInternal(width, height, framerate));
+ }
+
+ private void changeCaptureFormatInternal(int width, int height, int framerate) {
+ if (!isVideoCallEnabled() || isError || videoCapturer == null) {
+ Log.e(TAG,
+ "Failed to change capture format. Video: " + isVideoCallEnabled()
+ + ". Error : " + isError);
+ return;
+ }
+ Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ videoSource.adaptOutputFormat(width, height, framerate);
+ }
+
+ // Implementation detail: observe ICE & stream changes and react accordingly.
+ private class PCObserver implements PeerConnection.Observer {
+ private ZLMConnection connection;
+ private PeerConnection peerConnection;
+
+ public void setConnection(ZLMConnection connection) {
+ this.connection = connection;
+ this.peerConnection = connection.peerConnection;
+ }
+
+ @Override
+ public void onIceCandidate(final IceCandidate candidate) {
+ executor.execute(() -> events.onIceCandidate(connection.handleId, candidate));
+ }
+
+ @Override
+ public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
+ executor.execute(() -> events.onIceCandidatesRemoved(connection.handleId, candidates));
+ }
+
+ @Override
+ public void onSignalingChange(PeerConnection.SignalingState newState) {
+ Log.d(TAG, "SignalingState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionChange(final IceConnectionState newState) {
+ executor.execute(() -> {
+ Log.d(TAG, "IceConnectionState: " + newState);
+ if (newState == IceConnectionState.CONNECTED) {
+ events.onIceConnected(connection.handleId);
+ } else if (newState == IceConnectionState.DISCONNECTED) {
+ events.onIceDisconnected(connection.handleId);
+ } else if (newState == IceConnectionState.FAILED) {
+ reportError("ICE connection failed.");
+ }
+ });
+ }
+
+ @Override
+ public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
+ Log.d(TAG, "IceGatheringState: " + newState);
+ }
+
+ @Override
+ public void onIceConnectionReceivingChange(boolean receiving) {
+ Log.d(TAG, "IceConnectionReceiving changed to " + receiving);
+ }
+
+ @Override
+ public void onAddStream(final MediaStream stream) {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ Log.d(TAG, "==onAddStream tracks size:" + stream.videoTracks.size());
+ if (stream.videoTracks.size() == 1) {
+ remoteVideoTrack = stream.videoTracks.get(0);
+ remoteVideoTrack.setEnabled(true);
+ connection.videoTrack = remoteVideoTrack;
+ connection.videoTrack.addSink(videoSinkMap.get(connection.handleId));
+ events.onRemoteRender(connection.handleId);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRemoveStream(final MediaStream stream) {
+ }
+
+ @Override
+ public void onDataChannel(final DataChannel dc) {
+ Log.d(TAG, "New Data channel " + dc.label());
+
+ if (!dataChannelEnabled)
+ return;
+
+ dc.registerObserver(new DataChannel.Observer() {
+ @Override
+ public void onBufferedAmountChange(long previousAmount) {
+ Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onStateChange() {
+ Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state());
+ }
+
+ @Override
+ public void onMessage(final DataChannel.Buffer buffer) {
+ if (buffer.binary) {
+ Log.d(TAG, "Received binary msg over " + dc);
+ return;
+ }
+ ByteBuffer data = buffer.data;
+ final byte[] bytes = new byte[data.capacity()];
+ data.get(bytes);
+ String strData = new String(bytes, Charset.forName("UTF-8"));
+ Log.d(TAG, "Got msg: " + strData + " over " + dc);
+ }
+ });
+ }
+
+ @Override
+ public void onRenegotiationNeeded() {
+ // No need to do anything; AppRTC follows a pre-agreed-upon
+ // signaling/negotiation protocol.
+ }
+
+ @Override
+ public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {
+ Log.d(TAG, "==onAddTrack mediaStreams size:" + mediaStreams.length);
+ }
+ }
+
+ // Implementation detail: handle offer creation/signaling and answer setting,
+ // as well as adding remote ICE candidates once the answer SDP is set.
+ public class SDPObserver implements SdpObserver {
+ private PeerConnection peerConnection;
+ private SDPObserver sdpObserver;
+ private BigInteger handleId;
+ private SessionDescription localSdp;
+ private boolean type;
+
+ public void setConnection(ZLMConnection connection) {
+ this.peerConnection = connection.peerConnection;
+ this.sdpObserver = connection.sdpObserver;
+ this.handleId = connection.handleId;
+ this.type = connection.type;
+ }
+
+ @Override
+ public void onCreateSuccess(final SessionDescription origSdp) {
+ if (localSdp != null) {
+ reportError("Multiple SDP create.");
+ return;
+ }
+ String sdpDescription = origSdp.description;
+ if (preferIsac) {
+ sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
+ }
+ if (isVideoCallEnabled()) {
+ sdpDescription =
+ preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false);
+ }
+ final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
+ localSdp = sdp;
+ executor.execute(() -> {
+ if (peerConnection != null && !isError) {
+ Log.d(TAG, "Set local SDP from " + sdp.type);
+ peerConnection.setLocalDescription(sdpObserver, sdp);
+
+// MediaStream localMediaStream = factory.createLocalMediaStream("ARDAMS");
+// localMediaStream.addTrack(localAudioTrack);
+// peerConnection.addStream(localMediaStream);
+ }
+ });
+ }
+
+ @Override
+ public void onSetSuccess() {
+ executor.execute(() -> {
+ if (peerConnection == null || isError) {
+ return;
+ }
+ if (type) {
+ // For offering peer connection we first create offer and set
+ // local SDP, then after receiving answer set remote SDP.
+ if (peerConnection.getRemoteDescription() == null) {
+ // We've just set our local SDP so time to send it.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(handleId, localSdp);
+ } else {
+ // We've just set remote description, so drain remote
+ // and send local ICE candidates.
+ Log.d(TAG, "Remote SDP set succesfully");
+ drainCandidates(handleId);
+ }
+ } else {
+ // For answering peer connection we set remote SDP and then
+ // create answer and set local SDP.
+ if (peerConnection.getLocalDescription() != null) {
+ // We've just set our local SDP so time to send it, drain
+ // remote and send local ICE candidates.
+ Log.d(TAG, "Local SDP set succesfully");
+ events.onLocalDescription(handleId, localSdp);
+ drainCandidates(handleId);
+ } else {
+ // We've just set remote SDP - do nothing for now -
+ // answer will be created soon.
+ Log.d(TAG, "Remote SDP set succesfully");
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onCreateFailure(final String error) {
+ reportError("createSDP error: " + error);
+ }
+
+ @Override
+ public void onSetFailure(final String error) {
+ reportError("setSDP error: " + error);
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RecordedAudioToFileController.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RecordedAudioToFileController.java
new file mode 100644
index 00000000..0fda70bb
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RecordedAudioToFileController.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.zlm.rtc.client;
+
+import android.media.AudioFormat;
+import android.os.Environment;
+import android.util.Log;
+
+import androidx.annotation.Nullable;
+
+import org.webrtc.audio.JavaAudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
+import org.webrtc.voiceengine.WebRtcAudioRecord;
+import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.concurrent.ExecutorService;
+
+
+/**
+ * Implements the AudioRecordSamplesReadyCallback interface and writes
+ * recorded raw audio samples to an output file.
+ */
+public class RecordedAudioToFileController
+ implements SamplesReadyCallback, WebRtcAudioRecordSamplesReadyCallback {
+ private static final String TAG = "RecordedAudioToFile";
+ private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L;
+
+ private final Object lock = new Object();
+ private final ExecutorService executor;
+ @Nullable
+ private OutputStream rawAudioFileOutputStream = null;
+ private boolean isRunning;
+ private long fileSizeInBytes = 0;
+
+ public RecordedAudioToFileController(ExecutorService executor) {
+ Log.d(TAG, "ctor");
+ this.executor = executor;
+ }
+
+ /**
+ * Should be called on the same executor thread as the one provided at
+ * construction.
+ */
+ public boolean start() {
+ Log.d(TAG, "start");
+ if (!isExternalStorageWritable()) {
+ Log.e(TAG, "Writing to external media is not possible");
+ return false;
+ }
+ synchronized (lock) {
+ isRunning = true;
+ }
+ return true;
+ }
+
+ /**
+ * Should be called on the same executor thread as the one provided at
+ * construction.
+ */
+ public void stop() {
+ Log.d(TAG, "stop");
+ synchronized (lock) {
+ isRunning = false;
+ if (rawAudioFileOutputStream != null) {
+ try {
+ rawAudioFileOutputStream.close();
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to close file with saved input audio: " + e);
+ }
+ rawAudioFileOutputStream = null;
+ }
+ fileSizeInBytes = 0;
+ }
+ }
+
+ // Checks if external storage is available for read and write.
+ private boolean isExternalStorageWritable() {
+ String state = Environment.getExternalStorageState();
+ if (Environment.MEDIA_MOUNTED.equals(state)) {
+ return true;
+ }
+ return false;
+ }
+
+ // Utilizes audio parameters to create a file name which contains sufficient
+ // information so that the file can be played using an external file player.
+ // Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm.
+ private void openRawAudioOutputFile(int sampleRate, int channelCount) {
+ final String fileName = Environment.getExternalStorageDirectory().getPath() + File.separator
+ + "recorded_audio_16bits_" + String.valueOf(sampleRate) + "Hz"
+ + ((channelCount == 1) ? "_mono" : "_stereo") + ".pcm";
+ final File outputFile = new File(fileName);
+ try {
+ rawAudioFileOutputStream = new FileOutputStream(outputFile);
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "Failed to open audio output file: " + e.getMessage());
+ }
+ Log.d(TAG, "Opened file for recording: " + fileName);
+ }
+
+ // Called when new audio samples are ready.
+ @Override
+ public void onWebRtcAudioRecordSamplesReady(WebRtcAudioRecord.AudioSamples samples) {
+ onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(samples.getAudioFormat(),
+ samples.getChannelCount(), samples.getSampleRate(), samples.getData()));
+ }
+
+ // Called when new audio samples are ready.
+ @Override
+ public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) {
+ // The native audio layer on Android should use 16-bit PCM format.
+ if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) {
+ Log.e(TAG, "Invalid audio format");
+ return;
+ }
+ synchronized (lock) {
+ // Abort early if stop() has been called.
+ if (!isRunning) {
+ return;
+ }
+ // Open a new file for the first callback only since it allows us to add audio parameters to
+ // the file name.
+ if (rawAudioFileOutputStream == null) {
+ openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount());
+ fileSizeInBytes = 0;
+ }
+ }
+ // Append the recorded 16-bit audio samples to the open output file.
+ executor.execute(() -> {
+ if (rawAudioFileOutputStream != null) {
+ try {
+ // Set a limit on max file size. 58348800 bytes corresponds to
+ // approximately 10 minutes of recording in mono at 48kHz.
+ if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) {
+ // Writes samples.getData().length bytes to output stream.
+ rawAudioFileOutputStream.write(samples.getData());
+ fileSizeInBytes += samples.getData().length;
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to write audio to file: " + e.getMessage());
+ }
+ }
+ });
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RtcEventLog.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RtcEventLog.java
new file mode 100644
index 00000000..94875cda
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RtcEventLog.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package com.zlm.rtc.client;
+
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+
+import org.webrtc.PeerConnection;
+
+import java.io.File;
+import java.io.IOException;
+
+public class RtcEventLog {
+ private static final String TAG = "RtcEventLog";
+ private static final int OUTPUT_FILE_MAX_BYTES = 10_000_000;
+ private final PeerConnection peerConnection;
+ private RtcEventLogState state = RtcEventLogState.INACTIVE;
+
+ enum RtcEventLogState {
+ INACTIVE,
+ STARTED,
+ STOPPED,
+ }
+
+ public RtcEventLog(PeerConnection peerConnection) {
+ if (peerConnection == null) {
+ throw new NullPointerException("The peer connection is null.");
+ }
+ this.peerConnection = peerConnection;
+ }
+
+ public void start(final File outputFile) {
+ if (state == RtcEventLogState.STARTED) {
+ Log.e(TAG, "RtcEventLog has already started.");
+ return;
+ }
+ final ParcelFileDescriptor fileDescriptor;
+ try {
+ fileDescriptor = ParcelFileDescriptor.open(outputFile,
+ ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+ | ParcelFileDescriptor.MODE_TRUNCATE);
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to create a new file", e);
+ return;
+ }
+
+ // Passes ownership of the file to WebRTC.
+ boolean success =
+ peerConnection.startRtcEventLog(fileDescriptor.detachFd(), OUTPUT_FILE_MAX_BYTES);
+ if (!success) {
+ Log.e(TAG, "Failed to start RTC event log.");
+ return;
+ }
+ state = RtcEventLogState.STARTED;
+ Log.d(TAG, "RtcEventLog started.");
+ }
+
+ public void stop() {
+ if (state != RtcEventLogState.STARTED) {
+ Log.e(TAG, "RtcEventLog was not started.");
+ return;
+ }
+ peerConnection.stopRtcEventLog();
+ state = RtcEventLogState.STOPPED;
+ Log.d(TAG, "RtcEventLog stopped.");
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/SdpBean.kt b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/SdpBean.kt
new file mode 100644
index 00000000..26cc2439
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/SdpBean.kt
@@ -0,0 +1,13 @@
+package com.zlm.rtc.client
+
+class SdpBean {
+ var code = 0
+
+ var id:String?=null
+
+ var msg:String?=null
+
+ var sdp: String? = null
+
+ var type:String?=null
+}
\ No newline at end of file
diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/ZLMConnection.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/ZLMConnection.java
new file mode 100644
index 00000000..2946b4f3
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/ZLMConnection.java
@@ -0,0 +1,14 @@
+package com.zlm.rtc.client;
+
+import org.webrtc.PeerConnection;
+import org.webrtc.VideoTrack;
+
+import java.math.BigInteger;
+
+public class ZLMConnection {
+ public BigInteger handleId;
+ public PeerConnection peerConnection;
+ public PeerConnectionClient.SDPObserver sdpObserver;
+ public VideoTrack videoTrack;
+ public boolean type;
+}
diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt
index 9e66365a..ebc522e3 100644
--- a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt
+++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt
@@ -2,54 +2,113 @@ package com.zlm.rtc.play
import android.content.Context
import android.graphics.Bitmap
+import android.util.Log
+import com.zlm.rtc.NativeLib
import com.zlm.rtc.ZLMRTCPlayer
-import org.webrtc.AudioSource
-import org.webrtc.AudioTrack
+import com.zlm.rtc.client.HttpClient
+import com.zlm.rtc.client.PeerConnectionClient
+import org.json.JSONObject
+import org.webrtc.Camera1Enumerator
+import org.webrtc.Camera2Enumerator
+import org.webrtc.CameraEnumerator
import org.webrtc.EglBase
-import org.webrtc.PeerConnection
+import org.webrtc.IceCandidate
import org.webrtc.PeerConnectionFactory
-import org.webrtc.SurfaceTextureHelper
+import org.webrtc.SessionDescription
+import org.webrtc.StatsReport
import org.webrtc.SurfaceViewRenderer
import org.webrtc.VideoCapturer
-import org.webrtc.VideoSource
-import org.webrtc.VideoTrack
+import java.math.BigInteger
-class ZLMRTCPlayerImpl: ZLMRTCPlayer() {
+class ZLMRTCPlayerImpl : ZLMRTCPlayer(), PeerConnectionClient.PeerConnectionEvents {
- private val context: Context? = null
+ private var context: Context? = null
- private val eglBase: EglBase? by lazy {
- EglBase.create()
+ private val peerConnectionClient: PeerConnectionClient? by lazy {
+ PeerConnectionClient(
+ context, EglBase.create(),
+ PeerConnectionClient.PeerConnectionParameters(
+ true,
+ false,
+ false,
+ 1080,
+ 960,
+ 0,
+ 0,
+ "VP8",
+ true,
+ false,
+ 0,
+ "OPUS",
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false, false, false, null
+ ), this
+ )
}
- private var playUrl: String? = null
- private var peerConnection: PeerConnection? = null
- private var surfaceViewRenderer: SurfaceViewRenderer? = null
- private var peerConnectionFactory: PeerConnectionFactory? = null
- private var audioSource: AudioSource? = null
- private var videoSource: VideoSource? = null
- private var localAudioTrack: AudioTrack? = null
- private var localVideoTrack: VideoTrack? = null
- private var captureAndroid: VideoCapturer? = null
- private var surfaceTextureHelper: SurfaceTextureHelper? = null
- private var isShowCamera = true
- private var isPublishMode = false //isPublish true为推流 false为拉流
- private var defaultFps = 24
- private var isPreviewing = false
- private var isFirst = true
init {
}
+ private fun logger(msg: String) {
+ Log.i("ZLMRTCPlayerImpl", msg)
+ }
+
+ fun createVideoCapture(context: Context?): VideoCapturer? {
+ val videoCapturer: VideoCapturer? = if (Camera2Enumerator.isSupported(context)) {
+ createCameraCapture(Camera2Enumerator(context))
+ } else {
+ createCameraCapture(Camera1Enumerator(true))
+ }
+ return videoCapturer
+ }
+
+ /**
+ * 创建相机媒体流
+ */
+ private fun createCameraCapture(enumerator: CameraEnumerator): VideoCapturer? {
+ val deviceNames = enumerator.deviceNames
+
+ // Front facing camera not found, try something else
+ for (deviceName in deviceNames) {
+ if (!enumerator.isFrontFacing(deviceName)) {
+ val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null)
+ if (videoCapturer != null) {
+ return videoCapturer
+ }
+ }
+ }
+ // First, try to find front facing camera
+ for (deviceName in deviceNames) {
+ if (enumerator.isFrontFacing(deviceName)) {
+ val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null)
+ if (videoCapturer != null) {
+ return videoCapturer
+ }
+ }
+ }
+
+
+ return null
+ }
+ override fun bind(context: Context, surface: SurfaceViewRenderer, localPreview: Boolean) {
+ this.context = context
+ peerConnectionClient?.setAudioEnabled(true)
+ peerConnectionClient?.createPeerConnectionFactory(PeerConnectionFactory.Options())
+ peerConnectionClient?.createPeerConnection(createVideoCapture(context), BigInteger.ZERO)
+ peerConnectionClient?.createOffer((BigInteger.ZERO))
+
- override fun bind(surface: SurfaceViewRenderer, localPreview: Boolean) {
- this.surfaceViewRenderer = surface
- surfaceViewRenderer?.init(eglBase?.eglBaseContext,null)
}
override fun play(app: String, streamId: String) {
-
}
override fun setSpeakerphoneOn(on: Boolean) {
@@ -81,4 +140,71 @@ class ZLMRTCPlayerImpl: ZLMRTCPlayer() {
}
+ override fun onLocalDescription(handleId: BigInteger?, sdp: SessionDescription?) {
+
+ val url = NativeLib().makeUrl("live", "li")
+ logger("handleId: " + url)
+ logger("handleId: " + sdp?.description)
+ val doPost = HttpClient.doPost(
+ url,
+ mutableMapOf(Pair("sdp", sdp?.description)),
+ mutableMapOf()
+ )
+
+ val result = JSONObject(doPost)
+
+ val code = result.getInt("code")
+ if (code == 0) {
+ logger("handleId: " + doPost)
+ val sdp = result.getString("sdp")
+ peerConnectionClient?.setRemoteDescription(handleId,SessionDescription(SessionDescription.Type.ANSWER,sdp))
+ } else {
+ val msg = result.getString("msg")
+ logger("handleId: " + msg)
+ }
+ }
+
+ override fun onIceCandidate(handleId: BigInteger?, candidate: IceCandidate?) {
+
+ }
+
+ override fun onIceCandidatesRemoved(
+ handleId: BigInteger?,
+ candidates: Array?
+ ) {
+
+ }
+
+ override fun onIceConnected(handleId: BigInteger?) {
+
+ }
+
+ override fun onIceDisconnected(handleId: BigInteger?) {
+
+ }
+
+ override fun onPeerConnectionClosed(handleId: BigInteger?) {
+
+ }
+
+ override fun onPeerConnectionStatsReady(
+ handleId: BigInteger?,
+ reports: Array?
+ ) {
+
+ }
+
+ override fun onPeerConnectionError(handleId: BigInteger?, description: String?) {
+
+ }
+
+ override fun onLocalRender(handleId: BigInteger?) {
+
+ }
+
+ override fun onRemoteRender(handleId: BigInteger?) {
+
+ }
+
+
}
\ No newline at end of file
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AddIceObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AddIceObserver.java
new file mode 100644
index 00000000..ff2c6900
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AddIceObserver.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface to handle completion of addIceCandidate */
+public interface AddIceObserver {
+ /** Called when ICE candidate added successfully.*/
+ @CalledByNative public void onAddSuccess();
+
+ /** Called when ICE candidate addition failed.*/
+ @CalledByNative public void onAddFailure(String error);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AndroidVideoDecoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AndroidVideoDecoder.java
new file mode 100644
index 00000000..47cb5689
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AndroidVideoDecoder.java
@@ -0,0 +1,673 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaFormat;
+import android.os.SystemClock;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video decoder.
+ */
+class AndroidVideoDecoder implements VideoDecoder, VideoSink {
+ private static final String TAG = "AndroidVideoDecoder";
+
+ // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
+ // this timeout.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+
+ // WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a
+ // long timeout (500 ms) to prevent this from causing the codec to return an error.
+ private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
+
+ // Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds).
+ // If this timeout is exceeded, the output thread will unblock and check if the decoder is still
+ // running. If it is, it will block on dequeue again. Otherwise, it will stop and release the
+ // MediaCodec.
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+
+ private static class FrameInfo {
+ final long decodeStartTimeMs;
+ final int rotation;
+
+ FrameInfo(long decodeStartTimeMs, int rotation) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.rotation = rotation;
+ }
+ }
+
+ private final BlockingDeque frameInfos;
+ private int colorFormat;
+
+ // Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
+ // those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
+ // thread and is immutable while the codec is running.
+ @Nullable private Thread outputThread;
+
+ // Checker that ensures work is run on the output thread.
+ private ThreadChecker outputThreadChecker;
+
+ // Checker that ensures work is run on the decoder thread. The decoder thread is owned by the
+ // caller and must be used to call initDecode, decode, and release.
+ private ThreadChecker decoderThreadChecker;
+
+ private volatile boolean running;
+ @Nullable private volatile Exception shutdownException;
+
+ // Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
+ // or the output thread. Accesses should be protected with this lock.
+ private final Object dimensionLock = new Object();
+ private int width;
+ private int height;
+ private int stride;
+ private int sliceHeight;
+
+ // Whether the decoder has finished the first frame. The codec may not change output dimensions
+ // after delivering the first frame. Only accessed on the output thread while the decoder is
+ // running.
+ private boolean hasDecodedFirstFrame;
+ // Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed
+ // on the decoder thread.
+ private boolean keyFrameRequired;
+
+ private final @Nullable EglBase.Context sharedContext;
+ // Valid and immutable while the decoder is running.
+ @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+ @Nullable private Surface surface;
+
+ private static class DecodedTextureMetadata {
+ final long presentationTimestampUs;
+ final Integer decodeTimeMs;
+
+ DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) {
+ this.presentationTimestampUs = presentationTimestampUs;
+ this.decodeTimeMs = decodeTimeMs;
+ }
+ }
+
+ // Metadata for the last frame rendered to the texture.
+ private final Object renderedTextureMetadataLock = new Object();
+ @Nullable private DecodedTextureMetadata renderedTextureMetadata;
+
+ // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
+ // and immutable while the decoder is running.
+ @Nullable private Callback callback;
+
+ // Valid and immutable while the decoder is running.
+ @Nullable private MediaCodecWrapper codec;
+
+ AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) {
+ if (!isSupportedColorFormat(colorFormat)) {
+ throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
+ }
+ Logging.d(TAG,
+ "ctor name: " + codecName + " type: " + codecType + " color format: " + colorFormat
+ + " context: " + sharedContext);
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.colorFormat = colorFormat;
+ this.sharedContext = sharedContext;
+ this.frameInfos = new LinkedBlockingDeque<>();
+ }
+
+ @Override
+ public VideoCodecStatus initDecode(Settings settings, Callback callback) {
+ this.decoderThreadChecker = new ThreadChecker();
+
+ this.callback = callback;
+ if (sharedContext != null) {
+ surfaceTextureHelper = createSurfaceTextureHelper();
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ surfaceTextureHelper.startListening(this);
+ }
+ return initDecodeInternal(settings.width, settings.height);
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus initDecodeInternal(int width, int height) {
+ decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG,
+ "initDecodeInternal name: " + codecName + " type: " + codecType + " width: " + width
+ + " height: " + height + " color format: " + colorFormat);
+ if (outputThread != null) {
+ Logging.e(TAG, "initDecodeInternal called while the codec is already running");
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ // Note: it is not necessary to initialize dimensions under the lock, since the output thread
+ // is not running.
+ this.width = width;
+ this.height = height;
+
+ stride = width;
+ sliceHeight = height;
+ hasDecodedFirstFrame = false;
+ keyFrameRequired = true;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException | IllegalStateException e) {
+ Logging.e(TAG, "Cannot create media decoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ if (sharedContext == null) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ }
+ codec.configure(format, surface, null, 0);
+ codec.start();
+ } catch (IllegalStateException | IllegalArgumentException e) {
+ Logging.e(TAG, "initDecode failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+ running = true;
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ Logging.d(TAG, "initDecodeInternal done");
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
+ decoderThreadChecker.checkIsOnValidThread();
+ if (codec == null || callback == null) {
+ Logging.d(TAG, "decode uninitalized, codec: " + (codec != null) + ", callback: " + callback);
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ if (frame.buffer == null) {
+ Logging.e(TAG, "decode() - no input data");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ int size = frame.buffer.remaining();
+ if (size == 0) {
+ Logging.e(TAG, "decode() - input buffer empty");
+ return VideoCodecStatus.ERR_PARAMETER;
+ }
+
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ // Check if the resolution changed and reset the codec if necessary.
+ if (frame.encodedWidth * frame.encodedHeight > 0
+ && (frame.encodedWidth != width || frame.encodedHeight != height)) {
+ VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (keyFrameRequired) {
+ // Need to process a key frame first.
+ if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
+ Logging.e(TAG, "decode() - key frame required first");
+ return VideoCodecStatus.NO_OUTPUT;
+ }
+ }
+
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ if (index < 0) {
+ // Decoder is falling behind. No input buffers available.
+ // The decoder can't simply drop frames; it might lose a key frame.
+ Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind");
+ return VideoCodecStatus.ERROR;
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (buffer.capacity() < size) {
+ Logging.e(TAG, "decode() - HW buffer too small");
+ return VideoCodecStatus.ERROR;
+ }
+ buffer.put(frame.buffer);
+
+ frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
+ try {
+ codec.queueInputBuffer(index, 0 /* offset */, size,
+ TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ frameInfos.pollLast();
+ return VideoCodecStatus.ERROR;
+ }
+ if (keyFrameRequired) {
+ keyFrameRequired = false;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ // TODO(sakal): This is not called on the correct thread but is still called synchronously.
+ // Re-enable the check once this is called on the correct thread.
+ // decoderThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "release");
+ VideoCodecStatus status = releaseInternal();
+ if (surface != null) {
+ releaseSurface();
+ surface = null;
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+ surfaceTextureHelper = null;
+ }
+ synchronized (renderedTextureMetadataLock) {
+ renderedTextureMetadata = null;
+ }
+ callback = null;
+ frameInfos.clear();
+ return status;
+ }
+
+ // Internal variant is used when restarting the codec due to reconfiguration.
+ private VideoCodecStatus releaseInternal() {
+ if (!running) {
+ Logging.d(TAG, "release: Decoder is not running.");
+ return VideoCodecStatus.OK;
+ }
+ try {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ // Log an exception to capture the stack trace and turn it into a TIMEOUT error.
+ Logging.e(TAG, "Media decoder release timeout", new RuntimeException());
+ return VideoCodecStatus.TIMEOUT;
+ }
+ if (shutdownException != null) {
+ // Log the exception and turn it into an error. Wrap the exception in a new exception to
+ // capture both the output thread's stack trace and this thread's stack trace.
+ Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException));
+ shutdownException = null;
+ return VideoCodecStatus.ERROR;
+ }
+ } finally {
+ codec = null;
+ outputThread = null;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
+ decoderThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = releaseInternal();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ return initDecodeInternal(newWidth, newHeight);
+ }
+
+ private Thread createOutputThread() {
+ return new Thread("AndroidVideoDecoder.outputThread") {
+ @Override
+ public void run() {
+ outputThreadChecker = new ThreadChecker();
+ while (running) {
+ deliverDecodedFrame();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverDecodedFrame() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ // Block until an output buffer is available (up to 100 milliseconds). If the timeout is
+ // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
+ // thread's loop. Blocking here prevents the output thread from busy-waiting while the codec
+ // is idle.
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ reformat(codec.getOutputFormat());
+ return;
+ }
+
+ if (index < 0) {
+ Logging.v(TAG, "dequeueOutputBuffer returned " + index);
+ return;
+ }
+
+ FrameInfo frameInfo = frameInfos.poll();
+ Integer decodeTimeMs = null;
+ int rotation = 0;
+ if (frameInfo != null) {
+ decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
+ rotation = frameInfo.rotation;
+ }
+
+ hasDecodedFirstFrame = true;
+
+ if (surfaceTextureHelper != null) {
+ deliverTextureFrame(index, info, rotation, decodeTimeMs);
+ } else {
+ deliverByteFrame(index, info, rotation, decodeTimeMs);
+ }
+
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverDecodedFrame failed", e);
+ }
+ }
+
+ private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
+ final int rotation, final Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ final int width;
+ final int height;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ }
+
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata != null) {
+ codec.releaseOutputBuffer(index, false);
+ return; // We are still waiting for texture for the previous frame, drop this one.
+ }
+ surfaceTextureHelper.setTextureSize(width, height);
+ surfaceTextureHelper.setFrameRotation(rotation);
+ renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs);
+ codec.releaseOutputBuffer(index, /* render= */ true);
+ }
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ final VideoFrame newFrame;
+ final Integer decodeTimeMs;
+ final long timestampNs;
+ synchronized (renderedTextureMetadataLock) {
+ if (renderedTextureMetadata == null) {
+ throw new IllegalStateException(
+ "Rendered texture metadata was null in onTextureFrameAvailable.");
+ }
+ timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000;
+ decodeTimeMs = renderedTextureMetadata.decodeTimeMs;
+ renderedTextureMetadata = null;
+ }
+ // Change timestamp of frame.
+ final VideoFrame frameWithModifiedTimeStamp =
+ new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs);
+ callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */);
+ }
+
+ private void deliverByteFrame(
+ int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
+ // Load dimensions from shared memory under the dimension lock.
+ int width;
+ int height;
+ int stride;
+ int sliceHeight;
+ synchronized (dimensionLock) {
+ width = this.width;
+ height = this.height;
+ stride = this.stride;
+ sliceHeight = this.sliceHeight;
+ }
+
+ // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
+ // bytes for each of the U and V channels.
+ if (info.size < width * height * 3 / 2) {
+ Logging.e(TAG, "Insufficient output buffer size: " + info.size);
+ return;
+ }
+
+ if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
+ // Some codecs (Exynos) report an incorrect stride. Correct it here.
+ // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
+ // 2 * size / (3 * height).
+ stride = info.size * 2 / (height * 3);
+ }
+
+ ByteBuffer buffer = codec.getOutputBuffer(index);
+ buffer.position(info.offset);
+ buffer.limit(info.offset + info.size);
+ buffer = buffer.slice();
+
+ final VideoFrame.Buffer frameBuffer;
+ if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
+ frameBuffer = copyI420Buffer(buffer, stride, sliceHeight, width, height);
+ } else {
+ // All other supported color formats are NV12.
+ frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height);
+ }
+ codec.releaseOutputBuffer(index, /* render= */ false);
+
+ long presentationTimeNs = info.presentationTimeUs * 1000;
+ VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
+
+ // Note that qp is parsed on the C++ side.
+ callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
+ frame.release();
+ }
+
+ private VideoFrame.Buffer copyNV12ToI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ // toI420 copies the buffer.
+ return new NV12Buffer(width, height, stride, sliceHeight, buffer, null /* releaseCallback */)
+ .toI420();
+ }
+
+ private VideoFrame.Buffer copyI420Buffer(
+ ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
+ if (stride % 2 != 0) {
+ throw new AssertionError("Stride is not divisible by two: " + stride);
+ }
+
+ // Note that the case with odd `sliceHeight` is handled in a special way.
+ // The chroma height contained in the payload is rounded down instead of
+ // up, making it one row less than what we expect in WebRTC. Therefore, we
+ // have to duplicate the last chroma rows for this case. Also, the offset
+ // between the Y plane and the U plane is unintuitive for this case. See
+ // http://bugs.webrtc.org/6651 for more info.
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
+
+ final int uvStride = stride / 2;
+
+ final int yPos = 0;
+ final int yEnd = yPos + stride * height;
+ final int uPos = yPos + stride * sliceHeight;
+ final int uEnd = uPos + uvStride * chromaHeight;
+ final int vPos = uPos + uvStride * sliceHeight / 2;
+ final int vEnd = vPos + uvStride * chromaHeight;
+
+ VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height);
+
+ buffer.limit(yEnd);
+ buffer.position(yPos);
+ copyPlane(
+ buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height);
+
+ buffer.limit(uEnd);
+ buffer.position(uPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataU = frameBuffer.getDataU();
+ dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row.
+ dataU.put(buffer); // Copy the last row.
+ }
+
+ buffer.limit(vEnd);
+ buffer.position(vPos);
+ copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
+ chromaWidth, chromaHeight);
+ if (sliceHeight % 2 == 1) {
+ buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
+
+ ByteBuffer dataV = frameBuffer.getDataV();
+ dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row.
+ dataV.put(buffer); // Copy the last row.
+ }
+
+ return frameBuffer;
+ }
+
+ private void reformat(MediaFormat format) {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Decoder format changed: " + format);
+ final int newWidth;
+ final int newHeight;
+ if (format.containsKey(MediaFormat.KEY_CROP_LEFT)
+ && format.containsKey(MediaFormat.KEY_CROP_RIGHT)
+ && format.containsKey(MediaFormat.KEY_CROP_BOTTOM)
+ && format.containsKey(MediaFormat.KEY_CROP_TOP)) {
+ newWidth = 1 + format.getInteger(MediaFormat.KEY_CROP_RIGHT)
+ - format.getInteger(MediaFormat.KEY_CROP_LEFT);
+ newHeight = 1 + format.getInteger(MediaFormat.KEY_CROP_BOTTOM)
+ - format.getInteger(MediaFormat.KEY_CROP_TOP);
+ } else {
+ newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+ newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+ }
+ // Compare to existing width, height, and save values under the dimension lock.
+ synchronized (dimensionLock) {
+ if (newWidth != width || newHeight != height) {
+ if (hasDecodedFirstFrame) {
+ stopOnOutputThread(new RuntimeException("Unexpected size change. "
+ + "Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight));
+ return;
+ } else if (newWidth <= 0 || newHeight <= 0) {
+ Logging.w(TAG,
+ "Unexpected format dimensions. Configured " + width + "*" + height + ". "
+ + "New " + newWidth + "*" + newHeight + ". Skip it");
+ return;
+ }
+ width = newWidth;
+ height = newHeight;
+ }
+ }
+
+ // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
+ // color format updates.
+ if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+ if (!isSupportedColorFormat(colorFormat)) {
+ stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
+ return;
+ }
+ }
+
+ // Save stride and sliceHeight under the dimension lock.
+ synchronized (dimensionLock) {
+ if (format.containsKey(MediaFormat.KEY_STRIDE)) {
+ stride = format.getInteger(MediaFormat.KEY_STRIDE);
+ }
+ if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
+ sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private void stopOnOutputThread(Exception e) {
+ outputThreadChecker.checkIsOnValidThread();
+ running = false;
+ shutdownException = e;
+ }
+
+ private boolean isSupportedColorFormat(int colorFormat) {
+ for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) {
+ if (supported == colorFormat) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // Visible for testing.
+ protected SurfaceTextureHelper createSurfaceTextureHelper() {
+ return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
+ }
+
+ // Visible for testing.
+ // TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC.
+ protected void releaseSurface() {
+ surface.release();
+ }
+
+ // Visible for testing.
+ protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
+ return JavaI420Buffer.allocate(width, height);
+ }
+
+ // Visible for testing.
+ protected void copyPlane(
+ ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
+ YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/ApplicationContextProvider.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/ApplicationContextProvider.java
new file mode 100644
index 00000000..6400a047
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/ApplicationContextProvider.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+public class ApplicationContextProvider {
+ @CalledByNative
+ public static Context getApplicationContext() {
+ return ContextUtils.getApplicationContext();
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioDecoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioDecoderFactoryFactory.java
new file mode 100644
index 00000000..dd3e2628
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioDecoderFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioDecoderFactory}.
+ */
+public interface AudioDecoderFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::AudioDecoderFactory}. The caller takes ownership.
+ */
+ long createNativeAudioDecoderFactory();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioEncoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioEncoderFactoryFactory.java
new file mode 100644
index 00000000..814b71ab
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioEncoderFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioEncoderFactory}.
+ */
+public interface AudioEncoderFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::AudioEncoderFactory}. The caller takes ownership.
+ */
+ long createNativeAudioEncoderFactory();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioProcessingFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioProcessingFactory.java
new file mode 100644
index 00000000..bd8fdb89
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioProcessingFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::AudioProcessing instances. */
+public interface AudioProcessingFactory {
+ /**
+ * Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ public long createNative();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioSource.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioSource.java
new file mode 100644
index 00000000..f8104e59
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioSource.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+ public AudioSource(long nativeSource) {
+ super(nativeSource);
+ }
+
+ /** Returns a pointer to webrtc::AudioSourceInterface. */
+ long getNativeAudioSource() {
+ return getNativeMediaSource();
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioTrack.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioTrack.java
new file mode 100644
index 00000000..ca745db6
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioTrack.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+ public AudioTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ /** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
+ * 0 to 10.
+ */
+ public void setVolume(double volume) {
+ nativeSetVolume(getNativeAudioTrack(), volume);
+ }
+
+ /** Returns a pointer to webrtc::AudioTrackInterface. */
+ long getNativeAudioTrack() {
+ return getNativeMediaStreamTrack();
+ }
+
+ private static native void nativeSetVolume(long track, double volume);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BaseBitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BaseBitrateAdjuster.java
new file mode 100644
index 00000000..3b5f5d29
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BaseBitrateAdjuster.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
+class BaseBitrateAdjuster implements BitrateAdjuster {
+ protected int targetBitrateBps;
+ protected double targetFramerateFps;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ this.targetBitrateBps = targetBitrateBps;
+ this.targetFramerateFps = targetFramerateFps;
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ // No op.
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return targetBitrateBps;
+ }
+
+ @Override
+ public double getAdjustedFramerateFps() {
+ return targetFramerateFps;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BitrateAdjuster.java
new file mode 100644
index 00000000..bfa08bad
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BitrateAdjuster.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Object that adjusts the bitrate of a hardware codec. */
+interface BitrateAdjuster {
+ /**
+ * Sets the target bitrate in bits per second and framerate in frames per second.
+ */
+ void setTargets(int targetBitrateBps, double targetFramerateFps);
+
+ /**
+ * Should be used to report the size of an encoded frame to the bitrate adjuster. Use
+ * getAdjustedBitrateBps to get the updated bitrate after calling this method.
+ */
+ void reportEncodedFrame(int size);
+
+ /** Gets the current bitrate. */
+ int getAdjustedBitrateBps();
+
+ /** Gets the current framerate. */
+ double getAdjustedFramerateFps();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioDecoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
new file mode 100644
index 00000000..5ebc19f2
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Creates a native {@code webrtc::AudioDecoderFactory} with the builtin audio decoders.
+ */
+public class BuiltinAudioDecoderFactoryFactory implements AudioDecoderFactoryFactory {
+ @Override
+ public long createNativeAudioDecoderFactory() {
+ return nativeCreateBuiltinAudioDecoderFactory();
+ }
+
+ private static native long nativeCreateBuiltinAudioDecoderFactory();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioEncoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
new file mode 100644
index 00000000..e884d4c3
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class creates a native {@code webrtc::AudioEncoderFactory} with the builtin audio encoders.
+ */
+public class BuiltinAudioEncoderFactoryFactory implements AudioEncoderFactoryFactory {
+ @Override
+ public long createNativeAudioEncoderFactory() {
+ return nativeCreateBuiltinAudioEncoderFactory();
+ }
+
+ private static native long nativeCreateBuiltinAudioEncoderFactory();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CallSessionFileRotatingLogSink.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CallSessionFileRotatingLogSink.java
new file mode 100644
index 00000000..f4edb588
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+ private long nativeSink;
+
+ public static byte[] getLogData(String dirPath) {
+ if (dirPath == null) {
+ throw new IllegalArgumentException("dirPath may not be null.");
+ }
+ return nativeGetLogData(dirPath);
+ }
+
+ public CallSessionFileRotatingLogSink(
+ String dirPath, int maxFileSize, Logging.Severity severity) {
+ if (dirPath == null) {
+ throw new IllegalArgumentException("dirPath may not be null.");
+ }
+ nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+ }
+
+ public void dispose() {
+ if (nativeSink != 0) {
+ nativeDeleteSink(nativeSink);
+ nativeSink = 0;
+ }
+ }
+
+ private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
+ private static native void nativeDeleteSink(long sink);
+ private static native byte[] nativeGetLogData(String dirPath);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNative.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNative.java
new file mode 100644
index 00000000..9b410cea
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNative.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNative is used by the JNI generator to create the necessary JNI
+ * bindings and expose this method to native code.
+ */
+@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNative {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNativeUnchecked.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNativeUnchecked.java
new file mode 100644
index 00000000..8a00a7fa
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNativeUnchecked.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions.
+ * It only makes sense to use this annotation on methods that declare a throws... spec.
+ * However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception)
+ * such as NullPointerException, so the native code should differentiate these cases.
+ * Usage of this should be very rare; where possible handle exceptions in the Java side and use a
+ * return value to indicate success / failure.
+ */
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.CLASS)
+public @interface CalledByNativeUnchecked {
+ /*
+ * If present, tells which inner class the method belongs to.
+ */
+ public String value() default "";
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Capturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Capturer.java
new file mode 100644
index 00000000..de172aa1
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Capturer.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+public class Camera1Capturer extends CameraCapturer {
+ private final boolean captureToTexture;
+
+ public Camera1Capturer(
+ String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
+ super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
+
+ this.captureToTexture = captureToTexture;
+ }
+
+ @Override
+ protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+ CameraSession.Events events, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+ int framerate) {
+ Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
+ surfaceTextureHelper, cameraName, width, height, framerate);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Enumerator.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Enumerator.java
new file mode 100644
index 00000000..fb1a21f3
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Enumerator.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.SystemClock;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+@SuppressWarnings("deprecation")
+public class Camera1Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera1Enumerator";
+ // Each entry contains the supported formats for corresponding camera index. The formats for all
+ // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+ // reference.
+ private static List> cachedSupportedFormats;
+
+ private final boolean captureToTexture;
+
+ public Camera1Enumerator() {
+ this(true /* captureToTexture */);
+ }
+
+ public Camera1Enumerator(boolean captureToTexture) {
+ this.captureToTexture = captureToTexture;
+ }
+
+ // Returns device names that can be used to create a new VideoCapturerAndroid.
+ @Override
+ public String[] getDeviceNames() {
+ ArrayList namesList = new ArrayList<>();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ String name = getDeviceName(i);
+ if (name != null) {
+ namesList.add(name);
+ Logging.d(TAG, "Index: " + i + ". " + name);
+ } else {
+ Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
+ }
+ }
+ String[] namesArray = new String[namesList.size()];
+ return namesList.toArray(namesArray);
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
+ }
+
+ @Override
+ public List getSupportedFormats(String deviceName) {
+ return getSupportedFormats(getCameraIndex(deviceName));
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
+ }
+
+ private static @Nullable android.hardware.Camera.CameraInfo getCameraInfo(int index) {
+ android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ try {
+ android.hardware.Camera.getCameraInfo(index, info);
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo failed on index " + index, e);
+ return null;
+ }
+ return info;
+ }
+
+ static synchronized List getSupportedFormats(int cameraId) {
+ if (cachedSupportedFormats == null) {
+ cachedSupportedFormats = new ArrayList>();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ cachedSupportedFormats.add(enumerateFormats(i));
+ }
+ }
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ private static List enumerateFormats(int cameraId) {
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ final android.hardware.Camera.Parameters parameters;
+ android.hardware.Camera camera = null;
+ try {
+ Logging.d(TAG, "Opening camera with index " + cameraId);
+ camera = android.hardware.Camera.open(cameraId);
+ parameters = camera.getParameters();
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+ return new ArrayList();
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
+ }
+
+ final List formatList = new ArrayList();
+ try {
+ int minFps = 0;
+ int maxFps = 0;
+ final List listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange != null) {
+ // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+ // corresponding to the highest fps.
+ final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+ minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ }
+ for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+ }
+
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+
+ // Convert from android.hardware.Camera.Size to Size.
+ static List convertSizes(List cameraSizes) {
+ final List sizes = new ArrayList();
+ for (android.hardware.Camera.Size size : cameraSizes) {
+ sizes.add(new Size(size.width, size.height));
+ }
+ return sizes;
+ }
+
+ // Convert from int[2] to CaptureFormat.FramerateRange.
+ static List convertFramerates(List arrayRanges) {
+ final List ranges = new ArrayList();
+ for (int[] range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
+ }
+ return ranges;
+ }
+
+ // Returns the camera index for camera with name `deviceName`, or throws IllegalArgumentException
+ // if no such camera can be found.
+ static int getCameraIndex(String deviceName) {
+ Logging.d(TAG, "getCameraIndex: " + deviceName);
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ if (deviceName.equals(getDeviceName(i))) {
+ return i;
+ }
+ }
+ throw new IllegalArgumentException("No such camera: " + deviceName);
+ }
+
+ // Returns the name of the camera with camera index. Returns null if the
+ // camera can not be used.
+ static @Nullable String getDeviceName(int index) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(index);
+ if (info == null) {
+ return null;
+ }
+
+ String facing =
+ (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Session.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Session.java
new file mode 100644
index 00000000..a54f7201
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Session.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.os.Handler;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+@SuppressWarnings("deprecation")
+class Camera1Session implements CameraSession {
+ private static final String TAG = "Camera1Session";
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+
+ private static final Histogram camera1StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera1StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final Events events;
+ private final boolean captureToTexture;
+ private final Context applicationContext;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final int cameraId;
+ private final Camera camera;
+ private final Camera.CameraInfo info;
+ private final CaptureFormat captureFormat;
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private SessionState state;
+ private boolean firstFrameReported;
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ public static void create(final CreateSessionCallback callback, final Events events,
+ final boolean captureToTexture, final Context applicationContext,
+ final SurfaceTextureHelper surfaceTextureHelper, final String cameraName,
+ final int width, final int height, final int framerate) {
+ final long constructionTimeNs = System.nanoTime();
+ Logging.d(TAG, "Open camera " + cameraName);
+ events.onCameraOpening();
+
+ final int cameraId;
+ try {
+ cameraId = Camera1Enumerator.getCameraIndex(cameraName);
+ } catch (IllegalArgumentException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera camera;
+ try {
+ camera = Camera.open(cameraId);
+ } catch (RuntimeException e) {
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (camera == null) {
+ callback.onFailure(
+ FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId);
+ return;
+ }
+
+ try {
+ camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
+ } catch (IOException | RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ final Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.getCameraInfo(cameraId, info);
+
+ final CaptureFormat captureFormat;
+ try {
+ final Camera.Parameters parameters = camera.getParameters();
+ captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
+ final Size pictureSize = findClosestPictureSize(parameters, width, height);
+ updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ if (!captureToTexture) {
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ camera.addCallbackBuffer(buffer.array());
+ }
+ }
+
+ // Calculate orientation manually and send it as CVO instead.
+ try {
+ camera.setDisplayOrientation(0 /* degrees */);
+ } catch (RuntimeException e) {
+ camera.release();
+ callback.onFailure(FailureType.ERROR, e.getMessage());
+ return;
+ }
+
+ callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
+ surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
+ }
+
+ private static void updateCameraParameters(Camera camera, Camera.Parameters parameters,
+ CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) {
+ final List focusModes = parameters.getSupportedFocusModes();
+
+ parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
+ parameters.setPreviewSize(captureFormat.width, captureFormat.height);
+ parameters.setPictureSize(pictureSize.width, pictureSize.height);
+ if (!captureToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
+
+ if (parameters.isVideoStabilizationSupported()) {
+ parameters.setVideoStabilization(true);
+ }
+ if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ }
+ camera.setParameters(parameters);
+ }
+
+ private static CaptureFormat findClosestCaptureFormat(
+ Camera.Parameters parameters, int width, int height, int framerate) {
+ // Find closest supported format for `width` x `height` @ `framerate`.
+ final List supportedFramerates =
+ Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
+ Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
+
+ final CaptureFormat.FramerateRange fpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
+
+ final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
+
+ return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
+ }
+
+ private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) {
+ return CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
+ }
+
+ private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera,
+ Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) {
+ Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
+
+ this.cameraThreadHandler = new Handler();
+ this.events = events;
+ this.captureToTexture = captureToTexture;
+ this.applicationContext = applicationContext;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.camera = camera;
+ this.info = info;
+ this.captureFormat = captureFormat;
+ this.constructionTimeNs = constructionTimeNs;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+
+ startCapturing();
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera1StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void startCapturing() {
+ Logging.d(TAG, "Start capturing");
+ checkIsOnCameraThread();
+
+ state = SessionState.RUNNING;
+
+ camera.setErrorCallback(new Camera.ErrorCallback() {
+ @Override
+ public void onError(int error, Camera camera) {
+ String errorMessage;
+ if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
+ errorMessage = "Camera server died!";
+ } else {
+ errorMessage = "Camera error: " + error;
+ }
+ Logging.e(TAG, errorMessage);
+ stopInternal();
+ if (error == Camera.CAMERA_ERROR_EVICTED) {
+ events.onCameraDisconnected(Camera1Session.this);
+ } else {
+ events.onCameraError(Camera1Session.this, errorMessage);
+ }
+ }
+ });
+
+ if (captureToTexture) {
+ listenForTextureFrames();
+ } else {
+ listenForBytebufferFrames();
+ }
+ try {
+ camera.startPreview();
+ } catch (RuntimeException e) {
+ stopInternal();
+ events.onCameraError(this, e.getMessage());
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+ if (state == SessionState.STOPPED) {
+ Logging.d(TAG, "Camera is already stopped");
+ return;
+ }
+
+ state = SessionState.STOPPED;
+ surfaceTextureHelper.stopListening();
+ // Note: stopPreview or other driver code might deadlock. Deadlock in
+ // Camera._stopPreview(Native Method) has been observed on
+ // Nexus 5 (hammerhead), OS version LMY48I.
+ camera.stopPreview();
+ camera.release();
+ events.onCameraClosed(this);
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void listenForTextureFrames() {
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT,
+ /* rotation= */ 0),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera1Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ }
+
+ private void listenForBytebufferFrames() {
+ camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
+ @Override
+ public void onPreviewFrame(final byte[] data, Camera callbackCamera) {
+ checkIsOnCameraThread();
+
+ if (callbackCamera != camera) {
+ Logging.e(TAG, "Callback from a different camera. This should never happen.");
+ return;
+ }
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
+ return;
+ }
+
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+ if (!firstFrameReported) {
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera1StartTimeMsHistogram.addSample(startTimeMs);
+ firstFrameReported = true;
+ }
+
+ VideoFrame.Buffer frameBuffer = new NV21Buffer(
+ data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
+ if (state == SessionState.RUNNING) {
+ camera.addCallbackBuffer(data);
+ }
+ }));
+ final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
+ events.onFrameCaptured(Camera1Session.this, frame);
+ frame.release();
+ }
+ });
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+ rotation = 360 - rotation;
+ }
+ return (info.orientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Capturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Capturer.java
new file mode 100644
index 00000000..c4becf48
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Capturer.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.camera2.CameraManager;
+import androidx.annotation.Nullable;
+
+public class Camera2Capturer extends CameraCapturer {
+ private final Context context;
+ @Nullable private final CameraManager cameraManager;
+
+ public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
+ super(cameraName, eventsHandler, new Camera2Enumerator(context));
+
+ this.context = context;
+ cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+ CameraSession.Events events, Context applicationContext,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+ int framerate) {
+ Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
+ surfaceTextureHelper, cameraName, width, height, framerate);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Enumerator.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Enumerator.java
new file mode 100644
index 00000000..456d8cd0
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Enumerator.java
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Range;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+public class Camera2Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera2Enumerator";
+ private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+ // Each entry contains the supported formats for a given camera index. The formats are enumerated
+ // lazily in getSupportedFormats(), and cached for future reference.
+ private static final Map> cachedSupportedFormats =
+ new HashMap>();
+
+ final Context context;
+ @Nullable final CameraManager cameraManager;
+
+ public Camera2Enumerator(Context context) {
+ this.context = context;
+ this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ public String[] getDeviceNames() {
+ try {
+ return cameraManager.getCameraIdList();
+ } catch (CameraAccessException e) {
+ Logging.e(TAG, "Camera access exception", e);
+ return new String[] {};
+ }
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_BACK;
+ }
+
+ @Nullable
+ @Override
+ public List getSupportedFormats(String deviceName) {
+ return getSupportedFormats(context, deviceName);
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new Camera2Capturer(context, deviceName, eventsHandler);
+ }
+
+ private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) {
+ try {
+ return cameraManager.getCameraCharacteristics(deviceName);
+ } catch (CameraAccessException | RuntimeException e) {
+ Logging.e(TAG, "Camera access exception", e);
+ return null;
+ }
+ }
+
+ /**
+ * Checks if API is supported and all cameras have better than legacy support.
+ */
+ public static boolean isSupported(Context context) {
+ CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ try {
+ String[] cameraIds = cameraManager.getCameraIdList();
+ for (String id : cameraIds) {
+ CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
+ if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
+ == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ return false;
+ }
+ }
+ } catch (CameraAccessException | RuntimeException e) {
+ Logging.e(TAG, "Failed to check if camera2 is supported", e);
+ return false;
+ }
+ return true;
+ }
+
+ static int getFpsUnitFactor(Range[] fpsRanges) {
+ if (fpsRanges.length == 0) {
+ return 1000;
+ }
+ return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
+ }
+
+ static List getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ final int supportLevel =
+ cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+
+ final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
+ final List sizes = convertSizes(nativeSizes);
+
+ // Video may be stretched pre LMR1 on legacy implementations.
+ // Filter out formats that have different aspect ratio than the sensor array.
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
+ && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ final Rect activeArraySize =
+ cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ final ArrayList filteredSizes = new ArrayList();
+
+ for (Size size : sizes) {
+ if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
+ filteredSizes.add(size);
+ }
+ }
+
+ return filteredSizes;
+ } else {
+ return sizes;
+ }
+ }
+
+ @Nullable
+ static List getSupportedFormats(Context context, String cameraId) {
+ return getSupportedFormats(
+ (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
+ }
+
+ @Nullable
+ static List getSupportedFormats(CameraManager cameraManager, String cameraId) {
+ synchronized (cachedSupportedFormats) {
+ if (cachedSupportedFormats.containsKey(cameraId)) {
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+
+ final CameraCharacteristics cameraCharacteristics;
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (Exception ex) {
+ Logging.e(TAG, "getCameraCharacteristics()", ex);
+ return new ArrayList();
+ }
+
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ Range[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ List framerateRanges =
+ convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
+ List sizes = getSupportedSizes(cameraCharacteristics);
+
+ int defaultMaxFps = 0;
+ for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
+ defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
+ }
+
+ final List formatList = new ArrayList();
+ for (Size size : sizes) {
+ long minFrameDurationNs = 0;
+ try {
+ minFrameDurationNs = streamMap.getOutputMinFrameDuration(
+ SurfaceTexture.class, new android.util.Size(size.width, size.height));
+ } catch (Exception e) {
+ // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+ }
+ final int maxFps = (minFrameDurationNs == 0)
+ ? defaultMaxFps
+ : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
+ formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
+ Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
+ }
+
+ cachedSupportedFormats.put(cameraId, formatList);
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+ }
+
+ // Convert from android.util.Size to Size.
+ private static List convertSizes(android.util.Size[] cameraSizes) {
+ if (cameraSizes == null || cameraSizes.length == 0) {
+ return Collections.emptyList();
+ }
+ final List sizes = new ArrayList<>(cameraSizes.length);
+ for (android.util.Size size : cameraSizes) {
+ sizes.add(new Size(size.getWidth(), size.getHeight()));
+ }
+ return sizes;
+ }
+
+ // Convert from android.util.Range to CaptureFormat.FramerateRange.
+ static List convertFramerates(
+ Range[] arrayRanges, int unitFactor) {
+ final List ranges = new ArrayList();
+ for (Range range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range.getLower() * unitFactor, range.getUpper() * unitFactor));
+ }
+ return ranges;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Session.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Session.java
new file mode 100644
index 00000000..dec97a2c
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Session.java
@@ -0,0 +1,426 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.os.Handler;
+import android.util.Range;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+class Camera2Session implements CameraSession {
+ private static final String TAG = "Camera2Session";
+
+ private static final Histogram camera2StartTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
+ private static final Histogram camera2StopTimeMsHistogram =
+ Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
+ private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
+ "WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
+
+ private static enum SessionState { RUNNING, STOPPED }
+
+ private final Handler cameraThreadHandler;
+ private final CreateSessionCallback callback;
+ private final Events events;
+ private final Context applicationContext;
+ private final CameraManager cameraManager;
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final String cameraId;
+ private final int width;
+ private final int height;
+ private final int framerate;
+
+ // Initialized at start
+ private CameraCharacteristics cameraCharacteristics;
+ private int cameraOrientation;
+ private boolean isCameraFrontFacing;
+ private int fpsUnitFactor;
+ private CaptureFormat captureFormat;
+
+ // Initialized when camera opens
+ @Nullable private CameraDevice cameraDevice;
+ @Nullable private Surface surface;
+
+ // Initialized when capture session is created
+ @Nullable private CameraCaptureSession captureSession;
+
+ // State
+ private SessionState state = SessionState.RUNNING;
+ private boolean firstFrameReported;
+
+ // Used only for stats. Only used on the camera thread.
+ private final long constructionTimeNs; // Construction time of this class.
+
+ private class CameraStateCallback extends CameraDevice.StateCallback {
+ private String getErrorDescription(int errorCode) {
+ switch (errorCode) {
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
+ return "Camera device has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
+ return "Camera device could not be opened due to a device policy.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
+ return "Camera device is in use already.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
+ return "Camera service has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
+ return "Camera device could not be opened because"
+ + " there are too many other open camera devices.";
+ default:
+ return "Unknown camera error: " + errorCode;
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ checkIsOnCameraThread();
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
+ } else {
+ events.onCameraDisconnected(Camera2Session.this);
+ }
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int errorCode) {
+ checkIsOnCameraThread();
+ reportError(getErrorDescription(errorCode));
+ }
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera opened.");
+ cameraDevice = camera;
+
+ surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ try {
+ camera.createCaptureSession(
+ Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to create capture session. " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Camera device closed.");
+ events.onCameraClosed(Camera2Session.this);
+ }
+ }
+
+ private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ session.close();
+ reportError("Failed to configure capture session.");
+ }
+
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Camera capture session configured.");
+ captureSession = session;
+ try {
+ /*
+ * The viable options for video capture requests are:
+ * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
+ * post-processing.
+ * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
+ * quality.
+ */
+ final CaptureRequest.Builder captureRequestBuilder =
+ cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ // Set auto exposure fps range.
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
+ new Range(captureFormat.framerate.min / fpsUnitFactor,
+ captureFormat.framerate.max / fpsUnitFactor));
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
+ chooseStabilizationMode(captureRequestBuilder);
+ chooseFocusMode(captureRequestBuilder);
+
+ captureRequestBuilder.addTarget(surface);
+ session.setRepeatingRequest(
+ captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to start capture request. " + e);
+ return;
+ }
+
+ surfaceTextureHelper.startListening((VideoFrame frame) -> {
+ checkIsOnCameraThread();
+
+ if (state != SessionState.RUNNING) {
+ Logging.d(TAG, "Texture frame captured but camera is no longer running.");
+ return;
+ }
+
+ if (!firstFrameReported) {
+ firstFrameReported = true;
+ final int startTimeMs =
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
+ camera2StartTimeMsHistogram.addSample(startTimeMs);
+ }
+
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ // Also, undo camera orientation, we report it as rotation instead.
+ final VideoFrame modifiedFrame =
+ new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
+ (TextureBufferImpl) frame.getBuffer(),
+ /* mirror= */ isCameraFrontFacing,
+ /* rotation= */ -cameraOrientation),
+ /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
+ events.onFrameCaptured(Camera2Session.this, modifiedFrame);
+ modifiedFrame.release();
+ });
+ Logging.d(TAG, "Camera device successfully started.");
+ callback.onDone(Camera2Session.this);
+ }
+
+ // Prefers optical stabilization over software stabilization if available. Only enables one of
+ // the stabilization modes at a time because having both enabled can cause strange results.
+ private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableOpticalStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
+ if (availableOpticalStabilization != null) {
+ for (int mode : availableOpticalStabilization) {
+ if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using optical stabilization.");
+ return;
+ }
+ }
+ }
+ // If no optical mode is available, try software.
+ final int[] availableVideoStabilization = cameraCharacteristics.get(
+ CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
+ if (availableVideoStabilization != null) {
+ for (int mode : availableVideoStabilization) {
+ if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
+ captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+ CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
+ Logging.d(TAG, "Using video stabilization.");
+ return;
+ }
+ }
+ }
+ Logging.d(TAG, "Stabilization not available.");
+ }
+
+ private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
+ final int[] availableFocusModes =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ for (int mode : availableFocusModes) {
+ if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
+ captureRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
+ Logging.d(TAG, "Using continuous video auto-focus.");
+ return;
+ }
+ }
+ Logging.d(TAG, "Auto-focus is not available.");
+ }
+ }
+
+ private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ @Override
+ public void onCaptureFailed(
+ CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+ Logging.d(TAG, "Capture failed: " + failure);
+ }
+ }
+
+ public static void create(CreateSessionCallback callback, Events events,
+ Context applicationContext, CameraManager cameraManager,
+ SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
+ int framerate) {
+ new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
+ cameraId, width, height, framerate);
+ }
+
+ private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
+ CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
+ int width, int height, int framerate) {
+ Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
+
+ constructionTimeNs = System.nanoTime();
+
+ this.cameraThreadHandler = new Handler();
+ this.callback = callback;
+ this.events = events;
+ this.applicationContext = applicationContext;
+ this.cameraManager = cameraManager;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraId = cameraId;
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ start();
+ }
+
+ private void start() {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "start");
+
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (CameraAccessException | IllegalArgumentException e) {
+ reportError("getCameraCharacteristics(): " + e.getMessage());
+ return;
+ }
+ cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+
+ findCaptureFormat();
+
+ if (captureFormat == null) {
+ // findCaptureFormat reports an error already.
+ return;
+ }
+
+ openCamera();
+ }
+
+ private void findCaptureFormat() {
+ checkIsOnCameraThread();
+
+ Range[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
+ List framerateRanges =
+ Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
+ List sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
+ Logging.d(TAG, "Available preview sizes: " + sizes);
+ Logging.d(TAG, "Available fps ranges: " + framerateRanges);
+
+ if (framerateRanges.isEmpty() || sizes.isEmpty()) {
+ reportError("No supported capture formats.");
+ return;
+ }
+
+ final CaptureFormat.FramerateRange bestFpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
+
+ final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
+ CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
+
+ captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
+ Logging.d(TAG, "Using capture format: " + captureFormat);
+ }
+
+ private void openCamera() {
+ checkIsOnCameraThread();
+
+ Logging.d(TAG, "Opening camera " + cameraId);
+ events.onCameraOpening();
+
+ try {
+ cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
+ } catch (CameraAccessException | IllegalArgumentException | SecurityException e) {
+ reportError("Failed to open camera: " + e);
+ return;
+ }
+ }
+
+ @Override
+ public void stop() {
+ Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
+ checkIsOnCameraThread();
+ if (state != SessionState.STOPPED) {
+ final long stopStartTime = System.nanoTime();
+ state = SessionState.STOPPED;
+ stopInternal();
+ final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+ camera2StopTimeMsHistogram.addSample(stopTimeMs);
+ }
+ }
+
+ private void stopInternal() {
+ Logging.d(TAG, "Stop internal");
+ checkIsOnCameraThread();
+
+ surfaceTextureHelper.stopListening();
+
+ if (captureSession != null) {
+ captureSession.close();
+ captureSession = null;
+ }
+ if (surface != null) {
+ surface.release();
+ surface = null;
+ }
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ cameraDevice = null;
+ }
+
+ Logging.d(TAG, "Stop done");
+ }
+
+ private void reportError(String error) {
+ checkIsOnCameraThread();
+ Logging.e(TAG, "Error: " + error);
+
+ final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
+ state = SessionState.STOPPED;
+ stopInternal();
+ if (startFailure) {
+ callback.onFailure(FailureType.ERROR, error);
+ } else {
+ events.onCameraError(this, error);
+ }
+ }
+
+ private int getFrameOrientation() {
+ int rotation = CameraSession.getDeviceOrientation(applicationContext);
+ if (!isCameraFrontFacing) {
+ rotation = 360 - rotation;
+ }
+ return (cameraOrientation + rotation) % 360;
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraCapturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraCapturer.java
new file mode 100644
index 00000000..1922a529
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraCapturer.java
@@ -0,0 +1,458 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+abstract class CameraCapturer implements CameraVideoCapturer {
+ enum SwitchState {
+ IDLE, // No switch requested.
+ PENDING, // Waiting for previous capture session to open.
+ IN_PROGRESS, // Waiting for new switched capture session to start.
+ }
+
+ private static final String TAG = "CameraCapturer";
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private final static int OPEN_CAMERA_TIMEOUT = 10000;
+
+ private final CameraEnumerator cameraEnumerator;
+ private final CameraEventsHandler eventsHandler;
+ private final Handler uiThreadHandler;
+
+ @Nullable
+ private final CameraSession.CreateSessionCallback createSessionCallback =
+ new CameraSession.CreateSessionCallback() {
+ @Override
+ public void onDone(CameraSession session) {
+ checkIsOnCameraThread();
+ Logging.d(TAG, "Create session done. Switch state: " + switchState);
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(true /* success */);
+ sessionOpening = false;
+ currentSession = session;
+ cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
+ firstFrameObserved = false;
+ stateLock.notifyAll();
+
+ if (switchState == SwitchState.IN_PROGRESS) {
+ switchState = SwitchState.IDLE;
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
+ switchEventsHandler = null;
+ }
+ } else if (switchState == SwitchState.PENDING) {
+ String selectedCameraName = pendingCameraName;
+ pendingCameraName = null;
+ switchState = SwitchState.IDLE;
+ switchCameraInternal(switchEventsHandler, selectedCameraName);
+ }
+ }
+ }
+
+ @Override
+ public void onFailure(CameraSession.FailureType failureType, String error) {
+ checkIsOnCameraThread();
+ uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
+ synchronized (stateLock) {
+ capturerObserver.onCapturerStarted(false /* success */);
+ openAttemptsRemaining--;
+
+ if (openAttemptsRemaining <= 0) {
+ Logging.w(TAG, "Opening camera failed, passing: " + error);
+ sessionOpening = false;
+ stateLock.notifyAll();
+
+ if (switchState != SwitchState.IDLE) {
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ switchEventsHandler = null;
+ }
+ switchState = SwitchState.IDLE;
+ }
+
+ if (failureType == CameraSession.FailureType.DISCONNECTED) {
+ eventsHandler.onCameraDisconnected();
+ } else {
+ eventsHandler.onCameraError(error);
+ }
+ } else {
+ Logging.w(TAG, "Opening camera failed, retry: " + error);
+ createSessionInternal(OPEN_CAMERA_DELAY_MS);
+ }
+ }
+ }
+ };
+
+ @Nullable
+ private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
+ @Override
+ public void onCameraOpening() {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (currentSession != null) {
+ Logging.w(TAG, "onCameraOpening while session was open.");
+ return;
+ }
+ eventsHandler.onCameraOpening(cameraName);
+ }
+ }
+
+ @Override
+ public void onCameraError(CameraSession session, String error) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraError from another session: " + error);
+ return;
+ }
+ eventsHandler.onCameraError(error);
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraDisconnected(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onCameraDisconnected from another session.");
+ return;
+ }
+ eventsHandler.onCameraDisconnected();
+ stopCapture();
+ }
+ }
+
+ @Override
+ public void onCameraClosed(CameraSession session) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession && currentSession != null) {
+ Logging.d(TAG, "onCameraClosed from another session.");
+ return;
+ }
+ eventsHandler.onCameraClosed();
+ }
+ }
+
+ @Override
+ public void onFrameCaptured(CameraSession session, VideoFrame frame) {
+ checkIsOnCameraThread();
+ synchronized (stateLock) {
+ if (session != currentSession) {
+ Logging.w(TAG, "onFrameCaptured from another session.");
+ return;
+ }
+ if (!firstFrameObserved) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameObserved = true;
+ }
+ cameraStatistics.addFrame();
+ capturerObserver.onFrameCaptured(frame);
+ }
+ }
+ };
+
+ private final Runnable openCameraTimeoutRunnable = new Runnable() {
+ @Override
+ public void run() {
+ eventsHandler.onCameraError("Camera failed to start within timeout.");
+ }
+ };
+
+ // Initialized on initialize
+ // -------------------------
+ private Handler cameraThreadHandler;
+ private Context applicationContext;
+ private org.webrtc.CapturerObserver capturerObserver;
+ private SurfaceTextureHelper surfaceHelper;
+
+ private final Object stateLock = new Object();
+ private boolean sessionOpening; /* guarded by stateLock */
+ @Nullable private CameraSession currentSession; /* guarded by stateLock */
+ private String cameraName; /* guarded by stateLock */
+ private String pendingCameraName; /* guarded by stateLock */
+ private int width; /* guarded by stateLock */
+ private int height; /* guarded by stateLock */
+ private int framerate; /* guarded by stateLock */
+ private int openAttemptsRemaining; /* guarded by stateLock */
+ private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
+ @Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
+ // Valid from onDone call until stopCapture, otherwise null.
+ @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
+ private boolean firstFrameObserved; /* guarded by stateLock */
+
+ public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
+ CameraEnumerator cameraEnumerator) {
+ if (eventsHandler == null) {
+ eventsHandler = new CameraEventsHandler() {
+ @Override
+ public void onCameraError(String errorDescription) {}
+ @Override
+ public void onCameraDisconnected() {}
+ @Override
+ public void onCameraFreezed(String errorDescription) {}
+ @Override
+ public void onCameraOpening(String cameraName) {}
+ @Override
+ public void onFirstFrameAvailable() {}
+ @Override
+ public void onCameraClosed() {}
+ };
+ }
+
+ this.eventsHandler = eventsHandler;
+ this.cameraEnumerator = cameraEnumerator;
+ this.cameraName = cameraName;
+ List deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+ uiThreadHandler = new Handler(Looper.getMainLooper());
+
+ if (deviceNames.isEmpty()) {
+ throw new RuntimeException("No cameras attached.");
+ }
+ if (!deviceNames.contains(this.cameraName)) {
+ throw new IllegalArgumentException(
+ "Camera name " + this.cameraName + " does not match any known camera device.");
+ }
+ }
+
+ @Override
+ public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ org.webrtc.CapturerObserver capturerObserver) {
+ this.applicationContext = applicationContext;
+ this.capturerObserver = capturerObserver;
+ this.surfaceHelper = surfaceTextureHelper;
+ this.cameraThreadHandler = surfaceTextureHelper.getHandler();
+ }
+
+ @Override
+ public void startCapture(int width, int height, int framerate) {
+ Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
+ if (applicationContext == null) {
+ throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
+ }
+
+ synchronized (stateLock) {
+ if (sessionOpening || currentSession != null) {
+ Logging.w(TAG, "Session already open");
+ return;
+ }
+
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+
+ sessionOpening = true;
+ openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
+ createSessionInternal(0);
+ }
+ }
+
+ private void createSessionInternal(int delayMs) {
+ uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
+ cameraThreadHandler.postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
+ surfaceHelper, cameraName, width, height, framerate);
+ }
+ }, delayMs);
+ }
+
+ @Override
+ public void stopCapture() {
+ Logging.d(TAG, "Stop capture");
+
+ synchronized (stateLock) {
+ while (sessionOpening) {
+ Logging.d(TAG, "Stop capture: Waiting for session to open");
+ try {
+ stateLock.wait();
+ } catch (InterruptedException e) {
+ Logging.w(TAG, "Stop capture interrupted while waiting for the session to open.");
+ Thread.currentThread().interrupt();
+ return;
+ }
+ }
+
+ if (currentSession != null) {
+ Logging.d(TAG, "Stop capture: Nulling session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+ capturerObserver.onCapturerStopped();
+ } else {
+ Logging.d(TAG, "Stop capture: No session open");
+ }
+ }
+
+ Logging.d(TAG, "Stop capture done");
+ }
+
+ @Override
+ public void changeCaptureFormat(int width, int height, int framerate) {
+ Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
+ synchronized (stateLock) {
+ stopCapture();
+ startCapture(width, height, framerate);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ Logging.d(TAG, "dispose");
+ stopCapture();
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ List deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (deviceNames.size() < 2) {
+ reportCameraSwitchError("No camera to switch to.", switchEventsHandler);
+ return;
+ }
+
+ int cameraNameIndex = deviceNames.indexOf(cameraName);
+ String cameraName = deviceNames.get((cameraNameIndex + 1) % deviceNames.size());
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler, final String cameraName) {
+ Logging.d(TAG, "switchCamera");
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ switchCameraInternal(switchEventsHandler, cameraName);
+ }
+ });
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return false;
+ }
+
+ public void printStackTrace() {
+ Thread cameraThread = null;
+ if (cameraThreadHandler != null) {
+ cameraThread = cameraThreadHandler.getLooper().getThread();
+ }
+ if (cameraThread != null) {
+ StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
+ if (cameraStackTrace.length > 0) {
+ Logging.d(TAG, "CameraCapturer stack trace:");
+ for (StackTraceElement traceElem : cameraStackTrace) {
+ Logging.d(TAG, traceElem.toString());
+ }
+ }
+ }
+ }
+
+ private void reportCameraSwitchError(
+ String error, @Nullable CameraSwitchHandler switchEventsHandler) {
+ Logging.e(TAG, error);
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(error);
+ }
+ }
+
+ private void switchCameraInternal(
+ @Nullable final CameraSwitchHandler switchEventsHandler, final String selectedCameraName) {
+ Logging.d(TAG, "switchCamera internal");
+ List deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
+
+ if (!deviceNames.contains(selectedCameraName)) {
+ reportCameraSwitchError("Attempted to switch to unknown camera device " + selectedCameraName,
+ switchEventsHandler);
+ return;
+ }
+
+ synchronized (stateLock) {
+ if (switchState != SwitchState.IDLE) {
+ reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
+ return;
+ }
+ if (!sessionOpening && currentSession == null) {
+ reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
+ return;
+ }
+
+ this.switchEventsHandler = switchEventsHandler;
+ if (sessionOpening) {
+ switchState = SwitchState.PENDING;
+ pendingCameraName = selectedCameraName;
+ return;
+ } else {
+ switchState = SwitchState.IN_PROGRESS;
+ }
+
+ Logging.d(TAG, "switchCamera: Stopping session");
+ cameraStatistics.release();
+ cameraStatistics = null;
+ final CameraSession oldSession = currentSession;
+ cameraThreadHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ oldSession.stop();
+ }
+ });
+ currentSession = null;
+
+ cameraName = selectedCameraName;
+
+ sessionOpening = true;
+ openAttemptsRemaining = 1;
+ createSessionInternal(0);
+ }
+ Logging.d(TAG, "switchCamera done");
+ }
+
+ private void checkIsOnCameraThread() {
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ Logging.e(TAG, "Check is on camera thread failed.");
+ throw new RuntimeException("Not on camera thread.");
+ }
+ }
+
+ protected String getCameraName() {
+ synchronized (stateLock) {
+ return cameraName;
+ }
+ }
+
+ abstract protected void createCameraSession(
+ CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
+ Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
+ int width, int height, int framerate);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerationAndroid.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerationAndroid.java
new file mode 100644
index 00000000..0c3188ff
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerationAndroid.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+
+import android.graphics.ImageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+ private final static String TAG = "CameraEnumerationAndroid";
+
+ static final ArrayList COMMON_RESOLUTIONS = new ArrayList(Arrays.asList(
+ // 0, Unknown resolution
+ new Size(160, 120), // 1, QQVGA
+ new Size(240, 160), // 2, HQVGA
+ new Size(320, 240), // 3, QVGA
+ new Size(400, 240), // 4, WQVGA
+ new Size(480, 320), // 5, HVGA
+ new Size(640, 360), // 6, nHD
+ new Size(640, 480), // 7, VGA
+ new Size(768, 480), // 8, WVGA
+ new Size(854, 480), // 9, FWVGA
+ new Size(800, 600), // 10, SVGA
+ new Size(960, 540), // 11, qHD
+ new Size(960, 640), // 12, DVGA
+ new Size(1024, 576), // 13, WSVGA
+ new Size(1024, 600), // 14, WVSGA
+ new Size(1280, 720), // 15, HD
+ new Size(1280, 1024), // 16, SXGA
+ new Size(1920, 1080), // 17, Full HD
+ new Size(1920, 1440), // 18, Full HD 4:3
+ new Size(2560, 1440), // 19, QHD
+ new Size(3840, 2160) // 20, UHD
+ ));
+
+ public static class CaptureFormat {
+ // Class to represent a framerate range. The framerate varies because of lightning conditions.
+ // The values are multiplied by 1000, so 1000 represents one frame per second.
+ public static class FramerateRange {
+ public int min;
+ public int max;
+
+ public FramerateRange(int min, int max) {
+ this.min = min;
+ this.max = max;
+ }
+
+ @Override
+ public String toString() {
+ return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof FramerateRange)) {
+ return false;
+ }
+ final FramerateRange otherFramerate = (FramerateRange) other;
+ return min == otherFramerate.min && max == otherFramerate.max;
+ }
+
+ @Override
+ public int hashCode() {
+ // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+ return 1 + 65537 * min + max;
+ }
+ }
+
+ public final int width;
+ public final int height;
+ public final FramerateRange framerate;
+
+ // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
+ // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
+ // all imageFormats.
+ public final int imageFormat = ImageFormat.NV21;
+
+ public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = new FramerateRange(minFramerate, maxFramerate);
+ }
+
+ public CaptureFormat(int width, int height, FramerateRange framerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+ }
+
+ // Calculates the frame size of this capture format.
+ public int frameSize() {
+ return frameSize(width, height, imageFormat);
+ }
+
+ // Calculates the frame size of the specified image format. Currently only
+ // supporting ImageFormat.NV21.
+ // The size is width * height * number of bytes per pixel.
+ // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+ public static int frameSize(int width, int height, int imageFormat) {
+ if (imageFormat != ImageFormat.NV21) {
+ throw new UnsupportedOperationException("Don't know how to calculate "
+ + "the frame size of non-NV21 image formats.");
+ }
+ return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + "@" + framerate;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof CaptureFormat)) {
+ return false;
+ }
+ final CaptureFormat otherFormat = (CaptureFormat) other;
+ return width == otherFormat.width && height == otherFormat.height
+ && framerate.equals(otherFormat.framerate);
+ }
+
+ @Override
+ public int hashCode() {
+ return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
+ }
+ }
+
+ // Helper class for finding the closest supported format for the two functions below. It creates a
+ // comparator based on the difference to some requested parameters, where the element with the
+ // minimum difference is the element that is closest to the requested parameters.
+ private static abstract class ClosestComparator implements Comparator {
+ // Difference between supported and requested parameter.
+ abstract int diff(T supportedParameter);
+
+ @Override
+ public int compare(T t1, T t2) {
+ return diff(t1) - diff(t2);
+ }
+ }
+
+ // Prefer a fps range with an upper bound close to `framerate`. Also prefer a fps range with a low
+ // lower bound, to allow the framerate to fluctuate based on lightning conditions.
+ public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
+ List supportedFramerates, final int requestedFps) {
+ return Collections.min(
+ supportedFramerates, new ClosestComparator() {
+ // Progressive penalty if the upper bound is further away than `MAX_FPS_DIFF_THRESHOLD`
+ // from requested.
+ private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
+ private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
+ private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
+
+ // Progressive penalty if the lower bound is bigger than `MIN_FPS_THRESHOLD`.
+ private static final int MIN_FPS_THRESHOLD = 8000;
+ private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
+ private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
+
+ // Use one weight for small `value` less than `threshold`, and another weight above.
+ private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
+ return (value < threshold) ? value * lowWeight
+ : threshold * lowWeight + (value - threshold) * highWeight;
+ }
+
+ @Override
+ int diff(CaptureFormat.FramerateRange range) {
+ final int minFpsError = progressivePenalty(
+ range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
+ final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
+ MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
+ return minFpsError + maxFpsError;
+ }
+ });
+ }
+
+ public static Size getClosestSupportedSize(
+ List supportedSizes, final int requestedWidth, final int requestedHeight) {
+ return Collections.min(supportedSizes, new ClosestComparator() {
+ @Override
+ int diff(Size size) {
+ return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+ }
+ });
+ }
+
+ // Helper method for camera classes.
+ static void reportCameraResolution(Histogram histogram, Size resolution) {
+ int index = COMMON_RESOLUTIONS.indexOf(resolution);
+ // 0 is reserved for unknown resolution, so add 1.
+ // indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
+ histogram.addSample(index + 1);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerator.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerator.java
new file mode 100644
index 00000000..dc954b62
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerator.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.List;
+
+public interface CameraEnumerator {
+ public String[] getDeviceNames();
+ public boolean isFrontFacing(String deviceName);
+ public boolean isBackFacing(String deviceName);
+ public List getSupportedFormats(String deviceName);
+
+ public CameraVideoCapturer createCapturer(
+ String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraSession.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraSession.java
new file mode 100644
index 00000000..8d137854
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraSession.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.graphics.Matrix;
+import android.view.WindowManager;
+import android.view.Surface;
+
+interface CameraSession {
+ enum FailureType { ERROR, DISCONNECTED }
+
+ // Callbacks are fired on the camera thread.
+ interface CreateSessionCallback {
+ void onDone(CameraSession session);
+ void onFailure(FailureType failureType, String error);
+ }
+
+ // Events are fired on the camera thread.
+ interface Events {
+ void onCameraOpening();
+ void onCameraError(CameraSession session, String error);
+ void onCameraDisconnected(CameraSession session);
+ void onCameraClosed(CameraSession session);
+ void onFrameCaptured(CameraSession session, VideoFrame frame);
+ }
+
+ /**
+ * Stops the capture. Waits until no more calls to capture observer will be made.
+ * If waitCameraStop is true, also waits for the camera to stop.
+ */
+ void stop();
+
+ static int getDeviceOrientation(Context context) {
+ final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ switch (wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ return 90;
+ case Surface.ROTATION_180:
+ return 180;
+ case Surface.ROTATION_270:
+ return 270;
+ case Surface.ROTATION_0:
+ default:
+ return 0;
+ }
+ }
+
+ static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
+ TextureBufferImpl buffer, boolean mirror, int rotation) {
+ final Matrix transformMatrix = new Matrix();
+ // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
+ transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
+ if (mirror) {
+ transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
+ }
+ transformMatrix.preRotate(rotation);
+ transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);
+
+ // The width and height are not affected by rotation since Camera2Session has set them to the
+ // value they should be after undoing the rotation.
+ return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraVideoCapturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraVideoCapturer.java
new file mode 100644
index 00000000..ec26868b
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraVideoCapturer.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaRecorder;
+
+/**
+ * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
+ * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
+ * class for detecting camera freezes.
+ */
+public interface CameraVideoCapturer extends VideoCapturer {
+ /**
+ * Camera events handler - can be used to be notifed about camera events. The callbacks are
+ * executed from an arbitrary thread.
+ */
+ public interface CameraEventsHandler {
+ // Camera error handler - invoked when camera can not be opened
+ // or any camera exception happens on camera thread.
+ void onCameraError(String errorDescription);
+
+ // Called when camera is disconnected.
+ void onCameraDisconnected();
+
+ // Invoked when camera stops receiving frames.
+ void onCameraFreezed(String errorDescription);
+
+ // Callback invoked when camera is opening.
+ void onCameraOpening(String cameraName);
+
+ // Callback invoked when first camera frame is available after camera is started.
+ void onFirstFrameAvailable();
+
+ // Callback invoked when camera is closed.
+ void onCameraClosed();
+ }
+
+ /**
+ * Camera switch handler - one of these functions are invoked with the result of switchCamera().
+ * The callback may be called on an arbitrary thread.
+ */
+ public interface CameraSwitchHandler {
+ // Invoked on success. `isFrontCamera` is true if the new camera is front facing.
+ void onCameraSwitchDone(boolean isFrontCamera);
+
+ // Invoked on failure, e.g. camera is stopped or only one camera available.
+ void onCameraSwitchError(String errorDescription);
+ }
+
+ /**
+ * Switch camera to the next valid camera id. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ void switchCamera(CameraSwitchHandler switchEventsHandler);
+
+ /**
+ * Switch camera to the specified camera id. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ void switchCamera(CameraSwitchHandler switchEventsHandler, String cameraName);
+
+ /**
+ * MediaRecorder add/remove handler - one of these functions are invoked with the result of
+ * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
+ * The callback may be called on an arbitrary thread.
+ */
+ @Deprecated
+ public interface MediaRecorderHandler {
+ // Invoked on success.
+ void onMediaRecorderSuccess();
+
+ // Invoked on failure, e.g. camera is stopped or any exception happens.
+ void onMediaRecorderError(String errorDescription);
+ }
+
+ /**
+ * Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
+ * Once MediaRecorder is added to camera pipeline camera switch is not allowed.
+ * This function can be called from any thread.
+ */
+ @Deprecated
+ default void addMediaRecorderToCamera(
+ MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
+ throw new UnsupportedOperationException("Deprecated and not implemented.");
+ }
+
+ /**
+ * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ @Deprecated
+ default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
+ throw new UnsupportedOperationException("Deprecated and not implemented.");
+ }
+
+ /**
+ * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
+ * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
+ * thread.
+ */
+ public static class CameraStatistics {
+ private final static String TAG = "CameraStatistics";
+ private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+ private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
+
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final CameraEventsHandler eventsHandler;
+ private int frameCount;
+ private int freezePeriodCount;
+ // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+ private final Runnable cameraObserver = new Runnable() {
+ @Override
+ public void run() {
+ final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
+ Logging.d(TAG, "Camera fps: " + cameraFps + ".");
+ if (frameCount == 0) {
+ ++freezePeriodCount;
+ if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
+ && eventsHandler != null) {
+ Logging.e(TAG, "Camera freezed.");
+ if (surfaceTextureHelper.isTextureInUse()) {
+ // This can only happen if we are capturing to textures.
+ eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+ } else {
+ eventsHandler.onCameraFreezed("Camera failure.");
+ }
+ return;
+ }
+ } else {
+ freezePeriodCount = 0;
+ }
+ frameCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ }
+ };
+
+ public CameraStatistics(
+ SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
+ if (surfaceTextureHelper == null) {
+ throw new IllegalArgumentException("SurfaceTextureHelper is null");
+ }
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.eventsHandler = eventsHandler;
+ this.frameCount = 0;
+ this.freezePeriodCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+ }
+
+ private void checkThread() {
+ if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ public void addFrame() {
+ checkThread();
+ ++frameCount;
+ }
+
+ public void release() {
+ surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CandidatePairChangeEvent.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CandidatePairChangeEvent.java
new file mode 100644
index 00000000..b8e6685a
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CandidatePairChangeEvent.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Representation of a change in selected ICE candidate pair.
+ * {@code CandidatePairChangeEvent} in the C++ API.
+ */
+public final class CandidatePairChangeEvent {
+ public final IceCandidate local;
+ public final IceCandidate remote;
+ public final int lastDataReceivedMs;
+ public final String reason;
+
+ /**
+ * An estimate from the ICE stack on how long it was disconnected before
+ * changing to the new candidate pair in this event.
+ * The first time an candidate pair is signaled the value will be 0.
+ */
+ public final int estimatedDisconnectedTimeMs;
+
+ @CalledByNative
+ CandidatePairChangeEvent(IceCandidate local, IceCandidate remote, int lastDataReceivedMs,
+ String reason, int estimatedDisconnectedTimeMs) {
+ this.local = local;
+ this.remote = remote;
+ this.lastDataReceivedMs = lastDataReceivedMs;
+ this.reason = reason;
+ this.estimatedDisconnectedTimeMs = estimatedDisconnectedTimeMs;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CapturerObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CapturerObserver.java
new file mode 100644
index 00000000..382dc15b
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CapturerObserver.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by
+ * {@link VideoSource#getCapturerObserver}.
+ *
+ * All callbacks must be executed on a single thread.
+ */
+public interface CapturerObserver {
+ /** Notify if the capturer have been started successfully or not. */
+ void onCapturerStarted(boolean success);
+ /** Notify that the capturer has been stopped. */
+ void onCapturerStopped();
+
+ /** Delivers a captured frame. */
+ void onFrameCaptured(VideoFrame frame);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/ContextUtils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/ContextUtils.java
new file mode 100644
index 00000000..e36ab728
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/ContextUtils.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+/**
+ * Class for storing the application context and retrieving it in a static context. Similar to
+ * org.chromium.base.ContextUtils.
+ */
+public class ContextUtils {
+ private static final String TAG = "ContextUtils";
+ private static Context applicationContext;
+
+ /**
+ * Stores the application context that will be returned by getApplicationContext. This is called
+ * by PeerConnectionFactory.initialize. The application context must be set before creating
+ * a PeerConnectionFactory and must not be modified while it is alive.
+ */
+ public static void initialize(Context applicationContext) {
+ if (applicationContext == null) {
+ throw new IllegalArgumentException(
+ "Application context cannot be null for ContextUtils.initialize.");
+ }
+ ContextUtils.applicationContext = applicationContext;
+ }
+
+ /**
+ * Returns the stored application context.
+ *
+ * @deprecated crbug.com/webrtc/8937
+ */
+ @Deprecated
+ public static Context getApplicationContext() {
+ return applicationContext;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CryptoOptions.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CryptoOptions.java
new file mode 100644
index 00000000..6e06bc64
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CryptoOptions.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * CryptoOptions defines advanced cryptographic settings for native WebRTC.
+ * These settings must be passed into RTCConfiguration. WebRTC is secur by
+ * default and you should not need to set any of these options unless you are
+ * specifically looking for an additional crypto feature such as AES_GCM
+ * support. This class is the Java binding of native api/crypto/cryptooptions.h
+ */
+public final class CryptoOptions {
+ /**
+ * SRTP Related Peer Connection Options.
+ */
+ public final class Srtp {
+ /**
+ * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
+ * if both sides enable it
+ */
+ private final boolean enableGcmCryptoSuites;
+ /**
+ * If set to true, the (potentially insecure) crypto cipher
+ * kSrtpAes128CmSha1_32 will be included in the list of supported ciphers
+ * during negotiation. It will only be used if both peers support it and no
+ * other ciphers get preferred.
+ */
+ private final boolean enableAes128Sha1_32CryptoCipher;
+ /**
+ * If set to true, encrypted RTP header extensions as defined in RFC 6904
+ * will be negotiated. They will only be used if both peers support them.
+ */
+ private final boolean enableEncryptedRtpHeaderExtensions;
+
+ private Srtp(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+ boolean enableEncryptedRtpHeaderExtensions) {
+ this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+ this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+ this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableGcmCryptoSuites() {
+ return enableGcmCryptoSuites;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableAes128Sha1_32CryptoCipher() {
+ return enableAes128Sha1_32CryptoCipher;
+ }
+
+ @CalledByNative("Srtp")
+ public boolean getEnableEncryptedRtpHeaderExtensions() {
+ return enableEncryptedRtpHeaderExtensions;
+ }
+ }
+
+ /**
+ * Options to be used when the FrameEncryptor / FrameDecryptor APIs are used.
+ */
+ public final class SFrame {
+ /**
+ * If set all RtpSenders must have an FrameEncryptor attached to them before
+ * they are allowed to send packets. All RtpReceivers must have a
+ * FrameDecryptor attached to them before they are able to receive packets.
+ */
+ private final boolean requireFrameEncryption;
+
+ private SFrame(boolean requireFrameEncryption) {
+ this.requireFrameEncryption = requireFrameEncryption;
+ }
+
+ @CalledByNative("SFrame")
+ public boolean getRequireFrameEncryption() {
+ return requireFrameEncryption;
+ }
+ }
+
+ private final Srtp srtp;
+ private final SFrame sframe;
+
+ private CryptoOptions(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+ boolean enableEncryptedRtpHeaderExtensions, boolean requireFrameEncryption) {
+ this.srtp = new Srtp(
+ enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, enableEncryptedRtpHeaderExtensions);
+ this.sframe = new SFrame(requireFrameEncryption);
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ @CalledByNative
+ public Srtp getSrtp() {
+ return srtp;
+ }
+
+ @CalledByNative
+ public SFrame getSFrame() {
+ return sframe;
+ }
+
+ public static class Builder {
+ private boolean enableGcmCryptoSuites;
+ private boolean enableAes128Sha1_32CryptoCipher;
+ private boolean enableEncryptedRtpHeaderExtensions;
+ private boolean requireFrameEncryption;
+
+ private Builder() {}
+
+ public Builder setEnableGcmCryptoSuites(boolean enableGcmCryptoSuites) {
+ this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+ return this;
+ }
+
+ public Builder setEnableAes128Sha1_32CryptoCipher(boolean enableAes128Sha1_32CryptoCipher) {
+ this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+ return this;
+ }
+
+ public Builder setEnableEncryptedRtpHeaderExtensions(
+ boolean enableEncryptedRtpHeaderExtensions) {
+ this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+ return this;
+ }
+
+ public Builder setRequireFrameEncryption(boolean requireFrameEncryption) {
+ this.requireFrameEncryption = requireFrameEncryption;
+ return this;
+ }
+
+ public CryptoOptions createCryptoOptions() {
+ return new CryptoOptions(enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher,
+ enableEncryptedRtpHeaderExtensions, requireFrameEncryption);
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DataChannel.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DataChannel.java
new file mode 100644
index 00000000..b9301f1f
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DataChannel.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+ /** Java wrapper for WebIDL RTCDataChannel. */
+ public static class Init {
+ public boolean ordered = true;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmitTimeMs = -1;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmits = -1;
+ public String protocol = "";
+ public boolean negotiated;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int id = -1;
+
+ @CalledByNative("Init")
+ boolean getOrdered() {
+ return ordered;
+ }
+
+ @CalledByNative("Init")
+ int getMaxRetransmitTimeMs() {
+ return maxRetransmitTimeMs;
+ }
+
+ @CalledByNative("Init")
+ int getMaxRetransmits() {
+ return maxRetransmits;
+ }
+
+ @CalledByNative("Init")
+ String getProtocol() {
+ return protocol;
+ }
+
+ @CalledByNative("Init")
+ boolean getNegotiated() {
+ return negotiated;
+ }
+
+ @CalledByNative("Init")
+ int getId() {
+ return id;
+ }
+ }
+
+ /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
+ public static class Buffer {
+ /** The underlying data. */
+ public final ByteBuffer data;
+
+ /**
+ * Indicates whether `data` contains UTF-8 text or "binary data"
+ * (i.e. anything else).
+ */
+ public final boolean binary;
+
+ @CalledByNative("Buffer")
+ public Buffer(ByteBuffer data, boolean binary) {
+ this.data = data;
+ this.binary = binary;
+ }
+ }
+
+ /** Java version of C++ DataChannelObserver. */
+ public interface Observer {
+ /** The data channel's bufferedAmount has changed. */
+ @CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount);
+ /** The data channel state has changed. */
+ @CalledByNative("Observer") public void onStateChange();
+ /**
+ * A data buffer was successfully received. NOTE: `buffer.data` will be
+ * freed once this function returns so callers who want to use the data
+ * asynchronously must make sure to copy it first.
+ */
+ @CalledByNative("Observer") public void onMessage(Buffer buffer);
+ }
+
+ /** Keep in sync with DataChannelInterface::DataState. */
+ public enum State {
+ CONNECTING,
+ OPEN,
+ CLOSING,
+ CLOSED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ private long nativeDataChannel;
+ private long nativeObserver;
+
+ @CalledByNative
+ public DataChannel(long nativeDataChannel) {
+ this.nativeDataChannel = nativeDataChannel;
+ }
+
+ /** Register `observer`, replacing any previously-registered observer. */
+ public void registerObserver(Observer observer) {
+ checkDataChannelExists();
+ if (nativeObserver != 0) {
+ nativeUnregisterObserver(nativeObserver);
+ }
+ nativeObserver = nativeRegisterObserver(observer);
+ }
+
+ /** Unregister the (only) observer. */
+ public void unregisterObserver() {
+ checkDataChannelExists();
+ nativeUnregisterObserver(nativeObserver);
+ nativeObserver = 0;
+ }
+
+ public String label() {
+ checkDataChannelExists();
+ return nativeLabel();
+ }
+
+ public int id() {
+ checkDataChannelExists();
+ return nativeId();
+ }
+
+ public State state() {
+ checkDataChannelExists();
+ return nativeState();
+ }
+
+ /**
+ * Return the number of bytes of application data (UTF-8 text and binary data)
+ * that have been queued using SendBuffer but have not yet been transmitted
+ * to the network.
+ */
+ public long bufferedAmount() {
+ checkDataChannelExists();
+ return nativeBufferedAmount();
+ }
+
+ /** Close the channel. */
+ public void close() {
+ checkDataChannelExists();
+ nativeClose();
+ }
+
+ /** Send `data` to the remote peer; return success. */
+ public boolean send(Buffer buffer) {
+ checkDataChannelExists();
+ // TODO(fischman): this could be cleverer about avoiding copies if the
+ // ByteBuffer is direct and/or is backed by an array.
+ byte[] data = new byte[buffer.data.remaining()];
+ buffer.data.get(data);
+ return nativeSend(data, buffer.binary);
+ }
+
+ /** Dispose of native resources attached to this channel. */
+ public void dispose() {
+ checkDataChannelExists();
+ JniCommon.nativeReleaseRef(nativeDataChannel);
+ nativeDataChannel = 0;
+ }
+
+ @CalledByNative
+ long getNativeDataChannel() {
+ return nativeDataChannel;
+ }
+
+ private void checkDataChannelExists() {
+ if (nativeDataChannel == 0) {
+ throw new IllegalStateException("DataChannel has been disposed.");
+ }
+ }
+
+ private native long nativeRegisterObserver(Observer observer);
+ private native void nativeUnregisterObserver(long observer);
+ private native String nativeLabel();
+ private native int nativeId();
+ private native State nativeState();
+ private native long nativeBufferedAmount();
+ private native void nativeClose();
+ private native boolean nativeSend(byte[] data, boolean binary);
+};
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Dav1dDecoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Dav1dDecoder.java
new file mode 100644
index 00000000..ecb16bc3
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Dav1dDecoder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class Dav1dDecoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoDecoderFactory.java
new file mode 100644
index 00000000..d7a8694d
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoDecoderFactory.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/**
+ * Helper class that combines HW and SW decoders.
+ */
+public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
+ private final VideoDecoderFactory hardwareVideoDecoderFactory;
+ private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
+ private final @Nullable VideoDecoderFactory platformSoftwareVideoDecoderFactory;
+
+ /**
+ * Create decoder factory using default hardware decoder factory.
+ */
+ public DefaultVideoDecoderFactory(@Nullable EglBase.Context eglContext) {
+ this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext);
+ this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext);
+ }
+
+ /**
+ * Create decoder factory using explicit hardware decoder factory.
+ */
+ DefaultVideoDecoderFactory(VideoDecoderFactory hardwareVideoDecoderFactory) {
+ this.hardwareVideoDecoderFactory = hardwareVideoDecoderFactory;
+ this.platformSoftwareVideoDecoderFactory = null;
+ }
+
+ @Override
+ public @Nullable VideoDecoder createDecoder(VideoCodecInfo codecType) {
+ VideoDecoder softwareDecoder = softwareVideoDecoderFactory.createDecoder(codecType);
+ final VideoDecoder hardwareDecoder = hardwareVideoDecoderFactory.createDecoder(codecType);
+ if (softwareDecoder == null && platformSoftwareVideoDecoderFactory != null) {
+ softwareDecoder = platformSoftwareVideoDecoderFactory.createDecoder(codecType);
+ }
+ if (hardwareDecoder != null && softwareDecoder != null) {
+ // Both hardware and software supported, wrap it in a software fallback
+ return new VideoDecoderFallback(
+ /* fallback= */ softwareDecoder, /* primary= */ hardwareDecoder);
+ }
+ return hardwareDecoder != null ? hardwareDecoder : softwareDecoder;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ LinkedHashSet supportedCodecInfos = new LinkedHashSet();
+
+ supportedCodecInfos.addAll(Arrays.asList(softwareVideoDecoderFactory.getSupportedCodecs()));
+ supportedCodecInfos.addAll(Arrays.asList(hardwareVideoDecoderFactory.getSupportedCodecs()));
+ if (platformSoftwareVideoDecoderFactory != null) {
+ supportedCodecInfos.addAll(
+ Arrays.asList(platformSoftwareVideoDecoderFactory.getSupportedCodecs()));
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java
new file mode 100644
index 00000000..76896b6b
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/** Helper class that combines HW and SW encoders. */
+public class DefaultVideoEncoderFactory implements VideoEncoderFactory {
+ private final VideoEncoderFactory hardwareVideoEncoderFactory;
+ private final VideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
+
+ /** Create encoder factory using default hardware encoder factory. */
+ public DefaultVideoEncoderFactory(
+ EglBase.Context eglContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
+ this.hardwareVideoEncoderFactory =
+ new HardwareVideoEncoderFactory(eglContext, enableIntelVp8Encoder, enableH264HighProfile);
+ }
+
+ /** Create encoder factory using explicit hardware encoder factory. */
+ DefaultVideoEncoderFactory(VideoEncoderFactory hardwareVideoEncoderFactory) {
+ this.hardwareVideoEncoderFactory = hardwareVideoEncoderFactory;
+ }
+
+ @Nullable
+ @Override
+ public VideoEncoder createEncoder(VideoCodecInfo info) {
+ final VideoEncoder softwareEncoder = softwareVideoEncoderFactory.createEncoder(info);
+ final VideoEncoder hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info);
+ if (hardwareEncoder != null && softwareEncoder != null) {
+ // Both hardware and software supported, wrap it in a software fallback
+ return new VideoEncoderFallback(
+ /* fallback= */ softwareEncoder, /* primary= */ hardwareEncoder);
+ }
+ return hardwareEncoder != null ? hardwareEncoder : softwareEncoder;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ LinkedHashSet supportedCodecInfos = new LinkedHashSet();
+
+ supportedCodecInfos.addAll(Arrays.asList(softwareVideoEncoderFactory.getSupportedCodecs()));
+ supportedCodecInfos.addAll(Arrays.asList(hardwareVideoEncoderFactory.getSupportedCodecs()));
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DtmfSender.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DtmfSender.java
new file mode 100644
index 00000000..65498230
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DtmfSender.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ DtmfSenderInterface. */
+public class DtmfSender {
+ private long nativeDtmfSender;
+
+ public DtmfSender(long nativeDtmfSender) {
+ this.nativeDtmfSender = nativeDtmfSender;
+ }
+
+ /**
+ * @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
+ */
+ public boolean canInsertDtmf() {
+ checkDtmfSenderExists();
+ return nativeCanInsertDtmf(nativeDtmfSender);
+ }
+
+ /**
+ * Queues a task that sends the provided DTMF tones.
+ *
+ * If insertDtmf is called on the same object while an existing task for this
+ * object to generate DTMF is still running, the previous task is canceled.
+ *
+ * @param tones This parameter is treated as a series of characters. The characters 0
+ * through 9, A through D, #, and * generate the associated DTMF tones. The
+ * characters a to d are equivalent to A to D. The character ',' indicates a
+ * delay of 2 seconds before processing the next character in the tones
+ * parameter. Unrecognized characters are ignored.
+ * @param duration Indicates the duration in ms to use for each character passed in the tones
+ * parameter. The duration cannot be more than 6000 or less than 70.
+ * @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
+ * as short as possible.
+ * @return true on success and false on failure.
+ */
+ public boolean insertDtmf(String tones, int duration, int interToneGap) {
+ checkDtmfSenderExists();
+ return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
+ }
+
+ /**
+ * @return The tones remaining to be played out
+ */
+ public String tones() {
+ checkDtmfSenderExists();
+ return nativeTones(nativeDtmfSender);
+ }
+
+ /**
+ * @return The current tone duration value in ms. This value will be the value last set via the
+ * insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
+ */
+ public int duration() {
+ checkDtmfSenderExists();
+ return nativeDuration(nativeDtmfSender);
+ }
+
+ /**
+ * @return The current value of the between-tone gap in ms. This value will be the value last set
+ * via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
+ * called.
+ */
+ public int interToneGap() {
+ checkDtmfSenderExists();
+ return nativeInterToneGap(nativeDtmfSender);
+ }
+
+ public void dispose() {
+ checkDtmfSenderExists();
+ JniCommon.nativeReleaseRef(nativeDtmfSender);
+ nativeDtmfSender = 0;
+ }
+
+ private void checkDtmfSenderExists() {
+ if (nativeDtmfSender == 0) {
+ throw new IllegalStateException("DtmfSender has been disposed.");
+ }
+ }
+
+ private static native boolean nativeCanInsertDtmf(long dtmfSender);
+ private static native boolean nativeInsertDtmf(
+ long dtmfSender, String tones, int duration, int interToneGap);
+ private static native String nativeTones(long dtmfSender);
+ private static native int nativeDuration(long dtmfSender);
+ private static native int nativeInterToneGap(long dtmfSender);
+};
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DynamicBitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DynamicBitrateAdjuster.java
new file mode 100644
index 00000000..96a15bbf
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DynamicBitrateAdjuster.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
+ * bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
+ * target bitrate by unacceptable margins.
+ */
+class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
+ // Change the bitrate at most once every three seconds.
+ private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
+ // Maximum bitrate adjustment scale - no more than 4 times.
+ private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
+ // Amount of adjustment steps to reach maximum scale.
+ private static final int BITRATE_ADJUSTMENT_STEPS = 20;
+
+ private static final double BITS_PER_BYTE = 8.0;
+
+ // How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
+ private double deviationBytes;
+ private double timeSinceLastAdjustmentMs;
+ private int bitrateAdjustmentScaleExp;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
+ // Rescale the accumulator level if the accumulator max decreases
+ deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
+ }
+ super.setTargets(targetBitrateBps, targetFramerateFps);
+ }
+
+ @Override
+ public void reportEncodedFrame(int size) {
+ if (targetFramerateFps == 0) {
+ return;
+ }
+
+ // Accumulate the difference between actual and expected frame sizes.
+ double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps;
+ deviationBytes += (size - expectedBytesPerFrame);
+ timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps;
+
+ // Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
+ // shortfall of the target.
+ double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
+
+ // Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
+ // bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
+ double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
+ deviationBytes = Math.min(deviationBytes, deviationCap);
+ deviationBytes = Math.max(deviationBytes, -deviationCap);
+
+ // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
+ // from the target value.
+ if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
+ return;
+ }
+
+ if (deviationBytes > deviationThresholdBytes) {
+ // Encoder generates too high bitrate - need to reduce the scale.
+ int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
+ // Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
+ // This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = deviationThresholdBytes;
+ } else if (deviationBytes < -deviationThresholdBytes) {
+ // Encoder generates too low bitrate - need to increase the scale.
+ int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
+ bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
+ // Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
+ // This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
+ bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
+ deviationBytes = -deviationThresholdBytes;
+ }
+ timeSinceLastAdjustmentMs = 0;
+ }
+
+ private double getBitrateAdjustmentScale() {
+ return Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
+ (double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS);
+ }
+
+ @Override
+ public int getAdjustedBitrateBps() {
+ return (int) (targetBitrateBps * getBitrateAdjustmentScale());
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase.java
new file mode 100644
index 00000000..3b45e357
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase.java
@@ -0,0 +1,305 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import javax.microedition.khronos.egl.EGL10;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public interface EglBase {
+ // EGL wrapper for an actual EGLContext.
+ public interface Context {
+ public final static long NO_CONTEXT = 0;
+
+ /**
+ * Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is
+ * unsupported.
+ *
+ * @note This is currently only supported for EGL 1.4 and not for EGL 1.0.
+ */
+ long getNativeEglContext();
+ }
+
+ /**
+ * Wraps the objects needed to interact with EGL that are independent of a particular EGLSurface.
+ * In practice this means EGLContext, EGLDisplay and EGLConfig objects. Separating them out in a
+ * standalone object allows for multiple EglBase instances to use the same underlying EGLContext,
+ * while still operating on their own EGLSurface.
+ */
+ public interface EglConnection extends RefCounted {
+ /** Analogous to corresponding EglBase#create below. */
+ public static EglConnection create(@Nullable Context sharedContext, int[] configAttributes) {
+ if (sharedContext == null) {
+ return EglConnection.createEgl14(configAttributes);
+ } else if (sharedContext instanceof EglBase14.Context) {
+ return new EglBase14Impl.EglConnection(
+ ((EglBase14.Context) sharedContext).getRawContext(), configAttributes);
+ } else if (sharedContext instanceof EglBase10.Context) {
+ return new EglBase10Impl.EglConnection(
+ ((EglBase10.Context) sharedContext).getRawContext(), configAttributes);
+ }
+ throw new IllegalArgumentException("Unrecognized Context");
+ }
+
+ /** Analogous to corresponding EglBase#createEgl10 below. */
+ public static EglConnection createEgl10(int[] configAttributes) {
+ return new EglBase10Impl.EglConnection(/* sharedContext= */ null, configAttributes);
+ }
+
+ /** Analogous to corresponding EglBase#createEgl14 below. */
+ public static EglConnection createEgl14(int[] configAttributes) {
+ return new EglBase14Impl.EglConnection(/* sharedContext= */ null, configAttributes);
+ }
+ }
+
+ // According to the documentation, EGL can be used from multiple threads at the same time if each
+ // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
+ // Therefore, synchronize on this global lock before calling dangerous EGL functions that might
+ // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+ public static final Object lock = new Object();
+
+ // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+ // This is similar to how GlSurfaceView does:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+ public static final int EGL_OPENGL_ES2_BIT = 4;
+ public static final int EGL_OPENGL_ES3_BIT = 0x40;
+ // Android-specific extension.
+ public static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ public static ConfigBuilder configBuilder() {
+ return new ConfigBuilder();
+ }
+
+ public static class ConfigBuilder {
+ private int openGlesVersion = 2;
+ private boolean hasAlphaChannel;
+ private boolean supportsPixelBuffer;
+ private boolean isRecordable;
+
+ public ConfigBuilder setOpenGlesVersion(int version) {
+ if (version < 1 || version > 3) {
+ throw new IllegalArgumentException("OpenGL ES version " + version + " not supported");
+ }
+ this.openGlesVersion = version;
+ return this;
+ }
+
+ public ConfigBuilder setHasAlphaChannel(boolean hasAlphaChannel) {
+ this.hasAlphaChannel = hasAlphaChannel;
+ return this;
+ }
+
+ public ConfigBuilder setSupportsPixelBuffer(boolean supportsPixelBuffer) {
+ this.supportsPixelBuffer = supportsPixelBuffer;
+ return this;
+ }
+
+ public ConfigBuilder setIsRecordable(boolean isRecordable) {
+ this.isRecordable = isRecordable;
+ return this;
+ }
+
+ public int[] createConfigAttributes() {
+ ArrayList list = new ArrayList<>();
+ list.add(EGL10.EGL_RED_SIZE);
+ list.add(8);
+ list.add(EGL10.EGL_GREEN_SIZE);
+ list.add(8);
+ list.add(EGL10.EGL_BLUE_SIZE);
+ list.add(8);
+ if (hasAlphaChannel) {
+ list.add(EGL10.EGL_ALPHA_SIZE);
+ list.add(8);
+ }
+ if (openGlesVersion == 2 || openGlesVersion == 3) {
+ list.add(EGL10.EGL_RENDERABLE_TYPE);
+ list.add(openGlesVersion == 3 ? EGL_OPENGL_ES3_BIT : EGL_OPENGL_ES2_BIT);
+ }
+ if (supportsPixelBuffer) {
+ list.add(EGL10.EGL_SURFACE_TYPE);
+ list.add(EGL10.EGL_PBUFFER_BIT);
+ }
+ if (isRecordable) {
+ list.add(EGL_RECORDABLE_ANDROID);
+ list.add(1);
+ }
+ list.add(EGL10.EGL_NONE);
+
+ final int[] res = new int[list.size()];
+ for (int i = 0; i < list.size(); ++i) {
+ res[i] = list.get(i);
+ }
+ return res;
+ }
+ }
+
+ public static final int[] CONFIG_PLAIN = configBuilder().createConfigAttributes();
+ public static final int[] CONFIG_RGBA =
+ configBuilder().setHasAlphaChannel(true).createConfigAttributes();
+ public static final int[] CONFIG_PIXEL_BUFFER =
+ configBuilder().setSupportsPixelBuffer(true).createConfigAttributes();
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = configBuilder()
+ .setHasAlphaChannel(true)
+ .setSupportsPixelBuffer(true)
+ .createConfigAttributes();
+ public static final int[] CONFIG_RECORDABLE =
+ configBuilder().setIsRecordable(true).createConfigAttributes();
+
+ static int getOpenGlesVersionFromConfig(int[] configAttributes) {
+ for (int i = 0; i < configAttributes.length - 1; ++i) {
+ if (configAttributes[i] == EGL10.EGL_RENDERABLE_TYPE) {
+ switch (configAttributes[i + 1]) {
+ case EGL_OPENGL_ES2_BIT:
+ return 2;
+ case EGL_OPENGL_ES3_BIT:
+ return 3;
+ default:
+ return 1;
+ }
+ }
+ }
+ // Default to V1 if no renderable type is specified.
+ return 1;
+ }
+
+ /**
+ * Creates a new EglBase with a shared EglConnection. EglBase instances sharing the same
+ * EglConnection should be used on the same thread to avoid the underlying EGLContext being made
+ * current on multiple threads. It is up to the client of EglBase to ensure that instances with a
+ * shared EglConnection are current on that thread before each use since other EglBase instances
+ * may have used the same EGLContext since the last interaction.
+ */
+ public static EglBase create(EglConnection eglConnection) {
+ if (eglConnection == null) {
+ return create();
+ } else if (eglConnection instanceof EglBase14Impl.EglConnection) {
+ return new EglBase14Impl((EglBase14Impl.EglConnection) eglConnection);
+ } else if (eglConnection instanceof EglBase10Impl.EglConnection) {
+ return new EglBase10Impl((EglBase10Impl.EglConnection) eglConnection);
+ }
+ throw new IllegalArgumentException("Unrecognized EglConnection");
+ }
+
+ /**
+ * Create a new context with the specified config attributes, sharing data with `sharedContext`.
+ * If `sharedContext` is null, a root EGL 1.4 context is created.
+ */
+ public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
+ if (sharedContext == null) {
+ return createEgl14(configAttributes);
+ } else if (sharedContext instanceof EglBase14.Context) {
+ return createEgl14((EglBase14.Context) sharedContext, configAttributes);
+ } else if (sharedContext instanceof EglBase10.Context) {
+ return createEgl10((EglBase10.Context) sharedContext, configAttributes);
+ }
+ throw new IllegalArgumentException("Unrecognized Context");
+ }
+
+ /**
+ * Helper function for creating a plain root context. This function will try to create an EGL 1.4
+ * context if possible, and an EGL 1.0 context otherwise.
+ */
+ public static EglBase create() {
+ return create(null /* shaderContext */, CONFIG_PLAIN);
+ }
+
+ /**
+ * Helper function for creating a plain context, sharing data with `sharedContext`. This function
+ * will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
+ */
+ public static EglBase create(Context sharedContext) {
+ return create(sharedContext, CONFIG_PLAIN);
+ }
+
+ /** Explicitly create a root EGl 1.0 context with the specified config attributes. */
+ public static EglBase10 createEgl10(int[] configAttributes) {
+ return new EglBase10Impl(/* sharedContext= */ null, configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.0 context with the specified config attributes and shared
+ * context.
+ */
+ public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) {
+ return new EglBase10Impl(
+ sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.0 context with the specified config attributes
+ * and shared context.
+ */
+ public static EglBase10 createEgl10(
+ javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
+ return new EglBase10Impl(sharedContext, configAttributes);
+ }
+
+ /** Explicitly create a root EGl 1.4 context with the specified config attributes. */
+ public static EglBase14 createEgl14(int[] configAttributes) {
+ return new EglBase14Impl(/* sharedContext= */ null, configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.4 context with the specified config attributes and shared
+ * context.
+ */
+ public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) {
+ return new EglBase14Impl(
+ sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+ }
+
+ /**
+ * Explicitly create a root EGl 1.4 context with the specified config attributes
+ * and shared context.
+ */
+ public static EglBase14 createEgl14(
+ android.opengl.EGLContext sharedContext, int[] configAttributes) {
+ return new EglBase14Impl(sharedContext, configAttributes);
+ }
+
+ void createSurface(Surface surface);
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ void createSurface(SurfaceTexture surfaceTexture);
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ void createDummyPbufferSurface();
+
+ void createPbufferSurface(int width, int height);
+
+ Context getEglBaseContext();
+
+ boolean hasSurface();
+
+ int surfaceWidth();
+
+ int surfaceHeight();
+
+ void releaseSurface();
+
+ void release();
+
+ void makeCurrent();
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ void detachCurrent();
+
+ void swapBuffers();
+
+ void swapBuffers(long presentationTimeStampNs);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10.java
new file mode 100644
index 00000000..ad2eb1c0
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+
+/** EGL 1.0 implementation of EglBase. */
+public interface EglBase10 extends EglBase {
+ interface Context extends EglBase.Context {
+ EGLContext getRawContext();
+ }
+
+ interface EglConnection extends EglBase.EglConnection {
+ EGL10 getEgl();
+
+ EGLContext getContext();
+
+ EGLDisplay getDisplay();
+
+ EGLConfig getConfig();
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10Impl.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10Impl.java
new file mode 100644
index 00000000..caa10e7e
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10Impl.java
@@ -0,0 +1,448 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLException;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import androidx.annotation.Nullable;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+class EglBase10Impl implements EglBase10 {
+ private static final String TAG = "EglBase10Impl";
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private static final EglConnection EGL_NO_CONNECTION = new EglConnection();
+
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+ private EglConnection eglConnection;
+
+ // EGL wrapper for an actual EGLContext.
+ private static class Context implements EglBase10.Context {
+ private final EGL10 egl;
+ private final EGLContext eglContext;
+ private final EGLConfig eglContextConfig;
+
+ @Override
+ public EGLContext getRawContext() {
+ return eglContext;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ EGLContext previousContext = egl.eglGetCurrentContext();
+ EGLDisplay currentDisplay = egl.eglGetCurrentDisplay();
+ EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW);
+ EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ);
+ EGLSurface tempEglSurface = null;
+
+ if (currentDisplay == EGL10.EGL_NO_DISPLAY) {
+ currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ }
+
+ try {
+ if (previousContext != eglContext) {
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE};
+ tempEglSurface =
+ egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs);
+ if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to make temporary EGL surface active: " + egl.eglGetError());
+ }
+ }
+
+ return nativeGetCurrentNativeEGLContext();
+ } finally {
+ if (tempEglSurface != null) {
+ egl.eglMakeCurrent(
+ currentDisplay, previousDrawSurface, previousReadSurface, previousContext);
+ egl.eglDestroySurface(currentDisplay, tempEglSurface);
+ }
+ }
+ }
+
+ public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) {
+ this.egl = egl;
+ this.eglContext = eglContext;
+ this.eglContextConfig = eglContextConfig;
+ }
+ }
+
+ public static class EglConnection implements EglBase10.EglConnection {
+ private final EGL10 egl;
+ private final EGLContext eglContext;
+ private final EGLDisplay eglDisplay;
+ private final EGLConfig eglConfig;
+ private final RefCountDelegate refCountDelegate;
+ private EGLSurface currentSurface = EGL10.EGL_NO_SURFACE;
+
+ public EglConnection(EGLContext sharedContext, int[] configAttributes) {
+ egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay(egl);
+ eglConfig = getEglConfig(egl, eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(egl, sharedContext, eglDisplay, eglConfig, openGlesVersion);
+
+ // Ref count delegate with release callback.
+ refCountDelegate = new RefCountDelegate(() -> {
+ synchronized (EglBase.lock) {
+ egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
+ }
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ currentSurface = EGL10.EGL_NO_SURFACE;
+ });
+ }
+
+ // Returns a "null" EglConnection. Useful to represent a released instance with default values.
+ private EglConnection() {
+ egl = (EGL10) EGLContext.getEGL();
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ refCountDelegate = new RefCountDelegate(() -> {});
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public EGL10 getEgl() {
+ return egl;
+ }
+
+ @Override
+ public EGLContext getContext() {
+ return eglContext;
+ }
+
+ @Override
+ public EGLDisplay getDisplay() {
+ return eglDisplay;
+ }
+
+ @Override
+ public EGLConfig getConfig() {
+ return eglConfig;
+ }
+
+ public void makeCurrent(EGLSurface eglSurface) {
+ if (egl.eglGetCurrentContext() == eglContext && currentSurface == eglSurface) {
+ return;
+ }
+
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(egl.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ currentSurface = eglSurface;
+ }
+
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new GLException(egl.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+ currentSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) {
+ this.eglConnection = new EglConnection(sharedContext, configAttributes);
+ }
+
+ public EglBase10Impl(EglConnection eglConnection) {
+ this.eglConnection = eglConnection;
+ this.eglConnection.retain();
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Nullable
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Nullable
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+
+ EGL10 egl = eglConnection.getEgl();
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(
+ eglConnection.getDisplay(), eglConnection.getConfig(), nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ EGL10 egl = eglConnection.getEgl();
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(
+ eglConnection.getDisplay(), eglConnection.getConfig(), surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(egl.eglGetError()));
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new Context(
+ eglConnection.getEgl(), eglConnection.getContext(), eglConnection.getConfig());
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ eglConnection.getEgl().eglQuerySurface(
+ eglConnection.getDisplay(), eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ eglConnection.getEgl().eglQuerySurface(
+ eglConnection.getDisplay(), eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ eglConnection.getEgl().eglDestroySurface(eglConnection.getDisplay(), eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglConnection == EGL_NO_CONNECTION) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ eglConnection.release();
+ eglConnection = EGL_NO_CONNECTION;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ eglConnection.makeCurrent(eglSurface);
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ eglConnection.detachCurrent();
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ eglConnection.getEgl().eglSwapBuffers(eglConnection.getDisplay(), eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ // Setting presentation time is not supported for EGL 1.0.
+ swapBuffers();
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay(EGL10 egl) {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new GLException(egl.eglGetError(),
+ "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new GLException(
+ egl.eglGetError(), "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(EGL10 egl, @Nullable EGLContext sharedContext,
+ EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ }
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new GLException(egl.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
+ }
+ return eglContext;
+ }
+
+ private static native long nativeGetCurrentNativeEGLContext();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14.java
new file mode 100644
index 00000000..74553625
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+
+/** EGL 1.4 implementation of EglBase. */
+public interface EglBase14 extends EglBase {
+ interface Context extends EglBase.Context {
+ EGLContext getRawContext();
+ }
+
+ interface EglConnection extends EglBase.EglConnection {
+ EGLContext getContext();
+
+ EGLDisplay getDisplay();
+
+ EGLConfig getConfig();
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14Impl.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14Impl.java
new file mode 100644
index 00000000..22cee866
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14Impl.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.opengl.GLException;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
+class EglBase14Impl implements EglBase14 {
+ private static final String TAG = "EglBase14Impl";
+ private static final EglConnection EGL_NO_CONNECTION = new EglConnection();
+
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+ private EglConnection eglConnection;
+
+ public static class Context implements EglBase14.Context {
+ private final EGLContext egl14Context;
+
+ @Override
+ public EGLContext getRawContext() {
+ return egl14Context;
+ }
+
+ @Override
+ public long getNativeEglContext() {
+ return egl14Context.getNativeHandle();
+ }
+
+ public Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ public static class EglConnection implements EglBase14.EglConnection {
+ private final EGLContext eglContext;
+ private final EGLDisplay eglDisplay;
+ private final EGLConfig eglConfig;
+ private final RefCountDelegate refCountDelegate;
+ private EGLSurface currentSurface = EGL14.EGL_NO_SURFACE;
+
+ public EglConnection(EGLContext sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
+ Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
+
+ // Ref count delegate with release callback.
+ refCountDelegate = new RefCountDelegate(() -> {
+ synchronized (EglBase.lock) {
+ EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ }
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ currentSurface = EGL14.EGL_NO_SURFACE;
+ });
+ }
+
+ // Returns a "null" EglConnection. Useful to represent a released instance with default values.
+ private EglConnection() {
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ refCountDelegate = new RefCountDelegate(() -> {});
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public EGLContext getContext() {
+ return eglContext;
+ }
+
+ @Override
+ public EGLDisplay getDisplay() {
+ return eglDisplay;
+ }
+
+ @Override
+ public EGLConfig getConfig() {
+ return eglConfig;
+ }
+
+ public void makeCurrent(EGLSurface eglSurface) {
+ if (EGL14.eglGetCurrentContext() == eglContext && currentSurface == eglSurface) {
+ return;
+ }
+
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ currentSurface = eglSurface;
+ }
+
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+ currentSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // `sharedContext` may be null.
+ public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) {
+ this.eglConnection = new EglConnection(sharedContext, configAttributes);
+ }
+
+ // Create a new EglBase using an existing, possibly externally managed, EglConnection.
+ public EglBase14Impl(EglConnection eglConnection) {
+ this.eglConnection = eglConnection;
+ this.eglConnection.retain();
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(
+ eglConnection.getDisplay(), eglConnection.getConfig(), surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(
+ eglConnection.getDisplay(), eglConnection.getConfig(), surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ + Integer.toHexString(EGL14.eglGetError()));
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new Context(eglConnection.getContext());
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int[] widthArray = new int[1];
+ EGL14.eglQuerySurface(eglConnection.getDisplay(), eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int[] heightArray = new int[1];
+ EGL14.eglQuerySurface(eglConnection.getDisplay(), eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglConnection.getDisplay(), eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglConnection == EGL_NO_CONNECTION) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ eglConnection.release();
+ eglConnection = EGL_NO_CONNECTION;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ eglConnection.makeCurrent(eglSurface);
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ eglConnection.detachCurrent();
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ EGL14.eglSwapBuffers(eglConnection.getDisplay(), eglSurface);
+ }
+ }
+
+ @Override
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ // See
+ // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglConnection.getDisplay(), eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglConnection.getDisplay(), eglSurface);
+ }
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new GLException(EGL14.eglGetError(),
+ "Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new GLException(EGL14.eglGetError(),
+ "eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(@Nullable EGLContext sharedContext,
+ EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
+ if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE};
+ EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ }
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new GLException(EGL14.eglGetError(),
+ "Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
+ }
+ return eglContext;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglRenderer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglRenderer.java
new file mode 100644
index 00000000..0a0479b3
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglRenderer.java
@@ -0,0 +1,776 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Bitmap;
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.view.Surface;
+import androidx.annotation.GuardedBy;
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
+ * be used as a helper class for rendering on SurfaceViews and TextureViews.
+ */
+public class EglRenderer implements VideoSink {
+ private static final String TAG = "EglRenderer";
+ private static final long LOG_INTERVAL_SEC = 4;
+
+ public interface FrameListener { void onFrame(Bitmap frame); }
+
+ /** Callback for clients to be notified about errors encountered during rendering. */
+ public static interface ErrorCallback {
+ /** Called if GLES20.GL_OUT_OF_MEMORY is encountered during rendering. */
+ void onGlOutOfMemory();
+ }
+
+ private static class FrameListenerAndParams {
+ public final FrameListener listener;
+ public final float scale;
+ public final RendererCommon.GlDrawer drawer;
+ public final boolean applyFpsReduction;
+
+ public FrameListenerAndParams(FrameListener listener, float scale,
+ RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
+ this.listener = listener;
+ this.scale = scale;
+ this.drawer = drawer;
+ this.applyFpsReduction = applyFpsReduction;
+ }
+ }
+
+ private class EglSurfaceCreation implements Runnable {
+ private Object surface;
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void setSurface(Object surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void run() {
+ if (surface != null && eglBase != null && !eglBase.hasSurface()) {
+ if (surface instanceof Surface) {
+ eglBase.createSurface((Surface) surface);
+ } else if (surface instanceof SurfaceTexture) {
+ eglBase.createSurface((SurfaceTexture) surface);
+ } else {
+ throw new IllegalStateException("Invalid surface: " + surface);
+ }
+ eglBase.makeCurrent();
+ // Necessary for YUV frames with odd width.
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ }
+ }
+ }
+
+ protected final String name;
+
+ // `eglThread` is used for rendering, and is synchronized on `threadLock`.
+ private final Object threadLock = new Object();
+ @GuardedBy("threadLock") @Nullable private EglThread eglThread;
+
+ private final Runnable eglExceptionCallback = new Runnable() {
+ @Override
+ public void run() {
+ synchronized (threadLock) {
+ eglThread = null;
+ }
+ }
+ };
+
+ private final ArrayList frameListeners = new ArrayList<>();
+
+ private volatile ErrorCallback errorCallback;
+
+ // Variables for fps reduction.
+ private final Object fpsReductionLock = new Object();
+ // Time for when next frame should be rendered.
+ private long nextFrameTimeNs;
+ // Minimum duration between frames when fps reduction is active, or -1 if video is completely
+ // paused.
+ private long minRenderPeriodNs;
+
+ // EGL and GL resources for drawing YUV/OES textures. After initialization, these are only
+ // accessed from the render thread.
+ @Nullable private EglBase eglBase;
+ private final VideoFrameDrawer frameDrawer;
+ @Nullable private RendererCommon.GlDrawer drawer;
+ private boolean usePresentationTimeStamp;
+ private final Matrix drawMatrix = new Matrix();
+
+ // Pending frame to render. Serves as a queue with size 1. Synchronized on `frameLock`.
+ private final Object frameLock = new Object();
+ @Nullable private VideoFrame pendingFrame;
+
+ // These variables are synchronized on `layoutLock`.
+ private final Object layoutLock = new Object();
+ private float layoutAspectRatio;
+ // If true, mirrors the video stream horizontally.
+ private boolean mirrorHorizontally;
+ // If true, mirrors the video stream vertically.
+ private boolean mirrorVertically;
+
+ // These variables are synchronized on `statisticsLock`.
+ private final Object statisticsLock = new Object();
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+ // yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Start time for counting these statistics, or 0 if we haven't started measuring yet.
+ private long statisticsStartTimeNs;
+ // Time in ns spent in renderFrameOnRenderThread() function.
+ private long renderTimeNs;
+ // Time in ns spent by the render thread in the swapBuffers() function.
+ private long renderSwapBufferTimeNs;
+
+ // Used for bitmap capturing.
+ private final GlTextureFrameBuffer bitmapTextureFramebuffer =
+ new GlTextureFrameBuffer(GLES20.GL_RGBA);
+
+ private final Runnable logStatisticsRunnable = new Runnable() {
+ @Override
+ public void run() {
+ logStatistics();
+ synchronized (threadLock) {
+ if (eglThread != null) {
+ eglThread.getHandler().removeCallbacks(logStatisticsRunnable);
+ eglThread.getHandler().postDelayed(
+ logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+ }
+ }
+ }
+ };
+
+ private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
+
+ /**
+ * Standard constructor. The name will be included when logging. In order to render something,
+ * you must first call init() and createEglSurface.
+ */
+ public EglRenderer(String name) {
+ this(name, new VideoFrameDrawer());
+ }
+
+ public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
+ this.name = name;
+ this.frameDrawer = videoFrameDrawer;
+ }
+
+ public void init(
+ EglThread eglThread, RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
+ synchronized (threadLock) {
+ if (this.eglThread != null) {
+ throw new IllegalStateException(name + "Already initialized");
+ }
+
+ logD("Initializing EglRenderer");
+ this.eglThread = eglThread;
+ this.drawer = drawer;
+ this.usePresentationTimeStamp = usePresentationTimeStamp;
+
+ eglThread.addExceptionCallback(eglExceptionCallback);
+
+ eglBase = eglThread.createEglBaseWithSharedConnection();
+ eglThread.getHandler().post(eglSurfaceCreationRunnable);
+
+ final long currentTimeNs = System.nanoTime();
+ resetStatistics(currentTimeNs);
+
+ eglThread.getHandler().postDelayed(
+ logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+ }
+ }
+
+ /**
+ * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
+ * set with the frame timestamps, which specifies desired presentation time and might be useful
+ * for e.g. syncing audio and video.
+ */
+ public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
+ EglThread thread =
+ EglThread.create(/* releaseMonitor= */ null, sharedContext, configAttributes);
+ init(thread, drawer, usePresentationTimeStamp);
+ }
+
+ /**
+ * Same as above with usePresentationTimeStamp set to false.
+ *
+ * @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean)
+ */
+ public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
+ RendererCommon.GlDrawer drawer) {
+ init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false);
+ }
+
+ public void createEglSurface(Surface surface) {
+ createEglSurfaceInternal(surface);
+ }
+
+ public void createEglSurface(SurfaceTexture surfaceTexture) {
+ createEglSurfaceInternal(surfaceTexture);
+ }
+
+ private void createEglSurfaceInternal(Object surface) {
+ eglSurfaceCreationRunnable.setSurface(surface);
+ postToRenderThread(eglSurfaceCreationRunnable);
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ logD("Releasing.");
+ final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+ synchronized (threadLock) {
+ if (eglThread == null) {
+ logD("Already released");
+ return;
+ }
+ eglThread.getHandler().removeCallbacks(logStatisticsRunnable);
+ eglThread.removeExceptionCallback(eglExceptionCallback);
+
+ // Release EGL and GL resources on render thread.
+ eglThread.getHandler().postAtFrontOfQueue(() -> {
+ // Detach current shader program.
+ synchronized (EglBase.lock) {
+ GLES20.glUseProgram(/* program= */ 0);
+ }
+ if (drawer != null) {
+ drawer.release();
+ drawer = null;
+ }
+ frameDrawer.release();
+ bitmapTextureFramebuffer.release();
+
+ if (eglBase != null) {
+ logD("eglBase detach and release.");
+ eglBase.detachCurrent();
+ eglBase.release();
+ eglBase = null;
+ }
+
+ frameListeners.clear();
+ eglCleanupBarrier.countDown();
+ });
+
+ // Don't accept any more frames or messages to the render thread.
+ eglThread.release();
+ eglThread = null;
+ }
+ // Make sure the EGL/GL cleanup posted above is executed.
+ ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ pendingFrame.release();
+ pendingFrame = null;
+ }
+ }
+ logD("Releasing done.");
+ }
+
+ /**
+ * Reset the statistics logged in logStatistics().
+ */
+ private void resetStatistics(long currentTimeNs) {
+ synchronized (statisticsLock) {
+ statisticsStartTimeNs = currentTimeNs;
+ framesReceived = 0;
+ framesDropped = 0;
+ framesRendered = 0;
+ renderTimeNs = 0;
+ renderSwapBufferTimeNs = 0;
+ }
+ }
+
+ public void printStackTrace() {
+ synchronized (threadLock) {
+ final Thread renderThread =
+ (eglThread == null) ? null : eglThread.getHandler().getLooper().getThread();
+ if (renderThread != null) {
+ final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
+ if (renderStackTrace.length > 0) {
+ logW("EglRenderer stack trace:");
+ for (StackTraceElement traceElem : renderStackTrace) {
+ logW(traceElem.toString());
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored horizontally or not.
+ */
+ public void setMirror(final boolean mirror) {
+ synchronized (layoutLock) {
+ this.mirrorHorizontally = mirror;
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored vertically or not.
+ */
+ public void setMirrorVertically(final boolean mirrorVertically) {
+ synchronized (layoutLock) {
+ this.mirrorVertically = mirrorVertically;
+ }
+ }
+
+ /**
+ * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
+ * Set this to 0 to disable cropping.
+ */
+ public void setLayoutAspectRatio(float layoutAspectRatio) {
+ synchronized (layoutLock) {
+ this.layoutAspectRatio = layoutAspectRatio;
+ }
+ }
+
+ /**
+ * Limit render framerate.
+ *
+ * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+ * reduction.
+ */
+ public void setFpsReduction(float fps) {
+ synchronized (fpsReductionLock) {
+ final long previousRenderPeriodNs = minRenderPeriodNs;
+ if (fps <= 0) {
+ minRenderPeriodNs = Long.MAX_VALUE;
+ } else {
+ minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
+ }
+ if (minRenderPeriodNs != previousRenderPeriodNs) {
+ // Fps reduction changed - reset frame time.
+ nextFrameTimeNs = System.nanoTime();
+ }
+ }
+ }
+
+ public void disableFpsReduction() {
+ setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
+ }
+
+ public void pauseVideo() {
+ setFpsReduction(0 /* fps */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received. This version uses
+ * the drawer of the EglRenderer that was passed in init.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ */
+ public void addFrameListener(final FrameListener listener, final float scale) {
+ addFrameListener(listener, scale, null, false /* applyFpsReduction */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener or null to use the default one.
+ */
+ public void addFrameListener(
+ final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
+ addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
+ }
+
+ /**
+ * Register a callback to be invoked when a new video frame has been received.
+ *
+ * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+ * It should be lightweight and must not call removeFrameListener.
+ * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+ * required.
+ * @param drawer Custom drawer to use for this frame listener or null to use the default one.
+ * @param applyFpsReduction This callback will not be called for frames that have been dropped by
+ * FPS reduction.
+ */
+ public void addFrameListener(final FrameListener listener, final float scale,
+ @Nullable final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
+ postToRenderThread(() -> {
+ final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
+ frameListeners.add(
+ new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
+ });
+ }
+
+ /**
+ * Remove any pending callback that was added with addFrameListener. If the callback is not in
+ * the queue, nothing happens. It is ensured that callback won't be called after this method
+ * returns.
+ *
+ * @param runnable The callback to remove.
+ */
+ public void removeFrameListener(final FrameListener listener) {
+ final CountDownLatch latch = new CountDownLatch(1);
+ synchronized (threadLock) {
+ if (eglThread == null) {
+ return;
+ }
+ if (Thread.currentThread() == eglThread.getHandler().getLooper().getThread()) {
+ throw new RuntimeException("removeFrameListener must not be called on the render thread.");
+ }
+ postToRenderThread(() -> {
+ latch.countDown();
+ final Iterator iter = frameListeners.iterator();
+ while (iter.hasNext()) {
+ if (iter.next().listener == listener) {
+ iter.remove();
+ }
+ }
+ });
+ }
+ ThreadUtils.awaitUninterruptibly(latch);
+ }
+
+ /** Can be set in order to be notified about errors encountered during rendering. */
+ public void setErrorCallback(ErrorCallback errorCallback) {
+ this.errorCallback = errorCallback;
+ }
+
+ // VideoSink interface.
+ @Override
+ public void onFrame(VideoFrame frame) {
+ synchronized (statisticsLock) {
+ ++framesReceived;
+ }
+ final boolean dropOldFrame;
+ synchronized (threadLock) {
+ if (eglThread == null) {
+ logD("Dropping frame - Not initialized or already released.");
+ return;
+ }
+ synchronized (frameLock) {
+ dropOldFrame = (pendingFrame != null);
+ if (dropOldFrame) {
+ pendingFrame.release();
+ }
+ pendingFrame = frame;
+ pendingFrame.retain();
+ eglThread.getHandler().post(this::renderFrameOnRenderThread);
+ }
+ }
+ if (dropOldFrame) {
+ synchronized (statisticsLock) {
+ ++framesDropped;
+ }
+ }
+ }
+
+ /**
+ * Release EGL surface. This function will block until the EGL surface is released.
+ */
+ public void releaseEglSurface(final Runnable completionCallback) {
+ // Ensure that the render thread is no longer touching the Surface before returning from this
+ // function.
+ eglSurfaceCreationRunnable.setSurface(null /* surface */);
+ synchronized (threadLock) {
+ if (eglThread != null) {
+ eglThread.getHandler().removeCallbacks(eglSurfaceCreationRunnable);
+ eglThread.getHandler().postAtFrontOfQueue(() -> {
+ if (eglBase != null) {
+ eglBase.detachCurrent();
+ eglBase.releaseSurface();
+ }
+ completionCallback.run();
+ });
+ return;
+ }
+ }
+ completionCallback.run();
+ }
+
+ /**
+ * Private helper function to post tasks safely.
+ */
+ private void postToRenderThread(Runnable runnable) {
+ synchronized (threadLock) {
+ if (eglThread != null) {
+ eglThread.getHandler().post(runnable);
+ }
+ }
+ }
+
+ private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
+ if (eglBase != null && eglBase.hasSurface()) {
+ logD("clearSurface");
+ eglBase.makeCurrent();
+ GLES20.glClearColor(r, g, b, a);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+
+ /**
+ * Post a task to clear the surface to a transparent uniform color.
+ */
+ public void clearImage() {
+ clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ }
+
+ /**
+ * Post a task to clear the surface to a specific color.
+ */
+ public void clearImage(final float r, final float g, final float b, final float a) {
+ synchronized (threadLock) {
+ if (eglThread == null) {
+ return;
+ }
+ eglThread.getHandler().postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
+ }
+ }
+
+ private void swapBuffersOnRenderThread(final VideoFrame frame, long swapBuffersStartTimeNs) {
+ synchronized (threadLock) {
+ if (eglThread != null) {
+ eglThread.scheduleRenderUpdate(
+ runsInline -> {
+ if (!runsInline) {
+ if (eglBase == null || !eglBase.hasSurface()) {
+ return;
+ }
+ eglBase.makeCurrent();
+ }
+
+ if (usePresentationTimeStamp) {
+ eglBase.swapBuffers(frame.getTimestampNs());
+ } else {
+ eglBase.swapBuffers();
+ }
+
+ synchronized (statisticsLock) {
+ renderSwapBufferTimeNs += (System.nanoTime() - swapBuffersStartTimeNs);
+ }
+ });
+ }
+ }
+ }
+
+ /**
+ * Renders and releases `pendingFrame`.
+ */
+ private void renderFrameOnRenderThread() {
+ // Fetch and render `pendingFrame`.
+ final VideoFrame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
+ if (eglBase == null || !eglBase.hasSurface()) {
+ logD("Dropping frame - No surface");
+ frame.release();
+ return;
+ }
+ eglBase.makeCurrent();
+
+ // Check if fps reduction is active.
+ final boolean shouldRenderFrame;
+ synchronized (fpsReductionLock) {
+ if (minRenderPeriodNs == Long.MAX_VALUE) {
+ // Rendering is paused.
+ shouldRenderFrame = false;
+ } else if (minRenderPeriodNs <= 0) {
+ // FPS reduction is disabled.
+ shouldRenderFrame = true;
+ } else {
+ final long currentTimeNs = System.nanoTime();
+ if (currentTimeNs < nextFrameTimeNs) {
+ logD("Skipping frame rendering - fps reduction is active.");
+ shouldRenderFrame = false;
+ } else {
+ nextFrameTimeNs += minRenderPeriodNs;
+ // The time for the next frame should always be in the future.
+ nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
+ shouldRenderFrame = true;
+ }
+ }
+ }
+
+ final long startTimeNs = System.nanoTime();
+
+ final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
+ final float drawnAspectRatio;
+ synchronized (layoutLock) {
+ drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
+ }
+
+ final float scaleX;
+ final float scaleY;
+
+ if (frameAspectRatio > drawnAspectRatio) {
+ scaleX = drawnAspectRatio / frameAspectRatio;
+ scaleY = 1f;
+ } else {
+ scaleX = 1f;
+ scaleY = frameAspectRatio / drawnAspectRatio;
+ }
+
+ drawMatrix.reset();
+ drawMatrix.preTranslate(0.5f, 0.5f);
+ drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+ drawMatrix.preScale(scaleX, scaleY);
+ drawMatrix.preTranslate(-0.5f, -0.5f);
+
+ try {
+ if (shouldRenderFrame) {
+ GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
+ eglBase.surfaceWidth(), eglBase.surfaceHeight());
+
+ final long swapBuffersStartTimeNs = System.nanoTime();
+ swapBuffersOnRenderThread(frame, swapBuffersStartTimeNs);
+
+ synchronized (statisticsLock) {
+ ++framesRendered;
+ renderTimeNs += (swapBuffersStartTimeNs - startTimeNs);
+ }
+ }
+
+ notifyCallbacks(frame, shouldRenderFrame);
+ } catch (GlUtil.GlOutOfMemoryException e) {
+ logE("Error while drawing frame", e);
+ final ErrorCallback errorCallback = this.errorCallback;
+ if (errorCallback != null) {
+ errorCallback.onGlOutOfMemory();
+ }
+ // Attempt to free up some resources.
+ drawer.release();
+ frameDrawer.release();
+ bitmapTextureFramebuffer.release();
+ // Continue here on purpose and retry again for next frame. In worst case, this is a
+ // continuous problem and no more frames will be drawn.
+ } finally {
+ frame.release();
+ }
+ }
+
+ private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
+ if (frameListeners.isEmpty())
+ return;
+
+ drawMatrix.reset();
+ drawMatrix.preTranslate(0.5f, 0.5f);
+ drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+ drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
+ drawMatrix.preTranslate(-0.5f, -0.5f);
+
+ Iterator it = frameListeners.iterator();
+ while (it.hasNext()) {
+ FrameListenerAndParams listenerAndParams = it.next();
+ if (!wasRendered && listenerAndParams.applyFpsReduction) {
+ continue;
+ }
+ it.remove();
+
+ final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
+ final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
+
+ if (scaledWidth == 0 || scaledHeight == 0) {
+ listenerAndParams.listener.onFrame(null);
+ continue;
+ }
+
+ bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
+
+ GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
+ 0 /* viewportY */, scaledWidth, scaledHeight);
+
+ final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
+ GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
+ GLES20.glReadPixels(
+ 0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
+
+ final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(bitmapBuffer);
+ listenerAndParams.listener.onFrame(bitmap);
+ }
+ }
+
+ private String averageTimeAsString(long sumTimeNs, int count) {
+ return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us";
+ }
+
+ private void logStatistics() {
+ final DecimalFormat fpsFormat = new DecimalFormat("#.0");
+ final long currentTimeNs = System.nanoTime();
+ synchronized (statisticsLock) {
+ final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
+ if (elapsedTimeNs <= 0 || (minRenderPeriodNs == Long.MAX_VALUE && framesReceived == 0)) {
+ return;
+ }
+ final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
+ logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
+ + " Frames received: " + framesReceived + "."
+ + " Dropped: " + framesDropped + "."
+ + " Rendered: " + framesRendered + "."
+ + " Render fps: " + fpsFormat.format(renderFps) + "."
+ + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
+ + " Average swapBuffer time: "
+ + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
+ resetStatistics(currentTimeNs);
+ }
+ }
+
+ private void logE(String string, Throwable e) {
+ Logging.e(TAG, name + string, e);
+ }
+
+ private void logD(String string) {
+ Logging.d(TAG, name + string);
+ }
+
+ private void logW(String string) {
+ Logging.w(TAG, name + string);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglThread.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglThread.java
new file mode 100644
index 00000000..73323d59
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglThread.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.Message;
+import androidx.annotation.GuardedBy;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import org.webrtc.EglBase.EglConnection;
+
+/** EGL graphics thread that allows multiple clients to share the same underlying EGLContext. */
+public class EglThread implements RenderSynchronizer.Listener {
+ /** Callback for externally managed reference count. */
+ public interface ReleaseMonitor {
+ /**
+ * Called by EglThread when a client releases its reference. Returns true when there are no more
+ * references and resources should be released.
+ */
+ boolean onRelease(EglThread eglThread);
+ }
+
+ /** Interface for clients to schedule rendering updates that will run synchronized. */
+ public interface RenderUpdate {
+
+ /**
+ * Called by EglThread when the rendering window is open. `runsInline` is true when the update
+ * is executed directly while the client schedules the update.
+ */
+ void update(boolean runsInline);
+ }
+
+ public static EglThread create(
+ @Nullable ReleaseMonitor releaseMonitor,
+ @Nullable final EglBase.Context sharedContext,
+ final int[] configAttributes,
+ @Nullable RenderSynchronizer renderSynchronizer) {
+ final HandlerThread renderThread = new HandlerThread("EglThread");
+ renderThread.start();
+ HandlerWithExceptionCallbacks handler =
+ new HandlerWithExceptionCallbacks(renderThread.getLooper());
+
+ // Not creating the EGLContext on the thread it will be used on seems to cause issues with
+ // creating window surfaces on certain devices. So keep the same legacy behavior as EglRenderer
+ // and create the context on the render thread.
+ EglConnection eglConnection = ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+ // If sharedContext is null, then texture frames are disabled. This is typically for old
+ // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
+ // caused trouble on some weird devices.
+ if (sharedContext == null) {
+ return EglConnection.createEgl10(configAttributes);
+ } else {
+ return EglConnection.create(sharedContext, configAttributes);
+ }
+ });
+
+ return new EglThread(
+ releaseMonitor != null ? releaseMonitor : eglThread -> true,
+ handler,
+ eglConnection,
+ renderSynchronizer);
+ }
+
+ public static EglThread create(
+ @Nullable ReleaseMonitor releaseMonitor,
+ @Nullable final EglBase.Context sharedContext,
+ final int[] configAttributes) {
+ return create(releaseMonitor, sharedContext, configAttributes, /* renderSynchronizer= */ null);
+ }
+
+ /**
+ * Handler that triggers callbacks when an uncaught exception happens when handling a message.
+ */
+ private static class HandlerWithExceptionCallbacks extends Handler {
+ private final Object callbackLock = new Object();
+ @GuardedBy("callbackLock") private final List exceptionCallbacks = new ArrayList<>();
+
+ public HandlerWithExceptionCallbacks(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void dispatchMessage(Message msg) {
+ try {
+ super.dispatchMessage(msg);
+ } catch (Exception e) {
+ Logging.e("EglThread", "Exception on EglThread", e);
+ synchronized (callbackLock) {
+ for (Runnable callback : exceptionCallbacks) {
+ callback.run();
+ }
+ }
+ throw e;
+ }
+ }
+
+ public void addExceptionCallback(Runnable callback) {
+ synchronized (callbackLock) {
+ exceptionCallbacks.add(callback);
+ }
+ }
+
+ public void removeExceptionCallback(Runnable callback) {
+ synchronized (callbackLock) {
+ exceptionCallbacks.remove(callback);
+ }
+ }
+ }
+
+ private final ReleaseMonitor releaseMonitor;
+ private final HandlerWithExceptionCallbacks handler;
+ private final EglConnection eglConnection;
+ private final RenderSynchronizer renderSynchronizer;
+ private final List pendingRenderUpdates = new ArrayList<>();
+ private boolean renderWindowOpen = true;
+
+ private EglThread(
+ ReleaseMonitor releaseMonitor,
+ HandlerWithExceptionCallbacks handler,
+ EglConnection eglConnection,
+ RenderSynchronizer renderSynchronizer) {
+ this.releaseMonitor = releaseMonitor;
+ this.handler = handler;
+ this.eglConnection = eglConnection;
+ this.renderSynchronizer = renderSynchronizer;
+ if (renderSynchronizer != null) {
+ renderSynchronizer.registerListener(this);
+ }
+ }
+
+ public void release() {
+ if (!releaseMonitor.onRelease(this)) {
+ // Thread is still in use, do not release yet.
+ return;
+ }
+
+ if (renderSynchronizer != null) {
+ renderSynchronizer.removeListener(this);
+ }
+
+ handler.post(eglConnection::release);
+ handler.getLooper().quitSafely();
+ }
+
+ /**
+ * Creates an EglBase instance with the EglThread's EglConnection. This method can be called on
+ * any thread, but the returned EglBase instance should only be used on this EglThread's Handler.
+ */
+ public EglBase createEglBaseWithSharedConnection() {
+ return EglBase.create(eglConnection);
+ }
+
+ /**
+ * Returns the Handler to interact with Gl/EGL on. Callers need to make sure that their own
+ * EglBase is current on the handler before running any graphics operations since the EglThread
+ * can be shared by multiple clients.
+ */
+ public Handler getHandler() {
+ return handler;
+ }
+
+ /**
+ * Adds a callback that will be called on the EGL thread if there is an exception on the thread.
+ */
+ public void addExceptionCallback(Runnable callback) {
+ handler.addExceptionCallback(callback);
+ }
+
+ /**
+ * Removes a previously added exception callback.
+ */
+ public void removeExceptionCallback(Runnable callback) {
+ handler.removeExceptionCallback(callback);
+ }
+
+ /**
+ * Schedules a render update (like swapBuffers) to be run in sync with other updates on the next
+ * open render window. If the render window is currently open the update will run immediately.
+ * This method must be called on the EglThread during a render pass.
+ */
+ public void scheduleRenderUpdate(RenderUpdate update) {
+ if (renderWindowOpen) {
+ update.update(/* runsInline = */true);
+ } else {
+ pendingRenderUpdates.add(update);
+ }
+ }
+
+ @Override
+ public void onRenderWindowOpen() {
+ handler.post(
+ () -> {
+ renderWindowOpen = true;
+ for (RenderUpdate update : pendingRenderUpdates) {
+ update.update(/* runsInline = */false);
+ }
+ pendingRenderUpdates.clear();
+ });
+ }
+
+ @Override
+ public void onRenderWindowClose() {
+ handler.post(() -> renderWindowOpen = false);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Empty.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Empty.java
new file mode 100644
index 00000000..fe9481e1
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Empty.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Empty class for use in libjingle_peerconnection_java because all targets require at least one
+ * Java file.
+ */
+class Empty {}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EncodedImage.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EncodedImage.java
new file mode 100644
index 00000000..a6eef67d
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EncodedImage.java
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * An encoded frame from a video stream. Used as an input for decoders and as an output for
+ * encoders.
+ */
+public class EncodedImage implements RefCounted {
+ // Must be kept in sync with common_types.h FrameType.
+ public enum FrameType {
+ EmptyFrame(0),
+ VideoFrameKey(3),
+ VideoFrameDelta(4);
+
+ private final int nativeIndex;
+
+ private FrameType(int nativeIndex) {
+ this.nativeIndex = nativeIndex;
+ }
+
+ public int getNative() {
+ return nativeIndex;
+ }
+
+ @CalledByNative("FrameType")
+ static FrameType fromNativeIndex(int nativeIndex) {
+ for (FrameType type : FrameType.values()) {
+ if (type.getNative() == nativeIndex) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
+ }
+ }
+
+ private final RefCountDelegate refCountDelegate;
+ public final ByteBuffer buffer;
+ public final int encodedWidth;
+ public final int encodedHeight;
+ public final long captureTimeMs; // Deprecated
+ public final long captureTimeNs;
+ public final FrameType frameType;
+ public final int rotation;
+ public final @Nullable Integer qp;
+
+ // TODO(bugs.webrtc.org/9378): Use retain and release from jni code.
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @CalledByNative
+ private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth,
+ int encodedHeight, long captureTimeNs, FrameType frameType, int rotation,
+ @Nullable Integer qp) {
+ this.buffer = buffer;
+ this.encodedWidth = encodedWidth;
+ this.encodedHeight = encodedHeight;
+ this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
+ this.captureTimeNs = captureTimeNs;
+ this.frameType = frameType;
+ this.rotation = rotation;
+ this.qp = qp;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @CalledByNative
+ private ByteBuffer getBuffer() {
+ return buffer;
+ }
+
+ @CalledByNative
+ private int getEncodedWidth() {
+ return encodedWidth;
+ }
+
+ @CalledByNative
+ private int getEncodedHeight() {
+ return encodedHeight;
+ }
+
+ @CalledByNative
+ private long getCaptureTimeNs() {
+ return captureTimeNs;
+ }
+
+ @CalledByNative
+ private int getFrameType() {
+ return frameType.getNative();
+ }
+
+ @CalledByNative
+ private int getRotation() {
+ return rotation;
+ }
+
+ @CalledByNative
+ private @Nullable Integer getQp() {
+ return qp;
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ public static class Builder {
+ private ByteBuffer buffer;
+ private @Nullable Runnable releaseCallback;
+ private int encodedWidth;
+ private int encodedHeight;
+ private long captureTimeNs;
+ private EncodedImage.FrameType frameType;
+ private int rotation;
+ private @Nullable Integer qp;
+
+ private Builder() {}
+
+ public Builder setBuffer(ByteBuffer buffer, @Nullable Runnable releaseCallback) {
+ this.buffer = buffer;
+ this.releaseCallback = releaseCallback;
+ return this;
+ }
+
+ public Builder setEncodedWidth(int encodedWidth) {
+ this.encodedWidth = encodedWidth;
+ return this;
+ }
+
+ public Builder setEncodedHeight(int encodedHeight) {
+ this.encodedHeight = encodedHeight;
+ return this;
+ }
+
+ @Deprecated
+ public Builder setCaptureTimeMs(long captureTimeMs) {
+ this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
+ return this;
+ }
+
+ public Builder setCaptureTimeNs(long captureTimeNs) {
+ this.captureTimeNs = captureTimeNs;
+ return this;
+ }
+
+ public Builder setFrameType(EncodedImage.FrameType frameType) {
+ this.frameType = frameType;
+ return this;
+ }
+
+ public Builder setRotation(int rotation) {
+ this.rotation = rotation;
+ return this;
+ }
+
+ public Builder setQp(@Nullable Integer qp) {
+ this.qp = qp;
+ return this;
+ }
+
+ public EncodedImage createEncodedImage() {
+ return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs,
+ frameType, rotation, qp);
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FecControllerFactoryFactoryInterface.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FecControllerFactoryFactoryInterface.java
new file mode 100644
index 00000000..6d39390f
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FecControllerFactoryFactoryInterface.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Factory for creating webrtc::FecControllerFactory instances.
+ */
+public interface FecControllerFactoryFactoryInterface {
+ /**
+ * Dynamically allocates a webrtc::FecControllerFactory instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ public long createNative();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FileVideoCapturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FileVideoCapturer.java
new file mode 100644
index 00000000..a00a5e5f
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FileVideoCapturer.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.Charset;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.TimeUnit;
+
+public class FileVideoCapturer implements VideoCapturer {
+ private interface VideoReader {
+ VideoFrame getNextFrame();
+ void close();
+ }
+
+ /**
+ * Read video data from file for the .y4m container.
+ */
+ @SuppressWarnings("StringSplitter")
+ private static class VideoReaderY4M implements VideoReader {
+ private static final String TAG = "VideoReaderY4M";
+ private static final String Y4M_FRAME_DELIMETER = "FRAME";
+ private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1;
+
+ private final int frameWidth;
+ private final int frameHeight;
+ // First char after header
+ private final long videoStart;
+ private final RandomAccessFile mediaFile;
+ private final FileChannel mediaFileChannel;
+
+ public VideoReaderY4M(String file) throws IOException {
+ mediaFile = new RandomAccessFile(file, "r");
+ mediaFileChannel = mediaFile.getChannel();
+ StringBuilder builder = new StringBuilder();
+ for (;;) {
+ int c = mediaFile.read();
+ if (c == -1) {
+ // End of file reached.
+ throw new RuntimeException("Found end of file before end of header for file: " + file);
+ }
+ if (c == '\n') {
+ // End of header found.
+ break;
+ }
+ builder.append((char) c);
+ }
+ videoStart = mediaFileChannel.position();
+ String header = builder.toString();
+ String[] headerTokens = header.split("[ ]");
+ int w = 0;
+ int h = 0;
+ String colorSpace = "420";
+ for (String tok : headerTokens) {
+ char c = tok.charAt(0);
+ switch (c) {
+ case 'W':
+ w = Integer.parseInt(tok.substring(1));
+ break;
+ case 'H':
+ h = Integer.parseInt(tok.substring(1));
+ break;
+ case 'C':
+ colorSpace = tok.substring(1);
+ break;
+ }
+ }
+ Logging.d(TAG, "Color space: " + colorSpace);
+ if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
+ throw new IllegalArgumentException(
+ "Does not support any other color space than I420 or I420mpeg2");
+ }
+ if ((w % 2) == 1 || (h % 2) == 1) {
+ throw new IllegalArgumentException("Does not support odd width or height");
+ }
+ frameWidth = w;
+ frameHeight = h;
+ Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
+ }
+
+ @Override
+ public VideoFrame getNextFrame() {
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+ final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
+ final ByteBuffer dataY = buffer.getDataY();
+ final ByteBuffer dataU = buffer.getDataU();
+ final ByteBuffer dataV = buffer.getDataV();
+ final int chromaHeight = (frameHeight + 1) / 2;
+ final int sizeY = frameHeight * buffer.getStrideY();
+ final int sizeU = chromaHeight * buffer.getStrideU();
+ final int sizeV = chromaHeight * buffer.getStrideV();
+
+ try {
+ ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH);
+ if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+ // We reach end of file, loop
+ mediaFileChannel.position(videoStart);
+ if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+ throw new RuntimeException("Error looping video");
+ }
+ }
+ String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII"));
+ if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
+ throw new RuntimeException(
+ "Frames should be delimited by FRAME plus newline, found delimter was: '"
+ + frameDelimStr + "'");
+ }
+
+ mediaFileChannel.read(dataY);
+ mediaFileChannel.read(dataU);
+ mediaFileChannel.read(dataV);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
+ }
+
+ @Override
+ public void close() {
+ try {
+ // Closing a file also closes the channel.
+ mediaFile.close();
+ } catch (IOException e) {
+ Logging.e(TAG, "Problem closing file", e);
+ }
+ }
+ }
+
+ private final static String TAG = "FileVideoCapturer";
+ private final VideoReader videoReader;
+ private CapturerObserver capturerObserver;
+ private final Timer timer = new Timer();
+
+ private final TimerTask tickTask = new TimerTask() {
+ @Override
+ public void run() {
+ tick();
+ }
+ };
+
+ public FileVideoCapturer(String inputFile) throws IOException {
+ try {
+ videoReader = new VideoReaderY4M(inputFile);
+ } catch (IOException e) {
+ Logging.d(TAG, "Could not open video file: " + inputFile);
+ throw e;
+ }
+ }
+
+ public void tick() {
+ VideoFrame videoFrame = videoReader.getNextFrame();
+ capturerObserver.onFrameCaptured(videoFrame);
+ videoFrame.release();
+ }
+
+ @Override
+ public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+ CapturerObserver capturerObserver) {
+ this.capturerObserver = capturerObserver;
+ }
+
+ @Override
+ public void startCapture(int width, int height, int framerate) {
+ timer.schedule(tickTask, 0, 1000 / framerate);
+ }
+
+ @Override
+ public void stopCapture() throws InterruptedException {
+ timer.cancel();
+ }
+
+ @Override
+ public void changeCaptureFormat(int width, int height, int framerate) {
+ // Empty on purpose
+ }
+
+ @Override
+ public void dispose() {
+ videoReader.close();
+ }
+
+ @Override
+ public boolean isScreencast() {
+ return false;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameDecryptor.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameDecryptor.java
new file mode 100644
index 00000000..2932f3d9
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameDecryptor.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameDecryptor interface allows Java API users to provide a
+ * pointer to their native implementation of the FrameDecryptorInterface.
+ * FrameDecryptors are extremely performance sensitive as they must process all
+ * incoming video and audio frames. Due to this reason they should always be
+ * backed by a native implementation
+ * @note Not ready for production use.
+ */
+public interface FrameDecryptor {
+ /**
+ * @return A FrameDecryptorInterface pointer.
+ */
+ long getNativeFrameDecryptor();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameEncryptor.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameEncryptor.java
new file mode 100644
index 00000000..bc81223f
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameEncryptor.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameEncryptor interface allows Java API users to provide a pointer to
+ * their native implementation of the FrameEncryptorInterface.
+ * FrameEncyptors are extremely performance sensitive as they must process all
+ * outgoing video and audio frames. Due to this reason they should always be
+ * backed by a native implementation.
+ * @note Not ready for production use.
+ */
+public interface FrameEncryptor {
+ /**
+ * @return A FrameEncryptorInterface pointer.
+ */
+ long getNativeFrameEncryptor();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FramerateBitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FramerateBitrateAdjuster.java
new file mode 100644
index 00000000..e28b7b5a
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FramerateBitrateAdjuster.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
+ * hardware codecs that assume the framerate never changes.
+ */
+class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
+ private static final int DEFAULT_FRAMERATE_FPS = 30;
+
+ @Override
+ public void setTargets(int targetBitrateBps, double targetFramerateFps) {
+ // Keep frame rate unchanged and adjust bit rate.
+ this.targetFramerateFps = DEFAULT_FRAMERATE_FPS;
+ this.targetBitrateBps = (int) (targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlGenericDrawer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlGenericDrawer.java
new file mode 100644
index 00000000..b70a3728
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlGenericDrawer.java
@@ -0,0 +1,284 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import androidx.annotation.Nullable;
+import java.nio.FloatBuffer;
+import org.webrtc.GlShader;
+import org.webrtc.GlUtil;
+import org.webrtc.RendererCommon;
+
+/**
+ * Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input
+ * sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader
+ * should sample pixel values from the function "sample" that will be provided by this class and
+ * provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate
+ * variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The
+ * simplest possible generic shader that just draws pixel from the frame unmodified looks like:
+ * void main() {
+ * gl_FragColor = sample(tc);
+ * }
+ * This class covers the cases for most simple shaders and generates the necessary boiler plate.
+ * Advanced shaders can always implement RendererCommon.GlDrawer directly.
+ */
+class GlGenericDrawer implements RendererCommon.GlDrawer {
+ /**
+ * The different shader types representing different input sources. YUV here represents three
+ * separate Y, U, V textures.
+ */
+ public static enum ShaderType { OES, RGB, YUV }
+
+ /**
+ * The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set
+ * uniform variables in the shader before a frame is drawn.
+ */
+ public static interface ShaderCallbacks {
+ /**
+ * This callback is called when a new shader has been compiled and created. It will be called
+ * for the first frame as well as when the shader type is changed. This callback can be used to
+ * do custom initialization of the shader that only needs to happen once.
+ */
+ void onNewShader(GlShader shader);
+
+ /**
+ * This callback is called before rendering a frame. It can be used to do custom preparation of
+ * the shader that needs to happen every frame.
+ */
+ void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight);
+ }
+
+ private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos";
+ private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc";
+ private static final String TEXTURE_MATRIX_NAME = "tex_mat";
+ private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "uniform mat4 tex_mat;\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " tc = (tex_mat * in_tc).xy;\n"
+ + "}\n";
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1)
+ // is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ if (shaderType == ShaderType.OES) {
+ stringBuilder.append("#extension GL_OES_EGL_image_external : require\n");
+ }
+ stringBuilder.append("precision mediump float;\n");
+ stringBuilder.append("varying vec2 tc;\n");
+
+ if (shaderType == ShaderType.YUV) {
+ stringBuilder.append("uniform sampler2D y_tex;\n");
+ stringBuilder.append("uniform sampler2D u_tex;\n");
+ stringBuilder.append("uniform sampler2D v_tex;\n");
+
+ // Add separate function for sampling texture.
+ // yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter.
+ stringBuilder.append("vec4 sample(vec2 p) {\n");
+ stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n");
+ stringBuilder.append(" float u = texture2D(u_tex, p).r;\n");
+ stringBuilder.append(" float v = texture2D(v_tex, p).r;\n");
+ stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n");
+ stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n");
+ stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n");
+ stringBuilder.append("}\n");
+ stringBuilder.append(genericFragmentSource);
+ } else {
+ final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D";
+ stringBuilder.append("uniform ").append(samplerName).append(" tex;\n");
+
+ // Update the sampling function in-place.
+ stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, "));
+ }
+
+ return stringBuilder.toString();
+ }
+
+ private final String genericFragmentSource;
+ private final String vertexShader;
+ private final ShaderCallbacks shaderCallbacks;
+ @Nullable private ShaderType currentShaderType;
+ @Nullable private GlShader currentShader;
+ private int inPosLocation;
+ private int inTcLocation;
+ private int texMatrixLocation;
+
+ public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks);
+ }
+
+ public GlGenericDrawer(
+ String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
+ this.vertexShader = vertexShader;
+ this.genericFragmentSource = genericFragmentSource;
+ this.shaderCallbacks = shaderCallbacks;
+ }
+
+ // Visible for testing.
+ GlShader createShader(ShaderType shaderType) {
+ return new GlShader(
+ vertexShader, createFragmentShaderString(genericFragmentSource, shaderType));
+ }
+
+ /**
+ * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ /**
+ * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+ * are allocated at the first call to this function.
+ */
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ // Draw the texture.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ /**
+ * Draw a YUV frame with specified texture transformation matrix. Required resources are allocated
+ * at the first call to this function.
+ */
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(
+ ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ // Bind the textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ }
+ // Draw the textures.
+ GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ // Unbind the textures as a precaution.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+ }
+
+ private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth,
+ int frameHeight, int viewportWidth, int viewportHeight) {
+ final GlShader shader;
+ if (shaderType.equals(currentShaderType)) {
+ // Same shader type as before, reuse exising shader.
+ shader = currentShader;
+ } else {
+ // Allocate new shader.
+ currentShaderType = null;
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ }
+
+ shader = createShader(shaderType);
+ currentShaderType = shaderType;
+ currentShader = shader;
+
+ shader.useProgram();
+ // Set input texture units.
+ if (shaderType == ShaderType.YUV) {
+ GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+ } else {
+ GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
+ }
+
+ GlUtil.checkNoGLES2Error("Create shader");
+ shaderCallbacks.onNewShader(shader);
+ texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME);
+ inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME);
+ inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME);
+ }
+
+ shader.useProgram();
+
+ // Upload the vertex coordinates.
+ GLES20.glEnableVertexAttribArray(inPosLocation);
+ GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_BUFFER);
+
+ // Upload the texture coordinates.
+ GLES20.glEnableVertexAttribArray(inTcLocation);
+ GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2,
+ /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
+ FULL_RECTANGLE_TEXTURE_BUFFER);
+
+ // Upload the texture transformation matrix.
+ GLES20.glUniformMatrix4fv(
+ texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */);
+
+ // Do custom per-frame shader preparation.
+ shaderCallbacks.onPrepareShader(
+ shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
+ GlUtil.checkNoGLES2Error("Prepare shader");
+ }
+
+ /**
+ * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+ */
+ @Override
+ public void release() {
+ if (currentShader != null) {
+ currentShader.release();
+ currentShader = null;
+ currentShaderType = null;
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlRectDrawer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlRectDrawer.java
new file mode 100644
index 00000000..d1fbd1b7
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlRectDrawer.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Simplest possible GL shader that just draws frames as opaque quads. */
+public class GlRectDrawer extends GlGenericDrawer {
+ private static final String FRAGMENT_SHADER = "void main() {\n"
+ + " gl_FragColor = sample(tc);\n"
+ + "}\n";
+
+ private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
+ @Override
+ public void onNewShader(GlShader shader) {}
+
+ @Override
+ public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportWidth, int viewportHeight) {}
+ }
+
+ public GlRectDrawer() {
+ super(FRAGMENT_SHADER, new ShaderCallbacks());
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlShader.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlShader.java
new file mode 100644
index 00000000..7efd8d3a
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlShader.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+ private static final String TAG = "GlShader";
+
+ private static int compileShader(int shaderType, String source) {
+ final int shader = GLES20.glCreateShader(shaderType);
+ if (shader == 0) {
+ throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compileStatus = new int[] {GLES20.GL_FALSE};
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
+ if (compileStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(
+ TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source);
+ throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+ }
+ GlUtil.checkNoGLES2Error("compileShader");
+ return shader;
+ }
+
+ private int program;
+
+ public GlShader(String vertexSource, String fragmentSource) {
+ final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ program = GLES20.glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[] {GLES20.GL_FALSE};
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
+ throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+ }
+ // According to the documentation of glLinkProgram():
+ // "After the link operation, applications are free to modify attached shader objects, compile
+ // attached shader objects, detach shader objects, delete shader objects, and attach additional
+ // shader objects. None of these operations affects the information log or the program that is
+ // part of the program object."
+ // But in practice, detaching shaders from the program seems to break some devices. Deleting the
+ // shaders are fine however - it will delete them when they are no longer attached to a program.
+ GLES20.glDeleteShader(vertexShader);
+ GLES20.glDeleteShader(fragmentShader);
+ GlUtil.checkNoGLES2Error("Creating GlShader");
+ }
+
+ public int getAttribLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetAttribLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate '" + label + "' in program");
+ }
+ return location;
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute `label`. The vertex data is specified in
+ * `buffer` with `dimension` number of components per vertex.
+ */
+ public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+ setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute `label`. The vertex data is specified in
+ * `buffer` with `dimension` number of components per vertex and specified `stride`.
+ */
+ public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = getAttribLocation(label);
+ GLES20.glEnableVertexAttribArray(location);
+ GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
+ GlUtil.checkNoGLES2Error("setVertexAttribArray");
+ }
+
+ public int getUniformLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetUniformLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+ }
+ return location;
+ }
+
+ public void useProgram() {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ synchronized (EglBase.lock) {
+ GLES20.glUseProgram(program);
+ }
+ GlUtil.checkNoGLES2Error("glUseProgram");
+ }
+
+ public void release() {
+ Logging.d(TAG, "Deleting shader.");
+ // Delete program, automatically detaching any shaders from it.
+ if (program != -1) {
+ GLES20.glDeleteProgram(program);
+ program = -1;
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlTextureFrameBuffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlTextureFrameBuffer.java
new file mode 100644
index 00000000..b906fe56
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlTextureFrameBuffer.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion. This class is not thread safe and must be used by a thread with an active GL context.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+ private final int pixelFormat;
+ private int frameBufferId;
+ private int textureId;
+ private int width;
+ private int height;
+
+ /**
+ * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+ * when calling this function. The framebuffer is not complete until setSize() is called.
+ */
+ public GlTextureFrameBuffer(int pixelFormat) {
+ switch (pixelFormat) {
+ case GLES20.GL_LUMINANCE:
+ case GLES20.GL_RGB:
+ case GLES20.GL_RGBA:
+ this.pixelFormat = pixelFormat;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+ }
+ this.width = 0;
+ this.height = 0;
+ }
+
+ /**
+ * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+ * EGLContext must be bound on the current thread when calling this function. Must be called at
+ * least once before using the framebuffer. May be called multiple times to change size.
+ */
+ public void setSize(int width, int height) {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+ }
+ if (width == this.width && height == this.height) {
+ return;
+ }
+ this.width = width;
+ this.height = height;
+ // Lazy allocation the first time setSize() is called.
+ if (textureId == 0) {
+ textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ if (frameBufferId == 0) {
+ final int frameBuffers[] = new int[1];
+ GLES20.glGenFramebuffers(1, frameBuffers, 0);
+ frameBufferId = frameBuffers[0];
+ }
+
+ // Allocate texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+ GLES20.GL_UNSIGNED_BYTE, null);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
+
+ // Attach the texture to the framebuffer as color attachment.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GLES20.glFramebufferTexture2D(
+ GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
+
+ // Check that the framebuffer is in a good state.
+ final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+ if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ throw new IllegalStateException("Framebuffer not complete, status: " + status);
+ }
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ }
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ /** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */
+ public int getFrameBufferId() {
+ return frameBufferId;
+ }
+
+ /** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */
+ public int getTextureId() {
+ return textureId;
+ }
+
+ /**
+ * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+ * this function. This object should not be used after this call.
+ */
+ public void release() {
+ GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+ textureId = 0;
+ GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+ frameBufferId = 0;
+ width = 0;
+ height = 0;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlUtil.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlUtil.java
new file mode 100644
index 00000000..e2dd0c56
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlUtil.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+import android.opengl.GLException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+ private GlUtil() {}
+
+ public static class GlOutOfMemoryException extends GLException {
+ public GlOutOfMemoryException(int error, String msg) {
+ super(error, msg);
+ }
+ }
+
+ // Assert that no OpenGL ES 2.0 error has been raised.
+ public static void checkNoGLES2Error(String msg) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ throw error == GLES20.GL_OUT_OF_MEMORY
+ ? new GlOutOfMemoryException(error, msg)
+ : new GLException(error, msg + ": GLES20 error: " + error);
+ }
+ }
+
+ public static FloatBuffer createFloatBuffer(float[] coords) {
+ // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+ ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(coords);
+ fb.position(0);
+ return fb;
+ }
+
+ /**
+ * Generate texture with standard parameters.
+ */
+ public static int generateTexture(int target) {
+ final int textureArray[] = new int[1];
+ GLES20.glGenTextures(1, textureArray, 0);
+ final int textureId = textureArray[0];
+ GLES20.glBindTexture(target, textureId);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkNoGLES2Error("generateTexture");
+ return textureId;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/H264Utils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/H264Utils.java
new file mode 100644
index 00000000..abb79c65
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/H264Utils.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+import java.util.HashMap;
+
+/** Container for static helper functions related to dealing with H264 codecs. */
+class H264Utils {
+ public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
+ public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
+ public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
+
+ public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0";
+ public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
+ public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
+ public static final String H264_CONSTRAINED_HIGH_3_1 =
+ H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
+ public static final String H264_CONSTRAINED_BASELINE_3_1 =
+ H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
+
+ public static Map getDefaultH264Params(boolean isHighProfile) {
+ final Map params = new HashMap<>();
+ params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
+ params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
+ isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
+ : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
+ return params;
+ }
+
+ public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false));
+ public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC =
+ new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true));
+
+ public static boolean isSameH264Profile(
+ Map params1, Map params2) {
+ return nativeIsSameH264Profile(params1, params2);
+ }
+
+ private static native boolean nativeIsSameH264Profile(
+ Map params1, Map params2);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java
new file mode 100644
index 00000000..215598a8
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+
+/** Factory for Android hardware VideoDecoders. */
+public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+ private final static Predicate defaultAllowedPredicate =
+ new Predicate() {
+ @Override
+ public boolean test(MediaCodecInfo arg) {
+ return MediaCodecUtils.isHardwareAccelerated(arg);
+ }
+ };
+
+ /** Creates a HardwareVideoDecoderFactory that does not use surface textures. */
+ @Deprecated // Not removed yet to avoid breaking callers.
+ public HardwareVideoDecoderFactory() {
+ this(null);
+ }
+
+ /**
+ * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ */
+ public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) {
+ this(sharedContext, /* codecAllowedPredicate= */ null);
+ }
+
+ /**
+ * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param codecAllowedPredicate predicate to filter codecs. It is combined with the default
+ * predicate that only allows hardware codecs.
+ */
+ public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
+ @Nullable Predicate codecAllowedPredicate) {
+ super(sharedContext,
+ (codecAllowedPredicate == null ? defaultAllowedPredicate
+ : codecAllowedPredicate.and(defaultAllowedPredicate)));
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoder.java
new file mode 100644
index 00000000..ad2d1925
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoder.java
@@ -0,0 +1,810 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel3;
+import static android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileHigh;
+import static android.media.MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.Surface;
+
+import androidx.annotation.Nullable;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
+
+import org.webrtc.ThreadUtils.ThreadChecker;
+
+/**
+ * Android hardware video encoder.
+ */
+class HardwareVideoEncoder implements VideoEncoder {
+ private static final String TAG = "HardwareVideoEncoder";
+
+ private static final int MAX_VIDEO_FRAMERATE = 30;
+
+ // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc.
+ private static final int MAX_ENCODER_Q_SIZE = 2;
+
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+ private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
+
+ // Size of the input frames should be multiple of 16 for the H/W encoder.
+ private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16;
+
+ /**
+ * Keeps track of the number of output buffers that have been passed down the pipeline and not yet
+ * released. We need to wait for this to go down to zero before operations invalidating the output
+ * buffers, i.e., stop() and getOutputBuffer().
+ */
+ private static class BusyCount {
+ private final Object countLock = new Object();
+ private int count;
+
+ public void increment() {
+ synchronized (countLock) {
+ count++;
+ }
+ }
+
+ // This method may be called on an arbitrary thread.
+ public void decrement() {
+ synchronized (countLock) {
+ count--;
+ if (count == 0) {
+ countLock.notifyAll();
+ }
+ }
+ }
+
+ // The increment and waitForZero methods are called on the same thread (deliverEncodedImage,
+ // running on the output thread). Hence, after waitForZero returns, the count will stay zero
+ // until the same thread calls increment.
+ public void waitForZero() {
+ boolean wasInterrupted = false;
+ synchronized (countLock) {
+ while (count > 0) {
+ try {
+ countLock.wait();
+ } catch (InterruptedException e) {
+ Logging.e(TAG, "Interrupted while waiting on busy count", e);
+ wasInterrupted = true;
+ }
+ }
+ }
+
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ }
+ }
+
+ // --- Initialized on construction.
+ private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
+ private final String codecName;
+ private final VideoCodecMimeType codecType;
+ private final Integer surfaceColorFormat;
+ private final Integer yuvColorFormat;
+ private final Map params;
+ private final int keyFrameIntervalSec; // Base interval for generating key frames.
+ // Interval at which to force a key frame. Used to reduce color distortions caused by some
+ // Qualcomm video encoders.
+ private final long forcedKeyFrameNs;
+ private final BitrateAdjuster bitrateAdjuster;
+ // EGL context shared with the application. Used to access texture inputs.
+ private final EglBase14.Context sharedContext;
+
+ // Drawer used to draw input textures onto the codec's input surface.
+ private final GlRectDrawer textureDrawer = new GlRectDrawer();
+ private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
+ // A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
+ // pre-populated with all the information that can't be sent through MediaCodec.
+ private final BlockingDeque outputBuilders = new LinkedBlockingDeque<>();
+
+ private final ThreadChecker encodeThreadChecker = new ThreadChecker();
+ private final ThreadChecker outputThreadChecker = new ThreadChecker();
+ private final BusyCount outputBuffersBusyCount = new BusyCount();
+
+ // --- Set on initialize and immutable until release.
+ private Callback callback;
+ private boolean automaticResizeOn;
+
+ // --- Valid and immutable while an encoding session is running.
+ @Nullable
+ private MediaCodecWrapper codec;
+ // Thread that delivers encoded frames to the user callback.
+ @Nullable
+ private Thread outputThread;
+
+ // EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
+ // input surface. Making this base current allows textures from the context to be drawn onto the
+ // surface.
+ @Nullable
+ private EglBase14 textureEglBase;
+ // Input surface for the codec. The encoder will draw input textures onto this surface.
+ @Nullable
+ private Surface textureInputSurface;
+
+ private int width;
+ private int height;
+ // Y-plane strides in the encoder's input
+ private int stride;
+ // Y-plane slice-height in the encoder's input
+ private int sliceHeight;
+ // True if encoder input color format is semi-planar (NV12).
+ private boolean isSemiPlanar;
+ // Size of frame for current color format and stride, in bytes.
+ private int frameSizeBytes;
+ private boolean useSurfaceMode;
+
+ // --- Only accessed from the encoding thread.
+ // Presentation timestamp of next frame to encode.
+ private long nextPresentationTimestampUs;
+ // Presentation timestamp of the last requested (or forced) key frame.
+ private long lastKeyFrameNs;
+
+ // --- Only accessed on the output thread.
+ // Contents of the last observed config frame output by the MediaCodec. Used by H.264.
+ @Nullable
+ private ByteBuffer configBuffer;
+ private int adjustedBitrate;
+
+ // Whether the encoder is running. Volatile so that the output thread can watch this value and
+ // exit when the encoder stops.
+ private volatile boolean running;
+ // Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
+ // value to send exceptions thrown during release back to the encoder thread.
+ @Nullable
+ private volatile Exception shutdownException;
+
+ // True if collection of encoding statistics is enabled.
+ private boolean isEncodingStatisticsEnabled;
+
+ /**
+ * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
+ * intervals, and bitrateAdjuster.
+ *
+ * @param codecName the hardware codec implementation to use
+ * @param codecType the type of the given video codec (eg. VP8, VP9, H264, H265, AV1)
+ * @param surfaceColorFormat color format for surface mode or null if not available
+ * @param yuvColorFormat color format for bytebuffer mode
+ * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
+ * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
+ * used to reduce distortion caused by some codec implementations
+ * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
+ * desired bitrates
+ * @throws IllegalArgumentException if colorFormat is unsupported
+ */
+ public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
+ VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
+ Map params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
+ BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
+ this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
+ this.codecName = codecName;
+ this.codecType = codecType;
+ this.surfaceColorFormat = surfaceColorFormat;
+ this.yuvColorFormat = yuvColorFormat;
+ this.params = params;
+ this.keyFrameIntervalSec = keyFrameIntervalSec;
+ this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
+ this.bitrateAdjuster = bitrateAdjuster;
+ this.sharedContext = sharedContext;
+
+ // Allow construction on a different thread.
+ encodeThreadChecker.detachThread();
+ }
+
+ @Override
+ public VideoCodecStatus initEncode(Settings settings, Callback callback) {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ this.callback = callback;
+ automaticResizeOn = settings.automaticResizeOn;
+
+ this.width = settings.width;
+ this.height = settings.height;
+ useSurfaceMode = canUseSurface();
+
+ if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
+ bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
+ }
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+
+ Logging.d(TAG,
+ "initEncode name: " + codecName + " type: " + codecType + " width: " + width
+ + " height: " + height + " framerate_fps: " + settings.maxFramerate
+ + " bitrate_kbps: " + settings.startBitrate + " surface mode: " + useSurfaceMode);
+ return initEncodeInternal();
+ }
+
+ private VideoCodecStatus initEncodeInternal() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ nextPresentationTimestampUs = 0;
+ lastKeyFrameNs = -1;
+
+ isEncodingStatisticsEnabled = false;
+
+ try {
+ codec = mediaCodecWrapperFactory.createByCodecName(codecName);
+ } catch (IOException | IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot create media encoder " + codecName);
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
+ format.setInteger(MediaFormat.KEY_BITRATE_MODE, BITRATE_MODE_CBR);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ format.setFloat(
+ MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps());
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+ if (codecType == VideoCodecMimeType.H264) {
+ String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
+ if (profileLevelId == null) {
+ profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
+ }
+ switch (profileLevelId) {
+ case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
+ format.setInteger("profile", AVCProfileHigh);
+ format.setInteger("level", AVCLevel3);
+ break;
+ case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
+ break;
+ default:
+ Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
+ }
+ }
+
+ if (codecName.equals("c2.google.av1.encoder")) {
+ // Enable RTC mode in AV1 HW encoder.
+ format.setInteger("vendor.google-av1enc.encoding-preset.int32.value", 1);
+ }
+
+ if (isEncodingStatisticsSupported()) {
+ format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
+ MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_1);
+ isEncodingStatisticsEnabled = true;
+ }
+
+ Logging.d(TAG, "Format: " + format);
+ codec.configure(
+ format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurfaceMode) {
+ textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ textureInputSurface = codec.createInputSurface();
+ textureEglBase.createSurface(textureInputSurface);
+ textureEglBase.makeCurrent();
+ }
+
+ updateInputFormat(codec.getInputFormat());
+
+ codec.start();
+ } catch (IllegalArgumentException | IllegalStateException e) {
+ Logging.e(TAG, "initEncodeInternal failed", e);
+ release();
+ return VideoCodecStatus.FALLBACK_SOFTWARE;
+ }
+
+ running = true;
+ outputThreadChecker.detachThread();
+ outputThread = createOutputThread();
+ outputThread.start();
+
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ encodeThreadChecker.checkIsOnValidThread();
+
+ final VideoCodecStatus returnValue;
+ if (outputThread == null) {
+ returnValue = VideoCodecStatus.OK;
+ } else {
+ // The outputThread actually stops and releases the codec once running is false.
+ running = false;
+ if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ returnValue = VideoCodecStatus.TIMEOUT;
+ } else if (shutdownException != null) {
+ // Log the exception and turn it into an error.
+ Logging.e(TAG, "Media encoder release exception", shutdownException);
+ returnValue = VideoCodecStatus.ERROR;
+ } else {
+ returnValue = VideoCodecStatus.OK;
+ }
+ }
+
+ textureDrawer.release();
+ videoFrameDrawer.release();
+ if (textureEglBase != null) {
+ textureEglBase.release();
+ textureEglBase = null;
+ }
+ if (textureInputSurface != null) {
+ textureInputSurface.release();
+ textureInputSurface = null;
+ }
+ outputBuilders.clear();
+
+ codec = null;
+ outputThread = null;
+
+ // Allow changing thread after release.
+ encodeThreadChecker.detachThread();
+
+ return returnValue;
+ }
+
+ @Override
+ public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (codec == null) {
+ return VideoCodecStatus.UNINITIALIZED;
+ }
+
+ final boolean isTextureBuffer = videoFrame.getBuffer() instanceof VideoFrame.TextureBuffer;
+
+ // If input resolution changed, restart the codec with the new resolution.
+ final int frameWidth = videoFrame.getBuffer().getWidth();
+ final int frameHeight = videoFrame.getBuffer().getHeight();
+ final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
+ if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
+ VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ }
+
+ if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
+ // Too many frames in the encoder. Drop this frame.
+ Logging.e(TAG, "Dropped frame, encoder queue full");
+ //https://github.com/open-webrtc-toolkit/owt-deps-webrtc/issues/117
+ VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ boolean requestedKeyFrame = false;
+ for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
+ if (frameType == EncodedImage.FrameType.VideoFrameKey) {
+ requestedKeyFrame = true;
+ }
+ }
+
+ if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
+ requestKeyFrame(videoFrame.getTimestampNs());
+ }
+
+ EncodedImage.Builder builder = EncodedImage.builder()
+ .setCaptureTimeNs(videoFrame.getTimestampNs())
+ .setEncodedWidth(videoFrame.getBuffer().getWidth())
+ .setEncodedHeight(videoFrame.getBuffer().getHeight())
+ .setRotation(videoFrame.getRotation());
+ outputBuilders.offer(builder);
+
+ long presentationTimestampUs = nextPresentationTimestampUs;
+ // Round frame duration down to avoid bitrate overshoot.
+ long frameDurationUs =
+ (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps());
+ nextPresentationTimestampUs += frameDurationUs;
+
+ final VideoCodecStatus returnValue;
+ if (useSurfaceMode) {
+ returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs);
+ } else {
+ returnValue = encodeByteBuffer(videoFrame, presentationTimestampUs);
+ }
+
+ // Check if the queue was successful.
+ if (returnValue != VideoCodecStatus.OK) {
+ // Keep the output builders in sync with buffers in the codec.
+ outputBuilders.pollLast();
+ }
+
+ return returnValue;
+ }
+
+ private VideoCodecStatus encodeTextureBuffer(
+ VideoFrame videoFrame, long presentationTimestampUs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ try {
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // It is not necessary to release this frame because it doesn't own the buffer.
+ VideoFrame derotatedFrame =
+ new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
+ videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
+ textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
+ int index;
+ try {
+ index = codec.dequeueInputBuffer(0 /* timeout */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueInputBuffer failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (index == -1) {
+ // Encoder is falling behind. No input buffers available. Drop the frame.
+ Logging.d(TAG, "Dropped frame, no input buffers available");
+ return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
+ }
+
+ ByteBuffer buffer;
+ try {
+ buffer = codec.getInputBuffer(index);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+
+ if (buffer.capacity() < frameSizeBytes) {
+ Logging.e(TAG,
+ "Input buffer size: " + buffer.capacity()
+ + " is smaller than frame size: " + frameSizeBytes);
+ return VideoCodecStatus.ERROR;
+ }
+
+ fillInputBuffer(buffer, videoFrame.getBuffer());
+
+ try {
+ codec.queueInputBuffer(
+ index, 0 /* offset */, frameSizeBytes, presentationTimestampUs, 0 /* flags */);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "queueInputBuffer failed", e);
+ // IllegalStateException thrown when the codec is in the wrong state.
+ return VideoCodecStatus.ERROR;
+ }
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
+ encodeThreadChecker.checkIsOnValidThread();
+ if (framerate > MAX_VIDEO_FRAMERATE) {
+ framerate = MAX_VIDEO_FRAMERATE;
+ }
+ bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRates(RateControlParameters rcParameters) {
+ encodeThreadChecker.checkIsOnValidThread();
+ bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps);
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public ScalingSettings getScalingSettings() {
+ if (automaticResizeOn) {
+ if (codecType == VideoCodecMimeType.VP8) {
+ final int kLowVp8QpThreshold = 29;
+ final int kHighVp8QpThreshold = 95;
+ return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold);
+ } else if (codecType == VideoCodecMimeType.H264) {
+ final int kLowH264QpThreshold = 24;
+ final int kHighH264QpThreshold = 37;
+ return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
+ }
+ }
+ return ScalingSettings.OFF;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return codecName;
+ }
+
+ @Override
+ public EncoderInfo getEncoderInfo() {
+ // Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment
+ // value to be 16. Additionally, this encoder produces a single stream. So it should not require
+ // alignment for all layers.
+ return new EncoderInfo(
+ /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT,
+ /* applyAlignmentToAllSimulcastLayers= */ false);
+ }
+
+ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
+ encodeThreadChecker.checkIsOnValidThread();
+ VideoCodecStatus status = release();
+ if (status != VideoCodecStatus.OK) {
+ return status;
+ }
+ width = newWidth;
+ height = newHeight;
+ useSurfaceMode = newUseSurfaceMode;
+ return initEncodeInternal();
+ }
+
+ private boolean shouldForceKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
+ }
+
+ private void requestKeyFrame(long presentationTimestampNs) {
+ encodeThreadChecker.checkIsOnValidThread();
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in queueInputBuffer() below and guarantee _this_ frame
+ // be encoded as a key frame, but sadly that flag is ignored. Instead,
+ // we request a key frame "soon".
+ try {
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ codec.setParameters(b);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "requestKeyFrame failed", e);
+ return;
+ }
+ lastKeyFrameNs = presentationTimestampNs;
+ }
+
+ private Thread createOutputThread() {
+ return new Thread() {
+ @Override
+ public void run() {
+ while (running) {
+ deliverEncodedImage();
+ }
+ releaseCodecOnOutputThread();
+ }
+ };
+ }
+
+ // Visible for testing.
+ protected void deliverEncodedImage() {
+ outputThreadChecker.checkIsOnValidThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
+ if (index < 0) {
+ if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffersBusyCount.waitForZero();
+ }
+ return;
+ }
+
+ ByteBuffer outputBuffer = codec.getOutputBuffer(index);
+ outputBuffer.position(info.offset);
+ outputBuffer.limit(info.offset + info.size);
+
+ if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
+ if (info.size > 0
+ && (codecType == VideoCodecMimeType.H264 || codecType == VideoCodecMimeType.H265)) {
+ // In case of H264 and H265 config buffer contains SPS and PPS headers. Presence of these
+ // headers makes IDR frame a truly keyframe. Some encoders issue IDR frames without SPS
+ // and PPS. We save config buffer here to prepend it to all IDR frames encoder delivers.
+ configBuffer = ByteBuffer.allocateDirect(info.size);
+ configBuffer.put(outputBuffer);
+ }
+ return;
+ }
+
+ bitrateAdjuster.reportEncodedFrame(info.size);
+ if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
+ updateBitrate();
+ }
+
+ final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+ if (isKeyFrame) {
+ Logging.d(TAG, "Sync frame generated");
+ }
+
+ // Extract QP before releasing output buffer.
+ Integer qp = null;
+ if (isEncodingStatisticsEnabled) {
+ MediaFormat format = codec.getOutputFormat(index);
+ if (format != null && format.containsKey(MediaFormat.KEY_VIDEO_QP_AVERAGE)) {
+ qp = format.getInteger(MediaFormat.KEY_VIDEO_QP_AVERAGE);
+ }
+ }
+
+ final ByteBuffer frameBuffer;
+ final Runnable releaseCallback;
+ if (isKeyFrame && configBuffer != null) {
+ Logging.d(TAG,
+ "Prepending config buffer of size " + configBuffer.capacity()
+ + " to output buffer with offset " + info.offset + ", size " + info.size);
+ frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
+ configBuffer.rewind();
+ frameBuffer.put(configBuffer);
+ frameBuffer.put(outputBuffer);
+ frameBuffer.rewind();
+ codec.releaseOutputBuffer(index, /* render= */ false);
+ releaseCallback = null;
+ } else {
+ frameBuffer = outputBuffer.slice();
+ outputBuffersBusyCount.increment();
+ releaseCallback = () -> {
+ // This callback should not throw any exceptions since
+ // it may be called on an arbitrary thread.
+ // Check bug webrtc:11230 for more details.
+ try {
+ codec.releaseOutputBuffer(index, /* render= */ false);
+ } catch (Exception e) {
+ Logging.e(TAG, "releaseOutputBuffer failed", e);
+ }
+ outputBuffersBusyCount.decrement();
+ };
+ }
+
+ final EncodedImage.FrameType frameType = isKeyFrame ? EncodedImage.FrameType.VideoFrameKey
+ : EncodedImage.FrameType.VideoFrameDelta;
+
+ EncodedImage.Builder builder = outputBuilders.poll();
+ builder.setBuffer(frameBuffer, releaseCallback);
+ builder.setFrameType(frameType);
+ builder.setQp(qp);
+
+ EncodedImage encodedImage = builder.createEncodedImage();
+ // TODO(mellem): Set codec-specific info.
+ callback.onEncodedFrame(encodedImage, new CodecSpecificInfo());
+ // Note that the callback may have retained the image.
+ encodedImage.release();
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "deliverOutput failed", e);
+ }
+ }
+
+ private void releaseCodecOnOutputThread() {
+ outputThreadChecker.checkIsOnValidThread();
+ Logging.d(TAG, "Releasing MediaCodec on output thread");
+ outputBuffersBusyCount.waitForZero();
+ try {
+ codec.stop();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder stop failed", e);
+ }
+ try {
+ codec.release();
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ // Propagate exceptions caught during release back to the main thread.
+ shutdownException = e;
+ }
+ configBuffer = null;
+ Logging.d(TAG, "Release on output thread done");
+ }
+
+ private VideoCodecStatus updateBitrate() {
+ outputThreadChecker.checkIsOnValidThread();
+ adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
+ try {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
+ codec.setParameters(params);
+ return VideoCodecStatus.OK;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "updateBitrate failed", e);
+ return VideoCodecStatus.ERROR;
+ }
+ }
+
+ private boolean canUseSurface() {
+ return sharedContext != null && surfaceColorFormat != null;
+ }
+
+ /**
+ * Fetches stride and slice height from input media format
+ */
+ private void updateInputFormat(MediaFormat format) {
+ stride = width;
+ sliceHeight = height;
+
+ if (format != null) {
+ if (format.containsKey(MediaFormat.KEY_STRIDE)) {
+ stride = format.getInteger(MediaFormat.KEY_STRIDE);
+ stride = Math.max(stride, width);
+ }
+
+ if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
+ sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
+ sliceHeight = Math.max(sliceHeight, height);
+ }
+ }
+
+ isSemiPlanar = isSemiPlanar(yuvColorFormat);
+ if (isSemiPlanar) {
+ int chromaHeight = (height + 1) / 2;
+ frameSizeBytes = sliceHeight * stride + chromaHeight * stride;
+ } else {
+ int chromaStride = (stride + 1) / 2;
+ int chromaSliceHeight = (sliceHeight + 1) / 2;
+ frameSizeBytes = sliceHeight * stride + chromaSliceHeight * chromaStride * 2;
+ }
+
+ Logging.d(TAG,
+ "updateInputFormat format: " + format + " stride: " + stride
+ + " sliceHeight: " + sliceHeight + " isSemiPlanar: " + isSemiPlanar
+ + " frameSizeBytes: " + frameSizeBytes);
+ }
+
+ protected boolean isEncodingStatisticsSupported() {
+ // WebRTC quality scaler, which adjusts resolution and/or frame rate based on encoded QP,
+ // expects QP to be in native bitstream range for given codec. Native QP range for VP8 is
+ // [0, 127] and for VP9 is [0, 255]. MediaCodec VP8 and VP9 encoders (perhaps not all)
+ // return QP in range [0, 64], which is libvpx API specific range. Due to this mismatch we
+ // can't use QP feedback from these codecs.
+ if (codecType == VideoCodecMimeType.VP8 || codecType == VideoCodecMimeType.VP9) {
+ return false;
+ }
+
+ MediaCodecInfo codecInfo = codec.getCodecInfo();
+ if (codecInfo == null) {
+ return false;
+ }
+
+ CodecCapabilities codecCaps = codecInfo.getCapabilitiesForType(codecType.mimeType());
+ if (codecCaps == null) {
+ return false;
+ }
+
+ return codecCaps.isFeatureSupported(CodecCapabilities.FEATURE_EncodingStatistics);
+ }
+
+ // Visible for testing.
+ protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer frame) {
+ VideoFrame.I420Buffer i420 = frame.toI420();
+ if (isSemiPlanar) {
+ YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride,
+ sliceHeight);
+ } else {
+ YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
+ i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride,
+ sliceHeight);
+ }
+ i420.release();
+ }
+
+ protected boolean isSemiPlanar(int colorFormat) {
+ switch (colorFormat) {
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
+ return false;
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
+ case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ return true;
+ default:
+ throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java
new file mode 100644
index 00000000..4d103ff9
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java
@@ -0,0 +1,280 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
+import static org.webrtc.MediaCodecUtils.HISI_PREFIX;
+import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
+import static org.webrtc.MediaCodecUtils.MTK_PREFIX;
+import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/** Factory for android hardware video encoders. */
+@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
+public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
+ private static final String TAG = "HardwareVideoEncoderFactory";
+
+ // We don't need periodic keyframes. But some HW encoders, Exynos in particular, fails to
+ // initialize with value -1 which should disable periodic keyframes according to the spec. Set it
+ // to 1 hour.
+ private static final int PERIODIC_KEY_FRAME_INTERVAL_S = 3600;
+
+ // Forced key frame interval - used to reduce color distortions on Qualcomm platforms.
+ private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
+ private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
+ private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
+
+ // List of devices with poor H.264 encoder quality.
+ // HW H.264 encoder on below devices has poor bitrate control - actual
+ // bitrates deviates a lot from the target value.
+ private static final List H264_HW_EXCEPTION_MODELS =
+ Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4");
+
+ @Nullable private final EglBase14.Context sharedContext;
+ private final boolean enableIntelVp8Encoder;
+ private final boolean enableH264HighProfile;
+ @Nullable private final Predicate codecAllowedPredicate;
+
+ /**
+ * Creates a HardwareVideoEncoderFactory that supports surface texture encoding.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled.
+ * @param enableH264HighProfile true if H264 High Profile enabled.
+ */
+ public HardwareVideoEncoderFactory(
+ EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
+ this(sharedContext, enableIntelVp8Encoder, enableH264HighProfile,
+ /* codecAllowedPredicate= */ null);
+ }
+
+ /**
+ * Creates a HardwareVideoEncoderFactory that supports surface texture encoding.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled.
+ * @param enableH264HighProfile true if H264 High Profile enabled.
+ * @param codecAllowedPredicate optional predicate to filter codecs. All codecs are allowed
+ * when predicate is not provided.
+ */
+ public HardwareVideoEncoderFactory(EglBase.Context sharedContext, boolean enableIntelVp8Encoder,
+ boolean enableH264HighProfile, @Nullable Predicate codecAllowedPredicate) {
+ // Texture mode requires EglBase14.
+ if (sharedContext instanceof EglBase14.Context) {
+ this.sharedContext = (EglBase14.Context) sharedContext;
+ } else {
+ Logging.w(TAG, "No shared EglBase.Context. Encoders will not use texture mode.");
+ this.sharedContext = null;
+ }
+ this.enableIntelVp8Encoder = enableIntelVp8Encoder;
+ this.enableH264HighProfile = enableH264HighProfile;
+ this.codecAllowedPredicate = codecAllowedPredicate;
+ }
+
+ @Deprecated
+ public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
+ this(null, enableIntelVp8Encoder, enableH264HighProfile);
+ }
+
+ @Nullable
+ @Override
+ public VideoEncoder createEncoder(VideoCodecInfo input) {
+ VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.getName());
+ MediaCodecInfo info = findCodecForType(type);
+
+ if (info == null) {
+ return null;
+ }
+
+ String codecName = info.getName();
+ String mime = type.mimeType();
+ Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat(
+ MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime));
+ Integer yuvColorFormat = MediaCodecUtils.selectColorFormat(
+ MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime));
+
+ if (type == VideoCodecMimeType.H264) {
+ boolean isHighProfile = H264Utils.isSameH264Profile(
+ input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true));
+ boolean isBaselineProfile = H264Utils.isSameH264Profile(
+ input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false));
+
+ if (!isHighProfile && !isBaselineProfile) {
+ return null;
+ }
+ if (isHighProfile && !isH264HighProfileSupported(info)) {
+ return null;
+ }
+ }
+
+ return new HardwareVideoEncoder(new MediaCodecWrapperFactoryImpl(), codecName, type,
+ surfaceColorFormat, yuvColorFormat, input.params, PERIODIC_KEY_FRAME_INTERVAL_S,
+ getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName),
+ sharedContext);
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ List supportedCodecInfos = new ArrayList();
+ // Generate a list of supported codecs in order of preference:
+ // VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265.
+ for (VideoCodecMimeType type :
+ new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9,
+ VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) {
+ MediaCodecInfo codec = findCodecForType(type);
+ if (codec != null) {
+ String name = type.name();
+ // TODO(sakal): Always add H264 HP once WebRTC correctly removes codecs that are not
+ // supported by the decoder.
+ if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
+ }
+
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
+ }
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+
+ private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = null;
+ try {
+ info = MediaCodecList.getCodecInfoAt(i);
+ } catch (IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot retrieve encoder codec info", e);
+ }
+
+ if (info == null || !info.isEncoder()) {
+ continue;
+ }
+
+ if (isSupportedCodec(info, type)) {
+ return info;
+ }
+ }
+ return null; // No support for this type.
+ }
+
+ // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
+ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) {
+ if (!MediaCodecUtils.codecSupportsType(info, type)) {
+ return false;
+ }
+ // Check for a supported color format.
+ if (MediaCodecUtils.selectColorFormat(
+ MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
+ == null) {
+ return false;
+ }
+ return isHardwareSupportedInCurrentSdk(info, type) && isMediaCodecAllowed(info);
+ }
+
+ // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
+ // current SDK.
+ private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return info.isHardwareAccelerated();
+ }
+
+ switch (type) {
+ case VP8:
+ return isHardwareSupportedInCurrentSdkVp8(info);
+ case VP9:
+ return isHardwareSupportedInCurrentSdkVp9(info);
+ case H264:
+ return isHardwareSupportedInCurrentSdkH264(info);
+ case H265:
+ case AV1:
+ return false;
+ }
+ return false;
+ }
+
+ private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
+ String name = info.getName();
+ // QCOM Vp8 encoder is always supported.
+ return name.startsWith(QCOM_PREFIX)
+ // Exynos VP8 encoder is supported in M or later.
+ || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
+ // Intel Vp8 encoder is always supported, with the intel encoder enabled.
+ || (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder);
+ }
+
+ private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
+ String name = info.getName();
+ return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX))
+ // Both QCOM and Exynos VP9 encoders are supported in N or later.
+ && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
+ }
+
+ private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
+ // First, H264 hardware might perform poorly on this model.
+ if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
+ return false;
+ }
+ String name = info.getName();
+ // QCOM and Exynos H264 encoders are always supported.
+ return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX)|| name.startsWith(MTK_PREFIX);
+ }
+
+ private boolean isMediaCodecAllowed(MediaCodecInfo info) {
+ if (codecAllowedPredicate == null) {
+ return true;
+ }
+ return codecAllowedPredicate.test(info);
+ }
+
+ private int getForcedKeyFrameIntervalMs(VideoCodecMimeType type, String codecName) {
+ if (type == VideoCodecMimeType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
+ }
+ if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
+ return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
+ }
+ return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
+ }
+ // Other codecs don't need key frame forcing.
+ return 0;
+ }
+
+ private BitrateAdjuster createBitrateAdjuster(VideoCodecMimeType type, String codecName) {
+ if (codecName.startsWith(EXYNOS_PREFIX)) {
+ if (type == VideoCodecMimeType.VP8) {
+ // Exynos VP8 encoders need dynamic bitrate adjustment.
+ return new DynamicBitrateAdjuster();
+ } else {
+ // Exynos VP9 and H264 encoders need framerate-based bitrate adjustment.
+ return new FramerateBitrateAdjuster();
+ }
+ }
+ // Other codecs don't need bitrate adjustment.
+ return new BaseBitrateAdjuster();
+ }
+
+ private boolean isH264HighProfileSupported(MediaCodecInfo info) {
+ return enableH264HighProfile && Build.VERSION.SDK_INT > Build.VERSION_CODES.M
+ && info.getName().startsWith(EXYNOS_PREFIX);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Histogram.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Histogram.java
new file mode 100644
index 00000000..87798613
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Histogram.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Class for holding the native pointer of a histogram. Since there is no way to destroy a
+ * histogram, please don't create unnecessary instances of this object. This class is thread safe.
+ *
+ * Usage example:
+ * private static final Histogram someMetricHistogram =
+ * Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
+ * someMetricHistogram.addSample(someVariable);
+ */
+class Histogram {
+ private final long handle;
+
+ private Histogram(long handle) {
+ this.handle = handle;
+ }
+
+ static public Histogram createCounts(String name, int min, int max, int bucketCount) {
+ return new Histogram(nativeCreateCounts(name, min, max, bucketCount));
+ }
+
+ static public Histogram createEnumeration(String name, int max) {
+ return new Histogram(nativeCreateEnumeration(name, max));
+ }
+
+ public void addSample(int sample) {
+ nativeAddSample(handle, sample);
+ }
+
+ private static native long nativeCreateCounts(String name, int min, int max, int bucketCount);
+ private static native long nativeCreateEnumeration(String name, int max);
+ private static native void nativeAddSample(long handle, int sample);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidate.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidate.java
new file mode 100644
index 00000000..5f00b2a5
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidate.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+import org.webrtc.PeerConnection;
+
+/**
+ * Representation of a single ICE Candidate, mirroring
+ * {@code IceCandidateInterface} in the C++ API.
+ */
+public class IceCandidate {
+ public final String sdpMid;
+ public final int sdpMLineIndex;
+ public final String sdp;
+ public final String serverUrl;
+ public final PeerConnection.AdapterType adapterType;
+
+ public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
+ this.sdpMid = sdpMid;
+ this.sdpMLineIndex = sdpMLineIndex;
+ this.sdp = sdp;
+ this.serverUrl = "";
+ this.adapterType = PeerConnection.AdapterType.UNKNOWN;
+ }
+
+ @CalledByNative
+ IceCandidate(String sdpMid, int sdpMLineIndex, String sdp, String serverUrl,
+ PeerConnection.AdapterType adapterType) {
+ this.sdpMid = sdpMid;
+ this.sdpMLineIndex = sdpMLineIndex;
+ this.sdp = sdp;
+ this.serverUrl = serverUrl;
+ this.adapterType = adapterType;
+ }
+
+ @Override
+ public String toString() {
+ return sdpMid + ":" + sdpMLineIndex + ":" + sdp + ":" + serverUrl + ":"
+ + adapterType.toString();
+ }
+
+ @CalledByNative
+ String getSdpMid() {
+ return sdpMid;
+ }
+
+ @CalledByNative
+ String getSdp() {
+ return sdp;
+ }
+
+ /** equals() checks sdpMid, sdpMLineIndex, and sdp for equality. */
+ @Override
+ public boolean equals(@Nullable Object object) {
+ if (!(object instanceof IceCandidate)) {
+ return false;
+ }
+
+ IceCandidate that = (IceCandidate) object;
+ return objectEquals(this.sdpMid, that.sdpMid) && this.sdpMLineIndex == that.sdpMLineIndex
+ && objectEquals(this.sdp, that.sdp);
+ }
+
+ @Override
+ public int hashCode() {
+ Object[] values = {sdpMid, sdpMLineIndex, sdp};
+ return Arrays.hashCode(values);
+ }
+
+ private static boolean objectEquals(Object o1, Object o2) {
+ if (o1 == null) {
+ return o2 == null;
+ }
+ return o1.equals(o2);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidateErrorEvent.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidateErrorEvent.java
new file mode 100644
index 00000000..aae9da70
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidateErrorEvent.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public final class IceCandidateErrorEvent {
+ /** The local IP address used to communicate with the STUN or TURN server. */
+ public final String address;
+ /** The port used to communicate with the STUN or TURN server. */
+ public final int port;
+ /**
+ * The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred.
+ */
+ public final String url;
+ /**
+ * The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach
+ * the server, errorCode will be set to the value 701 which is outside the STUN error code range.
+ * This error is only fired once per server URL while in the RTCIceGatheringState of "gathering".
+ */
+ public final int errorCode;
+ /**
+ * The STUN reason text returned by the STUN or TURN server. If the server could not be reached,
+ * errorText will be set to an implementation-specific value providing details about the error.
+ */
+ public final String errorText;
+
+ @CalledByNative
+ public IceCandidateErrorEvent(
+ String address, int port, String url, int errorCode, String errorText) {
+ this.address = address;
+ this.port = port;
+ this.url = url;
+ this.errorCode = errorCode;
+ this.errorText = errorText;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JNILogging.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JNILogging.java
new file mode 100644
index 00000000..f391db61
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JNILogging.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CalledByNative;
+import org.webrtc.Loggable;
+import org.webrtc.Logging.Severity;
+
+class JNILogging {
+ private final Loggable loggable;
+
+ public JNILogging(Loggable loggable) {
+ this.loggable = loggable;
+ }
+
+ @CalledByNative
+ public void logToInjectable(String message, Integer severity, String tag) {
+ loggable.onLogMessage(message, Severity.values()[severity], tag);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JavaI420Buffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JavaI420Buffer.java
new file mode 100644
index 00000000..322b8f38
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JavaI420Buffer.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+import org.webrtc.VideoFrame.I420Buffer;
+
+/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
+public class JavaI420Buffer implements VideoFrame.I420Buffer {
+ private final int width;
+ private final int height;
+ private final ByteBuffer dataY;
+ private final ByteBuffer dataU;
+ private final ByteBuffer dataV;
+ private final int strideY;
+ private final int strideU;
+ private final int strideV;
+ private final RefCountDelegate refCountDelegate;
+
+ private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
+ int strideU, ByteBuffer dataV, int strideV, @Nullable Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.dataY = dataY;
+ this.dataU = dataU;
+ this.dataV = dataV;
+ this.strideY = strideY;
+ this.strideU = strideU;
+ this.strideV = strideV;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ private static void checkCapacity(ByteBuffer data, int width, int height, int stride) {
+ // The last row does not necessarily need padding.
+ final int minCapacity = stride * (height - 1) + width;
+ if (data.capacity() < minCapacity) {
+ throw new IllegalArgumentException(
+ "Buffer must be at least " + minCapacity + " bytes, but was " + data.capacity());
+ }
+ }
+
+ /** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */
+ public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY,
+ ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV,
+ @Nullable Runnable releaseCallback) {
+ if (dataY == null || dataU == null || dataV == null) {
+ throw new IllegalArgumentException("Data buffers cannot be null.");
+ }
+ if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) {
+ throw new IllegalArgumentException("Data buffers must be direct byte buffers.");
+ }
+
+ // Slice the buffers to prevent external modifications to the position / limit of the buffer.
+ // Note that this doesn't protect the contents of the buffers from modifications.
+ dataY = dataY.slice();
+ dataU = dataU.slice();
+ dataV = dataV.slice();
+
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ checkCapacity(dataY, width, height, strideY);
+ checkCapacity(dataU, chromaWidth, chromaHeight, strideU);
+ checkCapacity(dataV, chromaWidth, chromaHeight, strideV);
+
+ return new JavaI420Buffer(
+ width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback);
+ }
+
+ /** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
+ public static JavaI420Buffer allocate(int width, int height) {
+ int chromaHeight = (height + 1) / 2;
+ int strideUV = (width + 1) / 2;
+ int yPos = 0;
+ int uPos = yPos + width * height;
+ int vPos = uPos + strideUV * chromaHeight;
+
+ ByteBuffer buffer =
+ JniCommon.nativeAllocateByteBuffer(width * height + 2 * strideUV * chromaHeight);
+
+ buffer.position(yPos);
+ buffer.limit(uPos);
+ ByteBuffer dataY = buffer.slice();
+
+ buffer.position(uPos);
+ buffer.limit(vPos);
+ ByteBuffer dataU = buffer.slice();
+
+ buffer.position(vPos);
+ buffer.limit(vPos + strideUV * chromaHeight);
+ ByteBuffer dataV = buffer.slice();
+
+ return new JavaI420Buffer(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
+ () -> { JniCommon.nativeFreeByteBuffer(buffer); });
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataY.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataU.slice();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ // Return a slice to prevent relative reads from changing the position.
+ return dataV.slice();
+ }
+
+ @Override
+ public int getStrideY() {
+ return strideY;
+ }
+
+ @Override
+ public int getStrideU() {
+ return strideU;
+ }
+
+ @Override
+ public int getStrideV() {
+ return strideV;
+ }
+
+ @Override
+ public I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return cropAndScaleI420(this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+
+ public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
+ int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
+ // No scaling.
+ ByteBuffer dataY = buffer.getDataY();
+ ByteBuffer dataU = buffer.getDataU();
+ ByteBuffer dataV = buffer.getDataV();
+
+ dataY.position(cropX + cropY * buffer.getStrideY());
+ dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
+ dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
+
+ buffer.retain();
+ return JavaI420Buffer.wrap(scaleWidth, scaleHeight, dataY.slice(), buffer.getStrideY(),
+ dataU.slice(), buffer.getStrideU(), dataV.slice(), buffer.getStrideV(), buffer::release);
+ }
+
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
+ buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
+ cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+ newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
+ scaleHeight);
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
+ ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
+ int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+ int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JniCommon.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniCommon.java
new file mode 100644
index 00000000..e1b2e513
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniCommon.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Class with static JNI helper functions that are used in many places. */
+public class JniCommon {
+ /** Functions to increment/decrement an rtc::RefCountInterface pointer. */
+ public static native void nativeAddRef(long refCountedPointer);
+ public static native void nativeReleaseRef(long refCountedPointer);
+
+ public static native ByteBuffer nativeAllocateByteBuffer(int size);
+ public static native void nativeFreeByteBuffer(ByteBuffer buffer);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JniHelper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniHelper.java
new file mode 100644
index 00000000..0d56d5d9
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniHelper.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Map;
+
+/**
+ * This class is only used from jni_helper.cc to give some Java functionality that were not possible
+ * to generate in other ways due to bugs.webrtc.org/8606 and bugs.webrtc.org/8632.
+ */
+class JniHelper {
+ // TODO(bugs.webrtc.org/8632): Remove.
+ @CalledByNative
+ static byte[] getStringBytes(String s) {
+ try {
+ return s.getBytes("ISO-8859-1");
+ } catch (UnsupportedEncodingException e) {
+ throw new RuntimeException("ISO-8859-1 is unsupported");
+ }
+ }
+
+ // TODO(bugs.webrtc.org/8632): Remove.
+ @CalledByNative
+ static Object getStringClass() {
+ return String.class;
+ }
+
+ // TODO(bugs.webrtc.org/8606): Remove.
+ @CalledByNative
+ static Object getKey(Map.Entry entry) {
+ return entry.getKey();
+ }
+
+ // TODO(bugs.webrtc.org/8606): Remove.
+ @CalledByNative
+ static Object getValue(Map.Entry entry) {
+ return entry.getValue();
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibaomAv1Encoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibaomAv1Encoder.java
new file mode 100644
index 00000000..569a719f
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibaomAv1Encoder.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibaomAv1Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Decoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Decoder.java
new file mode 100644
index 00000000..54ad0aa1
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Decoder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Encoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Encoder.java
new file mode 100644
index 00000000..4be9e52c
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Encoder.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Decoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Decoder.java
new file mode 100644
index 00000000..90a24433
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Decoder.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+
+ static native boolean nativeIsSupported();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Encoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Encoder.java
new file mode 100644
index 00000000..1211ae93
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Encoder.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+
+ static native boolean nativeIsSupported();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Loggable.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Loggable.java
new file mode 100644
index 00000000..cd66aa12
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Loggable.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.Logging.Severity;
+
+/**
+ * Java interface for WebRTC logging. The default implementation uses webrtc.Logging.
+ *
+ * When injected, the Loggable will receive logging from both Java and native.
+ */
+public interface Loggable {
+ public void onLogMessage(String message, Severity severity, String tag);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Logging.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Logging.java
new file mode 100644
index 00000000..e7a9921f
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Logging.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.EnumSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.webrtc.Loggable;
+
+/**
+ * Java wrapper for WebRTC logging. Logging defaults to java.util.logging.Logger, but a custom
+ * logger implementing the Loggable interface can be injected along with a Severity. All subsequent
+ * log messages will then be redirected to the injected Loggable, except those with a severity lower
+ * than the specified severity, which will be discarded.
+ *
+ * It is also possible to switch to native logging (rtc::LogMessage) if one of the following static
+ * functions are called from the app:
+ * - Logging.enableLogThreads
+ * - Logging.enableLogTimeStamps
+ * - Logging.enableLogToDebugOutput
+ *
+ * The priority goes:
+ * 1. Injected loggable
+ * 2. Native logging
+ * 3. Fallback logging.
+ * Only one method will be used at a time.
+ *
+ * Injecting a Loggable or using any of the enable... methods requires that the native library is
+ * loaded, using PeerConnectionFactory.initialize.
+ */
+public class Logging {
+ private static final Logger fallbackLogger = createFallbackLogger();
+ private static volatile boolean loggingEnabled;
+ @Nullable private static Loggable loggable;
+ private static Severity loggableSeverity;
+
+ private static Logger createFallbackLogger() {
+ final Logger fallbackLogger = Logger.getLogger("org.webrtc.Logging");
+ fallbackLogger.setLevel(Level.ALL);
+ return fallbackLogger;
+ }
+
+ static void injectLoggable(Loggable injectedLoggable, Severity severity) {
+ if (injectedLoggable != null) {
+ loggable = injectedLoggable;
+ loggableSeverity = severity;
+ }
+ }
+
+ static void deleteInjectedLoggable() {
+ loggable = null;
+ }
+
+ // TODO(solenberg): Remove once dependent projects updated.
+ @Deprecated
+ public enum TraceLevel {
+ TRACE_NONE(0x0000),
+ TRACE_STATEINFO(0x0001),
+ TRACE_WARNING(0x0002),
+ TRACE_ERROR(0x0004),
+ TRACE_CRITICAL(0x0008),
+ TRACE_APICALL(0x0010),
+ TRACE_DEFAULT(0x00ff),
+ TRACE_MODULECALL(0x0020),
+ TRACE_MEMORY(0x0100),
+ TRACE_TIMER(0x0200),
+ TRACE_STREAM(0x0400),
+ TRACE_DEBUG(0x0800),
+ TRACE_INFO(0x1000),
+ TRACE_TERSEINFO(0x2000),
+ TRACE_ALL(0xffff);
+
+ public final int level;
+ TraceLevel(int level) {
+ this.level = level;
+ }
+ }
+
+ // Keep in sync with webrtc/rtc_base/logging.h:LoggingSeverity.
+ public enum Severity { LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE }
+
+ public static void enableLogThreads() {
+ nativeEnableLogThreads();
+ }
+
+ public static void enableLogTimeStamps() {
+ nativeEnableLogTimeStamps();
+ }
+
+ // TODO(solenberg): Remove once dependent projects updated.
+ @Deprecated
+ public static void enableTracing(String path, EnumSet levels) {}
+
+ // Enable diagnostic logging for messages of `severity` to the platform debug
+ // output. On Android, the output will be directed to Logcat.
+ // Note: this function starts collecting the output of the RTC_LOG() macros.
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public static synchronized void enableLogToDebugOutput(Severity severity) {
+ if (loggable != null) {
+ throw new IllegalStateException(
+ "Logging to native debug output not supported while Loggable is injected. "
+ + "Delete the Loggable before calling this method.");
+ }
+ nativeEnableLogToDebugOutput(severity.ordinal());
+ loggingEnabled = true;
+ }
+
+ public static void log(Severity severity, String tag, String message) {
+ if (tag == null || message == null) {
+ throw new IllegalArgumentException("Logging tag or message may not be null.");
+ }
+ if (loggable != null) {
+ // Filter log messages below loggableSeverity.
+ if (severity.ordinal() < loggableSeverity.ordinal()) {
+ return;
+ }
+ loggable.onLogMessage(message, severity, tag);
+ return;
+ }
+
+ // Try native logging if no loggable is injected.
+ if (loggingEnabled) {
+ nativeLog(severity.ordinal(), tag, message);
+ return;
+ }
+
+ // Fallback to system log.
+ Level level;
+ switch (severity) {
+ case LS_ERROR:
+ level = Level.SEVERE;
+ break;
+ case LS_WARNING:
+ level = Level.WARNING;
+ break;
+ case LS_INFO:
+ level = Level.INFO;
+ break;
+ default:
+ level = Level.FINE;
+ break;
+ }
+ fallbackLogger.log(level, tag + ": " + message);
+ }
+
+ public static void d(String tag, String message) {
+ log(Severity.LS_INFO, tag, message);
+ }
+
+ public static void e(String tag, String message) {
+ log(Severity.LS_ERROR, tag, message);
+ }
+
+ public static void w(String tag, String message) {
+ log(Severity.LS_WARNING, tag, message);
+ }
+
+ public static void e(String tag, String message, Throwable e) {
+ log(Severity.LS_ERROR, tag, message);
+ log(Severity.LS_ERROR, tag, e.toString());
+ log(Severity.LS_ERROR, tag, getStackTraceString(e));
+ }
+
+ public static void w(String tag, String message, Throwable e) {
+ log(Severity.LS_WARNING, tag, message);
+ log(Severity.LS_WARNING, tag, e.toString());
+ log(Severity.LS_WARNING, tag, getStackTraceString(e));
+ }
+
+ public static void v(String tag, String message) {
+ log(Severity.LS_VERBOSE, tag, message);
+ }
+
+ private static String getStackTraceString(Throwable e) {
+ if (e == null) {
+ return "";
+ }
+
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw);
+ e.printStackTrace(pw);
+ return sw.toString();
+ }
+
+ private static native void nativeEnableLogToDebugOutput(int nativeSeverity);
+ private static native void nativeEnableLogThreads();
+ private static native void nativeEnableLogTimeStamps();
+ private static native void nativeLog(int severity, String tag, String message);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecUtils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecUtils.java
new file mode 100644
index 00000000..8e7616bc
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecUtils.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Container class for static constants and helpers used with MediaCodec. */
+// We are forced to use the old API because we want to support API level < 21.
+@SuppressWarnings("deprecation")
+class MediaCodecUtils {
+ private static final String TAG = "MediaCodecUtils";
+
+ // Prefixes for supported hardware encoder/decoder component names.
+ static final String EXYNOS_PREFIX = "OMX.Exynos.";
+ static final String INTEL_PREFIX = "OMX.Intel.";
+ static final String NVIDIA_PREFIX = "OMX.Nvidia.";
+ static final String QCOM_PREFIX = "OMX.qcom.";
+ static final String HISI_PREFIX = "OMX.hisi.";
+
+ static final String MTK_PREFIX = "OMX.mtk.";
+
+
+ static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {
+ "OMX.google.", "OMX.SEC.", "c2.android"};
+
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
+ static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
+ static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+
+ // Color formats supported by hardware decoder - in order of preference.
+ static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by hardware encoder - in order of preference.
+ static final int[] ENCODER_COLOR_FORMATS = {
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
+ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+
+ // Color formats supported by texture mode encoding - in order of preference.
+ static final int[] TEXTURE_COLOR_FORMATS =
+ new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
+
+ static @Nullable Integer selectColorFormat(
+ int[] supportedColorFormats, CodecCapabilities capabilities) {
+ for (int supportedColorFormat : supportedColorFormats) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ return codecColorFormat;
+ }
+ }
+ }
+ return null;
+ }
+
+ static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) {
+ for (String mimeType : info.getSupportedTypes()) {
+ if (type.mimeType().equals(mimeType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static Map getCodecProperties(VideoCodecMimeType type, boolean highProfile) {
+ switch (type) {
+ case VP8:
+ case VP9:
+ case AV1:
+ case H265:
+ return new HashMap();
+ case H264:
+ return H264Utils.getDefaultH264Params(highProfile);
+ default:
+ throw new IllegalArgumentException("Unsupported codec: " + type);
+ }
+ }
+
+ static boolean isHardwareAccelerated(MediaCodecInfo info) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isHardwareAcceleratedQOrHigher(info);
+ }
+ return !isSoftwareOnly(info);
+ }
+
+ @TargetApi(29)
+ private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isHardwareAccelerated();
+ }
+
+ static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ return isSoftwareOnlyQOrHigher(codecInfo);
+ }
+ String name = codecInfo.getName();
+ for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) {
+ if (name.startsWith(prefix)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @TargetApi(29)
+ private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) {
+ return codecInfo.isSoftwareOnly();
+ }
+
+ private MediaCodecUtils() {
+ // This class should not be instantiated.
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java
new file mode 100644
index 00000000..6ef0eb5e
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
+import static org.webrtc.MediaCodecUtils.HISI_PREFIX;
+import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Factory for decoders backed by Android MediaCodec API. */
+@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
+class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
+ private static final String TAG = "MediaCodecVideoDecoderFactory";
+
+ private final @Nullable EglBase.Context sharedContext;
+ private final @Nullable Predicate codecAllowedPredicate;
+
+ /**
+ * MediaCodecVideoDecoderFactory with support of codecs filtering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ * @param codecAllowedPredicate optional predicate to test if codec allowed. All codecs are
+ * allowed when predicate is not provided.
+ */
+ public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
+ @Nullable Predicate codecAllowedPredicate) {
+ this.sharedContext = sharedContext;
+ this.codecAllowedPredicate = codecAllowedPredicate;
+ }
+
+ @Nullable
+ @Override
+ public VideoDecoder createDecoder(VideoCodecInfo codecType) {
+ VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName());
+ MediaCodecInfo info = findCodecForType(type);
+
+ if (info == null) {
+ return null;
+ }
+
+ CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
+ return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type,
+ MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
+ sharedContext);
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ List supportedCodecInfos = new ArrayList();
+ // Generate a list of supported codecs in order of preference:
+ // VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265.
+ for (VideoCodecMimeType type :
+ new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9,
+ VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) {
+ MediaCodecInfo codec = findCodecForType(type);
+ if (codec != null) {
+ String name = type.name();
+ if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
+ }
+
+ supportedCodecInfos.add(new VideoCodecInfo(
+ name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
+ }
+ }
+
+ return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+ }
+
+ private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = null;
+ try {
+ info = MediaCodecList.getCodecInfoAt(i);
+ } catch (IllegalArgumentException e) {
+ Logging.e(TAG, "Cannot retrieve decoder codec info", e);
+ }
+
+ if (info == null || info.isEncoder()) {
+ continue;
+ }
+
+ if (isSupportedCodec(info, type)) {
+ return info;
+ }
+ }
+
+ return null; // No support for this type.
+ }
+
+ // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
+ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) {
+ if (!MediaCodecUtils.codecSupportsType(info, type)) {
+ return false;
+ }
+ // Check for a supported color format.
+ if (MediaCodecUtils.selectColorFormat(
+ MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
+ == null) {
+ return false;
+ }
+ return isCodecAllowed(info);
+ }
+
+ private boolean isCodecAllowed(MediaCodecInfo info) {
+ if (codecAllowedPredicate == null) {
+ return true;
+ }
+ return codecAllowedPredicate.test(info);
+ }
+
+ private boolean isH264HighProfileSupported(MediaCodecInfo info) {
+ String name = info.getName();
+ // Support H.264 HP decoding on QCOM chips.
+ if (name.startsWith(QCOM_PREFIX)) {
+ return true;
+ }
+ // Support H.264 HP decoding on Exynos chips for Android M and above.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && name.startsWith(EXYNOS_PREFIX)) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapper.java
new file mode 100644
index 00000000..11e0f58d
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapper.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.nio.ByteBuffer;
+
+/**
+ * Subset of methods defined in {@link android.media.MediaCodec} needed by
+ * {@link HardwareVideoEncoder} and {@link AndroidVideoDecoder}. This interface
+ * exists to allow mocking and using a fake implementation in tests.
+ */
+interface MediaCodecWrapper {
+ void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags);
+
+ void start();
+
+ void flush();
+
+ void stop();
+
+ void release();
+
+ int dequeueInputBuffer(long timeoutUs);
+
+ void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags);
+
+ int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs);
+
+ void releaseOutputBuffer(int index, boolean render);
+
+ MediaFormat getInputFormat();
+
+ MediaFormat getOutputFormat();
+
+ MediaFormat getOutputFormat(int index);
+
+ ByteBuffer getInputBuffer(int index);
+
+ ByteBuffer getOutputBuffer(int index);
+
+ Surface createInputSurface();
+
+ void setParameters(Bundle params);
+
+ MediaCodecInfo getCodecInfo();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactory.java
new file mode 100644
index 00000000..2962cb62
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactory.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.io.IOException;
+
+interface MediaCodecWrapperFactory {
+ /**
+ * Creates a new {@link MediaCodecWrapper} by codec name.
+ *
+ * For additional information see {@link android.media.MediaCodec#createByCodecName}.
+ */
+ MediaCodecWrapper createByCodecName(String name) throws IOException;
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactoryImpl.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
new file mode 100644
index 00000000..207492f3
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodecInfo;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.view.Surface;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping
+ * {@link android.media.MediaCodec} objects.
+ */
+class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
+ private static class MediaCodecWrapperImpl implements MediaCodecWrapper {
+ private final MediaCodec mediaCodec;
+
+ public MediaCodecWrapperImpl(MediaCodec mediaCodec) {
+ this.mediaCodec = mediaCodec;
+ }
+
+ @Override
+ public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
+ mediaCodec.configure(format, surface, crypto, flags);
+ }
+
+ @Override
+ public void start() {
+ mediaCodec.start();
+ }
+
+ @Override
+ public void flush() {
+ mediaCodec.flush();
+ }
+
+ @Override
+ public void stop() {
+ mediaCodec.stop();
+ }
+
+ @Override
+ public void release() {
+ mediaCodec.release();
+ }
+
+ @Override
+ public int dequeueInputBuffer(long timeoutUs) {
+ return mediaCodec.dequeueInputBuffer(timeoutUs);
+ }
+
+ @Override
+ public void queueInputBuffer(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
+ }
+
+ @Override
+ public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
+ return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
+ }
+
+ @Override
+ public void releaseOutputBuffer(int index, boolean render) {
+ mediaCodec.releaseOutputBuffer(index, render);
+ }
+
+ @Override
+ public MediaFormat getInputFormat() {
+ return mediaCodec.getInputFormat();
+ }
+
+ @Override
+ public MediaFormat getOutputFormat() {
+ return mediaCodec.getOutputFormat();
+ }
+
+ @Override
+ public MediaFormat getOutputFormat(int index) {
+ return mediaCodec.getOutputFormat(index);
+ }
+
+ @Override
+ public ByteBuffer getInputBuffer(int index) {
+ return mediaCodec.getInputBuffer(index);
+ }
+
+ @Override
+ public ByteBuffer getOutputBuffer(int index) {
+ return mediaCodec.getOutputBuffer(index);
+ }
+
+ @Override
+ public Surface createInputSurface() {
+ return mediaCodec.createInputSurface();
+ }
+
+ @Override
+ public void setParameters(Bundle params) {
+ mediaCodec.setParameters(params);
+ }
+
+ @Override
+ public MediaCodecInfo getCodecInfo() {
+ return mediaCodec.getCodecInfo();
+ }
+ }
+
+ @Override
+ public MediaCodecWrapper createByCodecName(String name) throws IOException {
+ return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name));
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaConstraints.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaConstraints.java
new file mode 100644
index 00000000..bae04e53
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaConstraints.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+ /** Simple String key/value pair. */
+ public static class KeyValuePair {
+ private final String key;
+ private final String value;
+
+ public KeyValuePair(String key, String value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ @CalledByNative("KeyValuePair")
+ public String getKey() {
+ return key;
+ }
+
+ @CalledByNative("KeyValuePair")
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return key + ": " + value;
+ }
+
+ @Override
+ public boolean equals(@Nullable Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+ KeyValuePair that = (KeyValuePair) other;
+ return key.equals(that.key) && value.equals(that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return key.hashCode() + value.hashCode();
+ }
+ }
+
+ public final List mandatory;
+ public final List optional;
+
+ public MediaConstraints() {
+ mandatory = new ArrayList();
+ optional = new ArrayList();
+ }
+
+ private static String stringifyKeyValuePairList(List list) {
+ StringBuilder builder = new StringBuilder("[");
+ for (KeyValuePair pair : list) {
+ if (builder.length() > 1) {
+ builder.append(", ");
+ }
+ builder.append(pair.toString());
+ }
+ return builder.append("]").toString();
+ }
+
+ @Override
+ public String toString() {
+ return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+ + stringifyKeyValuePairList(optional);
+ }
+
+ @CalledByNative
+ List getMandatory() {
+ return mandatory;
+ }
+
+ @CalledByNative
+ List getOptional() {
+ return optional;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaSource.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaSource.java
new file mode 100644
index 00000000..9245e3e2
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaSource.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+ /** Tracks MediaSourceInterface.SourceState */
+ public enum State {
+ INITIALIZING,
+ LIVE,
+ ENDED,
+ MUTED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ private final RefCountDelegate refCountDelegate;
+ private long nativeSource;
+
+ public MediaSource(long nativeSource) {
+ refCountDelegate = new RefCountDelegate(() -> JniCommon.nativeReleaseRef(nativeSource));
+ this.nativeSource = nativeSource;
+ }
+
+ public State state() {
+ checkMediaSourceExists();
+ return nativeGetState(nativeSource);
+ }
+
+ public void dispose() {
+ checkMediaSourceExists();
+ refCountDelegate.release();
+ nativeSource = 0;
+ }
+
+ /** Returns a pointer to webrtc::MediaSourceInterface. */
+ protected long getNativeMediaSource() {
+ checkMediaSourceExists();
+ return nativeSource;
+ }
+
+ /**
+ * Runs code in {@code runnable} holding a reference to the media source. If the object has
+ * already been released, does nothing.
+ */
+ void runWithReference(Runnable runnable) {
+ if (refCountDelegate.safeRetain()) {
+ try {
+ runnable.run();
+ } finally {
+ refCountDelegate.release();
+ }
+ }
+ }
+
+ private void checkMediaSourceExists() {
+ if (nativeSource == 0) {
+ throw new IllegalStateException("MediaSource has been disposed.");
+ }
+ }
+
+ private static native State nativeGetState(long pointer);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStream.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStream.java
new file mode 100644
index 00000000..e530fe5f
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStream.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/** Java wrapper for a C++ MediaStreamInterface. */
+public class MediaStream {
+ private static final String TAG = "MediaStream";
+
+ public final List audioTracks = new ArrayList<>();
+ public final List videoTracks = new ArrayList<>();
+ public final List preservedVideoTracks = new ArrayList<>();
+ private long nativeStream;
+
+ @CalledByNative
+ public MediaStream(long nativeStream) {
+ this.nativeStream = nativeStream;
+ }
+
+ public boolean addTrack(AudioTrack track) {
+ checkMediaStreamExists();
+ if (nativeAddAudioTrackToNativeStream(nativeStream, track.getNativeAudioTrack())) {
+ audioTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean addTrack(VideoTrack track) {
+ checkMediaStreamExists();
+ if (nativeAddVideoTrackToNativeStream(nativeStream, track.getNativeVideoTrack())) {
+ videoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ // Tracks added in addTrack() call will be auto released once MediaStream.dispose()
+ // is called. If video track need to be preserved after MediaStream is destroyed it
+ // should be added to MediaStream using addPreservedTrack() call.
+ public boolean addPreservedTrack(VideoTrack track) {
+ checkMediaStreamExists();
+ if (nativeAddVideoTrackToNativeStream(nativeStream, track.getNativeVideoTrack())) {
+ preservedVideoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean removeTrack(AudioTrack track) {
+ checkMediaStreamExists();
+ audioTracks.remove(track);
+ return nativeRemoveAudioTrack(nativeStream, track.getNativeAudioTrack());
+ }
+
+ public boolean removeTrack(VideoTrack track) {
+ checkMediaStreamExists();
+ videoTracks.remove(track);
+ preservedVideoTracks.remove(track);
+ return nativeRemoveVideoTrack(nativeStream, track.getNativeVideoTrack());
+ }
+
+ @CalledByNative
+ public void dispose() {
+ checkMediaStreamExists();
+ // Remove and release previously added audio and video tracks.
+ while (!audioTracks.isEmpty()) {
+ AudioTrack track = audioTracks.get(0 /* index */);
+ removeTrack(track);
+ track.dispose();
+ }
+ while (!videoTracks.isEmpty()) {
+ VideoTrack track = videoTracks.get(0 /* index */);
+ removeTrack(track);
+ track.dispose();
+ }
+ // Remove, but do not release preserved video tracks.
+ while (!preservedVideoTracks.isEmpty()) {
+ removeTrack(preservedVideoTracks.get(0 /* index */));
+ }
+ JniCommon.nativeReleaseRef(nativeStream);
+ nativeStream = 0;
+ }
+
+ public String getId() {
+ checkMediaStreamExists();
+ return nativeGetId(nativeStream);
+ }
+
+ @Override
+ public String toString() {
+ return "[" + getId() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
+ }
+
+ @CalledByNative
+ void addNativeAudioTrack(long nativeTrack) {
+ audioTracks.add(new AudioTrack(nativeTrack));
+ }
+
+ @CalledByNative
+ void addNativeVideoTrack(long nativeTrack) {
+ videoTracks.add(new VideoTrack(nativeTrack));
+ }
+
+ @CalledByNative
+ void removeAudioTrack(long nativeTrack) {
+ removeMediaStreamTrack(audioTracks, nativeTrack);
+ }
+
+ @CalledByNative
+ void removeVideoTrack(long nativeTrack) {
+ removeMediaStreamTrack(videoTracks, nativeTrack);
+ }
+
+ /** Returns a pointer to webrtc::MediaStreamInterface. */
+ long getNativeMediaStream() {
+ checkMediaStreamExists();
+ return nativeStream;
+ }
+
+ private void checkMediaStreamExists() {
+ if (nativeStream == 0) {
+ throw new IllegalStateException("MediaStream has been disposed.");
+ }
+ }
+
+ private static void removeMediaStreamTrack(
+ List extends MediaStreamTrack> tracks, long nativeTrack) {
+ final Iterator extends MediaStreamTrack> it = tracks.iterator();
+ while (it.hasNext()) {
+ MediaStreamTrack track = it.next();
+ if (track.getNativeMediaStreamTrack() == nativeTrack) {
+ track.dispose();
+ it.remove();
+ return;
+ }
+ }
+ Logging.e(TAG, "Couldn't not find track");
+ }
+
+ private static native boolean nativeAddAudioTrackToNativeStream(
+ long stream, long nativeAudioTrack);
+ private static native boolean nativeAddVideoTrackToNativeStream(
+ long stream, long nativeVideoTrack);
+ private static native boolean nativeRemoveAudioTrack(long stream, long nativeAudioTrack);
+ private static native boolean nativeRemoveVideoTrack(long stream, long nativeVideoTrack);
+ private static native String nativeGetId(long stream);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStreamTrack.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStreamTrack.java
new file mode 100644
index 00000000..2e4c3e18
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStreamTrack.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+ public static final String AUDIO_TRACK_KIND = "audio";
+ public static final String VIDEO_TRACK_KIND = "video";
+
+ /** Tracks MediaStreamTrackInterface.TrackState */
+ public enum State {
+ LIVE,
+ ENDED;
+
+ @CalledByNative("State")
+ static State fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ // Must be kept in sync with cricket::MediaType.
+ public enum MediaType {
+ MEDIA_TYPE_AUDIO(0),
+ MEDIA_TYPE_VIDEO(1);
+
+ private final int nativeIndex;
+
+ private MediaType(int nativeIndex) {
+ this.nativeIndex = nativeIndex;
+ }
+
+ @CalledByNative("MediaType")
+ int getNative() {
+ return nativeIndex;
+ }
+
+ @CalledByNative("MediaType")
+ static MediaType fromNativeIndex(int nativeIndex) {
+ for (MediaType type : MediaType.values()) {
+ if (type.getNative() == nativeIndex) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unknown native media type: " + nativeIndex);
+ }
+ }
+
+ /** Factory method to create an AudioTrack or VideoTrack subclass. */
+ static @Nullable MediaStreamTrack createMediaStreamTrack(long nativeTrack) {
+ if (nativeTrack == 0) {
+ return null;
+ }
+ String trackKind = nativeGetKind(nativeTrack);
+ if (trackKind.equals(AUDIO_TRACK_KIND)) {
+ return new AudioTrack(nativeTrack);
+ } else if (trackKind.equals(VIDEO_TRACK_KIND)) {
+ return new VideoTrack(nativeTrack);
+ } else {
+ return null;
+ }
+ }
+
+ private long nativeTrack;
+
+ public MediaStreamTrack(long nativeTrack) {
+ if (nativeTrack == 0) {
+ throw new IllegalArgumentException("nativeTrack may not be null");
+ }
+ this.nativeTrack = nativeTrack;
+ }
+
+ public String id() {
+ checkMediaStreamTrackExists();
+ return nativeGetId(nativeTrack);
+ }
+
+ public String kind() {
+ checkMediaStreamTrackExists();
+ return nativeGetKind(nativeTrack);
+ }
+
+ public boolean enabled() {
+ checkMediaStreamTrackExists();
+ return nativeGetEnabled(nativeTrack);
+ }
+
+ public boolean setEnabled(boolean enable) {
+ checkMediaStreamTrackExists();
+ return nativeSetEnabled(nativeTrack, enable);
+ }
+
+ public State state() {
+ checkMediaStreamTrackExists();
+ return nativeGetState(nativeTrack);
+ }
+
+ public void dispose() {
+ checkMediaStreamTrackExists();
+ JniCommon.nativeReleaseRef(nativeTrack);
+ nativeTrack = 0;
+ }
+
+ long getNativeMediaStreamTrack() {
+ checkMediaStreamTrackExists();
+ return nativeTrack;
+ }
+
+ private void checkMediaStreamTrackExists() {
+ if (nativeTrack == 0) {
+ throw new IllegalStateException("MediaStreamTrack has been disposed.");
+ }
+ }
+
+ private static native String nativeGetId(long track);
+ private static native String nativeGetKind(long track);
+ private static native boolean nativeGetEnabled(long track);
+ private static native boolean nativeSetEnabled(long track, boolean enabled);
+ private static native State nativeGetState(long track);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Metrics.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Metrics.java
new file mode 100644
index 00000000..25337683
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Metrics.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+// Java-side of androidmetrics.cc
+//
+// Rtc histograms can be queried through the API, getAndReset().
+// The returned map holds the name of a histogram and its samples.
+//
+// Example of `map` with one histogram:
+// `name`: "WebRTC.Video.InputFramesPerSecond"
+// `min`: 1
+// `max`: 100
+// `bucketCount`: 50
+// `samples`: [30]:1
+//
+// Most histograms are not updated frequently (e.g. most video metrics are an
+// average over the call and recorded when a stream is removed).
+// The metrics can for example be retrieved when a peer connection is closed.
+public class Metrics {
+ private static final String TAG = "Metrics";
+
+ public final Map map =
+ new HashMap(); //
+
+ @CalledByNative
+ Metrics() {}
+
+ /**
+ * Class holding histogram information.
+ */
+ public static class HistogramInfo {
+ public final int min;
+ public final int max;
+ public final int bucketCount;
+ public final Map samples =
+ new HashMap(); //
+
+ @CalledByNative("HistogramInfo")
+ public HistogramInfo(int min, int max, int bucketCount) {
+ this.min = min;
+ this.max = max;
+ this.bucketCount = bucketCount;
+ }
+
+ @CalledByNative("HistogramInfo")
+ public void addSample(int value, int numEvents) {
+ samples.put(value, numEvents);
+ }
+ }
+
+ @CalledByNative
+ private void add(String name, HistogramInfo info) {
+ map.put(name, info);
+ }
+
+ // Enables gathering of metrics (which can be fetched with getAndReset()).
+ // Must be called before PeerConnectionFactory is created.
+ public static void enable() {
+ nativeEnable();
+ }
+
+ // Gets and clears native histograms.
+ public static Metrics getAndReset() {
+ return nativeGetAndReset();
+ }
+
+ private static native void nativeEnable();
+ private static native Metrics nativeGetAndReset();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NV12Buffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV12Buffer.java
new file mode 100644
index 00000000..fe0221d8
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV12Buffer.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV12Buffer implements VideoFrame.Buffer {
+ private final int width;
+ private final int height;
+ private final int stride;
+ private final int sliceHeight;
+ private final ByteBuffer buffer;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer,
+ @Nullable Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.stride = stride;
+ this.sliceHeight = sliceHeight;
+ this.buffer = buffer;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width,
+ height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(),
+ newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride,
+ int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU,
+ ByteBuffer dstV, int dstStrideV);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NV21Buffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV21Buffer.java
new file mode 100644
index 00000000..0fb1afe7
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV21Buffer.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+public class NV21Buffer implements VideoFrame.Buffer {
+ private final byte[] data;
+ private final int width;
+ private final int height;
+ private final RefCountDelegate refCountDelegate;
+
+ public NV21Buffer(byte[] data, int width, int height, @Nullable Runnable releaseCallback) {
+ this.data = data;
+ this.width = width;
+ this.height = height;
+ this.refCountDelegate = new RefCountDelegate(releaseCallback);
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ // Cropping converts the frame to I420. Just crop and scale to the whole image.
+ return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
+ height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
+ }
+
+ @Override
+ public void retain() {
+ refCountDelegate.retain();
+ }
+
+ @Override
+ public void release() {
+ refCountDelegate.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+ nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
+ height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+ newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+ return newBuffer;
+ }
+
+ private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+ int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
+ int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeAndroidVideoTrackSource.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeAndroidVideoTrackSource.java
new file mode 100644
index 00000000..d4fba481
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeAndroidVideoTrackSource.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import org.webrtc.VideoFrame;
+import org.webrtc.VideoProcessor;
+
+/**
+ * This class is meant to be a simple layer that only handles the JNI wrapping of a C++
+ * AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding
+ * any unnecessary logic to this class.
+ * This class is thred safe and methods can be called from any thread, but if frames A, B, ..., are
+ * sent to adaptFrame(), the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
+ * order to onFrameCaptured().
+ */
+class NativeAndroidVideoTrackSource {
+ // Pointer to webrtc::jni::AndroidVideoTrackSource.
+ private final long nativeAndroidVideoTrackSource;
+
+ public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) {
+ this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource;
+ }
+
+ /**
+ * Set the state for the native MediaSourceInterface. Maps boolean to either
+ * SourceState::kLive or SourceState::kEnded.
+ */
+ public void setState(boolean isLive) {
+ nativeSetState(nativeAndroidVideoTrackSource, isLive);
+ }
+
+ /**
+ * This function should be called before delivering any frame to determine if the frame should be
+ * dropped or what the cropping and scaling parameters should be. If the return value is null, the
+ * frame should be dropped, otherwise the frame should be adapted in accordance to the frame
+ * adaptation parameters before calling onFrameCaptured().
+ */
+ @Nullable
+ public VideoProcessor.FrameAdaptationParameters adaptFrame(VideoFrame frame) {
+ return nativeAdaptFrame(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(),
+ frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs());
+ }
+
+ /**
+ * Pass an adapted frame to the native AndroidVideoTrackSource. Note that adaptFrame() is
+ * expected to be called first and that the passed frame conforms to those parameters.
+ */
+ public void onFrameCaptured(VideoFrame frame) {
+ nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getRotation(),
+ frame.getTimestampNs(), frame.getBuffer());
+ }
+
+ /**
+ * Calling this function will cause frames to be scaled down to the requested resolution. Also,
+ * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
+ * the requested fps.
+ */
+ public void adaptOutputFormat(VideoSource.AspectRatio targetLandscapeAspectRatio,
+ @Nullable Integer maxLandscapePixelCount, VideoSource.AspectRatio targetPortraitAspectRatio,
+ @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
+ nativeAdaptOutputFormat(nativeAndroidVideoTrackSource, targetLandscapeAspectRatio.width,
+ targetLandscapeAspectRatio.height, maxLandscapePixelCount, targetPortraitAspectRatio.width,
+ targetPortraitAspectRatio.height, maxPortraitPixelCount, maxFps);
+ }
+
+ public void setIsScreencast(boolean isScreencast) {
+ nativeSetIsScreencast(nativeAndroidVideoTrackSource, isScreencast);
+ }
+
+ @CalledByNative
+ static VideoProcessor.FrameAdaptationParameters createFrameAdaptationParameters(int cropX,
+ int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, long timestampNs,
+ boolean drop) {
+ return new VideoProcessor.FrameAdaptationParameters(
+ cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, timestampNs, drop);
+ }
+
+ private static native void nativeSetIsScreencast(
+ long nativeAndroidVideoTrackSource, boolean isScreencast);
+ private static native void nativeSetState(long nativeAndroidVideoTrackSource, boolean isLive);
+ private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackSource,
+ int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount,
+ int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount,
+ @Nullable Integer maxFps);
+ @Nullable
+ private static native VideoProcessor.FrameAdaptationParameters nativeAdaptFrame(
+ long nativeAndroidVideoTrackSource, int width, int height, int rotation, long timestampNs);
+ private static native void nativeOnFrameCaptured(
+ long nativeAndroidVideoTrackSource, int rotation, long timestampNs, VideoFrame.Buffer buffer);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeCapturerObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeCapturerObserver.java
new file mode 100644
index 00000000..c195fb3a
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeCapturerObserver.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoFrame;
+
+/**
+ * Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to
+ * a webrtc::jni::AndroidVideoTrackSource.
+ */
+class NativeCapturerObserver implements CapturerObserver {
+ private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
+
+ @CalledByNative
+ public NativeCapturerObserver(long nativeSource) {
+ this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeAndroidVideoTrackSource.setState(success);
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ final VideoProcessor.FrameAdaptationParameters parameters =
+ nativeAndroidVideoTrackSource.adaptFrame(frame);
+ if (parameters == null) {
+ // Drop frame.
+ return;
+ }
+
+ final VideoFrame.Buffer adaptedBuffer =
+ frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
+ parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
+ nativeAndroidVideoTrackSource.onFrameCaptured(
+ new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs));
+ adaptedBuffer.release();
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibrary.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibrary.java
new file mode 100644
index 00000000..531c2163
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibrary.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+class NativeLibrary {
+ private static String TAG = "NativeLibrary";
+
+ static class DefaultLoader implements NativeLibraryLoader {
+ @Override
+ public boolean load(String name) {
+ Logging.d(TAG, "Loading library: " + name);
+ System.loadLibrary(name);
+
+ // Not relevant, but kept for API compatibility.
+ return true;
+ }
+ }
+
+ private static Object lock = new Object();
+ private static boolean libraryLoaded;
+
+ /**
+ * Loads the native library. Clients should call PeerConnectionFactory.initialize. It will call
+ * this method for them.
+ */
+ static void initialize(NativeLibraryLoader loader, String libraryName) {
+ synchronized (lock) {
+ if (libraryLoaded) {
+ Logging.d(TAG, "Native library has already been loaded.");
+ return;
+ }
+ Logging.d(TAG, "Loading native library: " + libraryName);
+ libraryLoaded = loader.load(libraryName);
+ }
+ }
+
+ /** Returns true if the library has been loaded successfully. */
+ static boolean isLoaded() {
+ synchronized (lock) {
+ return libraryLoaded;
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibraryLoader.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibraryLoader.java
new file mode 100644
index 00000000..8bd7b3b2
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibraryLoader.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for loading native libraries. A custom loader can be passed to
+ * PeerConnectionFactory.initialize.
+ */
+public interface NativeLibraryLoader {
+ /**
+ * Loads a native library with the given name.
+ *
+ * @return True on success
+ */
+ boolean load(String name);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativePeerConnectionFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativePeerConnectionFactory.java
new file mode 100644
index 00000000..aeb91e17
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativePeerConnectionFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::jni::OwnedPeerConnection instances. */
+public interface NativePeerConnectionFactory {
+ /**
+ * Create a new webrtc::jni::OwnedPeerConnection instance and returns a pointer to it.
+ * The caller takes ownership of the object.
+ */
+ long createNativePeerConnection();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetEqFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetEqFactoryFactory.java
new file mode 100644
index 00000000..8464324c
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetEqFactoryFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::NetEqFactory}.
+ */
+public interface NetEqFactoryFactory {
+ /**
+ * Returns a pointer to a {@code webrtc::NetEqFactory}. The caller takes ownership.
+ */
+ long createNativeNetEqFactory();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetector.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetector.java
new file mode 100644
index 00000000..ed3210e0
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetector.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.List;
+
+/** Interface for detecting network changes */
+public interface NetworkChangeDetector {
+ // java equivalent of c++ android_network_monitor.h / NetworkType.
+ public static enum ConnectionType {
+ CONNECTION_UNKNOWN,
+ CONNECTION_ETHERNET,
+ CONNECTION_WIFI,
+ CONNECTION_5G,
+ CONNECTION_4G,
+ CONNECTION_3G,
+ CONNECTION_2G,
+ CONNECTION_UNKNOWN_CELLULAR,
+ CONNECTION_BLUETOOTH,
+ CONNECTION_VPN,
+ CONNECTION_NONE
+ }
+
+ public static class IPAddress {
+ public final byte[] address;
+
+ public IPAddress(byte[] address) {
+ this.address = address;
+ }
+
+ @CalledByNative("IPAddress")
+ private byte[] getAddress() {
+ return address;
+ }
+ }
+
+ /** Java version of NetworkMonitor.NetworkInformation */
+ public static class NetworkInformation {
+ public final String name;
+ public final ConnectionType type;
+ // Used to specify the underlying network type if the type is CONNECTION_VPN.
+ public final ConnectionType underlyingTypeForVpn;
+ public final long handle;
+ public final IPAddress[] ipAddresses;
+
+ public NetworkInformation(String name, ConnectionType type, ConnectionType underlyingTypeForVpn,
+ long handle, IPAddress[] addresses) {
+ this.name = name;
+ this.type = type;
+ this.underlyingTypeForVpn = underlyingTypeForVpn;
+ this.handle = handle;
+ this.ipAddresses = addresses;
+ }
+
+ @CalledByNative("NetworkInformation")
+ private IPAddress[] getIpAddresses() {
+ return ipAddresses;
+ }
+
+ @CalledByNative("NetworkInformation")
+ private ConnectionType getConnectionType() {
+ return type;
+ }
+
+ @CalledByNative("NetworkInformation")
+ private ConnectionType getUnderlyingConnectionTypeForVpn() {
+ return underlyingTypeForVpn;
+ }
+
+ @CalledByNative("NetworkInformation")
+ private long getHandle() {
+ return handle;
+ }
+
+ @CalledByNative("NetworkInformation")
+ private String getName() {
+ return name;
+ }
+ };
+
+ /** Observer interface by which observer is notified of network changes. */
+ public static abstract class Observer {
+ /** Called when default network changes. */
+ public abstract void onConnectionTypeChanged(ConnectionType newConnectionType);
+
+ public abstract void onNetworkConnect(NetworkInformation networkInfo);
+
+ public abstract void onNetworkDisconnect(long networkHandle);
+
+ /**
+ * Called when network preference change for a (list of) connection type(s). (e.g WIFI) is
+ * `NOT_PREFERRED` or `NEUTRAL`.
+ *
+ * note: `types` is a list of ConnectionTypes, so that all cellular types can be modified in
+ * one call.
+ */
+ public abstract void onNetworkPreference(
+ List types, @NetworkPreference int preference);
+
+ // Add default impl. for down-stream tests.
+ public String getFieldTrialsString() {
+ return "";
+ }
+ }
+
+ public ConnectionType getCurrentConnectionType();
+
+ public boolean supportNetworkCallback();
+
+ @Nullable public List getActiveNetworkList();
+
+ public void destroy();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetectorFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetectorFactory.java
new file mode 100644
index 00000000..14e98b23
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetectorFactory.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+public interface NetworkChangeDetectorFactory {
+ public NetworkChangeDetector create(NetworkChangeDetector.Observer observer, Context context);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkControllerFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkControllerFactoryFactory.java
new file mode 100644
index 00000000..75e8fcaa
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkControllerFactoryFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::NetworkControllerFactory instances. */
+public interface NetworkControllerFactoryFactory {
+ /**
+ * Dynamically allocates a webrtc::NetworkControllerFactory instance and returns a pointer to
+ * it. The caller takes ownership of the object.
+ */
+ public long createNativeNetworkControllerFactory();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitor.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitor.java
new file mode 100644
index 00000000..0bc461df
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitor.java
@@ -0,0 +1,367 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Build;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import org.webrtc.NetworkChangeDetector;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
+ *
+ * Triggers updates to the underlying network state from OS networking events.
+ *
+ *
This class is thread-safe.
+ */
+public class NetworkMonitor {
+ /**
+ * Alerted when the connection type of the network changes. The alert is fired on the UI thread.
+ */
+ public interface NetworkObserver {
+ public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType connectionType);
+ }
+
+ private static final String TAG = "NetworkMonitor";
+
+ // Lazy initialization holder class idiom for static fields.
+ private static class InstanceHolder {
+ // We are storing application context so it is okay.
+ static final NetworkMonitor instance = new NetworkMonitor();
+ }
+
+ // Factory for creating NetworkChangeDetector.
+ private NetworkChangeDetectorFactory networkChangeDetectorFactory =
+ new NetworkChangeDetectorFactory() {
+ @Override
+ public NetworkChangeDetector create(
+ NetworkChangeDetector.Observer observer, Context context) {
+ return new NetworkMonitorAutoDetect(observer, context);
+ }
+ };
+
+ // Native observers of the connection type changes.
+ private final ArrayList nativeNetworkObservers;
+ // Java observers of the connection type changes.
+ private final ArrayList networkObservers;
+
+ private final Object networkChangeDetectorLock = new Object();
+ // Object that detects the connection type changes and brings up mobile networks.
+ @Nullable private NetworkChangeDetector networkChangeDetector;
+ // Also guarded by autoDetectLock.
+ private int numObservers;
+
+ private volatile NetworkChangeDetector.ConnectionType currentConnectionType;
+
+ private NetworkMonitor() {
+ nativeNetworkObservers = new ArrayList();
+ networkObservers = new ArrayList();
+ numObservers = 0;
+ currentConnectionType = NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN;
+ }
+
+ /**
+ * Set the factory that will be used to create the network change detector.
+ * Needs to be called before the monitoring is starts.
+ */
+ public void setNetworkChangeDetectorFactory(NetworkChangeDetectorFactory factory) {
+ assertIsTrue(numObservers == 0);
+ this.networkChangeDetectorFactory = factory;
+ }
+
+ // TODO(sakal): Remove once downstream dependencies have been updated.
+ @Deprecated
+ public static void init(Context context) {}
+
+ /** Returns the singleton instance. This may be called from native or from Java code. */
+ @CalledByNative
+ public static NetworkMonitor getInstance() {
+ return InstanceHolder.instance;
+ }
+
+ private static void assertIsTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected to be true");
+ }
+ }
+
+ /**
+ * Enables auto detection of the network state change and brings up mobile networks for using
+ * multi-networking. This requires the embedding app have the platform ACCESS_NETWORK_STATE and
+ * CHANGE_NETWORK_STATE permission.
+ */
+ public void startMonitoring(Context applicationContext, String fieldTrialsString) {
+ synchronized (networkChangeDetectorLock) {
+ ++numObservers;
+ if (networkChangeDetector == null) {
+ networkChangeDetector = createNetworkChangeDetector(applicationContext, fieldTrialsString);
+ }
+ currentConnectionType = networkChangeDetector.getCurrentConnectionType();
+ }
+ }
+
+ /** Deprecated, use startMonitoring with fieldTrialsStringString argument. */
+ @Deprecated
+ public void startMonitoring(Context applicationContext) {
+ startMonitoring(applicationContext, "");
+ }
+
+ /** Deprecated, pass in application context in startMonitoring instead. */
+ @Deprecated
+ public void startMonitoring() {
+ startMonitoring(ContextUtils.getApplicationContext(), "");
+ }
+
+ /**
+ * Enables auto detection of the network state change and brings up mobile networks for using
+ * multi-networking. This requires the embedding app have the platform ACCESS_NETWORK_STATE and
+ * CHANGE_NETWORK_STATE permission.
+ */
+ @CalledByNative
+ private void startMonitoring(
+ @Nullable Context applicationContext, long nativeObserver, String fieldTrialsString) {
+ Logging.d(TAG,
+ "Start monitoring with native observer " + nativeObserver
+ + " fieldTrialsString: " + fieldTrialsString);
+
+ startMonitoring(
+ applicationContext != null ? applicationContext : ContextUtils.getApplicationContext(),
+ fieldTrialsString);
+
+ synchronized (nativeNetworkObservers) {
+ nativeNetworkObservers.add(nativeObserver);
+ }
+ // The native observer expects a network list update after startMonitoring.
+ updateObserverActiveNetworkList(nativeObserver);
+ // currentConnectionType was updated in startMonitoring().
+ // Need to notify the native observers here.
+ notifyObserversOfConnectionTypeChange(currentConnectionType);
+ }
+
+ /**
+ * Stop network monitoring. If no one is monitoring networks, destroy and reset
+ * networkChangeDetector.
+ */
+ public void stopMonitoring() {
+ synchronized (networkChangeDetectorLock) {
+ if (--numObservers == 0) {
+ networkChangeDetector.destroy();
+ networkChangeDetector = null;
+ }
+ }
+ }
+
+ @CalledByNative
+ private void stopMonitoring(long nativeObserver) {
+ Logging.d(TAG, "Stop monitoring with native observer " + nativeObserver);
+ stopMonitoring();
+ synchronized (nativeNetworkObservers) {
+ nativeNetworkObservers.remove(nativeObserver);
+ }
+ }
+
+ // Returns true if network binding is supported on this platform.
+ @CalledByNative
+ private boolean networkBindingSupported() {
+ synchronized (networkChangeDetectorLock) {
+ return networkChangeDetector != null && networkChangeDetector.supportNetworkCallback();
+ }
+ }
+
+ @CalledByNative
+ private static int androidSdkInt() {
+ return Build.VERSION.SDK_INT;
+ }
+
+ private NetworkChangeDetector.ConnectionType getCurrentConnectionType() {
+ return currentConnectionType;
+ }
+
+ private NetworkChangeDetector createNetworkChangeDetector(
+ Context appContext, String fieldTrialsString) {
+ return networkChangeDetectorFactory.create(new NetworkChangeDetector.Observer() {
+ @Override
+ public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType newConnectionType) {
+ updateCurrentConnectionType(newConnectionType);
+ }
+
+ @Override
+ public void onNetworkConnect(NetworkChangeDetector.NetworkInformation networkInfo) {
+ notifyObserversOfNetworkConnect(networkInfo);
+ }
+
+ @Override
+ public void onNetworkDisconnect(long networkHandle) {
+ notifyObserversOfNetworkDisconnect(networkHandle);
+ }
+
+ @Override
+ public void onNetworkPreference(
+ List types, int preference) {
+ notifyObserversOfNetworkPreference(types, preference);
+ }
+
+ @Override
+ public String getFieldTrialsString() {
+ return fieldTrialsString;
+ }
+ }, appContext);
+ }
+
+ private void updateCurrentConnectionType(NetworkChangeDetector.ConnectionType newConnectionType) {
+ currentConnectionType = newConnectionType;
+ notifyObserversOfConnectionTypeChange(newConnectionType);
+ }
+
+ /** Alerts all observers of a connection change. */
+ private void notifyObserversOfConnectionTypeChange(
+ NetworkChangeDetector.ConnectionType newConnectionType) {
+ List nativeObservers = getNativeNetworkObserversSync();
+ for (Long nativeObserver : nativeObservers) {
+ nativeNotifyConnectionTypeChanged(nativeObserver);
+ }
+ // This avoids calling external methods while locking on an object.
+ List javaObservers;
+ synchronized (networkObservers) {
+ javaObservers = new ArrayList<>(networkObservers);
+ }
+ for (NetworkObserver observer : javaObservers) {
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+ }
+
+ private void notifyObserversOfNetworkConnect(
+ NetworkChangeDetector.NetworkInformation networkInfo) {
+ List nativeObservers = getNativeNetworkObserversSync();
+ for (Long nativeObserver : nativeObservers) {
+ nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
+ }
+ }
+
+ private void notifyObserversOfNetworkDisconnect(long networkHandle) {
+ List nativeObservers = getNativeNetworkObserversSync();
+ for (Long nativeObserver : nativeObservers) {
+ nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
+ }
+ }
+
+ private void notifyObserversOfNetworkPreference(
+ List types, int preference) {
+ List nativeObservers = getNativeNetworkObserversSync();
+ for (NetworkChangeDetector.ConnectionType type : types) {
+ for (Long nativeObserver : nativeObservers) {
+ nativeNotifyOfNetworkPreference(nativeObserver, type, preference);
+ }
+ }
+ }
+
+ private void updateObserverActiveNetworkList(long nativeObserver) {
+ List networkInfoList;
+ synchronized (networkChangeDetectorLock) {
+ networkInfoList =
+ (networkChangeDetector == null) ? null : networkChangeDetector.getActiveNetworkList();
+ }
+ if (networkInfoList == null) {
+ return;
+ }
+
+ NetworkChangeDetector.NetworkInformation[] networkInfos =
+ new NetworkChangeDetector.NetworkInformation[networkInfoList.size()];
+ networkInfos = networkInfoList.toArray(networkInfos);
+ nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
+ }
+
+ private List getNativeNetworkObserversSync() {
+ synchronized (nativeNetworkObservers) {
+ return new ArrayList<>(nativeNetworkObservers);
+ }
+ }
+
+ /**
+ * Adds an observer for any connection type changes.
+ *
+ * @deprecated Use getInstance(appContext).addObserver instead.
+ */
+ @Deprecated
+ public static void addNetworkObserver(NetworkObserver observer) {
+ getInstance().addObserver(observer);
+ }
+
+ public void addObserver(NetworkObserver observer) {
+ synchronized (networkObservers) {
+ networkObservers.add(observer);
+ }
+ }
+
+ /**
+ * Removes an observer for any connection type changes.
+ *
+ * @deprecated Use getInstance(appContext).removeObserver instead.
+ */
+ @Deprecated
+ public static void removeNetworkObserver(NetworkObserver observer) {
+ getInstance().removeObserver(observer);
+ }
+
+ public void removeObserver(NetworkObserver observer) {
+ synchronized (networkObservers) {
+ networkObservers.remove(observer);
+ }
+ }
+
+ /** Checks if there currently is connectivity. */
+ public static boolean isOnline() {
+ NetworkChangeDetector.ConnectionType connectionType = getInstance().getCurrentConnectionType();
+ return connectionType != NetworkChangeDetector.ConnectionType.CONNECTION_NONE;
+ }
+
+ private native void nativeNotifyConnectionTypeChanged(long nativeAndroidNetworkMonitor);
+
+ private native void nativeNotifyOfNetworkConnect(
+ long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation networkInfo);
+
+ private native void nativeNotifyOfNetworkDisconnect(
+ long nativeAndroidNetworkMonitor, long networkHandle);
+
+ private native void nativeNotifyOfActiveNetworkList(
+ long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation[] networkInfos);
+
+ private native void nativeNotifyOfNetworkPreference(
+ long nativeAndroidNetworkMonitor, NetworkChangeDetector.ConnectionType type, int preference);
+
+ // For testing only.
+ @Nullable
+ NetworkChangeDetector getNetworkChangeDetector() {
+ synchronized (networkChangeDetectorLock) {
+ return networkChangeDetector;
+ }
+ }
+
+ // For testing only.
+ int getNumObservers() {
+ synchronized (networkChangeDetectorLock) {
+ return numObservers;
+ }
+ }
+
+ // For testing only.
+ static NetworkMonitorAutoDetect createAndSetAutoDetectForTest(
+ Context context, String fieldTrialsString) {
+ NetworkMonitor networkMonitor = getInstance();
+ NetworkChangeDetector networkChangeDetector =
+ networkMonitor.createNetworkChangeDetector(context, fieldTrialsString);
+ networkMonitor.networkChangeDetector = networkChangeDetector;
+ return (NetworkMonitorAutoDetect) networkChangeDetector;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java
new file mode 100644
index 00000000..a6f24c28
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java
@@ -0,0 +1,901 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.SuppressLint;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.net.ConnectivityManager;
+import android.net.ConnectivityManager.NetworkCallback;
+import android.net.LinkAddress;
+import android.net.LinkProperties;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkInfo;
+import android.net.NetworkRequest;
+import android.net.wifi.WifiInfo;
+import android.net.wifi.WifiManager;
+import android.net.wifi.p2p.WifiP2pGroup;
+import android.net.wifi.p2p.WifiP2pManager;
+import android.os.Build;
+import android.telephony.TelephonyManager;
+import androidx.annotation.GuardedBy;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
+ *
+ * Used by the NetworkMonitor to listen to platform changes in connectivity. Note that use of
+ * this class requires that the app have the platform ACCESS_NETWORK_STATE permission.
+ */
+public class NetworkMonitorAutoDetect extends BroadcastReceiver implements NetworkChangeDetector {
+ static class NetworkState {
+ private final boolean connected;
+ // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
+ // further divided into 2G, 3G, or 4G from the subtype.
+ private final int type;
+ // Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
+ // Will be useful to find the maximum bandwidth.
+ private final int subtype;
+ // When the type is TYPE_VPN, the following two fields specify the similar type and subtype as
+ // above for the underlying network that is used by the VPN.
+ private final int underlyingNetworkTypeForVpn;
+ private final int underlyingNetworkSubtypeForVpn;
+
+ public NetworkState(boolean connected, int type, int subtype, int underlyingNetworkTypeForVpn,
+ int underlyingNetworkSubtypeForVpn) {
+ this.connected = connected;
+ this.type = type;
+ this.subtype = subtype;
+ this.underlyingNetworkTypeForVpn = underlyingNetworkTypeForVpn;
+ this.underlyingNetworkSubtypeForVpn = underlyingNetworkSubtypeForVpn;
+ }
+
+ public boolean isConnected() {
+ return connected;
+ }
+
+ public int getNetworkType() {
+ return type;
+ }
+
+ public int getNetworkSubType() {
+ return subtype;
+ }
+
+ public int getUnderlyingNetworkTypeForVpn() {
+ return underlyingNetworkTypeForVpn;
+ }
+
+ public int getUnderlyingNetworkSubtypeForVpn() {
+ return underlyingNetworkSubtypeForVpn;
+ }
+ }
+
+ @SuppressLint("NewApi")
+ @VisibleForTesting()
+ class SimpleNetworkCallback extends NetworkCallback {
+ @GuardedBy("availableNetworks") final Set availableNetworks;
+
+ SimpleNetworkCallback(Set availableNetworks) {
+ this.availableNetworks = availableNetworks;
+ }
+
+ @Override
+ public void onAvailable(Network network) {
+ Logging.d(TAG,
+ "Network"
+ + " handle: " + networkToNetId(network)
+ + " becomes available: " + network.toString());
+
+ synchronized (availableNetworks) {
+ availableNetworks.add(network);
+ }
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
+ // A capabilities change may indicate the ConnectionType has changed,
+ // so forward the new NetworkInformation along to the observer.
+ Logging.d(TAG,
+ "handle: " + networkToNetId(network)
+ + " capabilities changed: " + networkCapabilities.toString());
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
+ // A link property change may indicate the IP address changes.
+ // so forward the new NetworkInformation to the observer.
+ //
+ // linkProperties.toString() has PII that cannot be redacted
+ // very reliably, so do not include in log.
+ Logging.d(TAG, "handle: " + networkToNetId(network) + " link properties changed");
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onLosing(Network network, int maxMsToLive) {
+ // Tell the network is going to lose in MaxMsToLive milliseconds.
+ // We may use this signal later.
+ Logging.d(TAG,
+ "Network"
+ + " handle: " + networkToNetId(network) + ", " + network.toString()
+ + " is about to lose in " + maxMsToLive + "ms");
+ }
+
+ @Override
+ public void onLost(Network network) {
+ Logging.d(TAG,
+ "Network"
+ + " handle: " + networkToNetId(network) + ", " + network.toString()
+ + " is disconnected");
+
+ synchronized (availableNetworks) {
+ availableNetworks.remove(network);
+ }
+ observer.onNetworkDisconnect(networkToNetId(network));
+ }
+
+ private void onNetworkChanged(Network network) {
+ NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
+ if (networkInformation != null) {
+ observer.onNetworkConnect(networkInformation);
+ }
+ }
+ }
+
+ /** Queries the ConnectivityManager for information about the current connection. */
+ static class ConnectivityManagerDelegate {
+ /**
+ * Note: In some rare Android systems connectivityManager is null. We handle that
+ * gracefully below.
+ */
+ @Nullable private final ConnectivityManager connectivityManager;
+
+ /**
+ * Note: The availableNetworks set is instantiated in NetworkMonitorAutoDetect
+ * and the instance is mutated by SimpleNetworkCallback.
+ */
+ @NonNull @GuardedBy("availableNetworks") private final Set availableNetworks;
+
+ /** field trials */
+ private final boolean getAllNetworksFromCache;
+ private final boolean requestVPN;
+ private final boolean includeOtherUidNetworks;
+
+ ConnectivityManagerDelegate(
+ Context context, Set availableNetworks, String fieldTrialsString) {
+ this((ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE),
+ availableNetworks, fieldTrialsString);
+ }
+
+ @VisibleForTesting
+ ConnectivityManagerDelegate(ConnectivityManager connectivityManager,
+ Set availableNetworks, String fieldTrialsString) {
+ this.connectivityManager = connectivityManager;
+ this.availableNetworks = availableNetworks;
+ this.getAllNetworksFromCache =
+ checkFieldTrial(fieldTrialsString, "getAllNetworksFromCache", false);
+ this.requestVPN = checkFieldTrial(fieldTrialsString, "requestVPN", false);
+ this.includeOtherUidNetworks =
+ checkFieldTrial(fieldTrialsString, "includeOtherUidNetworks", false);
+ }
+
+ private static boolean checkFieldTrial(
+ String fieldTrialsString, String key, boolean defaultValue) {
+ if (fieldTrialsString.contains(key + ":true")) {
+ return true;
+ } else if (fieldTrialsString.contains(key + ":false")) {
+ return false;
+ }
+ return defaultValue;
+ }
+
+ /**
+ * Returns connection type and status information about the current
+ * default network.
+ */
+ NetworkState getNetworkState() {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1, -1, -1);
+ }
+ return getNetworkState(connectivityManager.getActiveNetworkInfo());
+ }
+
+ /**
+ * Returns connection type and status information about `network`.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ NetworkState getNetworkState(@Nullable Network network) {
+ if (network == null || connectivityManager == null) {
+ return new NetworkState(false, -1, -1, -1, -1);
+ }
+ NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+ if (networkInfo == null) {
+ Logging.w(TAG, "Couldn't retrieve information from network " + network.toString());
+ return new NetworkState(false, -1, -1, -1, -1);
+ }
+ // The general logic of handling a VPN in this method is as follows. getNetworkInfo will
+ // return the info of the network with the same id as in `network` when it is registered via
+ // ConnectivityManager.registerNetworkAgent in Android. `networkInfo` may or may not indicate
+ // the type TYPE_VPN if `network` is a VPN. To reliably detect the VPN interface, we need to
+ // query the network capability as below in the case when networkInfo.getType() is not
+ // TYPE_VPN. On the other hand when networkInfo.getType() is TYPE_VPN, the only solution so
+ // far to obtain the underlying network information is to query the active network interface.
+ // However, the active network interface may not be used for the VPN, for example, if the VPN
+ // is restricted to WiFi by the implementation but the WiFi interface is currently turned
+ // off and the active interface is the Cell. Using directly the result from
+ // getActiveNetworkInfo may thus give the wrong interface information, and one should note
+ // that getActiveNetworkInfo would return the default network interface if the VPN does not
+ // specify its underlying networks in the implementation. Therefore, we need further compare
+ // `network` to the active network. If they are not the same network, we will have to fall
+ // back to report an unknown network.
+
+ if (networkInfo.getType() != ConnectivityManager.TYPE_VPN) {
+ // Note that getNetworkCapabilities returns null if the network is unknown.
+ NetworkCapabilities networkCapabilities =
+ connectivityManager.getNetworkCapabilities(network);
+ if (networkCapabilities == null
+ || !networkCapabilities.hasTransport(NetworkCapabilities.TRANSPORT_VPN)) {
+ return getNetworkState(networkInfo);
+ }
+ // When `network` is in fact a VPN after querying its capability but `networkInfo` is not of
+ // type TYPE_VPN, `networkInfo` contains the info for the underlying network, and we return
+ // a NetworkState constructed from it.
+ return new NetworkState(networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1,
+ networkInfo.getType(), networkInfo.getSubtype());
+ }
+
+ // When `networkInfo` is of type TYPE_VPN, which implies `network` is a VPN, we return the
+ // NetworkState of the active network via getActiveNetworkInfo(), if `network` is the active
+ // network that supports the VPN. Otherwise, NetworkState of an unknown network with type -1
+ // will be returned.
+ //
+ // Note that getActiveNetwork and getActiveNetworkInfo return null if no default network is
+ // currently active.
+ if (networkInfo.getType() == ConnectivityManager.TYPE_VPN) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
+ && network.equals(connectivityManager.getActiveNetwork())) {
+ // If a VPN network is in place, we can find the underlying network type via querying the
+ // active network info thanks to
+ // https://android.googlesource.com/platform/frameworks/base/+/d6a7980d
+ NetworkInfo underlyingActiveNetworkInfo = connectivityManager.getActiveNetworkInfo();
+ // We use the NetworkInfo of the underlying network if it is not of TYPE_VPN itself.
+ if (underlyingActiveNetworkInfo != null
+ && underlyingActiveNetworkInfo.getType() != ConnectivityManager.TYPE_VPN) {
+ return new NetworkState(networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1,
+ underlyingActiveNetworkInfo.getType(), underlyingActiveNetworkInfo.getSubtype());
+ }
+ }
+ return new NetworkState(
+ networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1, -1, -1);
+ }
+
+ return getNetworkState(networkInfo);
+ }
+
+ /**
+ * Returns connection type and status information gleaned from networkInfo. Note that to obtain
+ * the complete information about a VPN including the type of the underlying network, one should
+ * use the above method getNetworkState with a Network object.
+ */
+ private NetworkState getNetworkState(@Nullable NetworkInfo networkInfo) {
+ if (networkInfo == null || !networkInfo.isConnected()) {
+ return new NetworkState(false, -1, -1, -1, -1);
+ }
+ return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype(), -1, -1);
+ }
+
+ /**
+ * Returns all connected networks.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ Network[] getAllNetworks() {
+ if (connectivityManager == null) {
+ return new Network[0];
+ }
+
+ if (supportNetworkCallback() && getAllNetworksFromCache) {
+ synchronized (availableNetworks) {
+ return availableNetworks.toArray(new Network[0]);
+ }
+ }
+
+ return connectivityManager.getAllNetworks();
+ }
+
+ @Nullable
+ List getActiveNetworkList() {
+ if (!supportNetworkCallback()) {
+ return null;
+ }
+ ArrayList netInfoList = new ArrayList();
+ for (Network network : getAllNetworks()) {
+ NetworkInformation info = networkToInfo(network);
+ if (info != null) {
+ netInfoList.add(info);
+ }
+ }
+ return netInfoList;
+ }
+
+ /**
+ * Returns the NetID of the current default network. Returns
+ * INVALID_NET_ID if no current default network connected.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ long getDefaultNetId() {
+ if (!supportNetworkCallback()) {
+ return INVALID_NET_ID;
+ }
+ // Android Lollipop had no API to get the default network; only an
+ // API to return the NetworkInfo for the default network. To
+ // determine the default network one can find the network with
+ // type matching that of the default network.
+ final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
+ if (defaultNetworkInfo == null) {
+ return INVALID_NET_ID;
+ }
+ final Network[] networks = getAllNetworks();
+ long defaultNetId = INVALID_NET_ID;
+ for (Network network : networks) {
+ if (!hasInternetCapability(network)) {
+ continue;
+ }
+ final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+ if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
+ // There should not be multiple connected networks of the
+ // same type. At least as of Android Marshmallow this is
+ // not supported. If this becomes supported this assertion
+ // may trigger. At that point we could consider using
+ // ConnectivityManager.getDefaultNetwork() though this
+ // may give confusing results with VPNs and is only
+ // available with Android Marshmallow.
+ if (defaultNetId != INVALID_NET_ID) {
+ throw new RuntimeException(
+ "Multiple connected networks of same type are not supported.");
+ }
+ defaultNetId = networkToNetId(network);
+ }
+ }
+ return defaultNetId;
+ }
+
+ @SuppressLint("NewApi")
+ private @Nullable NetworkInformation networkToInfo(@Nullable Network network) {
+ if (network == null || connectivityManager == null) {
+ return null;
+ }
+ LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
+ // getLinkProperties will return null if the network is unknown.
+ if (linkProperties == null) {
+ Logging.w(TAG, "Detected unknown network: " + network.toString());
+ return null;
+ }
+ if (linkProperties.getInterfaceName() == null) {
+ Logging.w(TAG, "Null interface name for network " + network.toString());
+ return null;
+ }
+
+ NetworkState networkState = getNetworkState(network);
+ NetworkChangeDetector.ConnectionType connectionType = getConnectionType(networkState);
+ if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_NONE) {
+ // This may not be an error. The OS may signal a network event with connection type
+ // NONE when the network disconnects.
+ Logging.d(TAG, "Network " + network.toString() + " is disconnected");
+ return null;
+ }
+
+ // Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
+ // which appears to be usable. Just log them here.
+ if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN
+ || connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
+ Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+ + " because it has type " + networkState.getNetworkType() + " and subtype "
+ + networkState.getNetworkSubType());
+ }
+ // NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN if the network is not a VPN or the
+ // underlying network is
+ // unknown.
+ ConnectionType underlyingConnectionTypeForVpn =
+ getUnderlyingConnectionTypeForVpn(networkState);
+
+ NetworkInformation networkInformation = new NetworkInformation(
+ linkProperties.getInterfaceName(), connectionType, underlyingConnectionTypeForVpn,
+ networkToNetId(network), getIPAddresses(linkProperties));
+ return networkInformation;
+ }
+
+ /**
+ * Returns true if {@code network} can provide Internet access. Can be used to
+ * ignore specialized networks (e.g. IMS, FOTA).
+ */
+ @SuppressLint("NewApi")
+ boolean hasInternetCapability(Network network) {
+ if (connectivityManager == null) {
+ return false;
+ }
+ final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
+ return capabilities != null
+ && capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
+ }
+
+ @SuppressLint("NewApi")
+ @VisibleForTesting()
+ NetworkRequest createNetworkRequest() {
+ // Requests the following capabilities by default: NOT_VPN, NOT_RESTRICTED, TRUSTED
+ NetworkRequest.Builder builder =
+ new NetworkRequest.Builder().addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
+
+ if (requestVPN) {
+ builder.removeCapability(NetworkCapabilities.NET_CAPABILITY_NOT_VPN);
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && includeOtherUidNetworks) {
+ builder.setIncludeOtherUidNetworks(true);
+ }
+ return builder.build();
+ }
+
+ /** Only callable on Lollipop and newer releases. */
+ @SuppressLint("NewApi")
+ public void registerNetworkCallback(NetworkCallback networkCallback) {
+ connectivityManager.registerNetworkCallback(createNetworkRequest(), networkCallback);
+ }
+
+ /** Only callable on Lollipop and newer releases. */
+ @SuppressLint("NewApi")
+ public void requestMobileNetwork(NetworkCallback networkCallback) {
+ NetworkRequest.Builder builder = new NetworkRequest.Builder();
+ builder.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+ .addTransportType(NetworkCapabilities.TRANSPORT_CELLULAR);
+ connectivityManager.requestNetwork(builder.build(), networkCallback);
+ }
+
+ @SuppressLint("NewApi")
+ IPAddress[] getIPAddresses(LinkProperties linkProperties) {
+ IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
+ int i = 0;
+ for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
+ ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
+ ++i;
+ }
+ return ipAddresses;
+ }
+
+ @SuppressLint("NewApi")
+ public void releaseCallback(NetworkCallback networkCallback) {
+ if (supportNetworkCallback()) {
+ Logging.d(TAG, "Unregister network callback");
+ connectivityManager.unregisterNetworkCallback(networkCallback);
+ }
+ }
+
+ public boolean supportNetworkCallback() {
+ return connectivityManager != null;
+ }
+ }
+
+ /** Queries the WifiManager for SSID of the current Wifi connection. */
+ static class WifiManagerDelegate {
+ @Nullable private final Context context;
+ WifiManagerDelegate(Context context) {
+ this.context = context;
+ }
+
+ // For testing.
+ WifiManagerDelegate() {
+ // All the methods below should be overridden.
+ context = null;
+ }
+
+ String getWifiSSID() {
+ final Intent intent = context.registerReceiver(
+ null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
+ if (intent != null) {
+ final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
+ if (wifiInfo != null) {
+ final String ssid = wifiInfo.getSSID();
+ if (ssid != null) {
+ return ssid;
+ }
+ }
+ }
+ return "";
+ }
+ }
+
+ /** Maintains the information about wifi direct (aka WifiP2p) networks. */
+ static class WifiDirectManagerDelegate extends BroadcastReceiver {
+ // Network "handle" for the Wifi P2p network. We have to bind to the default network id
+ // (NETWORK_UNSPECIFIED) for these addresses.
+ private static final int WIFI_P2P_NETWORK_HANDLE = 0;
+ private final Context context;
+ private final NetworkChangeDetector.Observer observer;
+ // Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is
+ // connected.
+ @Nullable private NetworkInformation wifiP2pNetworkInfo;
+
+ WifiDirectManagerDelegate(NetworkChangeDetector.Observer observer, Context context) {
+ this.context = context;
+ this.observer = observer;
+ IntentFilter intentFilter = new IntentFilter();
+ intentFilter.addAction(WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION);
+ intentFilter.addAction(WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION);
+ context.registerReceiver(this, intentFilter);
+ if (Build.VERSION.SDK_INT > Build.VERSION_CODES.P) {
+ // Starting with Android Q (10), WIFI_P2P_CONNECTION_CHANGED_ACTION is no longer sticky.
+ // This means we have to explicitly request WifiP2pGroup info during initialization in order
+ // to get this data if we are already connected to a Wi-Fi Direct network.
+ WifiP2pManager manager =
+ (WifiP2pManager) context.getSystemService(Context.WIFI_P2P_SERVICE);
+ WifiP2pManager.Channel channel =
+ manager.initialize(context, context.getMainLooper(), null /* listener */);
+ manager.requestGroupInfo(channel, wifiP2pGroup -> { onWifiP2pGroupChange(wifiP2pGroup); });
+ }
+ }
+
+ // BroadcastReceiver
+ @Override
+ @SuppressLint("InlinedApi")
+ public void onReceive(Context context, Intent intent) {
+ if (WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION.equals(intent.getAction())) {
+ WifiP2pGroup wifiP2pGroup = intent.getParcelableExtra(WifiP2pManager.EXTRA_WIFI_P2P_GROUP);
+ onWifiP2pGroupChange(wifiP2pGroup);
+ } else if (WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION.equals(intent.getAction())) {
+ int state = intent.getIntExtra(WifiP2pManager.EXTRA_WIFI_STATE, 0 /* default to unknown */);
+ onWifiP2pStateChange(state);
+ }
+ }
+
+ /** Releases the broadcast receiver. */
+ public void release() {
+ context.unregisterReceiver(this);
+ }
+
+ public List getActiveNetworkList() {
+ if (wifiP2pNetworkInfo != null) {
+ return Collections.singletonList(wifiP2pNetworkInfo);
+ }
+
+ return Collections.emptyList();
+ }
+
+ /** Handle a change notification about the wifi p2p group. */
+ private void onWifiP2pGroupChange(@Nullable WifiP2pGroup wifiP2pGroup) {
+ if (wifiP2pGroup == null || wifiP2pGroup.getInterface() == null) {
+ return;
+ }
+
+ NetworkInterface wifiP2pInterface;
+ try {
+ wifiP2pInterface = NetworkInterface.getByName(wifiP2pGroup.getInterface());
+ } catch (SocketException e) {
+ Logging.e(TAG, "Unable to get WifiP2p network interface", e);
+ return;
+ }
+
+ List interfaceAddresses = Collections.list(wifiP2pInterface.getInetAddresses());
+ IPAddress[] ipAddresses = new IPAddress[interfaceAddresses.size()];
+ for (int i = 0; i < interfaceAddresses.size(); ++i) {
+ ipAddresses[i] = new IPAddress(interfaceAddresses.get(i).getAddress());
+ }
+
+ wifiP2pNetworkInfo = new NetworkInformation(wifiP2pGroup.getInterface(),
+ NetworkChangeDetector.ConnectionType.CONNECTION_WIFI,
+ NetworkChangeDetector.ConnectionType.CONNECTION_NONE, WIFI_P2P_NETWORK_HANDLE,
+ ipAddresses);
+ observer.onNetworkConnect(wifiP2pNetworkInfo);
+ }
+
+ /** Handle a state change notification about wifi p2p. */
+ private void onWifiP2pStateChange(int state) {
+ if (state == WifiP2pManager.WIFI_P2P_STATE_DISABLED) {
+ wifiP2pNetworkInfo = null;
+ observer.onNetworkDisconnect(WIFI_P2P_NETWORK_HANDLE);
+ }
+ }
+ }
+
+ private static final long INVALID_NET_ID = -1;
+ private static final String TAG = "NetworkMonitorAutoDetect";
+
+ // Observer for the connection type change.
+ private final NetworkChangeDetector.Observer observer;
+ private final IntentFilter intentFilter;
+ private final Context context;
+ // Used to request mobile network. It does not do anything except for keeping
+ // the callback for releasing the request.
+ @Nullable private final NetworkCallback mobileNetworkCallback;
+ // Used to receive updates on all networks.
+ @Nullable private final NetworkCallback allNetworkCallback;
+ // connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
+ private ConnectivityManagerDelegate connectivityManagerDelegate;
+ private WifiManagerDelegate wifiManagerDelegate;
+ private WifiDirectManagerDelegate wifiDirectManagerDelegate;
+ private static boolean includeWifiDirect;
+
+ @GuardedBy("availableNetworks") final Set availableNetworks = new HashSet<>();
+
+ private boolean isRegistered;
+ private NetworkChangeDetector.ConnectionType connectionType;
+ private String wifiSSID;
+
+ /** Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread. */
+ @SuppressLint("NewApi")
+ public NetworkMonitorAutoDetect(NetworkChangeDetector.Observer observer, Context context) {
+ this.observer = observer;
+ this.context = context;
+ String fieldTrialsString = observer.getFieldTrialsString();
+ connectivityManagerDelegate =
+ new ConnectivityManagerDelegate(context, availableNetworks, fieldTrialsString);
+ wifiManagerDelegate = new WifiManagerDelegate(context);
+
+ final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
+ connectionType = getConnectionType(networkState);
+ wifiSSID = getWifiSSID(networkState);
+ intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
+
+ if (includeWifiDirect) {
+ wifiDirectManagerDelegate = new WifiDirectManagerDelegate(observer, context);
+ }
+
+ registerReceiver();
+ if (connectivityManagerDelegate.supportNetworkCallback()) {
+ // On Android 6.0.0, the WRITE_SETTINGS permission is necessary for
+ // requestNetwork, so it will fail. This was fixed in Android 6.0.1.
+ NetworkCallback tempNetworkCallback = new NetworkCallback();
+ try {
+ connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback);
+ } catch (java.lang.SecurityException e) {
+ Logging.w(TAG, "Unable to obtain permission to request a cellular network.");
+ tempNetworkCallback = null;
+ }
+ mobileNetworkCallback = tempNetworkCallback;
+ allNetworkCallback = new SimpleNetworkCallback(availableNetworks);
+ connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
+ } else {
+ mobileNetworkCallback = null;
+ allNetworkCallback = null;
+ }
+ }
+
+ /** Enables WifiDirectManager. */
+ public static void setIncludeWifiDirect(boolean enable) {
+ includeWifiDirect = enable;
+ }
+
+ @Override
+ public boolean supportNetworkCallback() {
+ return connectivityManagerDelegate.supportNetworkCallback();
+ }
+
+ /**
+ * Allows overriding the ConnectivityManagerDelegate for tests.
+ */
+ void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
+ connectivityManagerDelegate = delegate;
+ }
+
+ /**
+ * Allows overriding the WifiManagerDelegate for tests.
+ */
+ void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
+ wifiManagerDelegate = delegate;
+ }
+
+ /**
+ * Returns whether the object has registered to receive network connectivity intents.
+ * Visible for testing.
+ */
+ boolean isReceiverRegisteredForTesting() {
+ return isRegistered;
+ }
+
+ @Override
+ @Nullable
+ public List getActiveNetworkList() {
+ List connectivityManagerList =
+ connectivityManagerDelegate.getActiveNetworkList();
+ if (connectivityManagerList == null) {
+ return null;
+ }
+ ArrayList result =
+ new ArrayList(connectivityManagerList);
+ if (wifiDirectManagerDelegate != null) {
+ result.addAll(wifiDirectManagerDelegate.getActiveNetworkList());
+ }
+ return result;
+ }
+
+ @Override
+ public void destroy() {
+ if (allNetworkCallback != null) {
+ connectivityManagerDelegate.releaseCallback(allNetworkCallback);
+ }
+ if (mobileNetworkCallback != null) {
+ connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
+ }
+ if (wifiDirectManagerDelegate != null) {
+ wifiDirectManagerDelegate.release();
+ }
+ unregisterReceiver();
+ }
+
+ /**
+ * Registers a BroadcastReceiver in the given context.
+ */
+ private void registerReceiver() {
+ if (isRegistered)
+ return;
+
+ isRegistered = true;
+ context.registerReceiver(this, intentFilter);
+ }
+
+ /**
+ * Unregisters the BroadcastReceiver in the given context.
+ */
+ private void unregisterReceiver() {
+ if (!isRegistered)
+ return;
+
+ isRegistered = false;
+ context.unregisterReceiver(this);
+ }
+
+ public NetworkState getCurrentNetworkState() {
+ return connectivityManagerDelegate.getNetworkState();
+ }
+
+ /**
+ * Returns NetID of device's current default connected network used for
+ * communication.
+ * Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
+ * when not implemented.
+ */
+ public long getDefaultNetId() {
+ return connectivityManagerDelegate.getDefaultNetId();
+ }
+
+ private static NetworkChangeDetector.ConnectionType getConnectionType(
+ boolean isConnected, int networkType, int networkSubtype) {
+ if (!isConnected) {
+ return NetworkChangeDetector.ConnectionType.CONNECTION_NONE;
+ }
+
+ switch (networkType) {
+ case ConnectivityManager.TYPE_ETHERNET:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_ETHERNET;
+ case ConnectivityManager.TYPE_WIFI:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_WIFI;
+ case ConnectivityManager.TYPE_WIMAX:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_4G;
+ case ConnectivityManager.TYPE_BLUETOOTH:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_BLUETOOTH;
+ case ConnectivityManager.TYPE_MOBILE:
+ case ConnectivityManager.TYPE_MOBILE_DUN:
+ case ConnectivityManager.TYPE_MOBILE_HIPRI:
+ // Use information from TelephonyManager to classify the connection.
+ switch (networkSubtype) {
+ case TelephonyManager.NETWORK_TYPE_GPRS:
+ case TelephonyManager.NETWORK_TYPE_EDGE:
+ case TelephonyManager.NETWORK_TYPE_CDMA:
+ case TelephonyManager.NETWORK_TYPE_1xRTT:
+ case TelephonyManager.NETWORK_TYPE_IDEN:
+ case TelephonyManager.NETWORK_TYPE_GSM:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_2G;
+ case TelephonyManager.NETWORK_TYPE_UMTS:
+ case TelephonyManager.NETWORK_TYPE_EVDO_0:
+ case TelephonyManager.NETWORK_TYPE_EVDO_A:
+ case TelephonyManager.NETWORK_TYPE_HSDPA:
+ case TelephonyManager.NETWORK_TYPE_HSUPA:
+ case TelephonyManager.NETWORK_TYPE_HSPA:
+ case TelephonyManager.NETWORK_TYPE_EVDO_B:
+ case TelephonyManager.NETWORK_TYPE_EHRPD:
+ case TelephonyManager.NETWORK_TYPE_HSPAP:
+ case TelephonyManager.NETWORK_TYPE_TD_SCDMA:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_3G;
+ case TelephonyManager.NETWORK_TYPE_LTE:
+ case TelephonyManager.NETWORK_TYPE_IWLAN:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_4G;
+ case TelephonyManager.NETWORK_TYPE_NR:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_5G;
+ default:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
+ }
+ case ConnectivityManager.TYPE_VPN:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_VPN;
+ default:
+ return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN;
+ }
+ }
+
+ public static NetworkChangeDetector.ConnectionType getConnectionType(NetworkState networkState) {
+ return getConnectionType(networkState.isConnected(), networkState.getNetworkType(),
+ networkState.getNetworkSubType());
+ }
+
+ @Override
+ public NetworkChangeDetector.ConnectionType getCurrentConnectionType() {
+ return getConnectionType(getCurrentNetworkState());
+ }
+
+ private static NetworkChangeDetector.ConnectionType getUnderlyingConnectionTypeForVpn(
+ NetworkState networkState) {
+ if (networkState.getNetworkType() != ConnectivityManager.TYPE_VPN) {
+ return NetworkChangeDetector.ConnectionType.CONNECTION_NONE;
+ }
+ return getConnectionType(networkState.isConnected(),
+ networkState.getUnderlyingNetworkTypeForVpn(),
+ networkState.getUnderlyingNetworkSubtypeForVpn());
+ }
+
+ private String getWifiSSID(NetworkState networkState) {
+ if (getConnectionType(networkState) != NetworkChangeDetector.ConnectionType.CONNECTION_WIFI)
+ return "";
+ return wifiManagerDelegate.getWifiSSID();
+ }
+
+ // BroadcastReceiver
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ final NetworkState networkState = getCurrentNetworkState();
+ if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
+ connectionTypeChanged(networkState);
+ }
+ }
+
+ private void connectionTypeChanged(NetworkState networkState) {
+ NetworkChangeDetector.ConnectionType newConnectionType = getConnectionType(networkState);
+ String newWifiSSID = getWifiSSID(networkState);
+ if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
+ return;
+
+ connectionType = newConnectionType;
+ wifiSSID = newWifiSSID;
+ Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+
+ /**
+ * Extracts NetID of network on Lollipop and NetworkHandle (which is mungled
+ * NetID) on Marshmallow and newer releases. Only available on Lollipop and
+ * newer releases. Returns long since getNetworkHandle returns long.
+ */
+ @SuppressLint("NewApi")
+ private static long networkToNetId(Network network) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return network.getNetworkHandle();
+ }
+
+ // NOTE(honghaiz): This depends on Android framework implementation details.
+ // These details cannot change because Lollipop has been released.
+ return Integer.parseInt(network.toString());
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkPreference.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkPreference.java
new file mode 100644
index 00000000..b96ad89c
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkPreference.java
@@ -0,0 +1,11 @@
+
+// IntelliJ API Decompiler stub source generated from a class file
+// Implementation of methods is not available
+
+package org.webrtc;
+
+@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE)
+public @interface NetworkPreference {
+ int NEUTRAL = 0;
+ int NOT_PREFERRED = -1;
+}
\ No newline at end of file
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkStatePredictorFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkStatePredictorFactoryFactory.java
new file mode 100644
index 00000000..bf965bcb
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkStatePredictorFactoryFactory.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::NetworkStatePredictorFactory instances. */
+public interface NetworkStatePredictorFactoryFactory {
+ /**
+ * Dynamically allocates a webrtc::NetworkStatePredictorFactory instance and returns a pointer to
+ * it. The caller takes ownership of the object.
+ */
+ public long createNativeNetworkStatePredictorFactory();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/OWNERS b/webrtc_player/android/zlm/src/main/java/org/webrtc/OWNERS
new file mode 100644
index 00000000..109bea27
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/OWNERS
@@ -0,0 +1,2 @@
+magjed@webrtc.org
+xalep@webrtc.org
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnection.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnection.java
new file mode 100644
index 00000000..d530bc2c
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnection.java
@@ -0,0 +1,1316 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.webrtc.CandidatePairChangeEvent;
+import org.webrtc.DataChannel;
+import org.webrtc.MediaStreamTrack;
+import org.webrtc.RtpTransceiver;
+
+/**
+ * Java-land version of the PeerConnection APIs; wraps the C++ API
+ * http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
+ * JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
+ * http://www.w3.org/TR/mediacapture-streams/
+ */
+public class PeerConnection {
+ /** Tracks PeerConnectionInterface::IceGatheringState */
+ public enum IceGatheringState {
+ NEW,
+ GATHERING,
+ COMPLETE;
+
+ @CalledByNative("IceGatheringState")
+ static IceGatheringState fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ /** Tracks PeerConnectionInterface::IceConnectionState */
+ public enum IceConnectionState {
+ NEW,
+ CHECKING,
+ CONNECTED,
+ COMPLETED,
+ FAILED,
+ DISCONNECTED,
+ CLOSED;
+
+ @CalledByNative("IceConnectionState")
+ static IceConnectionState fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ /** Tracks PeerConnectionInterface::PeerConnectionState */
+ public enum PeerConnectionState {
+ NEW,
+ CONNECTING,
+ CONNECTED,
+ DISCONNECTED,
+ FAILED,
+ CLOSED;
+
+ @CalledByNative("PeerConnectionState")
+ static PeerConnectionState fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ /** Tracks PeerConnectionInterface::TlsCertPolicy */
+ public enum TlsCertPolicy {
+ TLS_CERT_POLICY_SECURE,
+ TLS_CERT_POLICY_INSECURE_NO_CHECK,
+ }
+
+ /** Tracks PeerConnectionInterface::SignalingState */
+ public enum SignalingState {
+ STABLE,
+ HAVE_LOCAL_OFFER,
+ HAVE_LOCAL_PRANSWER,
+ HAVE_REMOTE_OFFER,
+ HAVE_REMOTE_PRANSWER,
+ CLOSED;
+
+ @CalledByNative("SignalingState")
+ static SignalingState fromNativeIndex(int nativeIndex) {
+ return values()[nativeIndex];
+ }
+ }
+
+ /** Java version of PeerConnectionObserver. */
+ public static interface Observer {
+ /** Triggered when the SignalingState changes. */
+ @CalledByNative("Observer") void onSignalingChange(SignalingState newState);
+
+ /** Triggered when the IceConnectionState changes. */
+ @CalledByNative("Observer") void onIceConnectionChange(IceConnectionState newState);
+
+ /* Triggered when the standard-compliant state transition of IceConnectionState happens. */
+ @CalledByNative("Observer")
+ default void onStandardizedIceConnectionChange(IceConnectionState newState) {}
+
+ /** Triggered when the PeerConnectionState changes. */
+ @CalledByNative("Observer")
+ default void onConnectionChange(PeerConnectionState newState) {}
+
+ /** Triggered when the ICE connection receiving status changes. */
+ @CalledByNative("Observer") void onIceConnectionReceivingChange(boolean receiving);
+
+ /** Triggered when the IceGatheringState changes. */
+ @CalledByNative("Observer") void onIceGatheringChange(IceGatheringState newState);
+
+ /** Triggered when a new ICE candidate has been found. */
+ @CalledByNative("Observer") void onIceCandidate(IceCandidate candidate);
+
+ /** Triggered when gathering of an ICE candidate failed. */
+ default @CalledByNative("Observer") void onIceCandidateError(IceCandidateErrorEvent event) {}
+
+ /** Triggered when some ICE candidates have been removed. */
+ @CalledByNative("Observer") void onIceCandidatesRemoved(IceCandidate[] candidates);
+
+ /** Triggered when the ICE candidate pair is changed. */
+ @CalledByNative("Observer")
+ default void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {}
+
+ /** Triggered when media is received on a new stream from remote peer. */
+ @CalledByNative("Observer") void onAddStream(MediaStream stream);
+
+ /** Triggered when a remote peer close a stream. */
+ @CalledByNative("Observer") void onRemoveStream(MediaStream stream);
+
+ /** Triggered when a remote peer opens a DataChannel. */
+ @CalledByNative("Observer") void onDataChannel(DataChannel dataChannel);
+
+ /** Triggered when renegotiation is necessary. */
+ @CalledByNative("Observer") void onRenegotiationNeeded();
+
+ /**
+ * Triggered when a new track is signaled by the remote peer, as a result of
+ * setRemoteDescription.
+ */
+ @CalledByNative("Observer")
+ default void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams){};
+
+ /**
+ * Triggered when a previously added remote track is removed by the remote
+ * peer, as a result of setRemoteDescription.
+ */
+ @CalledByNative("Observer") default void onRemoveTrack(RtpReceiver receiver){};
+
+ /**
+ * Triggered when the signaling from SetRemoteDescription indicates that a transceiver
+ * will be receiving media from a remote endpoint. This is only called if UNIFIED_PLAN
+ * semantics are specified. The transceiver will be disposed automatically.
+ */
+ @CalledByNative("Observer") default void onTrack(RtpTransceiver transceiver){};
+ }
+
+ /** Java version of PeerConnectionInterface.IceServer. */
+ public static class IceServer {
+ // List of URIs associated with this server. Valid formats are described
+ // in RFC7064 and RFC7065, and more may be added in the future. The "host"
+ // part of the URI may contain either an IP address or a hostname.
+ @Deprecated public final String uri;
+ public final List urls;
+ public final String username;
+ public final String password;
+ public final TlsCertPolicy tlsCertPolicy;
+
+ // If the URIs in `urls` only contain IP addresses, this field can be used
+ // to indicate the hostname, which may be necessary for TLS (using the SNI
+ // extension). If `urls` itself contains the hostname, this isn't
+ // necessary.
+ public final String hostname;
+
+ // List of protocols to be used in the TLS ALPN extension.
+ public final List tlsAlpnProtocols;
+
+ // List of elliptic curves to be used in the TLS elliptic curves extension.
+ // Only curve names supported by OpenSSL should be used (eg. "P-256","X25519").
+ public final List tlsEllipticCurves;
+
+ /** Convenience constructor for STUN servers. */
+ @Deprecated
+ public IceServer(String uri) {
+ this(uri, "", "");
+ }
+
+ @Deprecated
+ public IceServer(String uri, String username, String password) {
+ this(uri, username, password, TlsCertPolicy.TLS_CERT_POLICY_SECURE);
+ }
+
+ @Deprecated
+ public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy) {
+ this(uri, username, password, tlsCertPolicy, "");
+ }
+
+ @Deprecated
+ public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy,
+ String hostname) {
+ this(uri, Collections.singletonList(uri), username, password, tlsCertPolicy, hostname, null,
+ null);
+ }
+
+ private IceServer(String uri, List urls, String username, String password,
+ TlsCertPolicy tlsCertPolicy, String hostname, List tlsAlpnProtocols,
+ List tlsEllipticCurves) {
+ if (uri == null || urls == null || urls.isEmpty()) {
+ throw new IllegalArgumentException("uri == null || urls == null || urls.isEmpty()");
+ }
+ for (String it : urls) {
+ if (it == null) {
+ throw new IllegalArgumentException("urls element is null: " + urls);
+ }
+ }
+ if (username == null) {
+ throw new IllegalArgumentException("username == null");
+ }
+ if (password == null) {
+ throw new IllegalArgumentException("password == null");
+ }
+ if (hostname == null) {
+ throw new IllegalArgumentException("hostname == null");
+ }
+ this.uri = uri;
+ this.urls = urls;
+ this.username = username;
+ this.password = password;
+ this.tlsCertPolicy = tlsCertPolicy;
+ this.hostname = hostname;
+ this.tlsAlpnProtocols = tlsAlpnProtocols;
+ this.tlsEllipticCurves = tlsEllipticCurves;
+ }
+
+ @Override
+ public String toString() {
+ return urls + " [" + username + ":" + password + "] [" + tlsCertPolicy + "] [" + hostname
+ + "] [" + tlsAlpnProtocols + "] [" + tlsEllipticCurves + "]";
+ }
+
+ @Override
+ public boolean equals(@Nullable Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof IceServer)) {
+ return false;
+ }
+ IceServer other = (IceServer) obj;
+ return (uri.equals(other.uri) && urls.equals(other.urls) && username.equals(other.username)
+ && password.equals(other.password) && tlsCertPolicy.equals(other.tlsCertPolicy)
+ && hostname.equals(other.hostname) && tlsAlpnProtocols.equals(other.tlsAlpnProtocols)
+ && tlsEllipticCurves.equals(other.tlsEllipticCurves));
+ }
+
+ @Override
+ public int hashCode() {
+ Object[] values = {uri, urls, username, password, tlsCertPolicy, hostname, tlsAlpnProtocols,
+ tlsEllipticCurves};
+ return Arrays.hashCode(values);
+ }
+
+ public static Builder builder(String uri) {
+ return new Builder(Collections.singletonList(uri));
+ }
+
+ public static Builder builder(List urls) {
+ return new Builder(urls);
+ }
+
+ public static class Builder {
+ @Nullable private final List urls;
+ private String username = "";
+ private String password = "";
+ private TlsCertPolicy tlsCertPolicy = TlsCertPolicy.TLS_CERT_POLICY_SECURE;
+ private String hostname = "";
+ private List tlsAlpnProtocols;
+ private List tlsEllipticCurves;
+
+ private Builder(List urls) {
+ if (urls == null || urls.isEmpty()) {
+ throw new IllegalArgumentException("urls == null || urls.isEmpty(): " + urls);
+ }
+ this.urls = urls;
+ }
+
+ public Builder setUsername(String username) {
+ this.username = username;
+ return this;
+ }
+
+ public Builder setPassword(String password) {
+ this.password = password;
+ return this;
+ }
+
+ public Builder setTlsCertPolicy(TlsCertPolicy tlsCertPolicy) {
+ this.tlsCertPolicy = tlsCertPolicy;
+ return this;
+ }
+
+ public Builder setHostname(String hostname) {
+ this.hostname = hostname;
+ return this;
+ }
+
+ public Builder setTlsAlpnProtocols(List tlsAlpnProtocols) {
+ this.tlsAlpnProtocols = tlsAlpnProtocols;
+ return this;
+ }
+
+ public Builder setTlsEllipticCurves(List tlsEllipticCurves) {
+ this.tlsEllipticCurves = tlsEllipticCurves;
+ return this;
+ }
+
+ public IceServer createIceServer() {
+ return new IceServer(urls.get(0), urls, username, password, tlsCertPolicy, hostname,
+ tlsAlpnProtocols, tlsEllipticCurves);
+ }
+ }
+
+ @Nullable
+ @CalledByNative("IceServer")
+ List getUrls() {
+ return urls;
+ }
+
+ @Nullable
+ @CalledByNative("IceServer")
+ String getUsername() {
+ return username;
+ }
+
+ @Nullable
+ @CalledByNative("IceServer")
+ String getPassword() {
+ return password;
+ }
+
+ @CalledByNative("IceServer")
+ TlsCertPolicy getTlsCertPolicy() {
+ return tlsCertPolicy;
+ }
+
+ @Nullable
+ @CalledByNative("IceServer")
+ String getHostname() {
+ return hostname;
+ }
+
+ @CalledByNative("IceServer")
+ List getTlsAlpnProtocols() {
+ return tlsAlpnProtocols;
+ }
+
+ @CalledByNative("IceServer")
+ List getTlsEllipticCurves() {
+ return tlsEllipticCurves;
+ }
+ }
+
+ /** Java version of PeerConnectionInterface.IceTransportsType */
+ public enum IceTransportsType { NONE, RELAY, NOHOST, ALL }
+
+ /** Java version of PeerConnectionInterface.BundlePolicy */
+ public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT }
+
+ /** Java version of PeerConnectionInterface.RtcpMuxPolicy */
+ public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE }
+
+ /** Java version of PeerConnectionInterface.TcpCandidatePolicy */
+ public enum TcpCandidatePolicy { ENABLED, DISABLED }
+
+ /** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
+ public enum CandidateNetworkPolicy { ALL, LOW_COST }
+
+ // Keep in sync with webrtc/rtc_base/network_constants.h.
+ public enum AdapterType {
+ UNKNOWN(0),
+ ETHERNET(1 << 0),
+ WIFI(1 << 1),
+ CELLULAR(1 << 2),
+ VPN(1 << 3),
+ LOOPBACK(1 << 4),
+ ADAPTER_TYPE_ANY(1 << 5),
+ CELLULAR_2G(1 << 6),
+ CELLULAR_3G(1 << 7),
+ CELLULAR_4G(1 << 8),
+ CELLULAR_5G(1 << 9);
+
+ public final Integer bitMask;
+ private AdapterType(Integer bitMask) {
+ this.bitMask = bitMask;
+ }
+ private static final Map BY_BITMASK = new HashMap<>();
+ static {
+ for (AdapterType t : values()) {
+ BY_BITMASK.put(t.bitMask, t);
+ }
+ }
+
+ @Nullable
+ @CalledByNative("AdapterType")
+ static AdapterType fromNativeIndex(int nativeIndex) {
+ return BY_BITMASK.get(nativeIndex);
+ }
+ }
+
+ /** Java version of rtc::KeyType */
+ public enum KeyType { RSA, ECDSA }
+
+ /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
+ public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }
+
+ /** Java version of webrtc::PortPrunePolicy */
+ public enum PortPrunePolicy {
+ NO_PRUNE, // Do not prune turn port.
+ PRUNE_BASED_ON_PRIORITY, // Prune turn port based the priority on the same network
+ KEEP_FIRST_READY // Keep the first ready port and prune the rest on the same network.
+ }
+
+ /**
+ * Java version of webrtc::SdpSemantics.
+ *
+ * Configure the SDP semantics used by this PeerConnection. By default, this
+ * is UNIFIED_PLAN which is compliant to the WebRTC 1.0 specification. It is
+ * possible to overrwite this to the deprecated PLAN_B SDP format, but note
+ * that PLAN_B will be deleted at some future date, see
+ * https://crbug.com/webrtc/13528.
+ *
+ * UNIFIED_PLAN will cause PeerConnection to create offers and answers with
+ * multiple m= sections where each m= section maps to one RtpSender and one
+ * RtpReceiver (an RtpTransceiver), either both audio or both video. This
+ * will also cause PeerConnection to ignore all but the first a=ssrc lines
+ * that form a Plan B stream.
+ *
+ * PLAN_B will cause PeerConnection to create offers and answers with at most
+ * one audio and one video m= section with multiple RtpSenders and
+ * RtpReceivers specified as multiple a=ssrc lines within the section. This
+ * will also cause PeerConnection to ignore all but the first m= section of
+ * the same media type.
+ */
+ public enum SdpSemantics {
+ // TODO(https://crbug.com/webrtc/13528): Remove support for PLAN_B.
+ @Deprecated PLAN_B,
+ UNIFIED_PLAN
+ }
+
+ /** Java version of PeerConnectionInterface.RTCConfiguration */
+ // TODO(qingsi): Resolve the naming inconsistency of fields with/without units.
+ public static class RTCConfiguration {
+ public IceTransportsType iceTransportsType;
+ public List iceServers;
+ public BundlePolicy bundlePolicy;
+ @Nullable public RtcCertificatePem certificate;
+ public RtcpMuxPolicy rtcpMuxPolicy;
+ public TcpCandidatePolicy tcpCandidatePolicy;
+ public CandidateNetworkPolicy candidateNetworkPolicy;
+ public int audioJitterBufferMaxPackets;
+ public boolean audioJitterBufferFastAccelerate;
+ public int iceConnectionReceivingTimeout;
+ public int iceBackupCandidatePairPingInterval;
+ public KeyType keyType;
+ public ContinualGatheringPolicy continualGatheringPolicy;
+ public int iceCandidatePoolSize;
+ @Deprecated // by the turnPortPrunePolicy. See bugs.webrtc.org/11026
+ public boolean pruneTurnPorts;
+ public PortPrunePolicy turnPortPrunePolicy;
+ public boolean presumeWritableWhenFullyRelayed;
+ public boolean surfaceIceCandidatesOnIceTransportTypeChanged;
+ // The following fields define intervals in milliseconds at which ICE
+ // connectivity checks are sent.
+ //
+ // We consider ICE is "strongly connected" for an agent when there is at
+ // least one candidate pair that currently succeeds in connectivity check
+ // from its direction i.e. sending a ping and receives a ping response, AND
+ // all candidate pairs have sent a minimum number of pings for connectivity
+ // (this number is implementation-specific). Otherwise, ICE is considered in
+ // "weak connectivity".
+ //
+ // Note that the above notion of strong and weak connectivity is not defined
+ // in RFC 5245, and they apply to our current ICE implementation only.
+ //
+ // 1) iceCheckIntervalStrongConnectivityMs defines the interval applied to
+ // ALL candidate pairs when ICE is strongly connected,
+ // 2) iceCheckIntervalWeakConnectivityMs defines the counterpart for ALL
+ // pairs when ICE is weakly connected, and
+ // 3) iceCheckMinInterval defines the minimal interval (equivalently the
+ // maximum rate) that overrides the above two intervals when either of them
+ // is less.
+ @Nullable public Integer iceCheckIntervalStrongConnectivityMs;
+ @Nullable public Integer iceCheckIntervalWeakConnectivityMs;
+ @Nullable public Integer iceCheckMinInterval;
+ // The time period in milliseconds for which a candidate pair must wait for response to
+ // connectivitiy checks before it becomes unwritable.
+ @Nullable public Integer iceUnwritableTimeMs;
+ // The minimum number of connectivity checks that a candidate pair must sent without receiving
+ // response before it becomes unwritable.
+ @Nullable public Integer iceUnwritableMinChecks;
+ // The interval in milliseconds at which STUN candidates will resend STUN binding requests
+ // to keep NAT bindings open.
+ // The default value in the implementation is used if this field is null.
+ @Nullable public Integer stunCandidateKeepaliveIntervalMs;
+ // The interval in milliseconds of pings sent when the connection is stable and writable.
+ // The default value in the implementation is used if this field is null.
+ @Nullable public Integer stableWritableConnectionPingIntervalMs;
+ public boolean disableIPv6OnWifi;
+ // By default, PeerConnection will use a limited number of IPv6 network
+ // interfaces, in order to avoid too many ICE candidate pairs being created
+ // and delaying ICE completion.
+ //
+ // Can be set to Integer.MAX_VALUE to effectively disable the limit.
+ public int maxIPv6Networks;
+
+ // These values will be overridden by MediaStream constraints if deprecated constraints-based
+ // create peerconnection interface is used.
+ public boolean enableDscp;
+ public boolean enableCpuOveruseDetection;
+ public boolean suspendBelowMinBitrate;
+ @Nullable public Integer screencastMinBitrate;
+ // Use "Unknown" to represent no preference of adapter types, not the
+ // preference of adapters of unknown types.
+ public AdapterType networkPreference;
+ public SdpSemantics sdpSemantics;
+
+ // This is an optional wrapper for the C++ webrtc::TurnCustomizer.
+ @Nullable public TurnCustomizer turnCustomizer;
+
+ // Actively reset the SRTP parameters whenever the DTLS transports underneath are reset for
+ // every offer/answer negotiation.This is only intended to be a workaround for crbug.com/835958
+ public boolean activeResetSrtpParams;
+
+ /**
+ * Defines advanced optional cryptographic settings related to SRTP and
+ * frame encryption for native WebRTC. Setting this will overwrite any
+ * options set through the PeerConnectionFactory (which is deprecated).
+ */
+ @Nullable public CryptoOptions cryptoOptions;
+
+ /**
+ * An optional string that if set will be attached to the
+ * TURN_ALLOCATE_REQUEST which can be used to correlate client
+ * logs with backend logs
+ */
+ @Nullable public String turnLoggingId;
+
+ /**
+ * Allow implicit rollback of local description when remote description
+ * conflicts with local description.
+ * See: https://w3c.github.io/webrtc-pc/#dom-peerconnection-setremotedescription
+ */
+ public boolean enableImplicitRollback;
+
+ /**
+ * Control if "a=extmap-allow-mixed" is included in the offer.
+ * See: https://www.chromestatus.com/feature/6269234631933952
+ */
+ public boolean offerExtmapAllowMixed;
+
+ // TODO(deadbeef): Instead of duplicating the defaults here, we should do
+ // something to pick up the defaults from C++. The Objective-C equivalent
+ // of RTCConfiguration does that.
+ public RTCConfiguration(List iceServers) {
+ iceTransportsType = IceTransportsType.ALL;
+ bundlePolicy = BundlePolicy.BALANCED;
+ rtcpMuxPolicy = RtcpMuxPolicy.REQUIRE;
+ tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
+ candidateNetworkPolicy = CandidateNetworkPolicy.ALL;
+ this.iceServers = iceServers;
+ audioJitterBufferMaxPackets = 50;
+ audioJitterBufferFastAccelerate = false;
+ iceConnectionReceivingTimeout = -1;
+ iceBackupCandidatePairPingInterval = -1;
+ keyType = KeyType.ECDSA;
+ continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
+ iceCandidatePoolSize = 0;
+ pruneTurnPorts = false;
+ turnPortPrunePolicy = PortPrunePolicy.NO_PRUNE;
+ presumeWritableWhenFullyRelayed = false;
+ surfaceIceCandidatesOnIceTransportTypeChanged = false;
+ iceCheckIntervalStrongConnectivityMs = null;
+ iceCheckIntervalWeakConnectivityMs = null;
+ iceCheckMinInterval = null;
+ iceUnwritableTimeMs = null;
+ iceUnwritableMinChecks = null;
+ stunCandidateKeepaliveIntervalMs = null;
+ stableWritableConnectionPingIntervalMs = null;
+ disableIPv6OnWifi = false;
+ maxIPv6Networks = 5;
+ enableDscp = false;
+ enableCpuOveruseDetection = true;
+ suspendBelowMinBitrate = false;
+ screencastMinBitrate = null;
+ networkPreference = AdapterType.UNKNOWN;
+ sdpSemantics = SdpSemantics.UNIFIED_PLAN;
+ activeResetSrtpParams = false;
+ cryptoOptions = null;
+ turnLoggingId = null;
+ enableImplicitRollback = false;
+ offerExtmapAllowMixed = true;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ IceTransportsType getIceTransportsType() {
+ return iceTransportsType;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ List getIceServers() {
+ return iceServers;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ BundlePolicy getBundlePolicy() {
+ return bundlePolicy;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ PortPrunePolicy getTurnPortPrunePolicy() {
+ return turnPortPrunePolicy;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ RtcCertificatePem getCertificate() {
+ return certificate;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ RtcpMuxPolicy getRtcpMuxPolicy() {
+ return rtcpMuxPolicy;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ TcpCandidatePolicy getTcpCandidatePolicy() {
+ return tcpCandidatePolicy;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ CandidateNetworkPolicy getCandidateNetworkPolicy() {
+ return candidateNetworkPolicy;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ int getAudioJitterBufferMaxPackets() {
+ return audioJitterBufferMaxPackets;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getAudioJitterBufferFastAccelerate() {
+ return audioJitterBufferFastAccelerate;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ int getIceConnectionReceivingTimeout() {
+ return iceConnectionReceivingTimeout;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ int getIceBackupCandidatePairPingInterval() {
+ return iceBackupCandidatePairPingInterval;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ KeyType getKeyType() {
+ return keyType;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ ContinualGatheringPolicy getContinualGatheringPolicy() {
+ return continualGatheringPolicy;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ int getIceCandidatePoolSize() {
+ return iceCandidatePoolSize;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getPruneTurnPorts() {
+ return pruneTurnPorts;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getPresumeWritableWhenFullyRelayed() {
+ return presumeWritableWhenFullyRelayed;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getSurfaceIceCandidatesOnIceTransportTypeChanged() {
+ return surfaceIceCandidatesOnIceTransportTypeChanged;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getIceCheckIntervalStrongConnectivity() {
+ return iceCheckIntervalStrongConnectivityMs;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getIceCheckIntervalWeakConnectivity() {
+ return iceCheckIntervalWeakConnectivityMs;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getIceCheckMinInterval() {
+ return iceCheckMinInterval;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getIceUnwritableTimeout() {
+ return iceUnwritableTimeMs;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getIceUnwritableMinChecks() {
+ return iceUnwritableMinChecks;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getStunCandidateKeepaliveInterval() {
+ return stunCandidateKeepaliveIntervalMs;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getStableWritableConnectionPingIntervalMs() {
+ return stableWritableConnectionPingIntervalMs;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getDisableIPv6OnWifi() {
+ return disableIPv6OnWifi;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ int getMaxIPv6Networks() {
+ return maxIPv6Networks;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ TurnCustomizer getTurnCustomizer() {
+ return turnCustomizer;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getEnableDscp() {
+ return enableDscp;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getEnableCpuOveruseDetection() {
+ return enableCpuOveruseDetection;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getSuspendBelowMinBitrate() {
+ return suspendBelowMinBitrate;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ Integer getScreencastMinBitrate() {
+ return screencastMinBitrate;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ AdapterType getNetworkPreference() {
+ return networkPreference;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ SdpSemantics getSdpSemantics() {
+ return sdpSemantics;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getActiveResetSrtpParams() {
+ return activeResetSrtpParams;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ CryptoOptions getCryptoOptions() {
+ return cryptoOptions;
+ }
+
+ @Nullable
+ @CalledByNative("RTCConfiguration")
+ String getTurnLoggingId() {
+ return turnLoggingId;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getEnableImplicitRollback() {
+ return enableImplicitRollback;
+ }
+
+ @CalledByNative("RTCConfiguration")
+ boolean getOfferExtmapAllowMixed() {
+ return offerExtmapAllowMixed;
+ }
+ };
+
+ private final List localStreams = new ArrayList<>();
+ private final long nativePeerConnection;
+ private List senders = new ArrayList<>();
+ private List receivers = new ArrayList<>();
+ private List transceivers = new ArrayList<>();
+
+ /**
+ * Wraps a PeerConnection created by the factory. Can be used by clients that want to implement
+ * their PeerConnection creation in JNI.
+ */
+ public PeerConnection(NativePeerConnectionFactory factory) {
+ this(factory.createNativePeerConnection());
+ }
+
+ PeerConnection(long nativePeerConnection) {
+ this.nativePeerConnection = nativePeerConnection;
+ }
+
+ // JsepInterface.
+ public SessionDescription getLocalDescription() {
+ return nativeGetLocalDescription();
+ }
+
+ public SessionDescription getRemoteDescription() {
+ return nativeGetRemoteDescription();
+ }
+
+ public RtcCertificatePem getCertificate() {
+ return nativeGetCertificate();
+ }
+
+ public DataChannel createDataChannel(String label, DataChannel.Init init) {
+ return nativeCreateDataChannel(label, init);
+ }
+
+ public void createOffer(SdpObserver observer, MediaConstraints constraints) {
+ nativeCreateOffer(observer, constraints);
+ }
+
+ public void createAnswer(SdpObserver observer, MediaConstraints constraints) {
+ nativeCreateAnswer(observer, constraints);
+ }
+
+ public void setLocalDescription(SdpObserver observer) {
+ nativeSetLocalDescriptionAutomatically(observer);
+ }
+
+ public void setLocalDescription(SdpObserver observer, SessionDescription sdp) {
+ nativeSetLocalDescription(observer, sdp);
+ }
+
+ public void setRemoteDescription(SdpObserver observer, SessionDescription sdp) {
+ nativeSetRemoteDescription(observer, sdp);
+ }
+
+ /**
+ * Tells the PeerConnection that ICE should be restarted.
+ */
+ public void restartIce() {
+ nativeRestartIce();
+ }
+
+ /**
+ * Enables/disables playout of received audio streams. Enabled by default.
+ *
+ * Note that even if playout is enabled, streams will only be played out if
+ * the appropriate SDP is also applied. The main purpose of this API is to
+ * be able to control the exact time when audio playout starts.
+ */
+ public void setAudioPlayout(boolean playout) {
+ nativeSetAudioPlayout(playout);
+ }
+
+ /**
+ * Enables/disables recording of transmitted audio streams. Enabled by default.
+ *
+ * Note that even if recording is enabled, streams will only be recorded if
+ * the appropriate SDP is also applied. The main purpose of this API is to
+ * be able to control the exact time when audio recording starts.
+ */
+ public void setAudioRecording(boolean recording) {
+ nativeSetAudioRecording(recording);
+ }
+
+ public boolean setConfiguration(RTCConfiguration config) {
+ return nativeSetConfiguration(config);
+ }
+
+ public boolean addIceCandidate(IceCandidate candidate) {
+ return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
+ }
+
+ public void addIceCandidate(IceCandidate candidate, AddIceObserver observer) {
+ nativeAddIceCandidateWithObserver(
+ candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp, observer);
+ }
+
+ public boolean removeIceCandidates(final IceCandidate[] candidates) {
+ return nativeRemoveIceCandidates(candidates);
+ }
+
+ /**
+ * Adds a new MediaStream to be sent on this peer connection.
+ * Note: This method is not supported with SdpSemantics.UNIFIED_PLAN. Please
+ * use addTrack instead.
+ */
+ public boolean addStream(MediaStream stream) {
+ boolean ret = nativeAddLocalStream(stream.getNativeMediaStream());
+ if (!ret) {
+ return false;
+ }
+ localStreams.add(stream);
+ return true;
+ }
+
+ /**
+ * Removes the given media stream from this peer connection.
+ * This method is not supported with SdpSemantics.UNIFIED_PLAN. Please use
+ * removeTrack instead.
+ */
+ public void removeStream(MediaStream stream) {
+ nativeRemoveLocalStream(stream.getNativeMediaStream());
+ localStreams.remove(stream);
+ }
+
+ /**
+ * Creates an RtpSender without a track.
+ *
+ * This method allows an application to cause the PeerConnection to negotiate
+ * sending/receiving a specific media type, but without having a track to
+ * send yet.
+ *
+ *
When the application does want to begin sending a track, it can call
+ * RtpSender.setTrack, which doesn't require any additional SDP negotiation.
+ *
+ *
Example use:
+ *
+ * {@code
+ * audioSender = pc.createSender("audio", "stream1");
+ * videoSender = pc.createSender("video", "stream1");
+ * // Do normal SDP offer/answer, which will kick off ICE/DTLS and negotiate
+ * // media parameters....
+ * // Later, when the endpoint is ready to actually begin sending:
+ * audioSender.setTrack(audioTrack, false);
+ * videoSender.setTrack(videoTrack, false);
+ * }
+ *
+ * Note: This corresponds most closely to "addTransceiver" in the official
+ * WebRTC API, in that it creates a sender without a track. It was
+ * implemented before addTransceiver because it provides useful
+ * functionality, and properly implementing transceivers would have required
+ * a great deal more work.
+ *
+ *
Note: This is only available with SdpSemantics.PLAN_B specified. Please use
+ * addTransceiver instead.
+ *
+ * @param kind Corresponds to MediaStreamTrack kinds (must be "audio" or
+ * "video").
+ * @param stream_id The ID of the MediaStream that this sender's track will
+ * be associated with when SDP is applied to the remote
+ * PeerConnection. If createSender is used to create an
+ * audio and video sender that should be synchronized, they
+ * should use the same stream ID.
+ * @return A new RtpSender object if successful, or null otherwise.
+ */
+ public RtpSender createSender(String kind, String stream_id) {
+ RtpSender newSender = nativeCreateSender(kind, stream_id);
+ if (newSender != null) {
+ senders.add(newSender);
+ }
+ return newSender;
+ }
+
+ /**
+ * Gets all RtpSenders associated with this peer connection.
+ * Note that calling getSenders will dispose of the senders previously
+ * returned.
+ */
+ public List getSenders() {
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders = nativeGetSenders();
+ return Collections.unmodifiableList(senders);
+ }
+
+ /**
+ * Gets all RtpReceivers associated with this peer connection.
+ * Note that calling getReceivers will dispose of the receivers previously
+ * returned.
+ */
+ public List getReceivers() {
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ receivers = nativeGetReceivers();
+ return Collections.unmodifiableList(receivers);
+ }
+
+ /**
+ * Gets all RtpTransceivers associated with this peer connection.
+ * Note that calling getTransceivers will dispose of the transceivers previously
+ * returned.
+ * Note: This is only available with SdpSemantics.UNIFIED_PLAN specified.
+ */
+ public List getTransceivers() {
+ for (RtpTransceiver transceiver : transceivers) {
+ transceiver.dispose();
+ }
+ transceivers = nativeGetTransceivers();
+ return Collections.unmodifiableList(transceivers);
+ }
+
+ /**
+ * Adds a new media stream track to be sent on this peer connection, and returns
+ * the newly created RtpSender. If streamIds are specified, the RtpSender will
+ * be associated with the streams specified in the streamIds list.
+ *
+ * @throws IllegalStateException if an error accors in C++ addTrack.
+ * An error can occur if:
+ * - A sender already exists for the track.
+ * - The peer connection is closed.
+ */
+ public RtpSender addTrack(MediaStreamTrack track) {
+ return addTrack(track, Collections.emptyList());
+ }
+
+ public RtpSender addTrack(MediaStreamTrack track, List streamIds) {
+ if (track == null || streamIds == null) {
+ throw new NullPointerException("No MediaStreamTrack specified in addTrack.");
+ }
+ RtpSender newSender = nativeAddTrack(track.getNativeMediaStreamTrack(), streamIds);
+ if (newSender == null) {
+ throw new IllegalStateException("C++ addTrack failed.");
+ }
+ senders.add(newSender);
+ return newSender;
+ }
+
+ /**
+ * Stops sending media from sender. The sender will still appear in getSenders. Future
+ * calls to createOffer will mark the m section for the corresponding transceiver as
+ * receive only or inactive, as defined in JSEP. Returns true on success.
+ */
+ public boolean removeTrack(RtpSender sender) {
+ if (sender == null) {
+ throw new NullPointerException("No RtpSender specified for removeTrack.");
+ }
+ return nativeRemoveTrack(sender.getNativeRtpSender());
+ }
+
+ /**
+ * Creates a new RtpTransceiver and adds it to the set of transceivers. Adding a
+ * transceiver will cause future calls to CreateOffer to add a media description
+ * for the corresponding transceiver.
+ *
+ * The initial value of `mid` in the returned transceiver is null. Setting a
+ * new session description may change it to a non-null value.
+ *
+ *
https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver
+ *
+ *
If a MediaStreamTrack is specified then a transceiver will be added with a
+ * sender set to transmit the given track. The kind
+ * of the transceiver (and sender/receiver) will be derived from the kind of
+ * the track.
+ *
+ *
If MediaType is specified then a transceiver will be added based upon that type.
+ * This can be either MEDIA_TYPE_AUDIO or MEDIA_TYPE_VIDEO.
+ *
+ *
Optionally, an RtpTransceiverInit structure can be specified to configure
+ * the transceiver from construction. If not specified, the transceiver will
+ * default to having a direction of kSendRecv and not be part of any streams.
+ *
+ *
Note: These methods are only available with SdpSemantics.UNIFIED_PLAN specified.
+ * @throws IllegalStateException if an error accors in C++ addTransceiver
+ */
+ public RtpTransceiver addTransceiver(MediaStreamTrack track) {
+ return addTransceiver(track, new RtpTransceiver.RtpTransceiverInit());
+ }
+
+ public RtpTransceiver addTransceiver(
+ MediaStreamTrack track, @Nullable RtpTransceiver.RtpTransceiverInit init) {
+ if (track == null) {
+ throw new NullPointerException("No MediaStreamTrack specified for addTransceiver.");
+ }
+ if (init == null) {
+ init = new RtpTransceiver.RtpTransceiverInit();
+ }
+ RtpTransceiver newTransceiver =
+ nativeAddTransceiverWithTrack(track.getNativeMediaStreamTrack(), init);
+ if (newTransceiver == null) {
+ throw new IllegalStateException("C++ addTransceiver failed.");
+ }
+ transceivers.add(newTransceiver);
+ return newTransceiver;
+ }
+
+ public RtpTransceiver addTransceiver(MediaStreamTrack.MediaType mediaType) {
+ return addTransceiver(mediaType, new RtpTransceiver.RtpTransceiverInit());
+ }
+
+ public RtpTransceiver addTransceiver(
+ MediaStreamTrack.MediaType mediaType, @Nullable RtpTransceiver.RtpTransceiverInit init) {
+ if (mediaType == null) {
+ throw new NullPointerException("No MediaType specified for addTransceiver.");
+ }
+ if (init == null) {
+ init = new RtpTransceiver.RtpTransceiverInit();
+ }
+ RtpTransceiver newTransceiver = nativeAddTransceiverOfType(mediaType, init);
+ if (newTransceiver == null) {
+ throw new IllegalStateException("C++ addTransceiver failed.");
+ }
+ transceivers.add(newTransceiver);
+ return newTransceiver;
+ }
+
+ // Older, non-standard implementation of getStats.
+ @Deprecated
+ public boolean getStats(StatsObserver observer, @Nullable MediaStreamTrack track) {
+ return nativeOldGetStats(observer, (track == null) ? 0 : track.getNativeMediaStreamTrack());
+ }
+
+ /**
+ * Gets stats using the new stats collection API, see webrtc/api/stats/. These
+ * will replace old stats collection API when the new API has matured enough.
+ */
+ public void getStats(RTCStatsCollectorCallback callback) {
+ nativeNewGetStats(callback);
+ }
+
+ /**
+ * Gets stats using the new stats collection API, see webrtc/api/stats/. These
+ * will replace old stats collection API when the new API has matured enough.
+ */
+ public void getStats(RtpSender sender, RTCStatsCollectorCallback callback) {
+ nativeNewGetStatsSender(sender.getNativeRtpSender(), callback);
+ }
+
+ /**
+ * Gets stats using the new stats collection API, see webrtc/api/stats/. These
+ * will replace old stats collection API when the new API has matured enough.
+ */
+ public void getStats(RtpReceiver receiver, RTCStatsCollectorCallback callback) {
+ nativeNewGetStatsReceiver(receiver.getNativeRtpReceiver(), callback);
+ }
+
+ /**
+ * Limits the bandwidth allocated for all RTP streams sent by this
+ * PeerConnection. Pass null to leave a value unchanged.
+ */
+ public boolean setBitrate(Integer min, Integer current, Integer max) {
+ return nativeSetBitrate(min, current, max);
+ }
+
+ /**
+ * Starts recording an RTC event log.
+ *
+ * Ownership of the file is transfered to the native code. If an RTC event
+ * log is already being recorded, it will be stopped and a new one will start
+ * using the provided file. Logging will continue until the stopRtcEventLog
+ * function is called. The max_size_bytes argument is ignored, it is added
+ * for future use.
+ */
+ public boolean startRtcEventLog(int file_descriptor, int max_size_bytes) {
+ return nativeStartRtcEventLog(file_descriptor, max_size_bytes);
+ }
+
+ /**
+ * Stops recording an RTC event log. If no RTC event log is currently being
+ * recorded, this call will have no effect.
+ */
+ public void stopRtcEventLog() {
+ nativeStopRtcEventLog();
+ }
+
+ // TODO(fischman): add support for DTMF-related methods once that API
+ // stabilizes.
+ public SignalingState signalingState() {
+ return nativeSignalingState();
+ }
+
+ public IceConnectionState iceConnectionState() {
+ return nativeIceConnectionState();
+ }
+
+ public PeerConnectionState connectionState() {
+ return nativeConnectionState();
+ }
+
+ public IceGatheringState iceGatheringState() {
+ return nativeIceGatheringState();
+ }
+
+ public void close() {
+ nativeClose();
+ }
+
+ /**
+ * Free native resources associated with this PeerConnection instance.
+ *
+ * This method removes a reference count from the C++ PeerConnection object,
+ * which should result in it being destroyed. It also calls equivalent
+ * "dispose" methods on the Java objects attached to this PeerConnection
+ * (streams, senders, receivers), such that their associated C++ objects
+ * will also be destroyed.
+ *
+ *
Note that this method cannot be safely called from an observer callback
+ * (PeerConnection.Observer, DataChannel.Observer, etc.). If you want to, for
+ * example, destroy the PeerConnection after an "ICE failed" callback, you
+ * must do this asynchronously (in other words, unwind the stack first). See
+ * bug
+ * 3721 for more details.
+ */
+ public void dispose() {
+ close();
+ for (MediaStream stream : localStreams) {
+ nativeRemoveLocalStream(stream.getNativeMediaStream());
+ stream.dispose();
+ }
+ localStreams.clear();
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders.clear();
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ for (RtpTransceiver transceiver : transceivers) {
+ transceiver.dispose();
+ }
+ transceivers.clear();
+ receivers.clear();
+ nativeFreeOwnedPeerConnection(nativePeerConnection);
+ }
+
+ /** Returns a pointer to the native webrtc::PeerConnectionInterface. */
+ public long getNativePeerConnection() {
+ return nativeGetNativePeerConnection();
+ }
+
+ @CalledByNative
+ long getNativeOwnedPeerConnection() {
+ return nativePeerConnection;
+ }
+
+ public static long createNativePeerConnectionObserver(Observer observer) {
+ return nativeCreatePeerConnectionObserver(observer);
+ }
+
+ private native long nativeGetNativePeerConnection();
+ private native SessionDescription nativeGetLocalDescription();
+ private native SessionDescription nativeGetRemoteDescription();
+ private native RtcCertificatePem nativeGetCertificate();
+ private native DataChannel nativeCreateDataChannel(String label, DataChannel.Init init);
+ private native void nativeCreateOffer(SdpObserver observer, MediaConstraints constraints);
+ private native void nativeCreateAnswer(SdpObserver observer, MediaConstraints constraints);
+ private native void nativeSetLocalDescriptionAutomatically(SdpObserver observer);
+ private native void nativeSetLocalDescription(SdpObserver observer, SessionDescription sdp);
+ private native void nativeSetRemoteDescription(SdpObserver observer, SessionDescription sdp);
+ private native void nativeRestartIce();
+ private native void nativeSetAudioPlayout(boolean playout);
+ private native void nativeSetAudioRecording(boolean recording);
+ private native boolean nativeSetBitrate(Integer min, Integer current, Integer max);
+ private native SignalingState nativeSignalingState();
+ private native IceConnectionState nativeIceConnectionState();
+ private native PeerConnectionState nativeConnectionState();
+ private native IceGatheringState nativeIceGatheringState();
+ private native void nativeClose();
+ private static native long nativeCreatePeerConnectionObserver(Observer observer);
+ private static native void nativeFreeOwnedPeerConnection(long ownedPeerConnection);
+ private native boolean nativeSetConfiguration(RTCConfiguration config);
+ private native boolean nativeAddIceCandidate(
+ String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
+ private native void nativeAddIceCandidateWithObserver(
+ String sdpMid, int sdpMLineIndex, String iceCandidateSdp, AddIceObserver observer);
+ private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates);
+ private native boolean nativeAddLocalStream(long stream);
+ private native void nativeRemoveLocalStream(long stream);
+ private native boolean nativeOldGetStats(StatsObserver observer, long nativeTrack);
+ private native void nativeNewGetStats(RTCStatsCollectorCallback callback);
+ private native void nativeNewGetStatsSender(long sender, RTCStatsCollectorCallback callback);
+ private native void nativeNewGetStatsReceiver(long receiver, RTCStatsCollectorCallback callback);
+ private native RtpSender nativeCreateSender(String kind, String stream_id);
+ private native List nativeGetSenders();
+ private native List nativeGetReceivers();
+ private native List nativeGetTransceivers();
+ private native RtpSender nativeAddTrack(long track, List streamIds);
+ private native boolean nativeRemoveTrack(long sender);
+ private native RtpTransceiver nativeAddTransceiverWithTrack(
+ long track, RtpTransceiver.RtpTransceiverInit init);
+ private native RtpTransceiver nativeAddTransceiverOfType(
+ MediaStreamTrack.MediaType mediaType, RtpTransceiver.RtpTransceiverInit init);
+ private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes);
+ private native void nativeStopRtcEventLog();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionDependencies.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionDependencies.java
new file mode 100644
index 00000000..ac6c94bb
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionDependencies.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+
+/**
+ * PeerConnectionDependencies holds all PeerConnection dependencies that are
+ * applied per PeerConnection. A dependency is distinct from a configuration
+ * as it defines significant executable code that can be provided by a user of
+ * the API.
+ */
+public final class PeerConnectionDependencies {
+ // Mandatory dependencies.
+ private final PeerConnection.Observer observer;
+
+ // Optional fields.
+ private final SSLCertificateVerifier sslCertificateVerifier;
+
+ public static class Builder {
+ private PeerConnection.Observer observer;
+ private SSLCertificateVerifier sslCertificateVerifier;
+
+ private Builder(PeerConnection.Observer observer) {
+ this.observer = observer;
+ }
+
+ public Builder setSSLCertificateVerifier(SSLCertificateVerifier sslCertificateVerifier) {
+ this.sslCertificateVerifier = sslCertificateVerifier;
+ return this;
+ }
+
+ // Observer is a required dependency and so is forced in the construction of the object.
+ public PeerConnectionDependencies createPeerConnectionDependencies() {
+ return new PeerConnectionDependencies(observer, sslCertificateVerifier);
+ }
+ }
+
+ public static Builder builder(PeerConnection.Observer observer) {
+ return new Builder(observer);
+ }
+
+ PeerConnection.Observer getObserver() {
+ return observer;
+ }
+
+ @Nullable
+ SSLCertificateVerifier getSSLCertificateVerifier() {
+ return sslCertificateVerifier;
+ }
+
+ private PeerConnectionDependencies(
+ PeerConnection.Observer observer, SSLCertificateVerifier sslCertificateVerifier) {
+ this.observer = observer;
+ this.sslCertificateVerifier = sslCertificateVerifier;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionFactory.java
new file mode 100644
index 00000000..c46718fd
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionFactory.java
@@ -0,0 +1,634 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Process;
+import androidx.annotation.Nullable;
+import java.util.List;
+import org.webrtc.Logging.Severity;
+import org.webrtc.MediaStreamTrack;
+import org.webrtc.PeerConnection;
+import org.webrtc.RtpCapabilities;
+import org.webrtc.audio.AudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule;
+
+/**
+ * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
+ * the PeerConnection API for clients.
+ */
+public class PeerConnectionFactory {
+ public static final String TRIAL_ENABLED = "Enabled";
+ @Deprecated public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit";
+
+ private static final String TAG = "PeerConnectionFactory";
+ private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
+
+ /** Helper class holding both Java and C++ thread info. */
+ private static class ThreadInfo {
+ final Thread thread;
+ final int tid;
+
+ public static ThreadInfo getCurrent() {
+ return new ThreadInfo(Thread.currentThread(), Process.myTid());
+ }
+
+ private ThreadInfo(Thread thread, int tid) {
+ this.thread = thread;
+ this.tid = tid;
+ }
+ }
+
+ private static volatile boolean internalTracerInitialized;
+
+ // Remove these once deprecated static printStackTrace() is gone.
+ @Nullable private static ThreadInfo staticNetworkThread;
+ @Nullable private static ThreadInfo staticWorkerThread;
+ @Nullable private static ThreadInfo staticSignalingThread;
+
+ private long nativeFactory;
+ @Nullable private volatile ThreadInfo networkThread;
+ @Nullable private volatile ThreadInfo workerThread;
+ @Nullable private volatile ThreadInfo signalingThread;
+
+ public static class InitializationOptions {
+ final Context applicationContext;
+ final String fieldTrials;
+ final boolean enableInternalTracer;
+ final NativeLibraryLoader nativeLibraryLoader;
+ final String nativeLibraryName;
+ @Nullable Loggable loggable;
+ @Nullable Severity loggableSeverity;
+
+ private InitializationOptions(Context applicationContext, String fieldTrials,
+ boolean enableInternalTracer, NativeLibraryLoader nativeLibraryLoader,
+ String nativeLibraryName, @Nullable Loggable loggable,
+ @Nullable Severity loggableSeverity) {
+ this.applicationContext = applicationContext;
+ this.fieldTrials = fieldTrials;
+ this.enableInternalTracer = enableInternalTracer;
+ this.nativeLibraryLoader = nativeLibraryLoader;
+ this.nativeLibraryName = nativeLibraryName;
+ this.loggable = loggable;
+ this.loggableSeverity = loggableSeverity;
+ }
+
+ public static Builder builder(Context applicationContext) {
+ return new Builder(applicationContext);
+ }
+
+ public static class Builder {
+ private final Context applicationContext;
+ private String fieldTrials = "";
+ private boolean enableInternalTracer;
+ private NativeLibraryLoader nativeLibraryLoader = new NativeLibrary.DefaultLoader();
+ private String nativeLibraryName = "jingle_peerconnection_so";
+ @Nullable private Loggable loggable;
+ @Nullable private Severity loggableSeverity;
+
+ Builder(Context applicationContext) {
+ this.applicationContext = applicationContext;
+ }
+
+ public Builder setFieldTrials(String fieldTrials) {
+ this.fieldTrials = fieldTrials;
+ return this;
+ }
+
+ public Builder setEnableInternalTracer(boolean enableInternalTracer) {
+ this.enableInternalTracer = enableInternalTracer;
+ return this;
+ }
+
+ public Builder setNativeLibraryLoader(NativeLibraryLoader nativeLibraryLoader) {
+ this.nativeLibraryLoader = nativeLibraryLoader;
+ return this;
+ }
+
+ public Builder setNativeLibraryName(String nativeLibraryName) {
+ this.nativeLibraryName = nativeLibraryName;
+ return this;
+ }
+
+ public Builder setInjectableLogger(Loggable loggable, Severity severity) {
+ this.loggable = loggable;
+ this.loggableSeverity = severity;
+ return this;
+ }
+
+ public PeerConnectionFactory.InitializationOptions createInitializationOptions() {
+ return new PeerConnectionFactory.InitializationOptions(applicationContext, fieldTrials,
+ enableInternalTracer, nativeLibraryLoader, nativeLibraryName, loggable,
+ loggableSeverity);
+ }
+ }
+ }
+
+ public static class Options {
+ // Keep in sync with webrtc/rtc_base/network.h!
+ //
+ // These bit fields are defined for `networkIgnoreMask` below.
+ public static final int ADAPTER_TYPE_UNKNOWN = 0;
+ public static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
+ public static final int ADAPTER_TYPE_WIFI = 1 << 1;
+ public static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
+ public static final int ADAPTER_TYPE_VPN = 1 << 3;
+ public static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
+ public static final int ADAPTER_TYPE_ANY = 1 << 5;
+
+ public int networkIgnoreMask;
+ public boolean disableEncryption;
+ public boolean disableNetworkMonitor;
+
+ @CalledByNative("Options")
+ int getNetworkIgnoreMask() {
+ return networkIgnoreMask;
+ }
+
+ @CalledByNative("Options")
+ boolean getDisableEncryption() {
+ return disableEncryption;
+ }
+
+ @CalledByNative("Options")
+ boolean getDisableNetworkMonitor() {
+ return disableNetworkMonitor;
+ }
+ }
+
+ public static class Builder {
+ @Nullable private Options options;
+ @Nullable private AudioDeviceModule audioDeviceModule;
+ private AudioEncoderFactoryFactory audioEncoderFactoryFactory =
+ new BuiltinAudioEncoderFactoryFactory();
+ private AudioDecoderFactoryFactory audioDecoderFactoryFactory =
+ new BuiltinAudioDecoderFactoryFactory();
+ @Nullable private VideoEncoderFactory videoEncoderFactory;
+ @Nullable private VideoDecoderFactory videoDecoderFactory;
+ @Nullable private AudioProcessingFactory audioProcessingFactory;
+ @Nullable private FecControllerFactoryFactoryInterface fecControllerFactoryFactory;
+ @Nullable private NetworkControllerFactoryFactory networkControllerFactoryFactory;
+ @Nullable private NetworkStatePredictorFactoryFactory networkStatePredictorFactoryFactory;
+ @Nullable private NetEqFactoryFactory neteqFactoryFactory;
+
+ private Builder() {}
+
+ public Builder setOptions(Options options) {
+ this.options = options;
+ return this;
+ }
+
+ public Builder setAudioDeviceModule(AudioDeviceModule audioDeviceModule) {
+ this.audioDeviceModule = audioDeviceModule;
+ return this;
+ }
+
+ public Builder setAudioEncoderFactoryFactory(
+ AudioEncoderFactoryFactory audioEncoderFactoryFactory) {
+ if (audioEncoderFactoryFactory == null) {
+ throw new IllegalArgumentException(
+ "PeerConnectionFactory.Builder does not accept a null AudioEncoderFactoryFactory.");
+ }
+ this.audioEncoderFactoryFactory = audioEncoderFactoryFactory;
+ return this;
+ }
+
+ public Builder setAudioDecoderFactoryFactory(
+ AudioDecoderFactoryFactory audioDecoderFactoryFactory) {
+ if (audioDecoderFactoryFactory == null) {
+ throw new IllegalArgumentException(
+ "PeerConnectionFactory.Builder does not accept a null AudioDecoderFactoryFactory.");
+ }
+ this.audioDecoderFactoryFactory = audioDecoderFactoryFactory;
+ return this;
+ }
+
+ public Builder setVideoEncoderFactory(VideoEncoderFactory videoEncoderFactory) {
+ this.videoEncoderFactory = videoEncoderFactory;
+ return this;
+ }
+
+ public Builder setVideoDecoderFactory(VideoDecoderFactory videoDecoderFactory) {
+ this.videoDecoderFactory = videoDecoderFactory;
+ return this;
+ }
+
+ public Builder setAudioProcessingFactory(AudioProcessingFactory audioProcessingFactory) {
+ if (audioProcessingFactory == null) {
+ throw new NullPointerException(
+ "PeerConnectionFactory builder does not accept a null AudioProcessingFactory.");
+ }
+ this.audioProcessingFactory = audioProcessingFactory;
+ return this;
+ }
+
+ public Builder setFecControllerFactoryFactoryInterface(
+ FecControllerFactoryFactoryInterface fecControllerFactoryFactory) {
+ this.fecControllerFactoryFactory = fecControllerFactoryFactory;
+ return this;
+ }
+
+ public Builder setNetworkControllerFactoryFactory(
+ NetworkControllerFactoryFactory networkControllerFactoryFactory) {
+ this.networkControllerFactoryFactory = networkControllerFactoryFactory;
+ return this;
+ }
+
+ public Builder setNetworkStatePredictorFactoryFactory(
+ NetworkStatePredictorFactoryFactory networkStatePredictorFactoryFactory) {
+ this.networkStatePredictorFactoryFactory = networkStatePredictorFactoryFactory;
+ return this;
+ }
+
+ /**
+ * Sets a NetEqFactoryFactory for the PeerConnectionFactory. When using a
+ * custom NetEqFactoryFactory, the AudioDecoderFactoryFactory will be set
+ * to null. The AudioDecoderFactoryFactory should be wrapped in the
+ * NetEqFactoryFactory.
+ */
+ public Builder setNetEqFactoryFactory(NetEqFactoryFactory neteqFactoryFactory) {
+ this.neteqFactoryFactory = neteqFactoryFactory;
+ return this;
+ }
+
+ public PeerConnectionFactory createPeerConnectionFactory() {
+ checkInitializeHasBeenCalled();
+ if (audioDeviceModule == null) {
+ audioDeviceModule = JavaAudioDeviceModule.builder(ContextUtils.getApplicationContext())
+ .createAudioDeviceModule();
+ }
+ return nativeCreatePeerConnectionFactory(ContextUtils.getApplicationContext(), options,
+ audioDeviceModule.getNativeAudioDeviceModulePointer(),
+ audioEncoderFactoryFactory.createNativeAudioEncoderFactory(),
+ audioDecoderFactoryFactory.createNativeAudioDecoderFactory(), videoEncoderFactory,
+ videoDecoderFactory,
+ audioProcessingFactory == null ? 0 : audioProcessingFactory.createNative(),
+ fecControllerFactoryFactory == null ? 0 : fecControllerFactoryFactory.createNative(),
+ networkControllerFactoryFactory == null
+ ? 0
+ : networkControllerFactoryFactory.createNativeNetworkControllerFactory(),
+ networkStatePredictorFactoryFactory == null
+ ? 0
+ : networkStatePredictorFactoryFactory.createNativeNetworkStatePredictorFactory(),
+ neteqFactoryFactory == null ? 0 : neteqFactoryFactory.createNativeNetEqFactory());
+ }
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * Loads and initializes WebRTC. This must be called at least once before creating a
+ * PeerConnectionFactory. Replaces all the old initialization methods. Must not be called while
+ * a PeerConnectionFactory is alive.
+ */
+ public static void initialize(InitializationOptions options) {
+ ContextUtils.initialize(options.applicationContext);
+ NativeLibrary.initialize(options.nativeLibraryLoader, options.nativeLibraryName);
+ nativeInitializeAndroidGlobals();
+ nativeInitializeFieldTrials(options.fieldTrials);
+ if (options.enableInternalTracer && !internalTracerInitialized) {
+ initializeInternalTracer();
+ }
+ if (options.loggable != null) {
+ Logging.injectLoggable(options.loggable, options.loggableSeverity);
+ nativeInjectLoggable(new JNILogging(options.loggable), options.loggableSeverity.ordinal());
+ } else {
+ Logging.d(TAG,
+ "PeerConnectionFactory was initialized without an injected Loggable. "
+ + "Any existing Loggable will be deleted.");
+ Logging.deleteInjectedLoggable();
+ nativeDeleteLoggable();
+ }
+ }
+
+ private static void checkInitializeHasBeenCalled() {
+ if (!NativeLibrary.isLoaded() || ContextUtils.getApplicationContext() == null) {
+ throw new IllegalStateException(
+ "PeerConnectionFactory.initialize was not called before creating a "
+ + "PeerConnectionFactory.");
+ }
+ }
+
+ private static void initializeInternalTracer() {
+ internalTracerInitialized = true;
+ nativeInitializeInternalTracer();
+ }
+
+ public static void shutdownInternalTracer() {
+ internalTracerInitialized = false;
+ nativeShutdownInternalTracer();
+ }
+
+ // Field trial initialization. Must be called before PeerConnectionFactory
+ // is created.
+ // Deprecated, use PeerConnectionFactory.initialize instead.
+ @Deprecated
+ public static void initializeFieldTrials(String fieldTrialsInitString) {
+ nativeInitializeFieldTrials(fieldTrialsInitString);
+ }
+
+ // Wrapper of webrtc::field_trial::FindFullName. Develop the feature with default behaviour off.
+ // Example usage:
+ // if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTCExperiment").equals("Enabled")) {
+ // method1();
+ // } else {
+ // method2();
+ // }
+ public static String fieldTrialsFindFullName(String name) {
+ return NativeLibrary.isLoaded() ? nativeFindFieldTrialsFullName(name) : "";
+ }
+ // Start/stop internal capturing of internal tracing.
+ public static boolean startInternalTracingCapture(String tracingFilename) {
+ return nativeStartInternalTracingCapture(tracingFilename);
+ }
+
+ public static void stopInternalTracingCapture() {
+ nativeStopInternalTracingCapture();
+ }
+
+ @CalledByNative
+ PeerConnectionFactory(long nativeFactory) {
+ checkInitializeHasBeenCalled();
+ if (nativeFactory == 0) {
+ throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
+ }
+ this.nativeFactory = nativeFactory;
+ }
+
+ /**
+ * Internal helper function to pass the parameters down into the native JNI bridge.
+ */
+ @Nullable
+ PeerConnection createPeerConnectionInternal(PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints, PeerConnection.Observer observer,
+ SSLCertificateVerifier sslCertificateVerifier) {
+ checkPeerConnectionFactoryExists();
+ long nativeObserver = PeerConnection.createNativePeerConnectionObserver(observer);
+ if (nativeObserver == 0) {
+ return null;
+ }
+ long nativePeerConnection = nativeCreatePeerConnection(
+ nativeFactory, rtcConfig, constraints, nativeObserver, sslCertificateVerifier);
+ if (nativePeerConnection == 0) {
+ return null;
+ }
+ return new PeerConnection(nativePeerConnection);
+ }
+
+ /**
+ * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct
+ * instead and use the method without constraints in the signature.
+ */
+ @Nullable
+ @Deprecated
+ public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints, PeerConnection.Observer observer) {
+ return createPeerConnectionInternal(
+ rtcConfig, constraints, observer, /* sslCertificateVerifier= */ null);
+ }
+
+ /**
+ * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct
+ * instead and use the method without constraints in the signature.
+ */
+ @Nullable
+ @Deprecated
+ public PeerConnection createPeerConnection(List iceServers,
+ MediaConstraints constraints, PeerConnection.Observer observer) {
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ return createPeerConnection(rtcConfig, constraints, observer);
+ }
+
+ @Nullable
+ public PeerConnection createPeerConnection(
+ List iceServers, PeerConnection.Observer observer) {
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ return createPeerConnection(rtcConfig, observer);
+ }
+
+ @Nullable
+ public PeerConnection createPeerConnection(
+ PeerConnection.RTCConfiguration rtcConfig, PeerConnection.Observer observer) {
+ return createPeerConnection(rtcConfig, null /* constraints */, observer);
+ }
+
+ @Nullable
+ public PeerConnection createPeerConnection(
+ PeerConnection.RTCConfiguration rtcConfig, PeerConnectionDependencies dependencies) {
+ return createPeerConnectionInternal(rtcConfig, null /* constraints */,
+ dependencies.getObserver(), dependencies.getSSLCertificateVerifier());
+ }
+
+ public MediaStream createLocalMediaStream(String label) {
+ checkPeerConnectionFactoryExists();
+ return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
+ }
+
+ /**
+ * Create video source with given parameters. If alignTimestamps is false, the caller is
+ * responsible for aligning the frame timestamps to rtc::TimeNanos(). This can be used to achieve
+ * higher accuracy if there is a big delay between frame creation and frames being delivered to
+ * the returned video source. If alignTimestamps is true, timestamps will be aligned to
+ * rtc::TimeNanos() when they arrive to the returned video source.
+ */
+ public VideoSource createVideoSource(boolean isScreencast, boolean alignTimestamps) {
+ checkPeerConnectionFactoryExists();
+ return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast, alignTimestamps));
+ }
+
+ /**
+ * Same as above with alignTimestamps set to true.
+ *
+ * @see #createVideoSource(boolean, boolean)
+ */
+ public VideoSource createVideoSource(boolean isScreencast) {
+ return createVideoSource(isScreencast, /* alignTimestamps= */ true);
+ }
+
+ public VideoTrack createVideoTrack(String id, VideoSource source) {
+ checkPeerConnectionFactoryExists();
+ return new VideoTrack(
+ nativeCreateVideoTrack(nativeFactory, id, source.getNativeVideoTrackSource()));
+ }
+
+ public AudioSource createAudioSource(MediaConstraints constraints) {
+ checkPeerConnectionFactoryExists();
+ return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
+ }
+
+ public AudioTrack createAudioTrack(String id, AudioSource source) {
+ checkPeerConnectionFactoryExists();
+ return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.getNativeAudioSource()));
+ }
+
+ public RtpCapabilities getRtpReceiverCapabilities(MediaStreamTrack.MediaType mediaType) {
+ checkPeerConnectionFactoryExists();
+ return nativeGetRtpReceiverCapabilities(nativeFactory, mediaType);
+ }
+
+ public RtpCapabilities getRtpSenderCapabilities(MediaStreamTrack.MediaType mediaType) {
+ checkPeerConnectionFactoryExists();
+ return nativeGetRtpSenderCapabilities(nativeFactory, mediaType);
+ }
+
+ // Starts recording an AEC dump. Ownership of the file is transfered to the
+ // native code. If an AEC dump is already in progress, it will be stopped and
+ // a new one will start using the provided file.
+ public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
+ checkPeerConnectionFactoryExists();
+ return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
+ }
+
+ // Stops recording an AEC dump. If no AEC dump is currently being recorded,
+ // this call will have no effect.
+ public void stopAecDump() {
+ checkPeerConnectionFactoryExists();
+ nativeStopAecDump(nativeFactory);
+ }
+
+ public void dispose() {
+ checkPeerConnectionFactoryExists();
+ nativeFreeFactory(nativeFactory);
+ networkThread = null;
+ workerThread = null;
+ signalingThread = null;
+ nativeFactory = 0;
+ }
+
+ /** Returns a pointer to the native webrtc::PeerConnectionFactoryInterface. */
+ public long getNativePeerConnectionFactory() {
+ checkPeerConnectionFactoryExists();
+ return nativeGetNativePeerConnectionFactory(nativeFactory);
+ }
+
+ /** Returns a pointer to the native OwnedFactoryAndThreads object */
+ public long getNativeOwnedFactoryAndThreads() {
+ checkPeerConnectionFactoryExists();
+ return nativeFactory;
+ }
+
+ private void checkPeerConnectionFactoryExists() {
+ if (nativeFactory == 0) {
+ throw new IllegalStateException("PeerConnectionFactory has been disposed.");
+ }
+ }
+
+ private static void printStackTrace(
+ @Nullable ThreadInfo threadInfo, boolean printNativeStackTrace) {
+ if (threadInfo == null) {
+ // Thread callbacks have not been completed yet, ignore call.
+ return;
+ }
+ final String threadName = threadInfo.thread.getName();
+ StackTraceElement[] stackTraces = threadInfo.thread.getStackTrace();
+ if (stackTraces.length > 0) {
+ Logging.w(TAG, threadName + " stacktrace:");
+ for (StackTraceElement stackTrace : stackTraces) {
+ Logging.w(TAG, stackTrace.toString());
+ }
+ }
+ if (printNativeStackTrace) {
+ // Imitate output from debuggerd/tombstone so that stack trace can easily be symbolized with
+ // ndk-stack.
+ Logging.w(TAG, "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***");
+ Logging.w(TAG,
+ "pid: " + Process.myPid() + ", tid: " + threadInfo.tid + ", name: " + threadName
+ + " >>> WebRTC <<<");
+ nativePrintStackTrace(threadInfo.tid);
+ }
+ }
+
+ /** Deprecated, use non-static version instead. */
+ @Deprecated
+ public static void printStackTraces() {
+ printStackTrace(staticNetworkThread, /* printNativeStackTrace= */ false);
+ printStackTrace(staticWorkerThread, /* printNativeStackTrace= */ false);
+ printStackTrace(staticSignalingThread, /* printNativeStackTrace= */ false);
+ }
+
+ /**
+ * Print the Java stack traces for the critical threads used by PeerConnectionFactory, namely;
+ * signaling thread, worker thread, and network thread. If printNativeStackTraces is true, also
+ * attempt to print the C++ stack traces for these threads.
+ */
+ public void printInternalStackTraces(boolean printNativeStackTraces) {
+ printStackTrace(signalingThread, printNativeStackTraces);
+ printStackTrace(workerThread, printNativeStackTraces);
+ printStackTrace(networkThread, printNativeStackTraces);
+ }
+
+ @CalledByNative
+ private void onNetworkThreadReady() {
+ networkThread = ThreadInfo.getCurrent();
+ staticNetworkThread = networkThread;
+ Logging.d(TAG, "onNetworkThreadReady");
+ }
+
+ @CalledByNative
+ private void onWorkerThreadReady() {
+ workerThread = ThreadInfo.getCurrent();
+ staticWorkerThread = workerThread;
+ Logging.d(TAG, "onWorkerThreadReady");
+ }
+
+ @CalledByNative
+ private void onSignalingThreadReady() {
+ signalingThread = ThreadInfo.getCurrent();
+ staticSignalingThread = signalingThread;
+ Logging.d(TAG, "onSignalingThreadReady");
+ }
+
+ // Must be called at least once before creating a PeerConnectionFactory
+ // (for example, at application startup time).
+ private static native void nativeInitializeAndroidGlobals();
+ private static native void nativeInitializeFieldTrials(String fieldTrialsInitString);
+ private static native String nativeFindFieldTrialsFullName(String name);
+ private static native void nativeInitializeInternalTracer();
+ // Internal tracing shutdown, called to prevent resource leaks. Must be called after
+ // PeerConnectionFactory is gone to prevent races with code performing tracing.
+ private static native void nativeShutdownInternalTracer();
+ private static native boolean nativeStartInternalTracingCapture(String tracingFilename);
+ private static native void nativeStopInternalTracingCapture();
+
+ private static native PeerConnectionFactory nativeCreatePeerConnectionFactory(Context context,
+ Options options, long nativeAudioDeviceModule, long audioEncoderFactory,
+ long audioDecoderFactory, VideoEncoderFactory encoderFactory,
+ VideoDecoderFactory decoderFactory, long nativeAudioProcessor,
+ long nativeFecControllerFactory, long nativeNetworkControllerFactory,
+ long nativeNetworkStatePredictorFactory, long neteqFactory);
+
+ private static native long nativeCreatePeerConnection(long factory,
+ PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver,
+ SSLCertificateVerifier sslCertificateVerifier);
+ private static native long nativeCreateLocalMediaStream(long factory, String label);
+ private static native long nativeCreateVideoSource(
+ long factory, boolean is_screencast, boolean alignTimestamps);
+ private static native long nativeCreateVideoTrack(
+ long factory, String id, long nativeVideoSource);
+ private static native long nativeCreateAudioSource(long factory, MediaConstraints constraints);
+ private static native long nativeCreateAudioTrack(long factory, String id, long nativeSource);
+ private static native boolean nativeStartAecDump(
+ long factory, int file_descriptor, int filesize_limit_bytes);
+ private static native void nativeStopAecDump(long factory);
+ private static native void nativeFreeFactory(long factory);
+ private static native long nativeGetNativePeerConnectionFactory(long factory);
+ private static native void nativeInjectLoggable(JNILogging jniLogging, int severity);
+ private static native void nativeDeleteLoggable();
+ private static native void nativePrintStackTrace(int tid);
+ private static native RtpCapabilities nativeGetRtpSenderCapabilities(
+ long factory, MediaStreamTrack.MediaType mediaType);
+ private static native RtpCapabilities nativeGetRtpReceiverCapabilities(
+ long factory, MediaStreamTrack.MediaType mediaType);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PlatformSoftwareVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
new file mode 100644
index 00000000..caca5e58
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+import androidx.annotation.Nullable;
+import java.util.Arrays;
+
+/** Factory for Android platform software VideoDecoders. */
+public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+ /**
+ * Default allowed predicate.
+ */
+ private static final Predicate defaultAllowedPredicate =
+ new Predicate() {
+ @Override
+ public boolean test(MediaCodecInfo arg) {
+ return MediaCodecUtils.isSoftwareOnly(arg);
+ }
+ };
+
+ /**
+ * Creates a PlatformSoftwareVideoDecoderFactory that supports surface texture rendering.
+ *
+ * @param sharedContext The textures generated will be accessible from this context. May be null,
+ * this disables texture support.
+ */
+ public PlatformSoftwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) {
+ super(sharedContext, defaultAllowedPredicate);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Predicate.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Predicate.java
new file mode 100644
index 00000000..50e69750
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Predicate.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Represents a predicate (boolean-valued function) of one argument.
+ */
+public interface Predicate {
+ /**
+ * Evaluates this predicate on the given argument.
+ *
+ * @param arg the input argument
+ * @return true if the input argument matches the predicate, otherwise false
+ */
+ boolean test(T arg);
+
+ /**
+ * Returns a composed predicate that represents a short-circuiting logical OR of this predicate
+ * and another. When evaluating the composed predicate, if this predicate is true, then the other
+ * predicate is not evaluated.
+ *
+ * @param other a predicate that will be logically-ORed with this predicate
+ * @return a composed predicate that represents the short-circuiting logical OR of this predicate
+ * and the other predicate
+ */
+ default Predicate or(Predicate super T> other) {
+ return new Predicate() {
+ @Override
+ public boolean test(T arg) {
+ return Predicate.this.test(arg) || other.test(arg);
+ }
+ };
+ }
+
+ /**
+ * Returns a composed predicate that represents a short-circuiting logical AND of this predicate
+ * and another.
+ *
+ * @param other a predicate that will be logically-ANDed with this predicate
+ * @return a composed predicate that represents the short-circuiting logical AND of this predicate
+ * and the other predicate
+ */
+ default Predicate and(Predicate super T> other) {
+ return new Predicate() {
+ @Override
+ public boolean test(T arg) {
+ return Predicate.this.test(arg) && other.test(arg);
+ }
+ };
+ }
+
+ /**
+ * Returns a predicate that represents the logical negation of this predicate.
+ *
+ * @return a predicate that represents the logical negation of this predicate
+ */
+ default Predicate negate() {
+ return new Predicate() {
+ @Override
+ public boolean test(T arg) {
+ return !Predicate.this.test(arg);
+ }
+ };
+ }
+}
\ No newline at end of file
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Priority.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Priority.java
new file mode 100644
index 00000000..a858cc61
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Priority.java
@@ -0,0 +1,13 @@
+
+// IntelliJ API Decompiler stub source generated from a class file
+// Implementation of methods is not available
+
+package org.webrtc;
+
+@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE)
+public @interface Priority {
+ int VERY_LOW = 0;
+ int LOW = 1;
+ int MEDIUM = 2;
+ int HIGH = 3;
+}
\ No newline at end of file
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStats.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStats.java
new file mode 100644
index 00000000..eaa28de1
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStats.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+
+/**
+ * Java version of webrtc::RTCStats. Represents an RTCStats object, as
+ * described in https://w3c.github.io/webrtc-stats/. The `id`, `timestampUs`
+ * and `type` accessors have the same meaning for this class as for the
+ * RTCStats dictionary. Each RTCStatsReport produced by getStats contains
+ * multiple RTCStats objects; one for each underlying object (codec, stream,
+ * transport, etc.) that was inspected to produce the stats.
+ */
+public class RTCStats {
+ private final long timestampUs;
+ private final String type;
+ private final String id;
+ private final Map members;
+
+ public RTCStats(long timestampUs, String type, String id, Map members) {
+ this.timestampUs = timestampUs;
+ this.type = type;
+ this.id = id;
+ this.members = members;
+ }
+
+ // Timestamp in microseconds.
+ public double getTimestampUs() {
+ return timestampUs;
+ }
+
+ // Equivalent to RTCStatsType in the stats spec. Indicates the type of the
+ // object that was inspected to produce the stats.
+ public String getType() {
+ return type;
+ }
+
+ // Unique ID representing this stats object. May be referred to by members of
+ // other stats objects.
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * Returns map of member names to values. Returns as an ordered map so that
+ * the stats object can be serialized with a consistent ordering.
+ *
+ * Values will be one of the following objects:
+ * - Boolean
+ * - Integer (for 32-bit signed integers)
+ * - Long (for 32-bit unsigned and 64-bit signed integers)
+ * - BigInteger (for 64-bit unsigned integers)
+ * - Double
+ * - String
+ * - The array form of any of the above (e.g., Integer[])
+ * - Map of String keys to BigInteger / Double values
+ */
+ public Map getMembers() {
+ return members;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("{ timestampUs: ")
+ .append(timestampUs)
+ .append(", type: ")
+ .append(type)
+ .append(", id: ")
+ .append(id);
+ boolean first = true;
+ for (Map.Entry entry : members.entrySet()) {
+ builder.append(", ").append(entry.getKey()).append(": ");
+ appendValue(builder, entry.getValue());
+ }
+ builder.append(" }");
+ return builder.toString();
+ }
+
+ private static void appendValue(StringBuilder builder, Object value) {
+ if (value instanceof Object[]) {
+ Object[] arrayValue = (Object[]) value;
+ builder.append('[');
+ for (int i = 0; i < arrayValue.length; ++i) {
+ if (i != 0) {
+ builder.append(", ");
+ }
+ appendValue(builder, arrayValue[i]);
+ }
+ builder.append(']');
+ } else if (value instanceof String) {
+ // Enclose strings in quotes to make it clear they're strings.
+ builder.append('"').append(value).append('"');
+ } else {
+ builder.append(value);
+ }
+ }
+
+ // TODO(bugs.webrtc.org/8557) Use ctor directly with full Map type.
+ @SuppressWarnings("unchecked")
+ @CalledByNative
+ static RTCStats create(long timestampUs, String type, String id, Map members) {
+ return new RTCStats(timestampUs, type, id, members);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsCollectorCallback.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsCollectorCallback.java
new file mode 100644
index 00000000..dc8902c9
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsCollectorCallback.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for receiving stats reports (see webrtc::RTCStatsCollectorCallback). */
+public interface RTCStatsCollectorCallback {
+ /** Called when the stats report is ready. */
+ @CalledByNative public void onStatsDelivered(RTCStatsReport report);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsReport.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsReport.java
new file mode 100644
index 00000000..d4d90db1
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsReport.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+
+/**
+ * Java version of webrtc::RTCStatsReport. Each RTCStatsReport produced by
+ * getStats contains multiple RTCStats objects; one for each underlying object
+ * (codec, stream, transport, etc.) that was inspected to produce the stats.
+ */
+public class RTCStatsReport {
+ private final long timestampUs;
+ private final Map stats;
+
+ public RTCStatsReport(long timestampUs, Map stats) {
+ this.timestampUs = timestampUs;
+ this.stats = stats;
+ }
+
+ // Timestamp in microseconds.
+ public double getTimestampUs() {
+ return timestampUs;
+ }
+
+ // Map of stats object IDs to stats objects. Can be used to easily look up
+ // other stats objects, when they refer to each other by ID.
+ public Map getStatsMap() {
+ return stats;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("{ timestampUs: ").append(timestampUs).append(", stats: [\n");
+ boolean first = true;
+ for (RTCStats stat : stats.values()) {
+ if (!first) {
+ builder.append(",\n");
+ }
+ builder.append(stat);
+ first = false;
+ }
+ builder.append(" ] }");
+ return builder.toString();
+ }
+
+ // TODO(bugs.webrtc.org/8557) Use ctor directly with full Map type.
+ @SuppressWarnings("unchecked")
+ @CalledByNative
+ private static RTCStatsReport create(long timestampUs, Map stats) {
+ return new RTCStatsReport(timestampUs, stats);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCountDelegate.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCountDelegate.java
new file mode 100644
index 00000000..b9210d26
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCountDelegate.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Implementation of RefCounted that executes a Runnable once the ref count reaches zero.
+ */
+class RefCountDelegate implements RefCounted {
+ private final AtomicInteger refCount = new AtomicInteger(1);
+ private final @Nullable Runnable releaseCallback;
+
+ /**
+ * @param releaseCallback Callback that will be executed once the ref count reaches zero.
+ */
+ public RefCountDelegate(@Nullable Runnable releaseCallback) {
+ this.releaseCallback = releaseCallback;
+ }
+
+ @Override
+ public void retain() {
+ int updated_count = refCount.incrementAndGet();
+ if (updated_count < 2) {
+ throw new IllegalStateException("retain() called on an object with refcount < 1");
+ }
+ }
+
+ @Override
+ public void release() {
+ int updated_count = refCount.decrementAndGet();
+ if (updated_count < 0) {
+ throw new IllegalStateException("release() called on an object with refcount < 1");
+ }
+ if (updated_count == 0 && releaseCallback != null) {
+ releaseCallback.run();
+ }
+ }
+
+ /**
+ * Tries to retain the object. Can be used in scenarios where it is unknown if the object has
+ * already been released. Returns true if successful or false if the object was already released.
+ */
+ boolean safeRetain() {
+ int currentRefCount = refCount.get();
+ while (currentRefCount != 0) {
+ if (refCount.weakCompareAndSet(currentRefCount, currentRefCount + 1)) {
+ return true;
+ }
+ currentRefCount = refCount.get();
+ }
+ return false;
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCounted.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCounted.java
new file mode 100644
index 00000000..0c1c3bf1
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCounted.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for ref counted objects in WebRTC. These objects have significant resources that need
+ * to be freed when they are no longer in use. Each objects starts with ref count of one when
+ * created. If a reference is passed as a parameter to a method, the caller has ownesrship of the
+ * object by default - calling release is not necessary unless retain is called.
+ */
+public interface RefCounted {
+ /** Increases ref count by one. */
+ @CalledByNative void retain();
+
+ /**
+ * Decreases ref count by one. When the ref count reaches zero, resources related to the object
+ * will be freed.
+ */
+ @CalledByNative void release();
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RenderSynchronizer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RenderSynchronizer.java
new file mode 100644
index 00000000..c89f798c
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RenderSynchronizer.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Build.VERSION;
+import android.os.Build.VERSION_CODES;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Trace;
+import android.view.Choreographer;
+import androidx.annotation.GuardedBy;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Class to synchronize rendering updates with display refresh cycles and save power by blocking
+ * updates that exceeds the target frame rate.
+ */
+public final class RenderSynchronizer {
+
+ /** Interface for listening to render window updates. */
+ public interface Listener {
+ void onRenderWindowOpen();
+
+ void onRenderWindowClose();
+ }
+
+ private static final String TAG = "RenderSynchronizer";
+ private static final float DEFAULT_TARGET_FPS = 30f;
+ private final Object lock = new Object();
+ private final List listeners = new CopyOnWriteArrayList<>();
+ private final long targetFrameIntervalNanos;
+ private final Handler mainThreadHandler;
+ private Choreographer choreographer;
+
+ @GuardedBy("lock")
+ private boolean isListening;
+
+ private boolean renderWindowOpen;
+ private long lastRefreshTimeNanos;
+ private long lastOpenedTimeNanos;
+
+ public RenderSynchronizer(float targetFrameRateFps) {
+ this.targetFrameIntervalNanos = Math.round(TimeUnit.SECONDS.toNanos(1) / targetFrameRateFps);
+ this.mainThreadHandler = new Handler(Looper.getMainLooper());
+ mainThreadHandler.post(() -> this.choreographer = Choreographer.getInstance());
+ Logging.d(TAG, "Created");
+ }
+
+ public RenderSynchronizer() {
+ this(DEFAULT_TARGET_FPS);
+ }
+
+ public void registerListener(Listener listener) {
+ listeners.add(listener);
+
+ synchronized (lock) {
+ if (!isListening) {
+ Logging.d(TAG, "First listener, subscribing to frame callbacks");
+ isListening = true;
+ mainThreadHandler.post(
+ () -> choreographer.postFrameCallback(this::onDisplayRefreshCycleBegin));
+ }
+ }
+ }
+
+ public void removeListener(Listener listener) {
+ listeners.remove(listener);
+ }
+
+ private void onDisplayRefreshCycleBegin(long refreshTimeNanos) {
+ synchronized (lock) {
+ if (listeners.isEmpty()) {
+ Logging.d(TAG, "No listeners, unsubscribing to frame callbacks");
+ isListening = false;
+ return;
+ }
+ }
+ choreographer.postFrameCallback(this::onDisplayRefreshCycleBegin);
+
+ long lastOpenDeltaNanos = refreshTimeNanos - lastOpenedTimeNanos;
+ long refreshDeltaNanos = refreshTimeNanos - lastRefreshTimeNanos;
+ lastRefreshTimeNanos = refreshTimeNanos;
+
+ // Make a greedy choice whether to open (or keep open) the render window. If the current time
+ // since the render window was last opened is closer to the target than what we predict it would
+ // be in the next refresh cycle then we open the window.
+ if (Math.abs(lastOpenDeltaNanos - targetFrameIntervalNanos)
+ < Math.abs(lastOpenDeltaNanos - targetFrameIntervalNanos + refreshDeltaNanos)) {
+ lastOpenedTimeNanos = refreshTimeNanos;
+ openRenderWindow();
+ } else if (renderWindowOpen) {
+ closeRenderWindow();
+ }
+ }
+
+ private void traceRenderWindowChange() {
+ if (VERSION.SDK_INT >= VERSION_CODES.Q) {
+ Trace.setCounter("RenderWindow", renderWindowOpen ? 1 : 0);
+ }
+ }
+
+ private void openRenderWindow() {
+ renderWindowOpen = true;
+ traceRenderWindowChange();
+ for (Listener listener : listeners) {
+ listener.onRenderWindowOpen();
+ }
+ }
+
+ private void closeRenderWindow() {
+ renderWindowOpen = false;
+ traceRenderWindowChange();
+ for (Listener listener : listeners) {
+ listener.onRenderWindowClose();
+ }
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RendererCommon.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RendererCommon.java
new file mode 100644
index 00000000..b97901c6
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RendererCommon.java
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.Matrix;
+import android.view.View;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+ /** Interface for reporting rendering events. */
+ public static interface RendererEvents {
+ /**
+ * Callback fired once first frame is rendered.
+ */
+ public void onFirstFrameRendered();
+
+ /**
+ * Callback fired when rendered frame resolution or rotation has changed.
+ */
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+ }
+
+ /**
+ * Interface for rendering frames on an EGLSurface with specified viewport location. Rotation,
+ * mirror, and cropping is specified using a 4x4 texture coordinate transform matrix. The frame
+ * input can either be an OES texture, RGB texture, or YUV textures in I420 format. The function
+ * release() must be called manually to free the resources held by this object.
+ */
+ public static interface GlDrawer {
+ /**
+ * Functions for drawing frames with different sources. The rendering surface target is
+ * implied by the current EGL context of the calling thread and requires no explicit argument.
+ * The coordinates specify the viewport location on the surface target.
+ */
+ void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+ void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
+ int viewportY, int viewportWidth, int viewportHeight);
+ void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+
+ /**
+ * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+ */
+ void release();
+ }
+
+ /**
+ * Helper class for determining layout size based on layout requirements, scaling type, and video
+ * aspect ratio.
+ */
+ public static class VideoLayoutMeasure {
+ // The scaling type determines how the video will fill the allowed layout area in measure(). It
+ // can be specified separately for the case when video has matched orientation with layout size
+ // and when there is an orientation mismatch.
+ private float visibleFractionMatchOrientation =
+ convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
+ private float visibleFractionMismatchOrientation =
+ convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
+
+ public void setScalingType(ScalingType scalingType) {
+ setScalingType(/* scalingTypeMatchOrientation= */ scalingType,
+ /* scalingTypeMismatchOrientation= */ scalingType);
+ }
+
+ public void setScalingType(
+ ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) {
+ this.visibleFractionMatchOrientation =
+ convertScalingTypeToVisibleFraction(scalingTypeMatchOrientation);
+ this.visibleFractionMismatchOrientation =
+ convertScalingTypeToVisibleFraction(scalingTypeMismatchOrientation);
+ }
+
+ public void setVisibleFraction(
+ float visibleFractionMatchOrientation, float visibleFractionMismatchOrientation) {
+ this.visibleFractionMatchOrientation = visibleFractionMatchOrientation;
+ this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation;
+ }
+
+ public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
+ // Calculate max allowed layout size.
+ final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
+ final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
+ if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) {
+ return new Point(maxWidth, maxHeight);
+ }
+ // Calculate desired display size based on scaling type, video aspect ratio,
+ // and maximum layout size.
+ final float frameAspect = frameWidth / (float) frameHeight;
+ final float displayAspect = maxWidth / (float) maxHeight;
+ final float visibleFraction = (frameAspect > 1.0f) == (displayAspect > 1.0f)
+ ? visibleFractionMatchOrientation
+ : visibleFractionMismatchOrientation;
+ final Point layoutSize = getDisplaySize(visibleFraction, frameAspect, maxWidth, maxHeight);
+
+ // If the measure specification is forcing a specific size - yield.
+ if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
+ layoutSize.x = maxWidth;
+ }
+ if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
+ layoutSize.y = maxHeight;
+ }
+ return layoutSize;
+ }
+ }
+
+ // Types of video scaling:
+ // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+ // maintaining the aspect ratio (black borders may be displayed).
+ // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+ // maintaining the aspect ratio. Some portion of the video frame may be
+ // clipped.
+ // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+ // possible of the view while maintaining aspect ratio, under the constraint that at least
+ // `BALANCED_VISIBLE_FRACTION` of the frame content will be shown.
+ public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+ // The minimum fraction of the frame content that will be shown for `SCALE_ASPECT_BALANCED`.
+ // This limits excessive cropping when adjusting display size.
+ private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+
+ /**
+ * Returns layout transformation matrix that applies an optional mirror effect and compensates
+ * for video vs display aspect ratio.
+ */
+ public static float[] getLayoutMatrix(
+ boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+ float scaleX = 1;
+ float scaleY = 1;
+ // Scale X or Y dimension so that video and display size have same aspect ratio.
+ if (displayAspectRatio > videoAspectRatio) {
+ scaleY = videoAspectRatio / displayAspectRatio;
+ } else {
+ scaleX = displayAspectRatio / videoAspectRatio;
+ }
+ // Apply optional horizontal flip.
+ if (mirror) {
+ scaleX *= -1;
+ }
+ final float matrix[] = new float[16];
+ Matrix.setIdentityM(matrix, 0);
+ Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+ adjustOrigin(matrix);
+ return matrix;
+ }
+
+ /** Converts a float[16] matrix array to android.graphics.Matrix. */
+ public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) {
+ // clang-format off
+ float[] values = {
+ matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0],
+ matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1],
+ matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3],
+ };
+ // clang-format on
+
+ android.graphics.Matrix matrix = new android.graphics.Matrix();
+ matrix.setValues(values);
+ return matrix;
+ }
+
+ /** Converts android.graphics.Matrix to a float[16] matrix array. */
+ public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
+ float[] values = new float[9];
+ matrix.getValues(values);
+
+ // The android.graphics.Matrix looks like this:
+ // [x1 y1 w1]
+ // [x2 y2 w2]
+ // [x3 y3 w3]
+ // We want to contruct a matrix that looks like this:
+ // [x1 y1 0 w1]
+ // [x2 y2 0 w2]
+ // [ 0 0 1 0]
+ // [x3 y3 0 w3]
+ // Since it is stored in column-major order, it looks like this:
+ // [x1 x2 0 x3
+ // y1 y2 0 y3
+ // 0 0 1 0
+ // w1 w2 0 w3]
+ // clang-format off
+ float[] matrix4x4 = {
+ values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0],
+ values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1],
+ 0, 0, 1, 0,
+ values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2],
+ };
+ // clang-format on
+ return matrix4x4;
+ }
+
+ /**
+ * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(
+ ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+ return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+ maxDisplayWidth, maxDisplayHeight);
+ }
+
+ /**
+ * Move `matrix` transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+ * that are in the range 0 to 1.
+ */
+ private static void adjustOrigin(float[] matrix) {
+ // Note that OpenGL is using column-major order.
+ // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+ matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+ matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+ // Post translate with 0.5 to move coordinates to range [0, 1].
+ matrix[12] += 0.5f;
+ matrix[13] += 0.5f;
+ }
+
+ /**
+ * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+ * that must remain visible.
+ */
+ private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+ switch (scalingType) {
+ case SCALE_ASPECT_FIT:
+ return 1.0f;
+ case SCALE_ASPECT_FILL:
+ return 0.0f;
+ case SCALE_ASPECT_BALANCED:
+ return BALANCED_VISIBLE_FRACTION;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * Calculate display size based on minimum fraction of the video that must remain visible,
+ * video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(
+ float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+ // If there is no constraint on the amount of cropping, fill the allowed display area.
+ if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+ return new Point(maxDisplayWidth, maxDisplayHeight);
+ }
+ // Each dimension is constrained on max display size and how much we are allowed to crop.
+ final int width = Math.min(
+ maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ final int height = Math.min(
+ maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ return new Point(width, height);
+ }
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtcCertificatePem.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtcCertificatePem.java
new file mode 100644
index 00000000..6070135b
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtcCertificatePem.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.PeerConnection;
+
+/**
+ * Easily storable/serializable version of a native C++ RTCCertificatePEM.
+ */
+public class RtcCertificatePem {
+ /** PEM string representation of the private key. */
+ public final String privateKey;
+ /** PEM string representation of the certificate. */
+ public final String certificate;
+ /** Default expiration time of 30 days. */
+ private static final long DEFAULT_EXPIRY = 60 * 60 * 24 * 30;
+
+ /** Instantiate an RtcCertificatePem object from stored strings. */
+ @CalledByNative
+ public RtcCertificatePem(String privateKey, String certificate) {
+ this.privateKey = privateKey;
+ this.certificate = certificate;
+ }
+
+ @CalledByNative
+ String getPrivateKey() {
+ return privateKey;
+ }
+
+ @CalledByNative
+ String getCertificate() {
+ return certificate;
+ }
+
+ /**
+ * Generate a new RtcCertificatePem with the default settings of KeyType = ECDSA and
+ * expires = 30 days.
+ */
+ public static RtcCertificatePem generateCertificate() {
+ return nativeGenerateCertificate(PeerConnection.KeyType.ECDSA, DEFAULT_EXPIRY);
+ }
+
+ /**
+ * Generate a new RtcCertificatePem with a custom KeyType and the default setting of
+ * expires = 30 days.
+ */
+ public static RtcCertificatePem generateCertificate(PeerConnection.KeyType keyType) {
+ return nativeGenerateCertificate(keyType, DEFAULT_EXPIRY);
+ }
+
+ /**
+ * Generate a new RtcCertificatePem with a custom expires and the default setting of
+ * KeyType = ECDSA.
+ */
+ public static RtcCertificatePem generateCertificate(long expires) {
+ return nativeGenerateCertificate(PeerConnection.KeyType.ECDSA, expires);
+ }
+
+ /** Generate a new RtcCertificatePem with a custom KeyType and a custom expires. */
+ public static RtcCertificatePem generateCertificate(
+ PeerConnection.KeyType keyType, long expires) {
+ return nativeGenerateCertificate(keyType, expires);
+ }
+
+ private static native RtcCertificatePem nativeGenerateCertificate(
+ PeerConnection.KeyType keyType, long expires);
+}
diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpCapabilities.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpCapabilities.java
new file mode 100644
index 00000000..02d17042
--- /dev/null
+++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpCapabilities.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.annotation.Nullable;
+import java.util.List;
+import java.util.Map;
+import org.webrtc.MediaStreamTrack;
+
+public class RtpCapabilities {
+ public static class CodecCapability {
+ public int preferredPayloadType;
+ // Name used to identify the codec. Equivalent to MIME subtype.
+ public String name;
+ // The media type of this codec. Equivalent to MIME top-level type.
+ public MediaStreamTrack.MediaType kind;
+ // Clock rate in Hertz.
+ public Integer clockRate;
+ // The number of audio channels used. Set to null for video codecs.
+ public Integer numChannels;
+ // The "format specific parameters" field from the "a=fmtp" line in the SDP
+ public Map parameters;
+ // The MIME type of the codec. This is a convenience field.
+ public String mimeType;
+
+ public CodecCapability() {}
+
+ @CalledByNative("CodecCapability")
+ CodecCapability(int preferredPayloadType, String name, MediaStreamTrack.MediaType kind,
+ Integer clockRate, Integer numChannels, String mimeType, Map parameters) {
+ this.preferredPayloadType = preferredPayloadType;
+ this.name = name;
+ this.kind = kind;
+ this.clockRate = clockRate;
+ this.numChannels = numChannels;
+ this.parameters = parameters;
+ this.mimeType = mimeType;
+ }
+
+ @CalledByNative("CodecCapability")
+ int getPreferredPayloadType() {
+ return preferredPayloadType;
+ }
+
+ @CalledByNative("CodecCapability")
+ String getName() {
+ return name;
+ }
+
+ @CalledByNative("CodecCapability")
+ MediaStreamTrack.MediaType getKind() {
+ return kind;
+ }
+
+ @CalledByNative("CodecCapability")
+ Integer getClockRate() {
+ return clockRate;
+ }
+
+ @CalledByNative("CodecCapability")
+ Integer getNumChannels() {
+ return numChannels;
+ }
+
+ @CalledByNative("CodecCapability")
+ Map getParameters() {
+ return parameters;
+ }
+ }
+
+ public static class HeaderExtensionCapability {
+ private final String uri;
+ private final int preferredId;
+ private final boolean preferredEncrypted;
+
+ @CalledByNative("HeaderExtensionCapability")
+ HeaderExtensionCapability(String uri, int preferredId, boolean preferredEncrypted) {
+ this.uri = uri;
+ this.preferredId = preferredId;
+ this.preferredEncrypted = preferredEncrypted;
+ }
+
+ @CalledByNative("HeaderExtensionCapability")
+ public String getUri() {
+ return uri;
+ }
+
+ @CalledByNative("HeaderExtensionCapability")
+ public int getPreferredId() {
+ return preferredId;
+ }
+
+ @CalledByNative("HeaderExtensionCapability")
+ public boolean getPreferredEncrypted() {
+ return preferredEncrypted;
+ }
+ }
+
+ public List codecs;
+ public List headerExtensions;
+
+ @CalledByNative
+ RtpCapabilities(List