diff --git a/webrtc_player/android/app/build.gradle b/webrtc_player/android/app/build.gradle index ba13d606..04b816f6 100644 --- a/webrtc_player/android/app/build.gradle +++ b/webrtc_player/android/app/build.gradle @@ -12,7 +12,7 @@ android { defaultConfig { applicationId "com.zlmediakit.webrtc" - minSdk 21 + minSdk 24 targetSdk 32 versionCode 1 versionName "1.0" @@ -41,7 +41,6 @@ dependencies { implementation 'androidx.appcompat:appcompat:1.5.1' implementation 'com.google.android.material:material:1.6.1' implementation 'androidx.constraintlayout:constraintlayout:2.1.4' - implementation project(':zlm') testImplementation 'junit:junit:4.13.2' androidTestImplementation 'androidx.test.ext:junit:1.1.3' androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' @@ -50,6 +49,6 @@ dependencies { implementation("com.squareup.okhttp3:okhttp:4.10.0") implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" - implementation 'org.webrtc:google-webrtc:1.0.32006' + implementation project(':zlm') } \ No newline at end of file diff --git a/webrtc_player/android/app/src/main/AndroidManifest.xml b/webrtc_player/android/app/src/main/AndroidManifest.xml index f80d6143..c45d3017 100644 --- a/webrtc_player/android/app/src/main/AndroidManifest.xml +++ b/webrtc_player/android/app/src/main/AndroidManifest.xml @@ -28,12 +28,13 @@ android:roundIcon="@mipmap/ic_launcher_round" android:supportsRtl="true" android:theme="@style/Theme.AndroidWebRTC" + tools:overrideLibrary="com.zlm.rtc" android:usesCleartextTraffic="true" - - + android:name=".App" tools:targetApi="31"> @@ -43,7 +44,8 @@ - + \ No newline at end of file diff --git a/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/App.kt b/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/App.kt new file mode 100644 index 00000000..5d1796f6 --- /dev/null +++ b/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/App.kt @@ -0,0 +1,11 @@ +package com.zlmediakit.webrtc + +import android.app.Application + +class App: Application() { + + override fun onCreate() { + super.onCreate() + + } +} \ No newline at end of file diff --git a/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/MainActivity.kt b/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/MainActivity.kt index 14f55008..18126d70 100644 --- a/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/MainActivity.kt +++ b/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/MainActivity.kt @@ -10,10 +10,12 @@ class MainActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_main) + + } fun toPlayActivity(view: View) { - startActivity(Intent(this, PlayDemoActivity::class.java)) + startActivity(Intent(this, PlayerDemoActivity::class.java)) } fun toPushActivity(view: View) { diff --git a/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/PlayerDemoActivity.kt b/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/PlayerDemoActivity.kt index 26ed3462..5a81cc25 100644 --- a/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/PlayerDemoActivity.kt +++ b/webrtc_player/android/app/src/main/java/com/zlmediakit/webrtc/PlayerDemoActivity.kt @@ -1,6 +1,7 @@ package com.zlmediakit.webrtc import android.os.Bundle +import android.os.Handler import androidx.appcompat.app.AppCompatActivity import com.zlm.rtc.ZLMRTCPlayer import kotlinx.android.synthetic.main.activity_player.surface_view_renderer @@ -10,10 +11,14 @@ class PlayerDemoActivity:AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) - setContentView(R.layout.activity_play) + setContentView(R.layout.activity_player) - ZLMRTCPlayer.shareInstance().bind(this,surface_view_renderer,true) + ZLMRTCPlayer.shareInstance().bind(applicationContext,surface_view_renderer,true) + Handler().postDelayed({ + ZLMRTCPlayer.shareInstance().play("live","test") + },1000) + } } \ No newline at end of file diff --git a/webrtc_player/android/zlm/build.gradle b/webrtc_player/android/zlm/build.gradle index cbefd241..033cb6fc 100644 --- a/webrtc_player/android/zlm/build.gradle +++ b/webrtc_player/android/zlm/build.gradle @@ -38,17 +38,24 @@ android { kotlinOptions { jvmTarget = '1.8' } + sourceSets { + main{ + jniLibs.srcDirs = ['libs'] + } + } } dependencies { - implementation 'androidx.core:core-ktx:1.13.1' - implementation platform('org.jetbrains.kotlin:kotlin-bom:1.8.0') - implementation 'androidx.appcompat:appcompat:1.6.1' - implementation 'com.google.android.material:material:1.12.0' + implementation 'androidx.core:core-ktx:1.7.0' + implementation 'androidx.appcompat:appcompat:1.5.1' + implementation 'com.google.android.material:material:1.6.1' + implementation 'androidx.constraintlayout:constraintlayout:2.1.4' testImplementation 'junit:junit:4.13.2' - androidTestImplementation 'androidx.test.ext:junit:1.1.5' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1' + androidTestImplementation 'androidx.test.ext:junit:1.1.3' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' + implementation 'com.google.code.gson:gson:2.8.9' - implementation 'org.webrtc:google-webrtc:1.0.32006' + implementation("com.squareup.okhttp3:okhttp:4.10.0") + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" } \ No newline at end of file diff --git a/webrtc_player/android/zlm/libs/arm64-v8a/libjingle_peerconnection_so.so b/webrtc_player/android/zlm/libs/arm64-v8a/libjingle_peerconnection_so.so new file mode 100644 index 00000000..942fe90f Binary files /dev/null and b/webrtc_player/android/zlm/libs/arm64-v8a/libjingle_peerconnection_so.so differ diff --git a/webrtc_player/android/zlm/libs/armeabi-v7a/libjingle_peerconnection_so.so b/webrtc_player/android/zlm/libs/armeabi-v7a/libjingle_peerconnection_so.so new file mode 100644 index 00000000..51ca139f Binary files /dev/null and b/webrtc_player/android/zlm/libs/armeabi-v7a/libjingle_peerconnection_so.so differ diff --git a/webrtc_player/android/zlm/src/main/AndroidManifest.xml b/webrtc_player/android/zlm/src/main/AndroidManifest.xml index a5918e68..bd8d57bb 100644 --- a/webrtc_player/android/zlm/src/main/AndroidManifest.xml +++ b/webrtc_player/android/zlm/src/main/AndroidManifest.xml @@ -1,4 +1,16 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/cpp/CMakeLists.txt b/webrtc_player/android/zlm/src/main/cpp/CMakeLists.txt index 7b804cd9..76b6ee11 100644 --- a/webrtc_player/android/zlm/src/main/cpp/CMakeLists.txt +++ b/webrtc_player/android/zlm/src/main/cpp/CMakeLists.txt @@ -14,7 +14,7 @@ project("rtc") add_library(ZLToolKit IMPORTED STATIC) -set_target_properties(tools PROPERTIES +set_target_properties(ZLToolKit PROPERTIES IMPORTED_LOCATION "${CMAKE_CURRENT_SOURCE_DIR}/../../../libs/${CMAKE_ANDROID_ARCH_ABI}/libZLToolKit.a" INTERFACE_INCLUDE_DIRECTORIES "${CMAKE_CURRENT_SOURCE_DIR}/include" ) diff --git a/webrtc_player/android/zlm/src/main/cpp/rtc.cpp b/webrtc_player/android/zlm/src/main/cpp/rtc.cpp index 1ba75583..c5490091 100644 --- a/webrtc_player/android/zlm/src/main/cpp/rtc.cpp +++ b/webrtc_player/android/zlm/src/main/cpp/rtc.cpp @@ -1,5 +1,12 @@ #include #include +#include +#include "Http/HttpClient.h" + + +using namespace toolkit; +using namespace mediakit; +using namespace std; extern "C" JNIEXPORT jstring JNICALL Java_com_zlm_rtc_NativeLib_stringFromJNI( @@ -7,4 +14,60 @@ Java_com_zlm_rtc_NativeLib_stringFromJNI( jobject /* this */) { std::string hello = "Hello from C++"; return env->NewStringUTF(hello.c_str()); +} +extern "C" +JNIEXPORT jstring JNICALL +Java_com_zlm_rtc_NativeLib_exchangeSessionDescription(JNIEnv *env, jobject thiz, + jstring description) { + static semaphore sem; + + //加载证书,证书包含公钥和私钥 + SSL_Initor::Instance().loadCertificate((exeDir() + "ssl.p12").data()); + //信任某个自签名证书 + SSL_Initor::Instance().trustCertificate((exeDir() + "ssl.p12").data()); + //不忽略无效证书证书(例如自签名或过期证书) + SSL_Initor::Instance().ignoreInvalidCertificate(false); + + //创建一个Http请求器 + HttpRequester::Ptr requesterUploader(new HttpRequester()); + //使用POST方式请求 + requesterUploader->setMethod("POST"); + //设置http请求头 + HttpArgs argsUploader; + argsUploader["query"] = "test"; + + static string boundary = "0xKhTmLbOuNdArY"; + HttpMultiFormBody::Ptr body(new HttpMultiFormBody(argsUploader, exePath(), boundary)); + requesterUploader->setBody(body); + requesterUploader->addHeader("Content-Type", HttpMultiFormBody::multiFormContentType(boundary)); + //开启请求 + requesterUploader->startRequester("https://zlmediakit.com/index/api/webrtc?app=live&stream=test&type=play",//url地址 + [](const SockException &ex, //网络相关的失败信息,如果为空就代表成功 + const Parser &parser) { //http回复body + DebugL << "=====================HttpRequester Uploader=========================="; + if (ex) { + //网络相关的错误 + WarnL << "network err:" << ex.getErrCode() << " " << ex.what(); + } else { + //打印http回复信息 + _StrPrinter printer; + for (auto &pr: parser.getHeader()) { + printer << pr.first << ":" << pr.second << "\r\n"; + } + InfoL << "status:" << parser.status() << "\r\n" + << "header:\r\n" << (printer << endl) + << "\r\nbody:" << parser.content(); + } + }); + + sem.wait(); +} +extern "C" +JNIEXPORT jstring JNICALL +Java_com_zlm_rtc_NativeLib_makeUrl(JNIEnv *env, jobject thiz, jstring app, jstring stream_id) { + const char *appString = env->GetStringUTFChars(app, 0); + const char *streamIdString = env->GetStringUTFChars(stream_id, 0); + char url[100]; + sprintf(url,"https://zlmediakit.com/index/api/webrtc?app=%s&stream=%s&type=play",appString,streamIdString); + return env->NewStringUTF(url); } \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/NativeLib.kt b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/NativeLib.kt index 6c65f05d..142b469a 100644 --- a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/NativeLib.kt +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/NativeLib.kt @@ -8,6 +8,11 @@ class NativeLib { */ external fun stringFromJNI(): String + external fun exchangeSessionDescription(description:String): String + + external fun makeUrl(app:String,streamId:String): String + + companion object { // Used to load the 'rtc' library on application startup. init { diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/ZLMRTCPlayer.kt b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/ZLMRTCPlayer.kt index e2614b34..ecdae242 100644 --- a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/ZLMRTCPlayer.kt +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/ZLMRTCPlayer.kt @@ -14,6 +14,9 @@ abstract class ZLMRTCPlayer { } + + + constructor() public abstract fun bind(context: Context,surface: SurfaceViewRenderer, localPreview:Boolean) diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/HttpClient.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/HttpClient.java new file mode 100644 index 00000000..52997772 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/HttpClient.java @@ -0,0 +1,395 @@ +package com.zlm.rtc.client; + +import java.io.*; +import java.net.*; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class HttpClient { + + //MIME部分文件类型对照表 + private static final Map FILE_TYPE = new HashMap<>(); + + static { + FILE_TYPE.put(".jpeg", "image/jpeg"); + FILE_TYPE.put(".jpg", "image/jpg"); + FILE_TYPE.put(".png", "image/png"); + FILE_TYPE.put(".bmp", "image/bmp"); + FILE_TYPE.put(".gif", "image/gif"); + FILE_TYPE.put(".mp4", "video/mp4"); + FILE_TYPE.put(".txt", "text/plain"); + FILE_TYPE.put(".xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"); + FILE_TYPE.put(".docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document"); + FILE_TYPE.put(".pptx", "application/vnd.openxmlformats-officedocument.presentationml.presentation"); + FILE_TYPE.put(".pdf", "application/pdf"); + } + + /** + * GET请求 + * + * @param url + * @param params + * @param headers + * @return + */ + public static String doGet(String url, Map params, Map headers) { + BufferedReader reader = null; + try { + //1、拼接url + StringBuffer stringBuffer = new StringBuffer(url); + if (params != null && !params.isEmpty()) { + stringBuffer.append("?"); + for (Map.Entry entry : params.entrySet()) { + stringBuffer.append(entry.getKey()).append("=").append(entry.getValue()).append("&"); + } + stringBuffer.deleteCharAt(stringBuffer.length() - 1); + } + URL testUrl = new URL(stringBuffer.toString()); + + //2、建立链接 + HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection(); + connection.setConnectTimeout(3000); //设置连接超时 + connection.setReadTimeout(3000); //设置读取响应超时 + if (headers != null && !headers.isEmpty()) { + for (Map.Entry entry : headers.entrySet()) { + connection.setRequestProperty(entry.getKey(), entry.getValue()); + } + } + + //3、发送请求 + InputStream inputStream = connection.getInputStream(); + reader = new BufferedReader(new InputStreamReader(inputStream)); + String line = ""; + StringBuffer response = new StringBuffer(); + while ((line = reader.readLine()) != null) { + response.append(line); + } + reader.close(); + return response.toString(); + + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (reader != null) { + try { + reader.close(); + } catch (IOException e) { + System.out.println("输入流关闭失败"); + } + } + } + return null; + } + + /** + * POST请求 + * + * @param url + * @param params + * @param headers + * @return + */ + public static String doPost(String url, Map params, Map headers) { + OutputStream outputStream = null; + BufferedReader reader = null; + try { + //建立连接 + URL testUrl = new URL(url); + HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection(); + connection.setRequestMethod("POST"); + connection.setDoOutput(true); //允许写入输出流 + connection.setUseCaches(false); //禁用缓存 + connection.setRequestProperty("Content-Type", "application/json; charset=utf-8"); + if (headers != null && !headers.isEmpty()) { + for (Map.Entry entry : headers.entrySet()) { + connection.setRequestProperty(entry.getKey(), entry.getValue()); + } + } + + //写入请求体 + outputStream = connection.getOutputStream(); + StringBuffer payload = new StringBuffer(); + if (params != null && !params.isEmpty()) { + for (Map.Entry entry : params.entrySet()) { + payload.append(entry.getKey()).append("=").append(entry.getValue()).append("&"); + } + payload.deleteCharAt(payload.length() - 1); + } + outputStream.write(payload.toString().getBytes()); + outputStream.flush(); + outputStream.close(); + + //发送请求 + InputStream inputStream = connection.getInputStream(); + reader = new BufferedReader(new InputStreamReader(inputStream)); + String line = ""; + StringBuffer response = new StringBuffer(); + while ((line = reader.readLine()) != null) { + response.append(line); + } + reader.close(); + return response.toString(); + + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (outputStream != null) { + try { + outputStream.close(); + } catch (IOException e) { + System.out.println("输出流关闭失败"); + } + } + if (reader != null) { + try { + reader.close(); + } catch (IOException e) { + System.out.println("输入流关闭失败"); + } + } + } + return null; + } + + /** + * GET请求下载文件 + * + * @param url + * @param params + * @param headers + * @param filePath + */ + public static void doGetDownload(String url, Map params, Map headers, String filePath) { + BufferedInputStream inputStream = null; + FileOutputStream outputStream = null; + try { + //1、拼接url + StringBuffer stringBuffer = new StringBuffer(url); + if (params != null && !params.isEmpty()) { + stringBuffer.append("?"); + for (Map.Entry entry : params.entrySet()) { + stringBuffer.append(entry.getKey()).append("=").append(entry.getValue()).append("&"); + } + stringBuffer.deleteCharAt(stringBuffer.length() - 1); + } + URL testUrl = new URL(stringBuffer.toString()); + + //2、建立链接 + HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection(); + connection.setConnectTimeout(3000); //设置连接超时 + connection.setReadTimeout(3000); //设置读取响应超时 + if (headers != null && !headers.isEmpty()) { + for (Map.Entry entry : headers.entrySet()) { + connection.setRequestProperty(entry.getKey(), entry.getValue()); + } + } + + //3、发送请求 + inputStream = new BufferedInputStream(connection.getInputStream()); + String contentDisposition = connection.getHeaderField("Content-Disposition"); + String regex = "attachment; filename=(.+\\.\\w+)"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(contentDisposition); + if (matcher.find()) { + String fileName = matcher.group(1); + File file = new File(filePath + "\\" + fileName); + outputStream = new FileOutputStream(file); + int n; + while ((n = inputStream.read()) != -1) { + outputStream.write(n); + } + outputStream.flush(); + outputStream.close(); + } + inputStream.close(); + + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (outputStream != null) { + try { + outputStream.close(); + } catch (IOException e) { + System.out.println("输出流关闭失败"); + } + } + if (inputStream != null) { + try { + inputStream.close(); + } catch (IOException e) { + System.out.println("输入流关闭失败"); + } + } + } + } + + /** + * POST请求上传文件 + * + * @param url + * @param fileUrl + * @param params + * @param headers + * @return + */ + public static String doPostUpload(String url, String fileUrl, Map params, Map headers) { + FileInputStream fileInputStream = null; + OutputStream outputStream = null; + BufferedReader reader = null; + try { + //读文件 + File file = new File(fileUrl); + fileInputStream = new FileInputStream(file); + byte[] bytes = new byte[(int) file.length()]; + fileInputStream.read(bytes); + fileInputStream.close(); + + URL testUrl = new URL(url); + HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection(); + connection.setRequestMethod("POST"); + connection.setDoOutput(true); //允许写入输出流 + connection.setUseCaches(false); //禁用缓存 + String boundary = UUID.randomUUID().toString(); + connection.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary); + if (headers != null && !headers.isEmpty()) { + for (Map.Entry entry : headers.entrySet()) { + connection.setRequestProperty(entry.getKey(), entry.getValue()); + } + } + + //写入请求体 + outputStream = connection.getOutputStream(); + + StringBuffer start = new StringBuffer(); + start.append("--").append(boundary).append("\r\n"); + String fileName = file.getName(); + String fileExtension = fileName.substring(fileName.lastIndexOf('.')); + start.append("Content-Disposition: form-data; name=\"file\"; filename=").append(fileName).append("\r\n"); + start.append("Content-Type: ").append(FILE_TYPE.get(fileExtension)).append("\r\n\r\n"); + outputStream.write(start.toString().getBytes()); + outputStream.write(bytes); + outputStream.write("\r\n".getBytes()); + + StringBuffer mid = new StringBuffer(); + if (params != null && !params.isEmpty()) { + for (Map.Entry entry : params.entrySet()) { + mid.append("--").append(boundary).append("\r\n"); + mid.append("Content-Disposition: form-data; name=\"").append(entry.getKey()).append("\"\r\n\r\n"); + mid.append(entry.getValue()).append("\r\n"); + } + outputStream.write(mid.toString().getBytes()); + } + + String end = "--" + boundary + "--"; + outputStream.write(end.getBytes()); + outputStream.flush(); + outputStream.close(); + + //发送请求 + InputStream inputStream = connection.getInputStream(); + reader = new BufferedReader(new InputStreamReader(inputStream)); + String line = ""; + StringBuffer response = new StringBuffer(); + while ((line = reader.readLine()) != null) { + response.append(line); + } + reader.close(); + return response.toString(); + + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (fileInputStream != null) { + try { + fileInputStream.close(); + } catch (IOException e) { + System.out.println("文件流关闭失败"); + } + } + if (outputStream != null) { + try { + outputStream.close(); + } catch (IOException e) { + System.out.println("输出流关闭失败"); + } + } + if (reader != null) { + try { + reader.close(); + } catch (IOException e) { + System.out.println("输入流关闭失败"); + } + } + } + return null; + } + + /** + * 从返回头中获取登录token + * + * @param url + * @param params + * @param headers + * @return + */ + public static String getToken(String url, Map params, Map headers) { + OutputStream outputStream = null; + try { + //建立连接 + URL testUrl = new URL(url); + HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection(); + connection.setRequestMethod("POST"); + connection.setDoOutput(true); //允许写入输出流 + connection.setUseCaches(false); //禁用缓存 + connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); + connection.setInstanceFollowRedirects(false); //禁用跟随重定向 + + //写入请求体 + outputStream = connection.getOutputStream(); + StringBuffer payload = new StringBuffer(); + if (params != null && !params.isEmpty()) { + for (Map.Entry entry : params.entrySet()) { + payload.append(entry.getKey()).append("=").append(entry.getValue()).append("&"); + } + payload.deleteCharAt(payload.length() - 1); + } + outputStream.write(payload.toString().getBytes()); + outputStream.flush(); + outputStream.close(); + + //发送请求,重定向到返回头中的Location + connection.connect(); + URL location = new URL(connection.getHeaderField("Location")); + HttpURLConnection connection2 = (HttpURLConnection) location.openConnection(); + + //请求Location,获取返回头中的所有Set-Cookie + connection2.setRequestMethod("GET"); + connection2.setInstanceFollowRedirects(false); + connection2.connect(); + List cookies = connection2.getHeaderFields().get("Set-Cookie"); + for (String cookie : cookies) { + if (cookie.contains("token-test=")) { + return cookie; + } + } + + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (outputStream != null) { + try { + outputStream.close(); + } catch (IOException e) { + System.out.println("输出流关闭失败"); + } + } + } + return null; + } +} + + diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/PeerConnectionClient.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/PeerConnectionClient.java new file mode 100644 index 00000000..460d9499 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/PeerConnectionClient.java @@ -0,0 +1,1511 @@ +/* + * Copyright 2014 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package com.zlm.rtc.client; + +import android.content.Context; +import android.os.Environment; +import android.os.ParcelFileDescriptor; +import android.util.Log; + +import androidx.annotation.Nullable; + +import org.webrtc.AudioSource; +import org.webrtc.AudioTrack; +import org.webrtc.CameraVideoCapturer; +import org.webrtc.DataChannel; +import org.webrtc.DefaultVideoDecoderFactory; +import org.webrtc.DefaultVideoEncoderFactory; +import org.webrtc.EglBase; +import org.webrtc.IceCandidate; +import org.webrtc.Logging; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnection; +import org.webrtc.PeerConnection.IceConnectionState; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.RtpParameters; +import org.webrtc.RtpReceiver; +import org.webrtc.RtpSender; +import org.webrtc.RtpTransceiver; +import org.webrtc.SdpObserver; +import org.webrtc.SessionDescription; +import org.webrtc.SoftwareVideoDecoderFactory; +import org.webrtc.SoftwareVideoEncoderFactory; +import org.webrtc.StatsObserver; +import org.webrtc.StatsReport; +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.SurfaceViewRenderer; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoDecoderFactory; +import org.webrtc.VideoEncoderFactory; +import org.webrtc.VideoFrame; +import org.webrtc.VideoSink; +import org.webrtc.VideoSource; +import org.webrtc.VideoTrack; +import org.webrtc.audio.AudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; +import org.webrtc.audio.LegacyAudioDeviceModule; +import org.webrtc.voiceengine.WebRtcAudioManager; +import org.webrtc.voiceengine.WebRtcAudioRecord; +import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode; +import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback; +import org.webrtc.voiceengine.WebRtcAudioTrack; +import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode; +import org.webrtc.voiceengine.WebRtcAudioUtils; + +import java.io.File; +import java.io.IOException; +import java.math.BigInteger; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/** + * Peer connection client implementation. + * + *

All public methods are routed to local looper thread. + * All PeerConnectionEvents callbacks are invoked from the same looper thread. + * This class is a singleton.CreateOffer + */ +public class PeerConnectionClient { + public static final String VIDEO_TRACK_ID = "ARDAMSv0"; + public static final String AUDIO_TRACK_ID = "ARDAMSa0"; + public static final String VIDEO_TRACK_TYPE = "video"; + private static final String TAG = "PCRTCClient"; + private static final String VIDEO_CODEC_VP8 = "VP8"; + private static final String VIDEO_CODEC_VP9 = "VP9"; + private static final String VIDEO_CODEC_H264 = "H264"; + private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline"; + private static final String VIDEO_CODEC_H264_HIGH = "H264 High"; + private static final String AUDIO_CODEC_OPUS = "opus"; + private static final String AUDIO_CODEC_ISAC = "ISAC"; + private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate"; + private static final String VIDEO_FLEXFEC_FIELDTRIAL = + "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/"; + private static final String VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL = "WebRTC-IntelVP8/Enabled/"; + private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL = + "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/"; + private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate"; + private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation"; + private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl"; + private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter"; + private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression"; + private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement"; + private static final int HD_VIDEO_WIDTH = 1280; + private static final int HD_VIDEO_HEIGHT = 720; + private static final int BPS_IN_KBPS = 1000; + private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log"; + + // Executor thread is started once in private ctor and is used for all + // peer connection API calls to ensure new peer connection factory is + // created on the same thread as previously destroyed factory. + private static final ExecutorService executor = Executors.newSingleThreadExecutor(); + + //private final PCObserver pcObserver = new PCObserver(); + //private final SDPObserver sdpObserver = new SDPObserver(); + private final Timer statsTimer = new Timer(); + private final EglBase rootEglBase; + private final Context appContext; + private final PeerConnectionParameters peerConnectionParameters; + private final PeerConnectionEvents events; + + @Nullable + private PeerConnectionFactory factory; + @Nullable + private ConcurrentHashMap peerConnectionMap; + @Nullable + private ConcurrentHashMap videoSinkMap; + @Nullable + private AudioSource audioSource; + @Nullable + private SurfaceTextureHelper surfaceTextureHelper; + @Nullable + private VideoSource videoSource; + private boolean preferIsac; + private boolean videoCapturerStopped; + private boolean isError; + @Nullable + private VideoSink localSink; + @Nullable + private List remoteSinks; + private int videoWidth; + private int videoHeight; + private int videoFps; + private MediaConstraints audioConstraints; + private MediaConstraints sdpMediaConstraints; + // Queued remote ICE candidates are consumed only after both local and + // remote descriptions are set. Similarly local ICE candidates are sent to + // remote peer after both local and remote description are set. + @Nullable + private List queuedRemoteCandidates; + private boolean isInitiator; + @Nullable + private SessionDescription localSdp; // either offer or answer SDP + @Nullable + private VideoCapturer videoCapturer; + // enableVideo is set to true if video should be rendered and sent. + private boolean renderVideo = true; + @Nullable + private VideoTrack localVideoTrack; + @Nullable + private VideoTrack remoteVideoTrack; + @Nullable + private RtpSender localVideoSender; + // enableAudio is set to true if audio should be sent. + private boolean enableAudio = true; + @Nullable + private AudioTrack localAudioTrack; + @Nullable + private DataChannel dataChannel; + private final boolean dataChannelEnabled; + // Enable RtcEventLog. + @Nullable + private RtcEventLog rtcEventLog; + // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes + // recorded audio samples to an output file. + @Nullable + private RecordedAudioToFileController saveRecordedAudioToFile = null; + + + /** + * Peer connection parameters. + */ + public static class DataChannelParameters { + public final boolean ordered; + public final int maxRetransmitTimeMs; + public final int maxRetransmits; + public final String protocol; + public final boolean negotiated; + public final int id; + + public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, + String protocol, boolean negotiated, int id) { + this.ordered = ordered; + this.maxRetransmitTimeMs = maxRetransmitTimeMs; + this.maxRetransmits = maxRetransmits; + this.protocol = protocol; + this.negotiated = negotiated; + this.id = id; + } + } + + /** + * Peer connection parameters. + */ + public static class PeerConnectionParameters { + public final boolean videoCallEnabled; + public final boolean loopback; + public final boolean tracing; + public final int videoWidth; + public final int videoHeight; + public final int videoFps; + public final int videoMaxBitrate; + public final String videoCodec; + public final boolean videoCodecHwAcceleration; + public final boolean videoFlexfecEnabled; + public final int audioStartBitrate; + public final String audioCodec; + public final boolean noAudioProcessing; + public final boolean aecDump; + public final boolean saveInputAudioToFile; + public final boolean useOpenSLES; + public final boolean disableBuiltInAEC; + public final boolean disableBuiltInAGC; + public final boolean disableBuiltInNS; + public final boolean disableWebRtcAGCAndHPF; + public final boolean enableRtcEventLog; + public final boolean useLegacyAudioDevice; + private final DataChannelParameters dataChannelParameters; + + public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing, + int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec, + boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate, + String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile, + boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC, + boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog, + boolean useLegacyAudioDevice, DataChannelParameters dataChannelParameters) { + this.videoCallEnabled = videoCallEnabled; + this.loopback = loopback; + this.tracing = tracing; + this.videoWidth = videoWidth; + this.videoHeight = videoHeight; + this.videoFps = videoFps; + this.videoMaxBitrate = videoMaxBitrate; + this.videoCodec = videoCodec; + this.videoFlexfecEnabled = videoFlexfecEnabled; + this.videoCodecHwAcceleration = videoCodecHwAcceleration; + this.audioStartBitrate = audioStartBitrate; + this.audioCodec = audioCodec; + this.noAudioProcessing = noAudioProcessing; + this.aecDump = aecDump; + this.saveInputAudioToFile = saveInputAudioToFile; + this.useOpenSLES = useOpenSLES; + this.disableBuiltInAEC = disableBuiltInAEC; + this.disableBuiltInAGC = disableBuiltInAGC; + this.disableBuiltInNS = disableBuiltInNS; + this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF; + this.enableRtcEventLog = enableRtcEventLog; + this.useLegacyAudioDevice = useLegacyAudioDevice; + this.dataChannelParameters = dataChannelParameters; + } + } + + /** + * Peer connection events. + */ + public interface PeerConnectionEvents { + /** + * Callback fired once local SDP is created and set. + */ + void onLocalDescription(final BigInteger handleId, final SessionDescription sdp); + + + /** + * Callback fired once local Ice candidate is generated. + */ + void onIceCandidate(final BigInteger handleId, final IceCandidate candidate); + + /** + * Callback fired once local ICE candidates are removed. + */ + void onIceCandidatesRemoved(final BigInteger handleId, final IceCandidate[] candidates); + + /** + * Callback fired once connection is established (IceConnectionState is + * CONNECTED). + */ + void onIceConnected(final BigInteger handleId); + + /** + * Callback fired once connection is closed (IceConnectionState is + * DISCONNECTED). + */ + void onIceDisconnected(final BigInteger handleId); + + /** + * Callback fired once peer connection is closed. + */ + void onPeerConnectionClosed(final BigInteger handleId); + + /** + * Callback fired once peer connection statistics is ready. + */ + void onPeerConnectionStatsReady(final BigInteger handleId, final StatsReport[] reports); + + /** + * Callback fired once peer connection error happened. + */ + void onPeerConnectionError(final BigInteger handleId, final String description); + + void onLocalRender(final BigInteger handleId); + + void onRemoteRender(final BigInteger handleId); + } + + public static class proxyVideoSinks implements VideoSink { + private VideoSink target = null; + + @Override + synchronized public void onFrame(VideoFrame frame) { + if (target == null) { + Logging.d(TAG, "Dropping frame in proxy because target is null."); + return; + } + + target.onFrame(frame); + } + + synchronized public void setTarget(VideoSink target) { + this.target = target; + } + + synchronized public void reset() { + target = null; + } + } + + /** + * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes + * ownership of |eglBase|. + */ + public PeerConnectionClient(Context appContext, EglBase eglBase, + PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) { + this.rootEglBase = eglBase; + this.appContext = appContext; + this.events = events; + this.peerConnectionParameters = peerConnectionParameters; + this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null; + + this.peerConnectionMap = new ConcurrentHashMap<>(); + this.videoSinkMap = new ConcurrentHashMap<>(); + + Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters)); + + final String fieldTrials = getFieldTrials(peerConnectionParameters); + executor.execute(() -> { + Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials); + PeerConnectionFactory.initialize( + PeerConnectionFactory.InitializationOptions.builder(appContext) + .setFieldTrials(fieldTrials) + .setEnableInternalTracer(true) + .createInitializationOptions()); + }); + } + + /** + * This function should only be called once. + */ + public void createPeerConnectionFactory(PeerConnectionFactory.Options options) { + if (factory != null) { + throw new IllegalStateException("PeerConnectionFactory has already been constructed"); + } + executor.execute(() -> createPeerConnectionFactoryInternal(options)); + } + + public void createPeerConnection(final VideoCapturer videoCapturer, final BigInteger handleId) { + if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) { + Log.w(TAG, "Video call enabled but no video capturer provided."); + } + if (peerConnectionParameters == null) { + Log.e(TAG, "Creating peer connection without initializing factory."); + return; + } + this.videoCapturer = videoCapturer; + executor.execute(() -> { + try { + createMediaConstraintsInternal(); + createPeerConnectionInternal(handleId); + //maybeCreateAndStartRtcEventLog(); + } catch (Exception e) { + reportError("Failed to create peer connection: " + e.getMessage()); + throw e; + } + }); + } + + public void close() { + executor.execute(this::closeInternal); + } + + private boolean isVideoCallEnabled() { + return peerConnectionParameters.videoCallEnabled && videoCapturer != null; + } + + private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) { + isError = false; + + if (peerConnectionParameters.tracing) { + PeerConnectionFactory.startInternalTracingCapture( + Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + + "webrtc-trace.txt"); + } + + // Check if ISAC is used by default. + preferIsac = peerConnectionParameters.audioCodec != null + && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC); + + final AudioDeviceModule adm = peerConnectionParameters.useLegacyAudioDevice + ? createLegacyAudioDevice() + : createJavaAudioDevice(); + + // Create peer connection factory. + if (options != null) { + Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask); + } + final boolean enableH264HighProfile = + VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec); + final VideoEncoderFactory encoderFactory; + final VideoDecoderFactory decoderFactory; + + if (peerConnectionParameters.videoCodecHwAcceleration) { + encoderFactory = new DefaultVideoEncoderFactory( + rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile); + decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext()); + } else { + encoderFactory = new SoftwareVideoEncoderFactory(); + decoderFactory = new SoftwareVideoDecoderFactory(); + } + + factory = PeerConnectionFactory.builder() + .setOptions(options) + .setAudioDeviceModule(adm) + .setVideoEncoderFactory(encoderFactory) + .setVideoDecoderFactory(decoderFactory) + .createPeerConnectionFactory(); + Log.d(TAG, "Peer connection factory created."); + adm.release(); + } + + AudioDeviceModule createLegacyAudioDevice() { + // Enable/disable OpenSL ES playback. + if (!peerConnectionParameters.useOpenSLES) { + Log.d(TAG, "Disable OpenSL ES audio even if device supports it"); + WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */); + } else { + Log.d(TAG, "Allow OpenSL ES audio if device supports it"); + WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false); + } + + if (peerConnectionParameters.disableBuiltInAEC) { + Log.d(TAG, "Disable built-in AEC even if device supports it"); + WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true); + } else { + Log.d(TAG, "Enable built-in AEC if device supports it"); + WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false); + } + + if (peerConnectionParameters.disableBuiltInNS) { + Log.d(TAG, "Disable built-in NS even if device supports it"); + WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true); + } else { + Log.d(TAG, "Enable built-in NS if device supports it"); + WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false); + } + + WebRtcAudioRecord.setOnAudioSamplesReady(saveRecordedAudioToFile); + + // Set audio record error callbacks. + WebRtcAudioRecord.setErrorCallback(new WebRtcAudioRecordErrorCallback() { + @Override + public void onWebRtcAudioRecordInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordStartError( + AudioRecordStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage); + reportError(errorMessage); + } + }); + + WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrack.ErrorCallback() { + @Override + public void onWebRtcAudioTrackInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackStartError( + AudioTrackStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage); + reportError(errorMessage); + } + }); + + + return new LegacyAudioDeviceModule(); + } + + AudioDeviceModule createJavaAudioDevice() { + // Enable/disable OpenSL ES playback. + if (!peerConnectionParameters.useOpenSLES) { + Log.w(TAG, "External OpenSLES ADM not implemented yet."); + // TODO(magjed): Add support for external OpenSLES ADM. + } + + // Set audio record error callbacks. + AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() { + @Override + public void onWebRtcAudioRecordInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordStartError( + JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioRecordError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage); + reportError(errorMessage); + } + }; + + AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() { + @Override + public void onWebRtcAudioTrackInitError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackStartError( + JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage); + reportError(errorMessage); + } + + @Override + public void onWebRtcAudioTrackError(String errorMessage) { + Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage); + reportError(errorMessage); + } + }; + + return JavaAudioDeviceModule.builder(appContext) + .setSamplesReadyCallback(saveRecordedAudioToFile) + .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC) + .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS) + .setAudioRecordErrorCallback(audioRecordErrorCallback) + .setAudioTrackErrorCallback(audioTrackErrorCallback) + .createAudioDeviceModule(); + } + + private void createMediaConstraintsInternal() { + // Create video constraints if video call is enabled. + if (isVideoCallEnabled()) { + videoWidth = peerConnectionParameters.videoWidth; + videoHeight = peerConnectionParameters.videoHeight; + videoFps = peerConnectionParameters.videoFps; + + // If video resolution is not specified, default to HD. + if (videoWidth == 0 || videoHeight == 0) { + videoWidth = HD_VIDEO_WIDTH; + videoHeight = HD_VIDEO_HEIGHT; + } + + // If fps is not specified, default to 30. + if (videoFps == 0) { + videoFps = 30; + } + Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps); + } + + // Create audio constraints. + audioConstraints = new MediaConstraints(); + // added for audio performance measurements + if (peerConnectionParameters.noAudioProcessing) { + Log.d(TAG, "Disabling audio processing"); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false")); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")); + audioConstraints.mandatory.add( + new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false")); + } + // Create SDP constraints. + sdpMediaConstraints = new MediaConstraints(); + sdpMediaConstraints.mandatory.add( + new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); + sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( + "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled()))); + } + + private void createPeerConnectionInternal(final BigInteger handleId) { + if (factory == null || isError) { + Log.e(TAG, "Peerconnection factory is not created"); + return; + } + Log.d(TAG, "Create peer connection."); + + queuedRemoteCandidates = new ArrayList<>(); + + Log.d(TAG, "createPeerConnectioning..."); + + PeerConnection peerConnection = createPeerConnection(handleId, true); + + // Set INFO libjingle logging. + // NOTE: this _must_ happen while |factory| is alive! + Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); + + List mediaStreamLabels = Collections.singletonList("ARDAMS"); + if (isVideoCallEnabled()) { + peerConnection.addTrack(createVideoTrack(handleId, videoCapturer), mediaStreamLabels); + events.onLocalRender(handleId); + // We can add the renderers right away because we don't need to wait for an + // answer to get the remote track. + /*remoteVideoTrack = getRemoteVideoTrack(handleId); + remoteVideoTrack.setEnabled(renderVideo); + for (VideoSink remoteSink : remoteSinks) { + remoteVideoTrack.addSink(remoteSink); + }*/ + } + + peerConnection.addTrack(createAudioTrack(), mediaStreamLabels); + + if (isVideoCallEnabled()) { + findVideoSender(handleId); + } + + if (peerConnectionParameters.aecDump) { + try { + ParcelFileDescriptor aecDumpFileDescriptor = + ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath() + + File.separator + "Download/audio.aecdump"), + ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE + | ParcelFileDescriptor.MODE_TRUNCATE); + factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1); + } catch (IOException e) { + Log.e(TAG, "Can not open aecdump file", e); + } + } + + if (saveRecordedAudioToFile != null) { + if (saveRecordedAudioToFile.start()) { + Log.d(TAG, "Recording input audio to file is activated"); + } + } + Log.d(TAG, "Peer connection created."); + + } + + private PeerConnection createPeerConnection(final BigInteger handleId, final boolean type) { + Log.d(TAG, "Create peer connection."); + PeerConnection.IceServer iceServer = new PeerConnection.IceServer("stun:stun.freeswitch.org"); + List iceServers = new ArrayList<>(); + iceServers.add(iceServer); + PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers); + //added by pcg + //下面这一行,如果加上,就不会发送本地的local ice candidates了,那我先不加,先发送本地ice再说 + //rtcConfig.iceTransportsType = PeerConnection.IceTransportsType.RELAY; + //added end + // TCP candidates are only useful when connecting to a server that supports + // ICE-TCP. + rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; + rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; + rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; + rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; + // Use ECDSA encryption. + rtcConfig.keyType = PeerConnection.KeyType.ECDSA; + // Enable DTLS for normal calls and disable for loopback calls. + rtcConfig.activeResetSrtpParams = true;//!peerConnectionParameters.loopback; + rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; + + PCObserver pcObserver = new PCObserver(); + SDPObserver sdpObserver = new SDPObserver(); + PeerConnection peerConnection = factory.createPeerConnection(rtcConfig, pcObserver); + + ZLMConnection JanusConnection2 = new ZLMConnection(); + JanusConnection2.handleId = handleId; + JanusConnection2.sdpObserver = sdpObserver; + JanusConnection2.peerConnection = peerConnection; + JanusConnection2.type = type; + Log.d(TAG, "We are putting handleId=" + handleId); + peerConnectionMap.put(handleId, JanusConnection2); + videoSinkMap.put(handleId, new proxyVideoSinks()); + pcObserver.setConnection(JanusConnection2); + sdpObserver.setConnection(JanusConnection2); + Log.d(TAG, "Peer connection created."); + return peerConnection; + } + + private File createRtcEventLogOutputFile() { + DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault()); + Date date = new Date(); + final String outputFileName = "event_log_" + dateFormat.format(date) + ".log"; + return new File( + appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName); + } + + + private void closeInternal() { + if (factory != null && peerConnectionParameters.aecDump) { + factory.stopAecDump(); + } + Log.d(TAG, "Closing peer connection."); + statsTimer.cancel(); + + if (peerConnectionMap != null) { + for (ZLMConnection conn : peerConnectionMap.values()) { + if (conn.peerConnection != null) { + conn.peerConnection.dispose(); + conn.peerConnection = null; + } + } + } + peerConnectionMap.clear(); + + if (videoSinkMap != null) { + for (proxyVideoSinks sink : videoSinkMap.values()) { + if (sink != null) { + sink.reset(); + } + } + } + videoSinkMap.clear(); + + + Log.d(TAG, "Closing audio source."); + if (audioSource != null) { + audioSource.dispose(); + audioSource = null; + } + Log.d(TAG, "Stopping capture."); + if (videoCapturer != null) { + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + videoCapturerStopped = true; + videoCapturer.dispose(); + videoCapturer = null; + } + Log.d(TAG, "Closing video source."); + if (videoSource != null) { + videoSource.dispose(); + videoSource = null; + } + if (surfaceTextureHelper != null) { + surfaceTextureHelper.dispose(); + surfaceTextureHelper = null; + } + + localSink = null; + remoteSinks = null; + Log.d(TAG, "Closing peer connection factory."); + if (factory != null) { + factory.dispose(); + factory = null; + } + rootEglBase.release(); + Log.d(TAG, "Closing peer connection done."); + //events.onPeerConnectionClosed(); fixme: + PeerConnectionFactory.stopInternalTracingCapture(); + PeerConnectionFactory.shutdownInternalTracer(); + } + + public boolean isHDVideo() { + return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720; + } + + @SuppressWarnings("deprecation") // TODO(sakal): getStats is deprecated. + private void getStats(final BigInteger handleId) { + PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection; + if (peerConnection == null || isError) { + return; + } + boolean success = peerConnection.getStats(new StatsObserver() { + @Override + public void onComplete(final StatsReport[] reports) { + //events.onPeerConnectionStatsReady(reports); fixme: + } + }, null); + if (!success) { + Log.e(TAG, "getStats() returns false!"); + } + } + + public void enableStatsEvents(boolean enable, int periodMs, final BigInteger handleId) { + if (enable) { + try { + statsTimer.schedule(new TimerTask() { + @Override + public void run() { + executor.execute(() -> getStats(handleId)); + } + }, 0, periodMs); + } catch (Exception e) { + Log.e(TAG, "Can not schedule statistics timer", e); + } + } else { + statsTimer.cancel(); + } + } + + public void setAudioEnabled(final boolean enable) { + executor.execute(() -> { + enableAudio = enable; + if (localAudioTrack != null) { + localAudioTrack.setEnabled(enableAudio); + } + }); + } + + public void setVideoEnabled(final boolean enable) { + executor.execute(() -> { + renderVideo = enable; + if (localVideoTrack != null) { + localVideoTrack.setEnabled(renderVideo); + } + if (remoteVideoTrack != null) { + remoteVideoTrack.setEnabled(renderVideo); + } + }); + } + + public void createOffer(final BigInteger handleId) { + Log.d(TAG, "peerConnectionMap get handleId=" + peerConnectionMap.size()); + executor.execute(() -> { + Log.d(TAG, "peerConnectionMap get handleId=" + handleId); + ZLMConnection connection = peerConnectionMap.get(handleId); + PeerConnection peerConnection = connection.peerConnection; + if (peerConnection != null && !isError) { + Log.d(TAG, "PC Create OFFER"); + peerConnection.createOffer(connection.sdpObserver, sdpMediaConstraints); + } + }); + } + + public void subscriberHandleRemoteJsep(final BigInteger handleId, final SessionDescription sdp) { + executor.execute(() -> { + PeerConnection peerConnection = createPeerConnection(handleId, false); + SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver; + if (peerConnection != null && !isError) { + Log.d(TAG, "PC create ANSWER"); + ZLMConnection connection = peerConnectionMap.get(handleId); + peerConnection.setRemoteDescription(sdpObserver, sdp); + peerConnection.createAnswer(connection.sdpObserver, sdpMediaConstraints); + } + }); + } + + public void addRemoteIceCandidate(final IceCandidate candidate, final BigInteger handleId) { + executor.execute(() -> { + PeerConnection peerConnection = createPeerConnection(handleId, false); + SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver; + if (peerConnection != null && !isError) { + if (queuedRemoteCandidates != null) { + queuedRemoteCandidates.add(candidate); + } else { + peerConnection.addIceCandidate(candidate); + } + } + }); + } + + public void removeRemoteIceCandidates(final IceCandidate[] candidates, final BigInteger handleId) { + executor.execute(() -> { + PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection; + SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver; + if (peerConnection == null || isError) { + return; + } + // Drain the queued remote candidates if there is any so that + // they are processed in the proper order. + drainCandidates(handleId); + peerConnection.removeIceCandidates(candidates); + }); + } + + public void setRemoteDescription(BigInteger handleId, final SessionDescription sdp) { + executor.execute(() -> { + PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection; + SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver; + if (peerConnection == null || isError) { + return; + } + String sdpDescription = sdp.description; + if (preferIsac) { + sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); + } + if (isVideoCallEnabled()) { + sdpDescription = + preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false); + } + if (peerConnectionParameters.audioStartBitrate > 0) { + sdpDescription = setStartBitrate( + AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate); + } + Log.d(TAG, "Set remote SDP."); + SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription); + peerConnection.setRemoteDescription(sdpObserver, sdpRemote); + }); + } + + public void setVideoRender(BigInteger handleId, SurfaceViewRenderer videoRender) { + executor.execute(() -> { + videoSinkMap.get(handleId).setTarget(videoRender); + }); + } + + public void dispose(BigInteger handleId) { + executor.execute(() -> { + videoSinkMap.remove(handleId); + ZLMConnection conn = peerConnectionMap.get(handleId); + if (conn.peerConnection != null) { + conn.peerConnection.dispose(); + conn.peerConnection = null; + } + peerConnectionMap.remove(handleId); + }); + } + + + public void stopVideoSource() { + executor.execute(() -> { + if (videoCapturer != null && !videoCapturerStopped) { + Log.d(TAG, "Stop video source."); + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + } + videoCapturerStopped = true; + } + }); + } + + public void startVideoSource() { + executor.execute(() -> { + if (videoCapturer != null && videoCapturerStopped) { + Log.d(TAG, "Restart video source."); + videoCapturer.startCapture(videoWidth, videoHeight, videoFps); + videoCapturerStopped = false; + } + }); + } + + public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps, final BigInteger handleId) { + PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection; + SDPObserver sdpObserver = peerConnectionMap.get(handleId).sdpObserver; + executor.execute(() -> { + if (peerConnection == null || localVideoSender == null || isError) { + return; + } + Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps); + if (localVideoSender == null) { + Log.w(TAG, "Sender is not ready."); + return; + } + + RtpParameters parameters = localVideoSender.getParameters(); + if (parameters.encodings.size() == 0) { + Log.w(TAG, "RtpParameters are not ready."); + return; + } + + for (RtpParameters.Encoding encoding : parameters.encodings) { + // Null value means no limit. + encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS; + } + if (!localVideoSender.setParameters(parameters)) { + Log.e(TAG, "RtpSender.setParameters failed."); + } + Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps); + }); + } + + private void reportError(final String errorMessage) { + Log.e(TAG, "Peerconnection error: " + errorMessage); + executor.execute(() -> { + if (!isError) { + //events.onPeerConnectionError(errorMessage); fixme: + isError = true; + } + }); + } + + @Nullable + private AudioTrack createAudioTrack() { + audioSource = factory.createAudioSource(audioConstraints); + localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); + localAudioTrack.setEnabled(enableAudio); + return localAudioTrack; + } + + @Nullable + private VideoTrack createVideoTrack(BigInteger handleId, VideoCapturer capturer) { + surfaceTextureHelper = + SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext()); + videoSource = factory.createVideoSource(capturer.isScreencast()); + capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver()); + capturer.startCapture(videoWidth, videoHeight, videoFps); + + localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); + localVideoTrack.setEnabled(renderVideo); + localVideoTrack.addSink(videoSinkMap.get(handleId)); + return localVideoTrack; + } + + private void findVideoSender(final BigInteger handleId) { + PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection; + for (RtpSender sender : peerConnection.getSenders()) { + if (sender.track() != null) { + String trackType = sender.track().kind(); + if (trackType.equals(VIDEO_TRACK_TYPE)) { + Log.d(TAG, "Found video sender."); + localVideoSender = sender; + } + } + } + } + + // Returns the remote VideoTrack, assuming there is only one. + private @Nullable VideoTrack getRemoteVideoTrack(BigInteger handleId) { + PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection; + for (RtpTransceiver transceiver : peerConnection.getTransceivers()) { + MediaStreamTrack track = transceiver.getReceiver().track(); + if (track instanceof VideoTrack) { + return (VideoTrack) track; + } + } + return null; + } + + private static String getSdpVideoCodecName(PeerConnectionParameters parameters) { + switch (parameters.videoCodec) { + case VIDEO_CODEC_VP8: + return VIDEO_CODEC_VP8; + case VIDEO_CODEC_VP9: + return VIDEO_CODEC_VP9; + case VIDEO_CODEC_H264_HIGH: + case VIDEO_CODEC_H264_BASELINE: + return VIDEO_CODEC_H264; + default: + return VIDEO_CODEC_VP8; + } + } + + private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) { + String fieldTrials = ""; + if (peerConnectionParameters.videoFlexfecEnabled) { + fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL; + Log.d(TAG, "Enable FlexFEC field trial."); + } + fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL; + if (peerConnectionParameters.disableWebRtcAGCAndHPF) { + fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL; + Log.d(TAG, "Disable WebRTC AGC field trial."); + } + return fieldTrials; + } + + @SuppressWarnings("StringSplitter") + private static String setStartBitrate( + String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) { + String[] lines = sdpDescription.split("\r\n"); + int rtpmapLineIndex = -1; + boolean sdpFormatUpdated = false; + String codecRtpMap = null; + // Search for codec rtpmap in format + // a=rtpmap: / [/] + String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"; + Pattern codecPattern = Pattern.compile(regex); + for (int i = 0; i < lines.length; i++) { + Matcher codecMatcher = codecPattern.matcher(lines[i]); + if (codecMatcher.matches()) { + codecRtpMap = codecMatcher.group(1); + rtpmapLineIndex = i; + break; + } + } + if (codecRtpMap == null) { + Log.w(TAG, "No rtpmap for " + codec + " codec"); + return sdpDescription; + } + Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]); + + // Check if a=fmtp string already exist in remote SDP for this codec and + // update it with new bitrate parameter. + regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$"; + codecPattern = Pattern.compile(regex); + for (int i = 0; i < lines.length; i++) { + Matcher codecMatcher = codecPattern.matcher(lines[i]); + if (codecMatcher.matches()) { + Log.d(TAG, "Found " + codec + " " + lines[i]); + if (isVideoCodec) { + lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; + } else { + lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000); + } + Log.d(TAG, "Update remote SDP line: " + lines[i]); + sdpFormatUpdated = true; + break; + } + } + + StringBuilder newSdpDescription = new StringBuilder(); + for (int i = 0; i < lines.length; i++) { + newSdpDescription.append(lines[i]).append("\r\n"); + // Append new a=fmtp line if no such line exist for a codec. + if (!sdpFormatUpdated && i == rtpmapLineIndex) { + String bitrateSet; + if (isVideoCodec) { + bitrateSet = + "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; + } else { + bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "=" + + (bitrateKbps * 1000); + } + Log.d(TAG, "Add remote SDP line: " + bitrateSet); + newSdpDescription.append(bitrateSet).append("\r\n"); + } + } + return newSdpDescription.toString(); + } + + /** + * Returns the line number containing "m=audio|video", or -1 if no such line exists. + */ + private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) { + final String mediaDescription = isAudio ? "m=audio " : "m=video "; + for (int i = 0; i < sdpLines.length; ++i) { + if (sdpLines[i].startsWith(mediaDescription)) { + return i; + } + } + return -1; + } + + private static String joinString( + Iterable s, String delimiter, boolean delimiterAtEnd) { + Iterator iter = s.iterator(); + if (!iter.hasNext()) { + return ""; + } + StringBuilder buffer = new StringBuilder(iter.next()); + while (iter.hasNext()) { + buffer.append(delimiter).append(iter.next()); + } + if (delimiterAtEnd) { + buffer.append(delimiter); + } + return buffer.toString(); + } + + private static @Nullable String movePayloadTypesToFront( + List preferredPayloadTypes, String mLine) { + // The format of the media description line should be: m= ... + final List origLineParts = Arrays.asList(mLine.split(" ")); + if (origLineParts.size() <= 3) { + Log.e(TAG, "Wrong SDP media description format: " + mLine); + return null; + } + final List header = origLineParts.subList(0, 3); + final List unpreferredPayloadTypes = + new ArrayList<>(origLineParts.subList(3, origLineParts.size())); + unpreferredPayloadTypes.removeAll(preferredPayloadTypes); + // Reconstruct the line with |preferredPayloadTypes| moved to the beginning of the payload + // types. + final List newLineParts = new ArrayList<>(); + newLineParts.addAll(header); + newLineParts.addAll(preferredPayloadTypes); + newLineParts.addAll(unpreferredPayloadTypes); + return joinString(newLineParts, " ", false /* delimiterAtEnd */); + } + + private static String preferCodec(String sdpDescription, String codec, boolean isAudio) { + final String[] lines = sdpDescription.split("\r\n"); + final int mLineIndex = findMediaDescriptionLine(isAudio, lines); + if (mLineIndex == -1) { + Log.w(TAG, "No mediaDescription line, so can't prefer " + codec); + return sdpDescription; + } + // A list with all the payload types with name |codec|. The payload types are integers in the + // range 96-127, but they are stored as strings here. + final List codecPayloadTypes = new ArrayList<>(); + // a=rtpmap: / [/] + final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"); + for (String line : lines) { + Matcher codecMatcher = codecPattern.matcher(line); + if (codecMatcher.matches()) { + codecPayloadTypes.add(codecMatcher.group(1)); + } + } + if (codecPayloadTypes.isEmpty()) { + Log.w(TAG, "No payload types with name " + codec); + return sdpDescription; + } + + final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]); + if (newMLine == null) { + return sdpDescription; + } + Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine); + lines[mLineIndex] = newMLine; + return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */); + } + + private void drainCandidates(BigInteger handleId) { + PeerConnection peerConnection = peerConnectionMap.get(handleId).peerConnection; + if (queuedRemoteCandidates != null) { + Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates"); + for (IceCandidate candidate : queuedRemoteCandidates) { + peerConnection.addIceCandidate(candidate); + } + queuedRemoteCandidates = null; + } + } + + private void switchCameraInternal() { + if (videoCapturer instanceof CameraVideoCapturer) { + if (!isVideoCallEnabled() || isError) { + Log.e(TAG, + "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError); + return; // No video is sent or only one camera is available or error happened. + } + Log.d(TAG, "Switch camera"); + CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; + cameraVideoCapturer.switchCamera(null); + } else { + Log.d(TAG, "Will not switch camera, video caputurer is not a camera"); + } + } + + public void switchCamera() { + executor.execute(this::switchCameraInternal); + } + + public void changeCaptureFormat(final int width, final int height, final int framerate) { + executor.execute(() -> changeCaptureFormatInternal(width, height, framerate)); + } + + private void changeCaptureFormatInternal(int width, int height, int framerate) { + if (!isVideoCallEnabled() || isError || videoCapturer == null) { + Log.e(TAG, + "Failed to change capture format. Video: " + isVideoCallEnabled() + + ". Error : " + isError); + return; + } + Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate); + videoSource.adaptOutputFormat(width, height, framerate); + } + + // Implementation detail: observe ICE & stream changes and react accordingly. + private class PCObserver implements PeerConnection.Observer { + private ZLMConnection connection; + private PeerConnection peerConnection; + + public void setConnection(ZLMConnection connection) { + this.connection = connection; + this.peerConnection = connection.peerConnection; + } + + @Override + public void onIceCandidate(final IceCandidate candidate) { + executor.execute(() -> events.onIceCandidate(connection.handleId, candidate)); + } + + @Override + public void onIceCandidatesRemoved(final IceCandidate[] candidates) { + executor.execute(() -> events.onIceCandidatesRemoved(connection.handleId, candidates)); + } + + @Override + public void onSignalingChange(PeerConnection.SignalingState newState) { + Log.d(TAG, "SignalingState: " + newState); + } + + @Override + public void onIceConnectionChange(final IceConnectionState newState) { + executor.execute(() -> { + Log.d(TAG, "IceConnectionState: " + newState); + if (newState == IceConnectionState.CONNECTED) { + events.onIceConnected(connection.handleId); + } else if (newState == IceConnectionState.DISCONNECTED) { + events.onIceDisconnected(connection.handleId); + } else if (newState == IceConnectionState.FAILED) { + reportError("ICE connection failed."); + } + }); + } + + @Override + public void onIceGatheringChange(PeerConnection.IceGatheringState newState) { + Log.d(TAG, "IceGatheringState: " + newState); + } + + @Override + public void onIceConnectionReceivingChange(boolean receiving) { + Log.d(TAG, "IceConnectionReceiving changed to " + receiving); + } + + @Override + public void onAddStream(final MediaStream stream) { + executor.execute(new Runnable() { + @Override + public void run() { + if (peerConnection == null || isError) { + return; + } + Log.d(TAG, "==onAddStream tracks size:" + stream.videoTracks.size()); + if (stream.videoTracks.size() == 1) { + remoteVideoTrack = stream.videoTracks.get(0); + remoteVideoTrack.setEnabled(true); + connection.videoTrack = remoteVideoTrack; + connection.videoTrack.addSink(videoSinkMap.get(connection.handleId)); + events.onRemoteRender(connection.handleId); + } + } + }); + } + + @Override + public void onRemoveStream(final MediaStream stream) { + } + + @Override + public void onDataChannel(final DataChannel dc) { + Log.d(TAG, "New Data channel " + dc.label()); + + if (!dataChannelEnabled) + return; + + dc.registerObserver(new DataChannel.Observer() { + @Override + public void onBufferedAmountChange(long previousAmount) { + Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state()); + } + + @Override + public void onStateChange() { + Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state()); + } + + @Override + public void onMessage(final DataChannel.Buffer buffer) { + if (buffer.binary) { + Log.d(TAG, "Received binary msg over " + dc); + return; + } + ByteBuffer data = buffer.data; + final byte[] bytes = new byte[data.capacity()]; + data.get(bytes); + String strData = new String(bytes, Charset.forName("UTF-8")); + Log.d(TAG, "Got msg: " + strData + " over " + dc); + } + }); + } + + @Override + public void onRenegotiationNeeded() { + // No need to do anything; AppRTC follows a pre-agreed-upon + // signaling/negotiation protocol. + } + + @Override + public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) { + Log.d(TAG, "==onAddTrack mediaStreams size:" + mediaStreams.length); + } + } + + // Implementation detail: handle offer creation/signaling and answer setting, + // as well as adding remote ICE candidates once the answer SDP is set. + public class SDPObserver implements SdpObserver { + private PeerConnection peerConnection; + private SDPObserver sdpObserver; + private BigInteger handleId; + private SessionDescription localSdp; + private boolean type; + + public void setConnection(ZLMConnection connection) { + this.peerConnection = connection.peerConnection; + this.sdpObserver = connection.sdpObserver; + this.handleId = connection.handleId; + this.type = connection.type; + } + + @Override + public void onCreateSuccess(final SessionDescription origSdp) { + if (localSdp != null) { + reportError("Multiple SDP create."); + return; + } + String sdpDescription = origSdp.description; + if (preferIsac) { + sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); + } + if (isVideoCallEnabled()) { + sdpDescription = + preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false); + } + final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription); + localSdp = sdp; + executor.execute(() -> { + if (peerConnection != null && !isError) { + Log.d(TAG, "Set local SDP from " + sdp.type); + peerConnection.setLocalDescription(sdpObserver, sdp); + +// MediaStream localMediaStream = factory.createLocalMediaStream("ARDAMS"); +// localMediaStream.addTrack(localAudioTrack); +// peerConnection.addStream(localMediaStream); + } + }); + } + + @Override + public void onSetSuccess() { + executor.execute(() -> { + if (peerConnection == null || isError) { + return; + } + if (type) { + // For offering peer connection we first create offer and set + // local SDP, then after receiving answer set remote SDP. + if (peerConnection.getRemoteDescription() == null) { + // We've just set our local SDP so time to send it. + Log.d(TAG, "Local SDP set succesfully"); + events.onLocalDescription(handleId, localSdp); + } else { + // We've just set remote description, so drain remote + // and send local ICE candidates. + Log.d(TAG, "Remote SDP set succesfully"); + drainCandidates(handleId); + } + } else { + // For answering peer connection we set remote SDP and then + // create answer and set local SDP. + if (peerConnection.getLocalDescription() != null) { + // We've just set our local SDP so time to send it, drain + // remote and send local ICE candidates. + Log.d(TAG, "Local SDP set succesfully"); + events.onLocalDescription(handleId, localSdp); + drainCandidates(handleId); + } else { + // We've just set remote SDP - do nothing for now - + // answer will be created soon. + Log.d(TAG, "Remote SDP set succesfully"); + } + } + }); + } + + @Override + public void onCreateFailure(final String error) { + reportError("createSDP error: " + error); + } + + @Override + public void onSetFailure(final String error) { + reportError("setSDP error: " + error); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RecordedAudioToFileController.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RecordedAudioToFileController.java new file mode 100644 index 00000000..0fda70bb --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RecordedAudioToFileController.java @@ -0,0 +1,158 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package com.zlm.rtc.client; + +import android.media.AudioFormat; +import android.os.Environment; +import android.util.Log; + +import androidx.annotation.Nullable; + +import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; +import org.webrtc.voiceengine.WebRtcAudioRecord; +import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.concurrent.ExecutorService; + + +/** + * Implements the AudioRecordSamplesReadyCallback interface and writes + * recorded raw audio samples to an output file. + */ +public class RecordedAudioToFileController + implements SamplesReadyCallback, WebRtcAudioRecordSamplesReadyCallback { + private static final String TAG = "RecordedAudioToFile"; + private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L; + + private final Object lock = new Object(); + private final ExecutorService executor; + @Nullable + private OutputStream rawAudioFileOutputStream = null; + private boolean isRunning; + private long fileSizeInBytes = 0; + + public RecordedAudioToFileController(ExecutorService executor) { + Log.d(TAG, "ctor"); + this.executor = executor; + } + + /** + * Should be called on the same executor thread as the one provided at + * construction. + */ + public boolean start() { + Log.d(TAG, "start"); + if (!isExternalStorageWritable()) { + Log.e(TAG, "Writing to external media is not possible"); + return false; + } + synchronized (lock) { + isRunning = true; + } + return true; + } + + /** + * Should be called on the same executor thread as the one provided at + * construction. + */ + public void stop() { + Log.d(TAG, "stop"); + synchronized (lock) { + isRunning = false; + if (rawAudioFileOutputStream != null) { + try { + rawAudioFileOutputStream.close(); + } catch (IOException e) { + Log.e(TAG, "Failed to close file with saved input audio: " + e); + } + rawAudioFileOutputStream = null; + } + fileSizeInBytes = 0; + } + } + + // Checks if external storage is available for read and write. + private boolean isExternalStorageWritable() { + String state = Environment.getExternalStorageState(); + if (Environment.MEDIA_MOUNTED.equals(state)) { + return true; + } + return false; + } + + // Utilizes audio parameters to create a file name which contains sufficient + // information so that the file can be played using an external file player. + // Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm. + private void openRawAudioOutputFile(int sampleRate, int channelCount) { + final String fileName = Environment.getExternalStorageDirectory().getPath() + File.separator + + "recorded_audio_16bits_" + String.valueOf(sampleRate) + "Hz" + + ((channelCount == 1) ? "_mono" : "_stereo") + ".pcm"; + final File outputFile = new File(fileName); + try { + rawAudioFileOutputStream = new FileOutputStream(outputFile); + } catch (FileNotFoundException e) { + Log.e(TAG, "Failed to open audio output file: " + e.getMessage()); + } + Log.d(TAG, "Opened file for recording: " + fileName); + } + + // Called when new audio samples are ready. + @Override + public void onWebRtcAudioRecordSamplesReady(WebRtcAudioRecord.AudioSamples samples) { + onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(samples.getAudioFormat(), + samples.getChannelCount(), samples.getSampleRate(), samples.getData())); + } + + // Called when new audio samples are ready. + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) { + // The native audio layer on Android should use 16-bit PCM format. + if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) { + Log.e(TAG, "Invalid audio format"); + return; + } + synchronized (lock) { + // Abort early if stop() has been called. + if (!isRunning) { + return; + } + // Open a new file for the first callback only since it allows us to add audio parameters to + // the file name. + if (rawAudioFileOutputStream == null) { + openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount()); + fileSizeInBytes = 0; + } + } + // Append the recorded 16-bit audio samples to the open output file. + executor.execute(() -> { + if (rawAudioFileOutputStream != null) { + try { + // Set a limit on max file size. 58348800 bytes corresponds to + // approximately 10 minutes of recording in mono at 48kHz. + if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) { + // Writes samples.getData().length bytes to output stream. + rawAudioFileOutputStream.write(samples.getData()); + fileSizeInBytes += samples.getData().length; + } + } catch (IOException e) { + Log.e(TAG, "Failed to write audio to file: " + e.getMessage()); + } + } + }); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RtcEventLog.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RtcEventLog.java new file mode 100644 index 00000000..94875cda --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/RtcEventLog.java @@ -0,0 +1,75 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package com.zlm.rtc.client; + +import android.os.ParcelFileDescriptor; +import android.util.Log; + +import org.webrtc.PeerConnection; + +import java.io.File; +import java.io.IOException; + +public class RtcEventLog { + private static final String TAG = "RtcEventLog"; + private static final int OUTPUT_FILE_MAX_BYTES = 10_000_000; + private final PeerConnection peerConnection; + private RtcEventLogState state = RtcEventLogState.INACTIVE; + + enum RtcEventLogState { + INACTIVE, + STARTED, + STOPPED, + } + + public RtcEventLog(PeerConnection peerConnection) { + if (peerConnection == null) { + throw new NullPointerException("The peer connection is null."); + } + this.peerConnection = peerConnection; + } + + public void start(final File outputFile) { + if (state == RtcEventLogState.STARTED) { + Log.e(TAG, "RtcEventLog has already started."); + return; + } + final ParcelFileDescriptor fileDescriptor; + try { + fileDescriptor = ParcelFileDescriptor.open(outputFile, + ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE + | ParcelFileDescriptor.MODE_TRUNCATE); + } catch (IOException e) { + Log.e(TAG, "Failed to create a new file", e); + return; + } + + // Passes ownership of the file to WebRTC. + boolean success = + peerConnection.startRtcEventLog(fileDescriptor.detachFd(), OUTPUT_FILE_MAX_BYTES); + if (!success) { + Log.e(TAG, "Failed to start RTC event log."); + return; + } + state = RtcEventLogState.STARTED; + Log.d(TAG, "RtcEventLog started."); + } + + public void stop() { + if (state != RtcEventLogState.STARTED) { + Log.e(TAG, "RtcEventLog was not started."); + return; + } + peerConnection.stopRtcEventLog(); + state = RtcEventLogState.STOPPED; + Log.d(TAG, "RtcEventLog stopped."); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/SdpBean.kt b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/SdpBean.kt new file mode 100644 index 00000000..26cc2439 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/SdpBean.kt @@ -0,0 +1,13 @@ +package com.zlm.rtc.client + +class SdpBean { + var code = 0 + + var id:String?=null + + var msg:String?=null + + var sdp: String? = null + + var type:String?=null +} \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/ZLMConnection.java b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/ZLMConnection.java new file mode 100644 index 00000000..2946b4f3 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/client/ZLMConnection.java @@ -0,0 +1,14 @@ +package com.zlm.rtc.client; + +import org.webrtc.PeerConnection; +import org.webrtc.VideoTrack; + +import java.math.BigInteger; + +public class ZLMConnection { + public BigInteger handleId; + public PeerConnection peerConnection; + public PeerConnectionClient.SDPObserver sdpObserver; + public VideoTrack videoTrack; + public boolean type; +} diff --git a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt index 9e66365a..ebc522e3 100644 --- a/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt +++ b/webrtc_player/android/zlm/src/main/java/com/zlm/rtc/play/ZLMRTCPlayerImpl.kt @@ -2,54 +2,113 @@ package com.zlm.rtc.play import android.content.Context import android.graphics.Bitmap +import android.util.Log +import com.zlm.rtc.NativeLib import com.zlm.rtc.ZLMRTCPlayer -import org.webrtc.AudioSource -import org.webrtc.AudioTrack +import com.zlm.rtc.client.HttpClient +import com.zlm.rtc.client.PeerConnectionClient +import org.json.JSONObject +import org.webrtc.Camera1Enumerator +import org.webrtc.Camera2Enumerator +import org.webrtc.CameraEnumerator import org.webrtc.EglBase -import org.webrtc.PeerConnection +import org.webrtc.IceCandidate import org.webrtc.PeerConnectionFactory -import org.webrtc.SurfaceTextureHelper +import org.webrtc.SessionDescription +import org.webrtc.StatsReport import org.webrtc.SurfaceViewRenderer import org.webrtc.VideoCapturer -import org.webrtc.VideoSource -import org.webrtc.VideoTrack +import java.math.BigInteger -class ZLMRTCPlayerImpl: ZLMRTCPlayer() { +class ZLMRTCPlayerImpl : ZLMRTCPlayer(), PeerConnectionClient.PeerConnectionEvents { - private val context: Context? = null + private var context: Context? = null - private val eglBase: EglBase? by lazy { - EglBase.create() + private val peerConnectionClient: PeerConnectionClient? by lazy { + PeerConnectionClient( + context, EglBase.create(), + PeerConnectionClient.PeerConnectionParameters( + true, + false, + false, + 1080, + 960, + 0, + 0, + "VP8", + true, + false, + 0, + "OPUS", + false, + false, + false, + false, + false, + false, + false, + false, false, false, null + ), this + ) } - private var playUrl: String? = null - private var peerConnection: PeerConnection? = null - private var surfaceViewRenderer: SurfaceViewRenderer? = null - private var peerConnectionFactory: PeerConnectionFactory? = null - private var audioSource: AudioSource? = null - private var videoSource: VideoSource? = null - private var localAudioTrack: AudioTrack? = null - private var localVideoTrack: VideoTrack? = null - private var captureAndroid: VideoCapturer? = null - private var surfaceTextureHelper: SurfaceTextureHelper? = null - private var isShowCamera = true - private var isPublishMode = false //isPublish true为推流 false为拉流 - private var defaultFps = 24 - private var isPreviewing = false - private var isFirst = true init { } + private fun logger(msg: String) { + Log.i("ZLMRTCPlayerImpl", msg) + } + + fun createVideoCapture(context: Context?): VideoCapturer? { + val videoCapturer: VideoCapturer? = if (Camera2Enumerator.isSupported(context)) { + createCameraCapture(Camera2Enumerator(context)) + } else { + createCameraCapture(Camera1Enumerator(true)) + } + return videoCapturer + } + + /** + * 创建相机媒体流 + */ + private fun createCameraCapture(enumerator: CameraEnumerator): VideoCapturer? { + val deviceNames = enumerator.deviceNames + + // Front facing camera not found, try something else + for (deviceName in deviceNames) { + if (!enumerator.isFrontFacing(deviceName)) { + val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null) + if (videoCapturer != null) { + return videoCapturer + } + } + } + // First, try to find front facing camera + for (deviceName in deviceNames) { + if (enumerator.isFrontFacing(deviceName)) { + val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null) + if (videoCapturer != null) { + return videoCapturer + } + } + } + + + return null + } + override fun bind(context: Context, surface: SurfaceViewRenderer, localPreview: Boolean) { + this.context = context + peerConnectionClient?.setAudioEnabled(true) + peerConnectionClient?.createPeerConnectionFactory(PeerConnectionFactory.Options()) + peerConnectionClient?.createPeerConnection(createVideoCapture(context), BigInteger.ZERO) + peerConnectionClient?.createOffer((BigInteger.ZERO)) + - override fun bind(surface: SurfaceViewRenderer, localPreview: Boolean) { - this.surfaceViewRenderer = surface - surfaceViewRenderer?.init(eglBase?.eglBaseContext,null) } override fun play(app: String, streamId: String) { - } override fun setSpeakerphoneOn(on: Boolean) { @@ -81,4 +140,71 @@ class ZLMRTCPlayerImpl: ZLMRTCPlayer() { } + override fun onLocalDescription(handleId: BigInteger?, sdp: SessionDescription?) { + + val url = NativeLib().makeUrl("live", "li") + logger("handleId: " + url) + logger("handleId: " + sdp?.description) + val doPost = HttpClient.doPost( + url, + mutableMapOf(Pair("sdp", sdp?.description)), + mutableMapOf() + ) + + val result = JSONObject(doPost) + + val code = result.getInt("code") + if (code == 0) { + logger("handleId: " + doPost) + val sdp = result.getString("sdp") + peerConnectionClient?.setRemoteDescription(handleId,SessionDescription(SessionDescription.Type.ANSWER,sdp)) + } else { + val msg = result.getString("msg") + logger("handleId: " + msg) + } + } + + override fun onIceCandidate(handleId: BigInteger?, candidate: IceCandidate?) { + + } + + override fun onIceCandidatesRemoved( + handleId: BigInteger?, + candidates: Array? + ) { + + } + + override fun onIceConnected(handleId: BigInteger?) { + + } + + override fun onIceDisconnected(handleId: BigInteger?) { + + } + + override fun onPeerConnectionClosed(handleId: BigInteger?) { + + } + + override fun onPeerConnectionStatsReady( + handleId: BigInteger?, + reports: Array? + ) { + + } + + override fun onPeerConnectionError(handleId: BigInteger?, description: String?) { + + } + + override fun onLocalRender(handleId: BigInteger?) { + + } + + override fun onRemoteRender(handleId: BigInteger?) { + + } + + } \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AddIceObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AddIceObserver.java new file mode 100644 index 00000000..ff2c6900 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AddIceObserver.java @@ -0,0 +1,20 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Interface to handle completion of addIceCandidate */ +public interface AddIceObserver { + /** Called when ICE candidate added successfully.*/ + @CalledByNative public void onAddSuccess(); + + /** Called when ICE candidate addition failed.*/ + @CalledByNative public void onAddFailure(String error); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AndroidVideoDecoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AndroidVideoDecoder.java new file mode 100644 index 00000000..47cb5689 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AndroidVideoDecoder.java @@ -0,0 +1,673 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo.CodecCapabilities; +import android.media.MediaFormat; +import android.os.SystemClock; +import android.view.Surface; +import androidx.annotation.Nullable; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import org.webrtc.ThreadUtils.ThreadChecker; + +/** + * Android hardware video decoder. + */ +class AndroidVideoDecoder implements VideoDecoder, VideoSink { + private static final String TAG = "AndroidVideoDecoder"; + + // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after + // this timeout. + private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; + + // WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a + // long timeout (500 ms) to prevent this from causing the codec to return an error. + private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000; + + // Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds). + // If this timeout is exceeded, the output thread will unblock and check if the decoder is still + // running. If it is, it will block on dequeue again. Otherwise, it will stop and release the + // MediaCodec. + private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; + + private final MediaCodecWrapperFactory mediaCodecWrapperFactory; + private final String codecName; + private final VideoCodecMimeType codecType; + + private static class FrameInfo { + final long decodeStartTimeMs; + final int rotation; + + FrameInfo(long decodeStartTimeMs, int rotation) { + this.decodeStartTimeMs = decodeStartTimeMs; + this.rotation = rotation; + } + } + + private final BlockingDeque frameInfos; + private int colorFormat; + + // Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats + // those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder + // thread and is immutable while the codec is running. + @Nullable private Thread outputThread; + + // Checker that ensures work is run on the output thread. + private ThreadChecker outputThreadChecker; + + // Checker that ensures work is run on the decoder thread. The decoder thread is owned by the + // caller and must be used to call initDecode, decode, and release. + private ThreadChecker decoderThreadChecker; + + private volatile boolean running; + @Nullable private volatile Exception shutdownException; + + // Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread + // or the output thread. Accesses should be protected with this lock. + private final Object dimensionLock = new Object(); + private int width; + private int height; + private int stride; + private int sliceHeight; + + // Whether the decoder has finished the first frame. The codec may not change output dimensions + // after delivering the first frame. Only accessed on the output thread while the decoder is + // running. + private boolean hasDecodedFirstFrame; + // Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed + // on the decoder thread. + private boolean keyFrameRequired; + + private final @Nullable EglBase.Context sharedContext; + // Valid and immutable while the decoder is running. + @Nullable private SurfaceTextureHelper surfaceTextureHelper; + @Nullable private Surface surface; + + private static class DecodedTextureMetadata { + final long presentationTimestampUs; + final Integer decodeTimeMs; + + DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) { + this.presentationTimestampUs = presentationTimestampUs; + this.decodeTimeMs = decodeTimeMs; + } + } + + // Metadata for the last frame rendered to the texture. + private final Object renderedTextureMetadataLock = new Object(); + @Nullable private DecodedTextureMetadata renderedTextureMetadata; + + // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid + // and immutable while the decoder is running. + @Nullable private Callback callback; + + // Valid and immutable while the decoder is running. + @Nullable private MediaCodecWrapper codec; + + AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, + VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) { + if (!isSupportedColorFormat(colorFormat)) { + throw new IllegalArgumentException("Unsupported color format: " + colorFormat); + } + Logging.d(TAG, + "ctor name: " + codecName + " type: " + codecType + " color format: " + colorFormat + + " context: " + sharedContext); + this.mediaCodecWrapperFactory = mediaCodecWrapperFactory; + this.codecName = codecName; + this.codecType = codecType; + this.colorFormat = colorFormat; + this.sharedContext = sharedContext; + this.frameInfos = new LinkedBlockingDeque<>(); + } + + @Override + public VideoCodecStatus initDecode(Settings settings, Callback callback) { + this.decoderThreadChecker = new ThreadChecker(); + + this.callback = callback; + if (sharedContext != null) { + surfaceTextureHelper = createSurfaceTextureHelper(); + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); + surfaceTextureHelper.startListening(this); + } + return initDecodeInternal(settings.width, settings.height); + } + + // Internal variant is used when restarting the codec due to reconfiguration. + private VideoCodecStatus initDecodeInternal(int width, int height) { + decoderThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, + "initDecodeInternal name: " + codecName + " type: " + codecType + " width: " + width + + " height: " + height + " color format: " + colorFormat); + if (outputThread != null) { + Logging.e(TAG, "initDecodeInternal called while the codec is already running"); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + + // Note: it is not necessary to initialize dimensions under the lock, since the output thread + // is not running. + this.width = width; + this.height = height; + + stride = width; + sliceHeight = height; + hasDecodedFirstFrame = false; + keyFrameRequired = true; + + try { + codec = mediaCodecWrapperFactory.createByCodecName(codecName); + } catch (IOException | IllegalArgumentException | IllegalStateException e) { + Logging.e(TAG, "Cannot create media decoder " + codecName); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + try { + MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); + if (sharedContext == null) { + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + } + codec.configure(format, surface, null, 0); + codec.start(); + } catch (IllegalStateException | IllegalArgumentException e) { + Logging.e(TAG, "initDecode failed", e); + release(); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + running = true; + outputThread = createOutputThread(); + outputThread.start(); + + Logging.d(TAG, "initDecodeInternal done"); + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) { + decoderThreadChecker.checkIsOnValidThread(); + if (codec == null || callback == null) { + Logging.d(TAG, "decode uninitalized, codec: " + (codec != null) + ", callback: " + callback); + return VideoCodecStatus.UNINITIALIZED; + } + + if (frame.buffer == null) { + Logging.e(TAG, "decode() - no input data"); + return VideoCodecStatus.ERR_PARAMETER; + } + + int size = frame.buffer.remaining(); + if (size == 0) { + Logging.e(TAG, "decode() - input buffer empty"); + return VideoCodecStatus.ERR_PARAMETER; + } + + // Load dimensions from shared memory under the dimension lock. + final int width; + final int height; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + } + + // Check if the resolution changed and reset the codec if necessary. + if (frame.encodedWidth * frame.encodedHeight > 0 + && (frame.encodedWidth != width || frame.encodedHeight != height)) { + VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight); + if (status != VideoCodecStatus.OK) { + return status; + } + } + + if (keyFrameRequired) { + // Need to process a key frame first. + if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) { + Logging.e(TAG, "decode() - key frame required first"); + return VideoCodecStatus.NO_OUTPUT; + } + } + + int index; + try { + index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US); + } catch (IllegalStateException e) { + Logging.e(TAG, "dequeueInputBuffer failed", e); + return VideoCodecStatus.ERROR; + } + if (index < 0) { + // Decoder is falling behind. No input buffers available. + // The decoder can't simply drop frames; it might lose a key frame. + Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind"); + return VideoCodecStatus.ERROR; + } + + ByteBuffer buffer; + try { + buffer = codec.getInputBuffer(index); + } catch (IllegalStateException e) { + Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e); + return VideoCodecStatus.ERROR; + } + + if (buffer.capacity() < size) { + Logging.e(TAG, "decode() - HW buffer too small"); + return VideoCodecStatus.ERROR; + } + buffer.put(frame.buffer); + + frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation)); + try { + codec.queueInputBuffer(index, 0 /* offset */, size, + TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */); + } catch (IllegalStateException e) { + Logging.e(TAG, "queueInputBuffer failed", e); + frameInfos.pollLast(); + return VideoCodecStatus.ERROR; + } + if (keyFrameRequired) { + keyFrameRequired = false; + } + return VideoCodecStatus.OK; + } + + @Override + public String getImplementationName() { + return codecName; + } + + @Override + public VideoCodecStatus release() { + // TODO(sakal): This is not called on the correct thread but is still called synchronously. + // Re-enable the check once this is called on the correct thread. + // decoderThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "release"); + VideoCodecStatus status = releaseInternal(); + if (surface != null) { + releaseSurface(); + surface = null; + surfaceTextureHelper.stopListening(); + surfaceTextureHelper.dispose(); + surfaceTextureHelper = null; + } + synchronized (renderedTextureMetadataLock) { + renderedTextureMetadata = null; + } + callback = null; + frameInfos.clear(); + return status; + } + + // Internal variant is used when restarting the codec due to reconfiguration. + private VideoCodecStatus releaseInternal() { + if (!running) { + Logging.d(TAG, "release: Decoder is not running."); + return VideoCodecStatus.OK; + } + try { + // The outputThread actually stops and releases the codec once running is false. + running = false; + if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { + // Log an exception to capture the stack trace and turn it into a TIMEOUT error. + Logging.e(TAG, "Media decoder release timeout", new RuntimeException()); + return VideoCodecStatus.TIMEOUT; + } + if (shutdownException != null) { + // Log the exception and turn it into an error. Wrap the exception in a new exception to + // capture both the output thread's stack trace and this thread's stack trace. + Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException)); + shutdownException = null; + return VideoCodecStatus.ERROR; + } + } finally { + codec = null; + outputThread = null; + } + return VideoCodecStatus.OK; + } + + private VideoCodecStatus reinitDecode(int newWidth, int newHeight) { + decoderThreadChecker.checkIsOnValidThread(); + VideoCodecStatus status = releaseInternal(); + if (status != VideoCodecStatus.OK) { + return status; + } + return initDecodeInternal(newWidth, newHeight); + } + + private Thread createOutputThread() { + return new Thread("AndroidVideoDecoder.outputThread") { + @Override + public void run() { + outputThreadChecker = new ThreadChecker(); + while (running) { + deliverDecodedFrame(); + } + releaseCodecOnOutputThread(); + } + }; + } + + // Visible for testing. + protected void deliverDecodedFrame() { + outputThreadChecker.checkIsOnValidThread(); + try { + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + // Block until an output buffer is available (up to 100 milliseconds). If the timeout is + // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output + // thread's loop. Blocking here prevents the output thread from busy-waiting while the codec + // is idle. + int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US); + if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + reformat(codec.getOutputFormat()); + return; + } + + if (index < 0) { + Logging.v(TAG, "dequeueOutputBuffer returned " + index); + return; + } + + FrameInfo frameInfo = frameInfos.poll(); + Integer decodeTimeMs = null; + int rotation = 0; + if (frameInfo != null) { + decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs); + rotation = frameInfo.rotation; + } + + hasDecodedFirstFrame = true; + + if (surfaceTextureHelper != null) { + deliverTextureFrame(index, info, rotation, decodeTimeMs); + } else { + deliverByteFrame(index, info, rotation, decodeTimeMs); + } + + } catch (IllegalStateException e) { + Logging.e(TAG, "deliverDecodedFrame failed", e); + } + } + + private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info, + final int rotation, final Integer decodeTimeMs) { + // Load dimensions from shared memory under the dimension lock. + final int width; + final int height; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + } + + synchronized (renderedTextureMetadataLock) { + if (renderedTextureMetadata != null) { + codec.releaseOutputBuffer(index, false); + return; // We are still waiting for texture for the previous frame, drop this one. + } + surfaceTextureHelper.setTextureSize(width, height); + surfaceTextureHelper.setFrameRotation(rotation); + renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs); + codec.releaseOutputBuffer(index, /* render= */ true); + } + } + + @Override + public void onFrame(VideoFrame frame) { + final VideoFrame newFrame; + final Integer decodeTimeMs; + final long timestampNs; + synchronized (renderedTextureMetadataLock) { + if (renderedTextureMetadata == null) { + throw new IllegalStateException( + "Rendered texture metadata was null in onTextureFrameAvailable."); + } + timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000; + decodeTimeMs = renderedTextureMetadata.decodeTimeMs; + renderedTextureMetadata = null; + } + // Change timestamp of frame. + final VideoFrame frameWithModifiedTimeStamp = + new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs); + callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */); + } + + private void deliverByteFrame( + int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) { + // Load dimensions from shared memory under the dimension lock. + int width; + int height; + int stride; + int sliceHeight; + synchronized (dimensionLock) { + width = this.width; + height = this.height; + stride = this.stride; + sliceHeight = this.sliceHeight; + } + + // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2) + // bytes for each of the U and V channels. + if (info.size < width * height * 3 / 2) { + Logging.e(TAG, "Insufficient output buffer size: " + info.size); + return; + } + + if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) { + // Some codecs (Exynos) report an incorrect stride. Correct it here. + // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as + // 2 * size / (3 * height). + stride = info.size * 2 / (height * 3); + } + + ByteBuffer buffer = codec.getOutputBuffer(index); + buffer.position(info.offset); + buffer.limit(info.offset + info.size); + buffer = buffer.slice(); + + final VideoFrame.Buffer frameBuffer; + if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) { + frameBuffer = copyI420Buffer(buffer, stride, sliceHeight, width, height); + } else { + // All other supported color formats are NV12. + frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height); + } + codec.releaseOutputBuffer(index, /* render= */ false); + + long presentationTimeNs = info.presentationTimeUs * 1000; + VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs); + + // Note that qp is parsed on the C++ side. + callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); + frame.release(); + } + + private VideoFrame.Buffer copyNV12ToI420Buffer( + ByteBuffer buffer, int stride, int sliceHeight, int width, int height) { + // toI420 copies the buffer. + return new NV12Buffer(width, height, stride, sliceHeight, buffer, null /* releaseCallback */) + .toI420(); + } + + private VideoFrame.Buffer copyI420Buffer( + ByteBuffer buffer, int stride, int sliceHeight, int width, int height) { + if (stride % 2 != 0) { + throw new AssertionError("Stride is not divisible by two: " + stride); + } + + // Note that the case with odd `sliceHeight` is handled in a special way. + // The chroma height contained in the payload is rounded down instead of + // up, making it one row less than what we expect in WebRTC. Therefore, we + // have to duplicate the last chroma rows for this case. Also, the offset + // between the Y plane and the U plane is unintuitive for this case. See + // http://bugs.webrtc.org/6651 for more info. + final int chromaWidth = (width + 1) / 2; + final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2; + + final int uvStride = stride / 2; + + final int yPos = 0; + final int yEnd = yPos + stride * height; + final int uPos = yPos + stride * sliceHeight; + final int uEnd = uPos + uvStride * chromaHeight; + final int vPos = uPos + uvStride * sliceHeight / 2; + final int vEnd = vPos + uvStride * chromaHeight; + + VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height); + + buffer.limit(yEnd); + buffer.position(yPos); + copyPlane( + buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height); + + buffer.limit(uEnd); + buffer.position(uPos); + copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(), + chromaWidth, chromaHeight); + if (sliceHeight % 2 == 1) { + buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row. + + ByteBuffer dataU = frameBuffer.getDataU(); + dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row. + dataU.put(buffer); // Copy the last row. + } + + buffer.limit(vEnd); + buffer.position(vPos); + copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(), + chromaWidth, chromaHeight); + if (sliceHeight % 2 == 1) { + buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row. + + ByteBuffer dataV = frameBuffer.getDataV(); + dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row. + dataV.put(buffer); // Copy the last row. + } + + return frameBuffer; + } + + private void reformat(MediaFormat format) { + outputThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "Decoder format changed: " + format); + final int newWidth; + final int newHeight; + if (format.containsKey(MediaFormat.KEY_CROP_LEFT) + && format.containsKey(MediaFormat.KEY_CROP_RIGHT) + && format.containsKey(MediaFormat.KEY_CROP_BOTTOM) + && format.containsKey(MediaFormat.KEY_CROP_TOP)) { + newWidth = 1 + format.getInteger(MediaFormat.KEY_CROP_RIGHT) + - format.getInteger(MediaFormat.KEY_CROP_LEFT); + newHeight = 1 + format.getInteger(MediaFormat.KEY_CROP_BOTTOM) + - format.getInteger(MediaFormat.KEY_CROP_TOP); + } else { + newWidth = format.getInteger(MediaFormat.KEY_WIDTH); + newHeight = format.getInteger(MediaFormat.KEY_HEIGHT); + } + // Compare to existing width, height, and save values under the dimension lock. + synchronized (dimensionLock) { + if (newWidth != width || newHeight != height) { + if (hasDecodedFirstFrame) { + stopOnOutputThread(new RuntimeException("Unexpected size change. " + + "Configured " + width + "*" + height + ". " + + "New " + newWidth + "*" + newHeight)); + return; + } else if (newWidth <= 0 || newHeight <= 0) { + Logging.w(TAG, + "Unexpected format dimensions. Configured " + width + "*" + height + ". " + + "New " + newWidth + "*" + newHeight + ". Skip it"); + return; + } + width = newWidth; + height = newHeight; + } + } + + // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip + // color format updates. + if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { + colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); + Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); + if (!isSupportedColorFormat(colorFormat)) { + stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat)); + return; + } + } + + // Save stride and sliceHeight under the dimension lock. + synchronized (dimensionLock) { + if (format.containsKey(MediaFormat.KEY_STRIDE)) { + stride = format.getInteger(MediaFormat.KEY_STRIDE); + } + if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) { + sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT); + } + Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight); + stride = Math.max(width, stride); + sliceHeight = Math.max(height, sliceHeight); + } + } + + private void releaseCodecOnOutputThread() { + outputThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "Releasing MediaCodec on output thread"); + try { + codec.stop(); + } catch (Exception e) { + Logging.e(TAG, "Media decoder stop failed", e); + } + try { + codec.release(); + } catch (Exception e) { + Logging.e(TAG, "Media decoder release failed", e); + // Propagate exceptions caught during release back to the main thread. + shutdownException = e; + } + Logging.d(TAG, "Release on output thread done"); + } + + private void stopOnOutputThread(Exception e) { + outputThreadChecker.checkIsOnValidThread(); + running = false; + shutdownException = e; + } + + private boolean isSupportedColorFormat(int colorFormat) { + for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) { + if (supported == colorFormat) { + return true; + } + } + return false; + } + + // Visible for testing. + protected SurfaceTextureHelper createSurfaceTextureHelper() { + return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext); + } + + // Visible for testing. + // TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC. + protected void releaseSurface() { + surface.release(); + } + + // Visible for testing. + protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) { + return JavaI420Buffer.allocate(width, height); + } + + // Visible for testing. + protected void copyPlane( + ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) { + YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/ApplicationContextProvider.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/ApplicationContextProvider.java new file mode 100644 index 00000000..6400a047 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/ApplicationContextProvider.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; + +public class ApplicationContextProvider { + @CalledByNative + public static Context getApplicationContext() { + return ContextUtils.getApplicationContext(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioDecoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioDecoderFactoryFactory.java new file mode 100644 index 00000000..dd3e2628 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioDecoderFactoryFactory.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Implementations of this interface can create a native {@code webrtc::AudioDecoderFactory}. + */ +public interface AudioDecoderFactoryFactory { + /** + * Returns a pointer to a {@code webrtc::AudioDecoderFactory}. The caller takes ownership. + */ + long createNativeAudioDecoderFactory(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioEncoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioEncoderFactoryFactory.java new file mode 100644 index 00000000..814b71ab --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioEncoderFactoryFactory.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Implementations of this interface can create a native {@code webrtc::AudioEncoderFactory}. + */ +public interface AudioEncoderFactoryFactory { + /** + * Returns a pointer to a {@code webrtc::AudioEncoderFactory}. The caller takes ownership. + */ + long createNativeAudioEncoderFactory(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioProcessingFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioProcessingFactory.java new file mode 100644 index 00000000..bd8fdb89 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioProcessingFactory.java @@ -0,0 +1,20 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Factory for creating webrtc::AudioProcessing instances. */ +public interface AudioProcessingFactory { + /** + * Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it. + * The caller takes ownership of the object. + */ + public long createNative(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioSource.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioSource.java new file mode 100644 index 00000000..f8104e59 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioSource.java @@ -0,0 +1,26 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or + * more {@code AudioTrack} objects. + */ +public class AudioSource extends MediaSource { + public AudioSource(long nativeSource) { + super(nativeSource); + } + + /** Returns a pointer to webrtc::AudioSourceInterface. */ + long getNativeAudioSource() { + return getNativeMediaSource(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioTrack.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioTrack.java new file mode 100644 index 00000000..ca745db6 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/AudioTrack.java @@ -0,0 +1,32 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Java wrapper for a C++ AudioTrackInterface */ +public class AudioTrack extends MediaStreamTrack { + public AudioTrack(long nativeTrack) { + super(nativeTrack); + } + + /** Sets the volume for the underlying MediaSource. Volume is a gain value in the range + * 0 to 10. + */ + public void setVolume(double volume) { + nativeSetVolume(getNativeAudioTrack(), volume); + } + + /** Returns a pointer to webrtc::AudioTrackInterface. */ + long getNativeAudioTrack() { + return getNativeMediaStreamTrack(); + } + + private static native void nativeSetVolume(long track, double volume); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BaseBitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BaseBitrateAdjuster.java new file mode 100644 index 00000000..3b5f5d29 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BaseBitrateAdjuster.java @@ -0,0 +1,38 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */ +class BaseBitrateAdjuster implements BitrateAdjuster { + protected int targetBitrateBps; + protected double targetFramerateFps; + + @Override + public void setTargets(int targetBitrateBps, double targetFramerateFps) { + this.targetBitrateBps = targetBitrateBps; + this.targetFramerateFps = targetFramerateFps; + } + + @Override + public void reportEncodedFrame(int size) { + // No op. + } + + @Override + public int getAdjustedBitrateBps() { + return targetBitrateBps; + } + + @Override + public double getAdjustedFramerateFps() { + return targetFramerateFps; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BitrateAdjuster.java new file mode 100644 index 00000000..bfa08bad --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BitrateAdjuster.java @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Object that adjusts the bitrate of a hardware codec. */ +interface BitrateAdjuster { + /** + * Sets the target bitrate in bits per second and framerate in frames per second. + */ + void setTargets(int targetBitrateBps, double targetFramerateFps); + + /** + * Should be used to report the size of an encoded frame to the bitrate adjuster. Use + * getAdjustedBitrateBps to get the updated bitrate after calling this method. + */ + void reportEncodedFrame(int size); + + /** Gets the current bitrate. */ + int getAdjustedBitrateBps(); + + /** Gets the current framerate. */ + double getAdjustedFramerateFps(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioDecoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioDecoderFactoryFactory.java new file mode 100644 index 00000000..5ebc19f2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioDecoderFactoryFactory.java @@ -0,0 +1,23 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Creates a native {@code webrtc::AudioDecoderFactory} with the builtin audio decoders. + */ +public class BuiltinAudioDecoderFactoryFactory implements AudioDecoderFactoryFactory { + @Override + public long createNativeAudioDecoderFactory() { + return nativeCreateBuiltinAudioDecoderFactory(); + } + + private static native long nativeCreateBuiltinAudioDecoderFactory(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioEncoderFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioEncoderFactoryFactory.java new file mode 100644 index 00000000..e884d4c3 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/BuiltinAudioEncoderFactoryFactory.java @@ -0,0 +1,23 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * This class creates a native {@code webrtc::AudioEncoderFactory} with the builtin audio encoders. + */ +public class BuiltinAudioEncoderFactoryFactory implements AudioEncoderFactoryFactory { + @Override + public long createNativeAudioEncoderFactory() { + return nativeCreateBuiltinAudioEncoderFactory(); + } + + private static native long nativeCreateBuiltinAudioEncoderFactory(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CallSessionFileRotatingLogSink.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CallSessionFileRotatingLogSink.java new file mode 100644 index 00000000..f4edb588 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CallSessionFileRotatingLogSink.java @@ -0,0 +1,41 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public class CallSessionFileRotatingLogSink { + private long nativeSink; + + public static byte[] getLogData(String dirPath) { + if (dirPath == null) { + throw new IllegalArgumentException("dirPath may not be null."); + } + return nativeGetLogData(dirPath); + } + + public CallSessionFileRotatingLogSink( + String dirPath, int maxFileSize, Logging.Severity severity) { + if (dirPath == null) { + throw new IllegalArgumentException("dirPath may not be null."); + } + nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal()); + } + + public void dispose() { + if (nativeSink != 0) { + nativeDeleteSink(nativeSink); + nativeSink = 0; + } + } + + private static native long nativeAddSink(String dirPath, int maxFileSize, int severity); + private static native void nativeDeleteSink(long sink); + private static native byte[] nativeGetLogData(String dirPath); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNative.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNative.java new file mode 100644 index 00000000..9b410cea --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNative.java @@ -0,0 +1,29 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @CalledByNative is used by the JNI generator to create the necessary JNI + * bindings and expose this method to native code. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.METHOD}) +@Retention(RetentionPolicy.CLASS) +public @interface CalledByNative { + /* + * If present, tells which inner class the method belongs to. + */ + public String value() default ""; +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNativeUnchecked.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNativeUnchecked.java new file mode 100644 index 00000000..8a00a7fa --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CalledByNativeUnchecked.java @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions. + * It only makes sense to use this annotation on methods that declare a throws... spec. + * However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception) + * such as NullPointerException, so the native code should differentiate these cases. + * Usage of this should be very rare; where possible handle exceptions in the Java side and use a + * return value to indicate success / failure. + */ +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.CLASS) +public @interface CalledByNativeUnchecked { + /* + * If present, tells which inner class the method belongs to. + */ + public String value() default ""; +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Capturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Capturer.java new file mode 100644 index 00000000..de172aa1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Capturer.java @@ -0,0 +1,33 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; + +public class Camera1Capturer extends CameraCapturer { + private final boolean captureToTexture; + + public Camera1Capturer( + String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) { + super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture)); + + this.captureToTexture = captureToTexture; + } + + @Override + protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback, + CameraSession.Events events, Context applicationContext, + SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height, + int framerate) { + Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext, + surfaceTextureHelper, cameraName, width, height, framerate); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Enumerator.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Enumerator.java new file mode 100644 index 00000000..fb1a21f3 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Enumerator.java @@ -0,0 +1,185 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.os.SystemClock; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.List; +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; + +@SuppressWarnings("deprecation") +public class Camera1Enumerator implements CameraEnumerator { + private final static String TAG = "Camera1Enumerator"; + // Each entry contains the supported formats for corresponding camera index. The formats for all + // cameras are enumerated on the first call to getSupportedFormats(), and cached for future + // reference. + private static List> cachedSupportedFormats; + + private final boolean captureToTexture; + + public Camera1Enumerator() { + this(true /* captureToTexture */); + } + + public Camera1Enumerator(boolean captureToTexture) { + this.captureToTexture = captureToTexture; + } + + // Returns device names that can be used to create a new VideoCapturerAndroid. + @Override + public String[] getDeviceNames() { + ArrayList namesList = new ArrayList<>(); + for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) { + String name = getDeviceName(i); + if (name != null) { + namesList.add(name); + Logging.d(TAG, "Index: " + i + ". " + name); + } else { + Logging.e(TAG, "Index: " + i + ". Failed to query camera name."); + } + } + String[] namesArray = new String[namesList.size()]; + return namesList.toArray(namesArray); + } + + @Override + public boolean isFrontFacing(String deviceName) { + android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName)); + return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT; + } + + @Override + public boolean isBackFacing(String deviceName) { + android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName)); + return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK; + } + + @Override + public List getSupportedFormats(String deviceName) { + return getSupportedFormats(getCameraIndex(deviceName)); + } + + @Override + public CameraVideoCapturer createCapturer( + String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) { + return new Camera1Capturer(deviceName, eventsHandler, captureToTexture); + } + + private static @Nullable android.hardware.Camera.CameraInfo getCameraInfo(int index) { + android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo(); + try { + android.hardware.Camera.getCameraInfo(index, info); + } catch (Exception e) { + Logging.e(TAG, "getCameraInfo failed on index " + index, e); + return null; + } + return info; + } + + static synchronized List getSupportedFormats(int cameraId) { + if (cachedSupportedFormats == null) { + cachedSupportedFormats = new ArrayList>(); + for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) { + cachedSupportedFormats.add(enumerateFormats(i)); + } + } + return cachedSupportedFormats.get(cameraId); + } + + private static List enumerateFormats(int cameraId) { + Logging.d(TAG, "Get supported formats for camera index " + cameraId + "."); + final long startTimeMs = SystemClock.elapsedRealtime(); + final android.hardware.Camera.Parameters parameters; + android.hardware.Camera camera = null; + try { + Logging.d(TAG, "Opening camera with index " + cameraId); + camera = android.hardware.Camera.open(cameraId); + parameters = camera.getParameters(); + } catch (RuntimeException e) { + Logging.e(TAG, "Open camera failed on camera index " + cameraId, e); + return new ArrayList(); + } finally { + if (camera != null) { + camera.release(); + } + } + + final List formatList = new ArrayList(); + try { + int minFps = 0; + int maxFps = 0; + final List listFpsRange = parameters.getSupportedPreviewFpsRange(); + if (listFpsRange != null) { + // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range + // corresponding to the highest fps. + final int[] range = listFpsRange.get(listFpsRange.size() - 1); + minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; + maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; + } + for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) { + formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps)); + } + } catch (Exception e) { + Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e); + } + + final long endTimeMs = SystemClock.elapsedRealtime(); + Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done." + + " Time spent: " + (endTimeMs - startTimeMs) + " ms."); + return formatList; + } + + // Convert from android.hardware.Camera.Size to Size. + static List convertSizes(List cameraSizes) { + final List sizes = new ArrayList(); + for (android.hardware.Camera.Size size : cameraSizes) { + sizes.add(new Size(size.width, size.height)); + } + return sizes; + } + + // Convert from int[2] to CaptureFormat.FramerateRange. + static List convertFramerates(List arrayRanges) { + final List ranges = new ArrayList(); + for (int[] range : arrayRanges) { + ranges.add(new CaptureFormat.FramerateRange( + range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX], + range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX])); + } + return ranges; + } + + // Returns the camera index for camera with name `deviceName`, or throws IllegalArgumentException + // if no such camera can be found. + static int getCameraIndex(String deviceName) { + Logging.d(TAG, "getCameraIndex: " + deviceName); + for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) { + if (deviceName.equals(getDeviceName(i))) { + return i; + } + } + throw new IllegalArgumentException("No such camera: " + deviceName); + } + + // Returns the name of the camera with camera index. Returns null if the + // camera can not be used. + static @Nullable String getDeviceName(int index) { + android.hardware.Camera.CameraInfo info = getCameraInfo(index); + if (info == null) { + return null; + } + + String facing = + (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back"; + return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Session.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Session.java new file mode 100644 index 00000000..a54f7201 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera1Session.java @@ -0,0 +1,340 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.hardware.Camera; +import android.os.Handler; +import android.os.SystemClock; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; + +@SuppressWarnings("deprecation") +class Camera1Session implements CameraSession { + private static final String TAG = "Camera1Session"; + private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; + + private static final Histogram camera1StartTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50); + private static final Histogram camera1StopTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50); + private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration( + "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size()); + + private static enum SessionState { RUNNING, STOPPED } + + private final Handler cameraThreadHandler; + private final Events events; + private final boolean captureToTexture; + private final Context applicationContext; + private final SurfaceTextureHelper surfaceTextureHelper; + private final int cameraId; + private final Camera camera; + private final Camera.CameraInfo info; + private final CaptureFormat captureFormat; + // Used only for stats. Only used on the camera thread. + private final long constructionTimeNs; // Construction time of this class. + + private SessionState state; + private boolean firstFrameReported; + + // TODO(titovartem) make correct fix during webrtc:9175 + @SuppressWarnings("ByteBufferBackingArray") + public static void create(final CreateSessionCallback callback, final Events events, + final boolean captureToTexture, final Context applicationContext, + final SurfaceTextureHelper surfaceTextureHelper, final String cameraName, + final int width, final int height, final int framerate) { + final long constructionTimeNs = System.nanoTime(); + Logging.d(TAG, "Open camera " + cameraName); + events.onCameraOpening(); + + final int cameraId; + try { + cameraId = Camera1Enumerator.getCameraIndex(cameraName); + } catch (IllegalArgumentException e) { + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + final Camera camera; + try { + camera = Camera.open(cameraId); + } catch (RuntimeException e) { + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + if (camera == null) { + callback.onFailure( + FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId); + return; + } + + try { + camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture()); + } catch (IOException | RuntimeException e) { + camera.release(); + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + final Camera.CameraInfo info = new Camera.CameraInfo(); + Camera.getCameraInfo(cameraId, info); + + final CaptureFormat captureFormat; + try { + final Camera.Parameters parameters = camera.getParameters(); + captureFormat = findClosestCaptureFormat(parameters, width, height, framerate); + final Size pictureSize = findClosestPictureSize(parameters, width, height); + updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture); + } catch (RuntimeException e) { + camera.release(); + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + if (!captureToTexture) { + final int frameSize = captureFormat.frameSize(); + for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { + final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); + camera.addCallbackBuffer(buffer.array()); + } + } + + // Calculate orientation manually and send it as CVO instead. + try { + camera.setDisplayOrientation(0 /* degrees */); + } catch (RuntimeException e) { + camera.release(); + callback.onFailure(FailureType.ERROR, e.getMessage()); + return; + } + + callback.onDone(new Camera1Session(events, captureToTexture, applicationContext, + surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs)); + } + + private static void updateCameraParameters(Camera camera, Camera.Parameters parameters, + CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) { + final List focusModes = parameters.getSupportedFocusModes(); + + parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max); + parameters.setPreviewSize(captureFormat.width, captureFormat.height); + parameters.setPictureSize(pictureSize.width, pictureSize.height); + if (!captureToTexture) { + parameters.setPreviewFormat(captureFormat.imageFormat); + } + + if (parameters.isVideoStabilizationSupported()) { + parameters.setVideoStabilization(true); + } + if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + } + camera.setParameters(parameters); + } + + private static CaptureFormat findClosestCaptureFormat( + Camera.Parameters parameters, int width, int height, int framerate) { + // Find closest supported format for `width` x `height` @ `framerate`. + final List supportedFramerates = + Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange()); + Logging.d(TAG, "Available fps ranges: " + supportedFramerates); + + final CaptureFormat.FramerateRange fpsRange = + CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate); + + final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( + Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height); + CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize); + + return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); + } + + private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) { + return CameraEnumerationAndroid.getClosestSupportedSize( + Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height); + } + + private Camera1Session(Events events, boolean captureToTexture, Context applicationContext, + SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera, + Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) { + Logging.d(TAG, "Create new camera1 session on camera " + cameraId); + + this.cameraThreadHandler = new Handler(); + this.events = events; + this.captureToTexture = captureToTexture; + this.applicationContext = applicationContext; + this.surfaceTextureHelper = surfaceTextureHelper; + this.cameraId = cameraId; + this.camera = camera; + this.info = info; + this.captureFormat = captureFormat; + this.constructionTimeNs = constructionTimeNs; + + surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); + + startCapturing(); + } + + @Override + public void stop() { + Logging.d(TAG, "Stop camera1 session on camera " + cameraId); + checkIsOnCameraThread(); + if (state != SessionState.STOPPED) { + final long stopStartTime = System.nanoTime(); + stopInternal(); + final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); + camera1StopTimeMsHistogram.addSample(stopTimeMs); + } + } + + private void startCapturing() { + Logging.d(TAG, "Start capturing"); + checkIsOnCameraThread(); + + state = SessionState.RUNNING; + + camera.setErrorCallback(new Camera.ErrorCallback() { + @Override + public void onError(int error, Camera camera) { + String errorMessage; + if (error == Camera.CAMERA_ERROR_SERVER_DIED) { + errorMessage = "Camera server died!"; + } else { + errorMessage = "Camera error: " + error; + } + Logging.e(TAG, errorMessage); + stopInternal(); + if (error == Camera.CAMERA_ERROR_EVICTED) { + events.onCameraDisconnected(Camera1Session.this); + } else { + events.onCameraError(Camera1Session.this, errorMessage); + } + } + }); + + if (captureToTexture) { + listenForTextureFrames(); + } else { + listenForBytebufferFrames(); + } + try { + camera.startPreview(); + } catch (RuntimeException e) { + stopInternal(); + events.onCameraError(this, e.getMessage()); + } + } + + private void stopInternal() { + Logging.d(TAG, "Stop internal"); + checkIsOnCameraThread(); + if (state == SessionState.STOPPED) { + Logging.d(TAG, "Camera is already stopped"); + return; + } + + state = SessionState.STOPPED; + surfaceTextureHelper.stopListening(); + // Note: stopPreview or other driver code might deadlock. Deadlock in + // Camera._stopPreview(Native Method) has been observed on + // Nexus 5 (hammerhead), OS version LMY48I. + camera.stopPreview(); + camera.release(); + events.onCameraClosed(this); + Logging.d(TAG, "Stop done"); + } + + private void listenForTextureFrames() { + surfaceTextureHelper.startListening((VideoFrame frame) -> { + checkIsOnCameraThread(); + + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Texture frame captured but camera is no longer running."); + return; + } + + if (!firstFrameReported) { + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera1StartTimeMsHistogram.addSample(startTimeMs); + firstFrameReported = true; + } + + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + final VideoFrame modifiedFrame = + new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( + (TextureBufferImpl) frame.getBuffer(), + /* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT, + /* rotation= */ 0), + /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); + events.onFrameCaptured(Camera1Session.this, modifiedFrame); + modifiedFrame.release(); + }); + } + + private void listenForBytebufferFrames() { + camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() { + @Override + public void onPreviewFrame(final byte[] data, Camera callbackCamera) { + checkIsOnCameraThread(); + + if (callbackCamera != camera) { + Logging.e(TAG, "Callback from a different camera. This should never happen."); + return; + } + + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running."); + return; + } + + final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); + + if (!firstFrameReported) { + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera1StartTimeMsHistogram.addSample(startTimeMs); + firstFrameReported = true; + } + + VideoFrame.Buffer frameBuffer = new NV21Buffer( + data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> { + if (state == SessionState.RUNNING) { + camera.addCallbackBuffer(data); + } + })); + final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs); + events.onFrameCaptured(Camera1Session.this, frame); + frame.release(); + } + }); + } + + private int getFrameOrientation() { + int rotation = CameraSession.getDeviceOrientation(applicationContext); + if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { + rotation = 360 - rotation; + } + return (info.orientation + rotation) % 360; + } + + private void checkIsOnCameraThread() { + if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { + throw new IllegalStateException("Wrong thread"); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Capturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Capturer.java new file mode 100644 index 00000000..c4becf48 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Capturer.java @@ -0,0 +1,36 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.hardware.camera2.CameraManager; +import androidx.annotation.Nullable; + +public class Camera2Capturer extends CameraCapturer { + private final Context context; + @Nullable private final CameraManager cameraManager; + + public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) { + super(cameraName, eventsHandler, new Camera2Enumerator(context)); + + this.context = context; + cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); + } + + @Override + protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback, + CameraSession.Events events, Context applicationContext, + SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height, + int framerate) { + Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager, + surfaceTextureHelper, cameraName, width, height, framerate); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Enumerator.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Enumerator.java new file mode 100644 index 00000000..456d8cd0 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Enumerator.java @@ -0,0 +1,239 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.graphics.Rect; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.os.Build; +import android.os.SystemClock; +import android.util.Range; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; + +public class Camera2Enumerator implements CameraEnumerator { + private final static String TAG = "Camera2Enumerator"; + private final static double NANO_SECONDS_PER_SECOND = 1.0e9; + + // Each entry contains the supported formats for a given camera index. The formats are enumerated + // lazily in getSupportedFormats(), and cached for future reference. + private static final Map> cachedSupportedFormats = + new HashMap>(); + + final Context context; + @Nullable final CameraManager cameraManager; + + public Camera2Enumerator(Context context) { + this.context = context; + this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); + } + + @Override + public String[] getDeviceNames() { + try { + return cameraManager.getCameraIdList(); + } catch (CameraAccessException e) { + Logging.e(TAG, "Camera access exception", e); + return new String[] {}; + } + } + + @Override + public boolean isFrontFacing(String deviceName) { + CameraCharacteristics characteristics = getCameraCharacteristics(deviceName); + + return characteristics != null + && characteristics.get(CameraCharacteristics.LENS_FACING) + == CameraMetadata.LENS_FACING_FRONT; + } + + @Override + public boolean isBackFacing(String deviceName) { + CameraCharacteristics characteristics = getCameraCharacteristics(deviceName); + + return characteristics != null + && characteristics.get(CameraCharacteristics.LENS_FACING) + == CameraMetadata.LENS_FACING_BACK; + } + + @Nullable + @Override + public List getSupportedFormats(String deviceName) { + return getSupportedFormats(context, deviceName); + } + + @Override + public CameraVideoCapturer createCapturer( + String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) { + return new Camera2Capturer(context, deviceName, eventsHandler); + } + + private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) { + try { + return cameraManager.getCameraCharacteristics(deviceName); + } catch (CameraAccessException | RuntimeException e) { + Logging.e(TAG, "Camera access exception", e); + return null; + } + } + + /** + * Checks if API is supported and all cameras have better than legacy support. + */ + public static boolean isSupported(Context context) { + CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); + try { + String[] cameraIds = cameraManager.getCameraIdList(); + for (String id : cameraIds) { + CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id); + if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) + == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { + return false; + } + } + } catch (CameraAccessException | RuntimeException e) { + Logging.e(TAG, "Failed to check if camera2 is supported", e); + return false; + } + return true; + } + + static int getFpsUnitFactor(Range[] fpsRanges) { + if (fpsRanges.length == 0) { + return 1000; + } + return fpsRanges[0].getUpper() < 1000 ? 1000 : 1; + } + + static List getSupportedSizes(CameraCharacteristics cameraCharacteristics) { + final StreamConfigurationMap streamMap = + cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + final int supportLevel = + cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); + + final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class); + final List sizes = convertSizes(nativeSizes); + + // Video may be stretched pre LMR1 on legacy implementations. + // Filter out formats that have different aspect ratio than the sensor array. + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1 + && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { + final Rect activeArraySize = + cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + final ArrayList filteredSizes = new ArrayList(); + + for (Size size : sizes) { + if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) { + filteredSizes.add(size); + } + } + + return filteredSizes; + } else { + return sizes; + } + } + + @Nullable + static List getSupportedFormats(Context context, String cameraId) { + return getSupportedFormats( + (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId); + } + + @Nullable + static List getSupportedFormats(CameraManager cameraManager, String cameraId) { + synchronized (cachedSupportedFormats) { + if (cachedSupportedFormats.containsKey(cameraId)) { + return cachedSupportedFormats.get(cameraId); + } + + Logging.d(TAG, "Get supported formats for camera index " + cameraId + "."); + final long startTimeMs = SystemClock.elapsedRealtime(); + + final CameraCharacteristics cameraCharacteristics; + try { + cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); + } catch (Exception ex) { + Logging.e(TAG, "getCameraCharacteristics()", ex); + return new ArrayList(); + } + + final StreamConfigurationMap streamMap = + cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + + Range[] fpsRanges = + cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); + List framerateRanges = + convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges)); + List sizes = getSupportedSizes(cameraCharacteristics); + + int defaultMaxFps = 0; + for (CaptureFormat.FramerateRange framerateRange : framerateRanges) { + defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max); + } + + final List formatList = new ArrayList(); + for (Size size : sizes) { + long minFrameDurationNs = 0; + try { + minFrameDurationNs = streamMap.getOutputMinFrameDuration( + SurfaceTexture.class, new android.util.Size(size.width, size.height)); + } catch (Exception e) { + // getOutputMinFrameDuration() is not supported on all devices. Ignore silently. + } + final int maxFps = (minFrameDurationNs == 0) + ? defaultMaxFps + : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000; + formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps)); + Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps); + } + + cachedSupportedFormats.put(cameraId, formatList); + final long endTimeMs = SystemClock.elapsedRealtime(); + Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done." + + " Time spent: " + (endTimeMs - startTimeMs) + " ms."); + return formatList; + } + } + + // Convert from android.util.Size to Size. + private static List convertSizes(android.util.Size[] cameraSizes) { + if (cameraSizes == null || cameraSizes.length == 0) { + return Collections.emptyList(); + } + final List sizes = new ArrayList<>(cameraSizes.length); + for (android.util.Size size : cameraSizes) { + sizes.add(new Size(size.getWidth(), size.getHeight())); + } + return sizes; + } + + // Convert from android.util.Range to CaptureFormat.FramerateRange. + static List convertFramerates( + Range[] arrayRanges, int unitFactor) { + final List ranges = new ArrayList(); + for (Range range : arrayRanges) { + ranges.add(new CaptureFormat.FramerateRange( + range.getLower() * unitFactor, range.getUpper() * unitFactor)); + } + return ranges; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Session.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Session.java new file mode 100644 index 00000000..dec97a2c --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Camera2Session.java @@ -0,0 +1,426 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureFailure; +import android.hardware.camera2.CaptureRequest; +import android.os.Handler; +import android.util.Range; +import android.view.Surface; +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; + +class Camera2Session implements CameraSession { + private static final String TAG = "Camera2Session"; + + private static final Histogram camera2StartTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50); + private static final Histogram camera2StopTimeMsHistogram = + Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); + private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration( + "WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size()); + + private static enum SessionState { RUNNING, STOPPED } + + private final Handler cameraThreadHandler; + private final CreateSessionCallback callback; + private final Events events; + private final Context applicationContext; + private final CameraManager cameraManager; + private final SurfaceTextureHelper surfaceTextureHelper; + private final String cameraId; + private final int width; + private final int height; + private final int framerate; + + // Initialized at start + private CameraCharacteristics cameraCharacteristics; + private int cameraOrientation; + private boolean isCameraFrontFacing; + private int fpsUnitFactor; + private CaptureFormat captureFormat; + + // Initialized when camera opens + @Nullable private CameraDevice cameraDevice; + @Nullable private Surface surface; + + // Initialized when capture session is created + @Nullable private CameraCaptureSession captureSession; + + // State + private SessionState state = SessionState.RUNNING; + private boolean firstFrameReported; + + // Used only for stats. Only used on the camera thread. + private final long constructionTimeNs; // Construction time of this class. + + private class CameraStateCallback extends CameraDevice.StateCallback { + private String getErrorDescription(int errorCode) { + switch (errorCode) { + case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: + return "Camera device has encountered a fatal error."; + case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: + return "Camera device could not be opened due to a device policy."; + case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: + return "Camera device is in use already."; + case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: + return "Camera service has encountered a fatal error."; + case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: + return "Camera device could not be opened because" + + " there are too many other open camera devices."; + default: + return "Unknown camera error: " + errorCode; + } + } + + @Override + public void onDisconnected(CameraDevice camera) { + checkIsOnCameraThread(); + final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED); + state = SessionState.STOPPED; + stopInternal(); + if (startFailure) { + callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted."); + } else { + events.onCameraDisconnected(Camera2Session.this); + } + } + + @Override + public void onError(CameraDevice camera, int errorCode) { + checkIsOnCameraThread(); + reportError(getErrorDescription(errorCode)); + } + + @Override + public void onOpened(CameraDevice camera) { + checkIsOnCameraThread(); + + Logging.d(TAG, "Camera opened."); + cameraDevice = camera; + + surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); + try { + camera.createCaptureSession( + Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler); + } catch (CameraAccessException e) { + reportError("Failed to create capture session. " + e); + return; + } + } + + @Override + public void onClosed(CameraDevice camera) { + checkIsOnCameraThread(); + + Logging.d(TAG, "Camera device closed."); + events.onCameraClosed(Camera2Session.this); + } + } + + private class CaptureSessionCallback extends CameraCaptureSession.StateCallback { + @Override + public void onConfigureFailed(CameraCaptureSession session) { + checkIsOnCameraThread(); + session.close(); + reportError("Failed to configure capture session."); + } + + @Override + public void onConfigured(CameraCaptureSession session) { + checkIsOnCameraThread(); + Logging.d(TAG, "Camera capture session configured."); + captureSession = session; + try { + /* + * The viable options for video capture requests are: + * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality + * post-processing. + * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording + * quality. + */ + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + // Set auto exposure fps range. + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range(captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + chooseStabilizationMode(captureRequestBuilder); + chooseFocusMode(captureRequestBuilder); + + captureRequestBuilder.addTarget(surface); + session.setRepeatingRequest( + captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); + } catch (CameraAccessException e) { + reportError("Failed to start capture request. " + e); + return; + } + + surfaceTextureHelper.startListening((VideoFrame frame) -> { + checkIsOnCameraThread(); + + if (state != SessionState.RUNNING) { + Logging.d(TAG, "Texture frame captured but camera is no longer running."); + return; + } + + if (!firstFrameReported) { + firstFrameReported = true; + final int startTimeMs = + (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); + camera2StartTimeMsHistogram.addSample(startTimeMs); + } + + // Undo the mirror that the OS "helps" us with. + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) + // Also, undo camera orientation, we report it as rotation instead. + final VideoFrame modifiedFrame = + new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( + (TextureBufferImpl) frame.getBuffer(), + /* mirror= */ isCameraFrontFacing, + /* rotation= */ -cameraOrientation), + /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); + events.onFrameCaptured(Camera2Session.this, modifiedFrame); + modifiedFrame.release(); + }); + Logging.d(TAG, "Camera device successfully started."); + callback.onDone(Camera2Session.this); + } + + // Prefers optical stabilization over software stabilization if available. Only enables one of + // the stabilization modes at a time because having both enabled can cause strange results. + private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) { + final int[] availableOpticalStabilization = cameraCharacteristics.get( + CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION); + if (availableOpticalStabilization != null) { + for (int mode : availableOpticalStabilization) { + if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) { + captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, + CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, + CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF); + Logging.d(TAG, "Using optical stabilization."); + return; + } + } + } + // If no optical mode is available, try software. + final int[] availableVideoStabilization = cameraCharacteristics.get( + CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); + if (availableVideoStabilization != null) { + for (int mode : availableVideoStabilization) { + if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) { + captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, + CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON); + captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, + CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); + Logging.d(TAG, "Using video stabilization."); + return; + } + } + } + Logging.d(TAG, "Stabilization not available."); + } + + private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) { + final int[] availableFocusModes = + cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES); + for (int mode : availableFocusModes) { + if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) { + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); + Logging.d(TAG, "Using continuous video auto-focus."); + return; + } + } + Logging.d(TAG, "Auto-focus is not available."); + } + } + + private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback { + @Override + public void onCaptureFailed( + CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { + Logging.d(TAG, "Capture failed: " + failure); + } + } + + public static void create(CreateSessionCallback callback, Events events, + Context applicationContext, CameraManager cameraManager, + SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height, + int framerate) { + new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper, + cameraId, width, height, framerate); + } + + private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext, + CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId, + int width, int height, int framerate) { + Logging.d(TAG, "Create new camera2 session on camera " + cameraId); + + constructionTimeNs = System.nanoTime(); + + this.cameraThreadHandler = new Handler(); + this.callback = callback; + this.events = events; + this.applicationContext = applicationContext; + this.cameraManager = cameraManager; + this.surfaceTextureHelper = surfaceTextureHelper; + this.cameraId = cameraId; + this.width = width; + this.height = height; + this.framerate = framerate; + + start(); + } + + private void start() { + checkIsOnCameraThread(); + Logging.d(TAG, "start"); + + try { + cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); + } catch (CameraAccessException | IllegalArgumentException e) { + reportError("getCameraCharacteristics(): " + e.getMessage()); + return; + } + cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) + == CameraMetadata.LENS_FACING_FRONT; + + findCaptureFormat(); + + if (captureFormat == null) { + // findCaptureFormat reports an error already. + return; + } + + openCamera(); + } + + private void findCaptureFormat() { + checkIsOnCameraThread(); + + Range[] fpsRanges = + cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); + fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges); + List framerateRanges = + Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor); + List sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics); + Logging.d(TAG, "Available preview sizes: " + sizes); + Logging.d(TAG, "Available fps ranges: " + framerateRanges); + + if (framerateRanges.isEmpty() || sizes.isEmpty()) { + reportError("No supported capture formats."); + return; + } + + final CaptureFormat.FramerateRange bestFpsRange = + CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate); + + final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize); + + captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange); + Logging.d(TAG, "Using capture format: " + captureFormat); + } + + private void openCamera() { + checkIsOnCameraThread(); + + Logging.d(TAG, "Opening camera " + cameraId); + events.onCameraOpening(); + + try { + cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler); + } catch (CameraAccessException | IllegalArgumentException | SecurityException e) { + reportError("Failed to open camera: " + e); + return; + } + } + + @Override + public void stop() { + Logging.d(TAG, "Stop camera2 session on camera " + cameraId); + checkIsOnCameraThread(); + if (state != SessionState.STOPPED) { + final long stopStartTime = System.nanoTime(); + state = SessionState.STOPPED; + stopInternal(); + final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); + camera2StopTimeMsHistogram.addSample(stopTimeMs); + } + } + + private void stopInternal() { + Logging.d(TAG, "Stop internal"); + checkIsOnCameraThread(); + + surfaceTextureHelper.stopListening(); + + if (captureSession != null) { + captureSession.close(); + captureSession = null; + } + if (surface != null) { + surface.release(); + surface = null; + } + if (cameraDevice != null) { + cameraDevice.close(); + cameraDevice = null; + } + + Logging.d(TAG, "Stop done"); + } + + private void reportError(String error) { + checkIsOnCameraThread(); + Logging.e(TAG, "Error: " + error); + + final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED); + state = SessionState.STOPPED; + stopInternal(); + if (startFailure) { + callback.onFailure(FailureType.ERROR, error); + } else { + events.onCameraError(this, error); + } + } + + private int getFrameOrientation() { + int rotation = CameraSession.getDeviceOrientation(applicationContext); + if (!isCameraFrontFacing) { + rotation = 360 - rotation; + } + return (cameraOrientation + rotation) % 360; + } + + private void checkIsOnCameraThread() { + if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { + throw new IllegalStateException("Wrong thread"); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraCapturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraCapturer.java new file mode 100644 index 00000000..1922a529 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraCapturer.java @@ -0,0 +1,458 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.os.Handler; +import android.os.Looper; +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.List; + +@SuppressWarnings("deprecation") +abstract class CameraCapturer implements CameraVideoCapturer { + enum SwitchState { + IDLE, // No switch requested. + PENDING, // Waiting for previous capture session to open. + IN_PROGRESS, // Waiting for new switched capture session to start. + } + + private static final String TAG = "CameraCapturer"; + private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3; + private final static int OPEN_CAMERA_DELAY_MS = 500; + private final static int OPEN_CAMERA_TIMEOUT = 10000; + + private final CameraEnumerator cameraEnumerator; + private final CameraEventsHandler eventsHandler; + private final Handler uiThreadHandler; + + @Nullable + private final CameraSession.CreateSessionCallback createSessionCallback = + new CameraSession.CreateSessionCallback() { + @Override + public void onDone(CameraSession session) { + checkIsOnCameraThread(); + Logging.d(TAG, "Create session done. Switch state: " + switchState); + uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable); + synchronized (stateLock) { + capturerObserver.onCapturerStarted(true /* success */); + sessionOpening = false; + currentSession = session; + cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler); + firstFrameObserved = false; + stateLock.notifyAll(); + + if (switchState == SwitchState.IN_PROGRESS) { + switchState = SwitchState.IDLE; + if (switchEventsHandler != null) { + switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName)); + switchEventsHandler = null; + } + } else if (switchState == SwitchState.PENDING) { + String selectedCameraName = pendingCameraName; + pendingCameraName = null; + switchState = SwitchState.IDLE; + switchCameraInternal(switchEventsHandler, selectedCameraName); + } + } + } + + @Override + public void onFailure(CameraSession.FailureType failureType, String error) { + checkIsOnCameraThread(); + uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable); + synchronized (stateLock) { + capturerObserver.onCapturerStarted(false /* success */); + openAttemptsRemaining--; + + if (openAttemptsRemaining <= 0) { + Logging.w(TAG, "Opening camera failed, passing: " + error); + sessionOpening = false; + stateLock.notifyAll(); + + if (switchState != SwitchState.IDLE) { + if (switchEventsHandler != null) { + switchEventsHandler.onCameraSwitchError(error); + switchEventsHandler = null; + } + switchState = SwitchState.IDLE; + } + + if (failureType == CameraSession.FailureType.DISCONNECTED) { + eventsHandler.onCameraDisconnected(); + } else { + eventsHandler.onCameraError(error); + } + } else { + Logging.w(TAG, "Opening camera failed, retry: " + error); + createSessionInternal(OPEN_CAMERA_DELAY_MS); + } + } + } + }; + + @Nullable + private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() { + @Override + public void onCameraOpening() { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (currentSession != null) { + Logging.w(TAG, "onCameraOpening while session was open."); + return; + } + eventsHandler.onCameraOpening(cameraName); + } + } + + @Override + public void onCameraError(CameraSession session, String error) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession) { + Logging.w(TAG, "onCameraError from another session: " + error); + return; + } + eventsHandler.onCameraError(error); + stopCapture(); + } + } + + @Override + public void onCameraDisconnected(CameraSession session) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession) { + Logging.w(TAG, "onCameraDisconnected from another session."); + return; + } + eventsHandler.onCameraDisconnected(); + stopCapture(); + } + } + + @Override + public void onCameraClosed(CameraSession session) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession && currentSession != null) { + Logging.d(TAG, "onCameraClosed from another session."); + return; + } + eventsHandler.onCameraClosed(); + } + } + + @Override + public void onFrameCaptured(CameraSession session, VideoFrame frame) { + checkIsOnCameraThread(); + synchronized (stateLock) { + if (session != currentSession) { + Logging.w(TAG, "onFrameCaptured from another session."); + return; + } + if (!firstFrameObserved) { + eventsHandler.onFirstFrameAvailable(); + firstFrameObserved = true; + } + cameraStatistics.addFrame(); + capturerObserver.onFrameCaptured(frame); + } + } + }; + + private final Runnable openCameraTimeoutRunnable = new Runnable() { + @Override + public void run() { + eventsHandler.onCameraError("Camera failed to start within timeout."); + } + }; + + // Initialized on initialize + // ------------------------- + private Handler cameraThreadHandler; + private Context applicationContext; + private org.webrtc.CapturerObserver capturerObserver; + private SurfaceTextureHelper surfaceHelper; + + private final Object stateLock = new Object(); + private boolean sessionOpening; /* guarded by stateLock */ + @Nullable private CameraSession currentSession; /* guarded by stateLock */ + private String cameraName; /* guarded by stateLock */ + private String pendingCameraName; /* guarded by stateLock */ + private int width; /* guarded by stateLock */ + private int height; /* guarded by stateLock */ + private int framerate; /* guarded by stateLock */ + private int openAttemptsRemaining; /* guarded by stateLock */ + private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */ + @Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */ + // Valid from onDone call until stopCapture, otherwise null. + @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */ + private boolean firstFrameObserved; /* guarded by stateLock */ + + public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler, + CameraEnumerator cameraEnumerator) { + if (eventsHandler == null) { + eventsHandler = new CameraEventsHandler() { + @Override + public void onCameraError(String errorDescription) {} + @Override + public void onCameraDisconnected() {} + @Override + public void onCameraFreezed(String errorDescription) {} + @Override + public void onCameraOpening(String cameraName) {} + @Override + public void onFirstFrameAvailable() {} + @Override + public void onCameraClosed() {} + }; + } + + this.eventsHandler = eventsHandler; + this.cameraEnumerator = cameraEnumerator; + this.cameraName = cameraName; + List deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames()); + uiThreadHandler = new Handler(Looper.getMainLooper()); + + if (deviceNames.isEmpty()) { + throw new RuntimeException("No cameras attached."); + } + if (!deviceNames.contains(this.cameraName)) { + throw new IllegalArgumentException( + "Camera name " + this.cameraName + " does not match any known camera device."); + } + } + + @Override + public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, + org.webrtc.CapturerObserver capturerObserver) { + this.applicationContext = applicationContext; + this.capturerObserver = capturerObserver; + this.surfaceHelper = surfaceTextureHelper; + this.cameraThreadHandler = surfaceTextureHelper.getHandler(); + } + + @Override + public void startCapture(int width, int height, int framerate) { + Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate); + if (applicationContext == null) { + throw new RuntimeException("CameraCapturer must be initialized before calling startCapture."); + } + + synchronized (stateLock) { + if (sessionOpening || currentSession != null) { + Logging.w(TAG, "Session already open"); + return; + } + + this.width = width; + this.height = height; + this.framerate = framerate; + + sessionOpening = true; + openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS; + createSessionInternal(0); + } + } + + private void createSessionInternal(int delayMs) { + uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT); + cameraThreadHandler.postDelayed(new Runnable() { + @Override + public void run() { + createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext, + surfaceHelper, cameraName, width, height, framerate); + } + }, delayMs); + } + + @Override + public void stopCapture() { + Logging.d(TAG, "Stop capture"); + + synchronized (stateLock) { + while (sessionOpening) { + Logging.d(TAG, "Stop capture: Waiting for session to open"); + try { + stateLock.wait(); + } catch (InterruptedException e) { + Logging.w(TAG, "Stop capture interrupted while waiting for the session to open."); + Thread.currentThread().interrupt(); + return; + } + } + + if (currentSession != null) { + Logging.d(TAG, "Stop capture: Nulling session"); + cameraStatistics.release(); + cameraStatistics = null; + final CameraSession oldSession = currentSession; + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + oldSession.stop(); + } + }); + currentSession = null; + capturerObserver.onCapturerStopped(); + } else { + Logging.d(TAG, "Stop capture: No session open"); + } + } + + Logging.d(TAG, "Stop capture done"); + } + + @Override + public void changeCaptureFormat(int width, int height, int framerate) { + Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate); + synchronized (stateLock) { + stopCapture(); + startCapture(width, height, framerate); + } + } + + @Override + public void dispose() { + Logging.d(TAG, "dispose"); + stopCapture(); + } + + @Override + public void switchCamera(final CameraSwitchHandler switchEventsHandler) { + Logging.d(TAG, "switchCamera"); + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + List deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames()); + + if (deviceNames.size() < 2) { + reportCameraSwitchError("No camera to switch to.", switchEventsHandler); + return; + } + + int cameraNameIndex = deviceNames.indexOf(cameraName); + String cameraName = deviceNames.get((cameraNameIndex + 1) % deviceNames.size()); + switchCameraInternal(switchEventsHandler, cameraName); + } + }); + } + + @Override + public void switchCamera(final CameraSwitchHandler switchEventsHandler, final String cameraName) { + Logging.d(TAG, "switchCamera"); + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + switchCameraInternal(switchEventsHandler, cameraName); + } + }); + } + + @Override + public boolean isScreencast() { + return false; + } + + public void printStackTrace() { + Thread cameraThread = null; + if (cameraThreadHandler != null) { + cameraThread = cameraThreadHandler.getLooper().getThread(); + } + if (cameraThread != null) { + StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace(); + if (cameraStackTrace.length > 0) { + Logging.d(TAG, "CameraCapturer stack trace:"); + for (StackTraceElement traceElem : cameraStackTrace) { + Logging.d(TAG, traceElem.toString()); + } + } + } + } + + private void reportCameraSwitchError( + String error, @Nullable CameraSwitchHandler switchEventsHandler) { + Logging.e(TAG, error); + if (switchEventsHandler != null) { + switchEventsHandler.onCameraSwitchError(error); + } + } + + private void switchCameraInternal( + @Nullable final CameraSwitchHandler switchEventsHandler, final String selectedCameraName) { + Logging.d(TAG, "switchCamera internal"); + List deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames()); + + if (!deviceNames.contains(selectedCameraName)) { + reportCameraSwitchError("Attempted to switch to unknown camera device " + selectedCameraName, + switchEventsHandler); + return; + } + + synchronized (stateLock) { + if (switchState != SwitchState.IDLE) { + reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler); + return; + } + if (!sessionOpening && currentSession == null) { + reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler); + return; + } + + this.switchEventsHandler = switchEventsHandler; + if (sessionOpening) { + switchState = SwitchState.PENDING; + pendingCameraName = selectedCameraName; + return; + } else { + switchState = SwitchState.IN_PROGRESS; + } + + Logging.d(TAG, "switchCamera: Stopping session"); + cameraStatistics.release(); + cameraStatistics = null; + final CameraSession oldSession = currentSession; + cameraThreadHandler.post(new Runnable() { + @Override + public void run() { + oldSession.stop(); + } + }); + currentSession = null; + + cameraName = selectedCameraName; + + sessionOpening = true; + openAttemptsRemaining = 1; + createSessionInternal(0); + } + Logging.d(TAG, "switchCamera done"); + } + + private void checkIsOnCameraThread() { + if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { + Logging.e(TAG, "Check is on camera thread failed."); + throw new RuntimeException("Not on camera thread."); + } + } + + protected String getCameraName() { + synchronized (stateLock) { + return cameraName; + } + } + + abstract protected void createCameraSession( + CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, + Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName, + int width, int height, int framerate); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerationAndroid.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerationAndroid.java new file mode 100644 index 00000000..0c3188ff --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerationAndroid.java @@ -0,0 +1,206 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static java.lang.Math.abs; + +import android.graphics.ImageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +@SuppressWarnings("deprecation") +public class CameraEnumerationAndroid { + private final static String TAG = "CameraEnumerationAndroid"; + + static final ArrayList COMMON_RESOLUTIONS = new ArrayList(Arrays.asList( + // 0, Unknown resolution + new Size(160, 120), // 1, QQVGA + new Size(240, 160), // 2, HQVGA + new Size(320, 240), // 3, QVGA + new Size(400, 240), // 4, WQVGA + new Size(480, 320), // 5, HVGA + new Size(640, 360), // 6, nHD + new Size(640, 480), // 7, VGA + new Size(768, 480), // 8, WVGA + new Size(854, 480), // 9, FWVGA + new Size(800, 600), // 10, SVGA + new Size(960, 540), // 11, qHD + new Size(960, 640), // 12, DVGA + new Size(1024, 576), // 13, WSVGA + new Size(1024, 600), // 14, WVSGA + new Size(1280, 720), // 15, HD + new Size(1280, 1024), // 16, SXGA + new Size(1920, 1080), // 17, Full HD + new Size(1920, 1440), // 18, Full HD 4:3 + new Size(2560, 1440), // 19, QHD + new Size(3840, 2160) // 20, UHD + )); + + public static class CaptureFormat { + // Class to represent a framerate range. The framerate varies because of lightning conditions. + // The values are multiplied by 1000, so 1000 represents one frame per second. + public static class FramerateRange { + public int min; + public int max; + + public FramerateRange(int min, int max) { + this.min = min; + this.max = max; + } + + @Override + public String toString() { + return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]"; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof FramerateRange)) { + return false; + } + final FramerateRange otherFramerate = (FramerateRange) other; + return min == otherFramerate.min && max == otherFramerate.max; + } + + @Override + public int hashCode() { + // Use prime close to 2^16 to avoid collisions for normal values less than 2^16. + return 1 + 65537 * min + max; + } + } + + public final int width; + public final int height; + public final FramerateRange framerate; + + // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this + // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of + // all imageFormats. + public final int imageFormat = ImageFormat.NV21; + + public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) { + this.width = width; + this.height = height; + this.framerate = new FramerateRange(minFramerate, maxFramerate); + } + + public CaptureFormat(int width, int height, FramerateRange framerate) { + this.width = width; + this.height = height; + this.framerate = framerate; + } + + // Calculates the frame size of this capture format. + public int frameSize() { + return frameSize(width, height, imageFormat); + } + + // Calculates the frame size of the specified image format. Currently only + // supporting ImageFormat.NV21. + // The size is width * height * number of bytes per pixel. + // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[]) + public static int frameSize(int width, int height, int imageFormat) { + if (imageFormat != ImageFormat.NV21) { + throw new UnsupportedOperationException("Don't know how to calculate " + + "the frame size of non-NV21 image formats."); + } + return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8; + } + + @Override + public String toString() { + return width + "x" + height + "@" + framerate; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof CaptureFormat)) { + return false; + } + final CaptureFormat otherFormat = (CaptureFormat) other; + return width == otherFormat.width && height == otherFormat.height + && framerate.equals(otherFormat.framerate); + } + + @Override + public int hashCode() { + return 1 + (width * 65497 + height) * 251 + framerate.hashCode(); + } + } + + // Helper class for finding the closest supported format for the two functions below. It creates a + // comparator based on the difference to some requested parameters, where the element with the + // minimum difference is the element that is closest to the requested parameters. + private static abstract class ClosestComparator implements Comparator { + // Difference between supported and requested parameter. + abstract int diff(T supportedParameter); + + @Override + public int compare(T t1, T t2) { + return diff(t1) - diff(t2); + } + } + + // Prefer a fps range with an upper bound close to `framerate`. Also prefer a fps range with a low + // lower bound, to allow the framerate to fluctuate based on lightning conditions. + public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange( + List supportedFramerates, final int requestedFps) { + return Collections.min( + supportedFramerates, new ClosestComparator() { + // Progressive penalty if the upper bound is further away than `MAX_FPS_DIFF_THRESHOLD` + // from requested. + private static final int MAX_FPS_DIFF_THRESHOLD = 5000; + private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1; + private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3; + + // Progressive penalty if the lower bound is bigger than `MIN_FPS_THRESHOLD`. + private static final int MIN_FPS_THRESHOLD = 8000; + private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1; + private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4; + + // Use one weight for small `value` less than `threshold`, and another weight above. + private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) { + return (value < threshold) ? value * lowWeight + : threshold * lowWeight + (value - threshold) * highWeight; + } + + @Override + int diff(CaptureFormat.FramerateRange range) { + final int minFpsError = progressivePenalty( + range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT); + final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max), + MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT); + return minFpsError + maxFpsError; + } + }); + } + + public static Size getClosestSupportedSize( + List supportedSizes, final int requestedWidth, final int requestedHeight) { + return Collections.min(supportedSizes, new ClosestComparator() { + @Override + int diff(Size size) { + return abs(requestedWidth - size.width) + abs(requestedHeight - size.height); + } + }); + } + + // Helper method for camera classes. + static void reportCameraResolution(Histogram histogram, Size resolution) { + int index = COMMON_RESOLUTIONS.indexOf(resolution); + // 0 is reserved for unknown resolution, so add 1. + // indexOf returns -1 for unknown resolutions so it becomes 0 automatically. + histogram.addSample(index + 1); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerator.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerator.java new file mode 100644 index 00000000..dc954b62 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraEnumerator.java @@ -0,0 +1,25 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; + +import java.util.List; + +public interface CameraEnumerator { + public String[] getDeviceNames(); + public boolean isFrontFacing(String deviceName); + public boolean isBackFacing(String deviceName); + public List getSupportedFormats(String deviceName); + + public CameraVideoCapturer createCapturer( + String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraSession.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraSession.java new file mode 100644 index 00000000..8d137854 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraSession.java @@ -0,0 +1,72 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.graphics.Matrix; +import android.view.WindowManager; +import android.view.Surface; + +interface CameraSession { + enum FailureType { ERROR, DISCONNECTED } + + // Callbacks are fired on the camera thread. + interface CreateSessionCallback { + void onDone(CameraSession session); + void onFailure(FailureType failureType, String error); + } + + // Events are fired on the camera thread. + interface Events { + void onCameraOpening(); + void onCameraError(CameraSession session, String error); + void onCameraDisconnected(CameraSession session); + void onCameraClosed(CameraSession session); + void onFrameCaptured(CameraSession session, VideoFrame frame); + } + + /** + * Stops the capture. Waits until no more calls to capture observer will be made. + * If waitCameraStop is true, also waits for the camera to stop. + */ + void stop(); + + static int getDeviceOrientation(Context context) { + final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); + switch (wm.getDefaultDisplay().getRotation()) { + case Surface.ROTATION_90: + return 90; + case Surface.ROTATION_180: + return 180; + case Surface.ROTATION_270: + return 270; + case Surface.ROTATION_0: + default: + return 0; + } + } + + static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix( + TextureBufferImpl buffer, boolean mirror, int rotation) { + final Matrix transformMatrix = new Matrix(); + // Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture. + transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f); + if (mirror) { + transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f); + } + transformMatrix.preRotate(rotation); + transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f); + + // The width and height are not affected by rotation since Camera2Session has set them to the + // value they should be after undoing the rotation. + return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight()); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraVideoCapturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraVideoCapturer.java new file mode 100644 index 00000000..ec26868b --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CameraVideoCapturer.java @@ -0,0 +1,172 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaRecorder; + +/** + * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a + * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper + * class for detecting camera freezes. + */ +public interface CameraVideoCapturer extends VideoCapturer { + /** + * Camera events handler - can be used to be notifed about camera events. The callbacks are + * executed from an arbitrary thread. + */ + public interface CameraEventsHandler { + // Camera error handler - invoked when camera can not be opened + // or any camera exception happens on camera thread. + void onCameraError(String errorDescription); + + // Called when camera is disconnected. + void onCameraDisconnected(); + + // Invoked when camera stops receiving frames. + void onCameraFreezed(String errorDescription); + + // Callback invoked when camera is opening. + void onCameraOpening(String cameraName); + + // Callback invoked when first camera frame is available after camera is started. + void onFirstFrameAvailable(); + + // Callback invoked when camera is closed. + void onCameraClosed(); + } + + /** + * Camera switch handler - one of these functions are invoked with the result of switchCamera(). + * The callback may be called on an arbitrary thread. + */ + public interface CameraSwitchHandler { + // Invoked on success. `isFrontCamera` is true if the new camera is front facing. + void onCameraSwitchDone(boolean isFrontCamera); + + // Invoked on failure, e.g. camera is stopped or only one camera available. + void onCameraSwitchError(String errorDescription); + } + + /** + * Switch camera to the next valid camera id. This can only be called while the camera is running. + * This function can be called from any thread. + */ + void switchCamera(CameraSwitchHandler switchEventsHandler); + + /** + * Switch camera to the specified camera id. This can only be called while the camera is running. + * This function can be called from any thread. + */ + void switchCamera(CameraSwitchHandler switchEventsHandler, String cameraName); + + /** + * MediaRecorder add/remove handler - one of these functions are invoked with the result of + * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls. + * The callback may be called on an arbitrary thread. + */ + @Deprecated + public interface MediaRecorderHandler { + // Invoked on success. + void onMediaRecorderSuccess(); + + // Invoked on failure, e.g. camera is stopped or any exception happens. + void onMediaRecorderError(String errorDescription); + } + + /** + * Add MediaRecorder to camera pipeline. This can only be called while the camera is running. + * Once MediaRecorder is added to camera pipeline camera switch is not allowed. + * This function can be called from any thread. + */ + @Deprecated + default void addMediaRecorderToCamera( + MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) { + throw new UnsupportedOperationException("Deprecated and not implemented."); + } + + /** + * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running. + * This function can be called from any thread. + */ + @Deprecated + default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) { + throw new UnsupportedOperationException("Deprecated and not implemented."); + } + + /** + * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks + * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that + * thread. + */ + public static class CameraStatistics { + private final static String TAG = "CameraStatistics"; + private final static int CAMERA_OBSERVER_PERIOD_MS = 2000; + private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000; + + private final SurfaceTextureHelper surfaceTextureHelper; + private final CameraEventsHandler eventsHandler; + private int frameCount; + private int freezePeriodCount; + // Camera observer - monitors camera framerate. Observer is executed on camera thread. + private final Runnable cameraObserver = new Runnable() { + @Override + public void run() { + final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS); + Logging.d(TAG, "Camera fps: " + cameraFps + "."); + if (frameCount == 0) { + ++freezePeriodCount; + if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS + && eventsHandler != null) { + Logging.e(TAG, "Camera freezed."); + if (surfaceTextureHelper.isTextureInUse()) { + // This can only happen if we are capturing to textures. + eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers."); + } else { + eventsHandler.onCameraFreezed("Camera failure."); + } + return; + } + } else { + freezePeriodCount = 0; + } + frameCount = 0; + surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS); + } + }; + + public CameraStatistics( + SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) { + if (surfaceTextureHelper == null) { + throw new IllegalArgumentException("SurfaceTextureHelper is null"); + } + this.surfaceTextureHelper = surfaceTextureHelper; + this.eventsHandler = eventsHandler; + this.frameCount = 0; + this.freezePeriodCount = 0; + surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS); + } + + private void checkThread() { + if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) { + throw new IllegalStateException("Wrong thread"); + } + } + + public void addFrame() { + checkThread(); + ++frameCount; + } + + public void release() { + surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CandidatePairChangeEvent.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CandidatePairChangeEvent.java new file mode 100644 index 00000000..b8e6685a --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CandidatePairChangeEvent.java @@ -0,0 +1,39 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Representation of a change in selected ICE candidate pair. + * {@code CandidatePairChangeEvent} in the C++ API. + */ +public final class CandidatePairChangeEvent { + public final IceCandidate local; + public final IceCandidate remote; + public final int lastDataReceivedMs; + public final String reason; + + /** + * An estimate from the ICE stack on how long it was disconnected before + * changing to the new candidate pair in this event. + * The first time an candidate pair is signaled the value will be 0. + */ + public final int estimatedDisconnectedTimeMs; + + @CalledByNative + CandidatePairChangeEvent(IceCandidate local, IceCandidate remote, int lastDataReceivedMs, + String reason, int estimatedDisconnectedTimeMs) { + this.local = local; + this.remote = remote; + this.lastDataReceivedMs = lastDataReceivedMs; + this.reason = reason; + this.estimatedDisconnectedTimeMs = estimatedDisconnectedTimeMs; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CapturerObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CapturerObserver.java new file mode 100644 index 00000000..382dc15b --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CapturerObserver.java @@ -0,0 +1,27 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by + * {@link VideoSource#getCapturerObserver}. + * + * All callbacks must be executed on a single thread. + */ +public interface CapturerObserver { + /** Notify if the capturer have been started successfully or not. */ + void onCapturerStarted(boolean success); + /** Notify that the capturer has been stopped. */ + void onCapturerStopped(); + + /** Delivers a captured frame. */ + void onFrameCaptured(VideoFrame frame); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/ContextUtils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/ContextUtils.java new file mode 100644 index 00000000..e36ab728 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/ContextUtils.java @@ -0,0 +1,45 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; + +/** + * Class for storing the application context and retrieving it in a static context. Similar to + * org.chromium.base.ContextUtils. + */ +public class ContextUtils { + private static final String TAG = "ContextUtils"; + private static Context applicationContext; + + /** + * Stores the application context that will be returned by getApplicationContext. This is called + * by PeerConnectionFactory.initialize. The application context must be set before creating + * a PeerConnectionFactory and must not be modified while it is alive. + */ + public static void initialize(Context applicationContext) { + if (applicationContext == null) { + throw new IllegalArgumentException( + "Application context cannot be null for ContextUtils.initialize."); + } + ContextUtils.applicationContext = applicationContext; + } + + /** + * Returns the stored application context. + * + * @deprecated crbug.com/webrtc/8937 + */ + @Deprecated + public static Context getApplicationContext() { + return applicationContext; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/CryptoOptions.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/CryptoOptions.java new file mode 100644 index 00000000..6e06bc64 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/CryptoOptions.java @@ -0,0 +1,145 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * CryptoOptions defines advanced cryptographic settings for native WebRTC. + * These settings must be passed into RTCConfiguration. WebRTC is secur by + * default and you should not need to set any of these options unless you are + * specifically looking for an additional crypto feature such as AES_GCM + * support. This class is the Java binding of native api/crypto/cryptooptions.h + */ +public final class CryptoOptions { + /** + * SRTP Related Peer Connection Options. + */ + public final class Srtp { + /** + * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used + * if both sides enable it + */ + private final boolean enableGcmCryptoSuites; + /** + * If set to true, the (potentially insecure) crypto cipher + * kSrtpAes128CmSha1_32 will be included in the list of supported ciphers + * during negotiation. It will only be used if both peers support it and no + * other ciphers get preferred. + */ + private final boolean enableAes128Sha1_32CryptoCipher; + /** + * If set to true, encrypted RTP header extensions as defined in RFC 6904 + * will be negotiated. They will only be used if both peers support them. + */ + private final boolean enableEncryptedRtpHeaderExtensions; + + private Srtp(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher, + boolean enableEncryptedRtpHeaderExtensions) { + this.enableGcmCryptoSuites = enableGcmCryptoSuites; + this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher; + this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions; + } + + @CalledByNative("Srtp") + public boolean getEnableGcmCryptoSuites() { + return enableGcmCryptoSuites; + } + + @CalledByNative("Srtp") + public boolean getEnableAes128Sha1_32CryptoCipher() { + return enableAes128Sha1_32CryptoCipher; + } + + @CalledByNative("Srtp") + public boolean getEnableEncryptedRtpHeaderExtensions() { + return enableEncryptedRtpHeaderExtensions; + } + } + + /** + * Options to be used when the FrameEncryptor / FrameDecryptor APIs are used. + */ + public final class SFrame { + /** + * If set all RtpSenders must have an FrameEncryptor attached to them before + * they are allowed to send packets. All RtpReceivers must have a + * FrameDecryptor attached to them before they are able to receive packets. + */ + private final boolean requireFrameEncryption; + + private SFrame(boolean requireFrameEncryption) { + this.requireFrameEncryption = requireFrameEncryption; + } + + @CalledByNative("SFrame") + public boolean getRequireFrameEncryption() { + return requireFrameEncryption; + } + } + + private final Srtp srtp; + private final SFrame sframe; + + private CryptoOptions(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher, + boolean enableEncryptedRtpHeaderExtensions, boolean requireFrameEncryption) { + this.srtp = new Srtp( + enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, enableEncryptedRtpHeaderExtensions); + this.sframe = new SFrame(requireFrameEncryption); + } + + public static Builder builder() { + return new Builder(); + } + + @CalledByNative + public Srtp getSrtp() { + return srtp; + } + + @CalledByNative + public SFrame getSFrame() { + return sframe; + } + + public static class Builder { + private boolean enableGcmCryptoSuites; + private boolean enableAes128Sha1_32CryptoCipher; + private boolean enableEncryptedRtpHeaderExtensions; + private boolean requireFrameEncryption; + + private Builder() {} + + public Builder setEnableGcmCryptoSuites(boolean enableGcmCryptoSuites) { + this.enableGcmCryptoSuites = enableGcmCryptoSuites; + return this; + } + + public Builder setEnableAes128Sha1_32CryptoCipher(boolean enableAes128Sha1_32CryptoCipher) { + this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher; + return this; + } + + public Builder setEnableEncryptedRtpHeaderExtensions( + boolean enableEncryptedRtpHeaderExtensions) { + this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions; + return this; + } + + public Builder setRequireFrameEncryption(boolean requireFrameEncryption) { + this.requireFrameEncryption = requireFrameEncryption; + return this; + } + + public CryptoOptions createCryptoOptions() { + return new CryptoOptions(enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, + enableEncryptedRtpHeaderExtensions, requireFrameEncryption); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DataChannel.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DataChannel.java new file mode 100644 index 00000000..b9301f1f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DataChannel.java @@ -0,0 +1,196 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** Java wrapper for a C++ DataChannelInterface. */ +public class DataChannel { + /** Java wrapper for WebIDL RTCDataChannel. */ + public static class Init { + public boolean ordered = true; + // Optional unsigned short in WebIDL, -1 means unspecified. + public int maxRetransmitTimeMs = -1; + // Optional unsigned short in WebIDL, -1 means unspecified. + public int maxRetransmits = -1; + public String protocol = ""; + public boolean negotiated; + // Optional unsigned short in WebIDL, -1 means unspecified. + public int id = -1; + + @CalledByNative("Init") + boolean getOrdered() { + return ordered; + } + + @CalledByNative("Init") + int getMaxRetransmitTimeMs() { + return maxRetransmitTimeMs; + } + + @CalledByNative("Init") + int getMaxRetransmits() { + return maxRetransmits; + } + + @CalledByNative("Init") + String getProtocol() { + return protocol; + } + + @CalledByNative("Init") + boolean getNegotiated() { + return negotiated; + } + + @CalledByNative("Init") + int getId() { + return id; + } + } + + /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */ + public static class Buffer { + /** The underlying data. */ + public final ByteBuffer data; + + /** + * Indicates whether `data` contains UTF-8 text or "binary data" + * (i.e. anything else). + */ + public final boolean binary; + + @CalledByNative("Buffer") + public Buffer(ByteBuffer data, boolean binary) { + this.data = data; + this.binary = binary; + } + } + + /** Java version of C++ DataChannelObserver. */ + public interface Observer { + /** The data channel's bufferedAmount has changed. */ + @CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount); + /** The data channel state has changed. */ + @CalledByNative("Observer") public void onStateChange(); + /** + * A data buffer was successfully received. NOTE: `buffer.data` will be + * freed once this function returns so callers who want to use the data + * asynchronously must make sure to copy it first. + */ + @CalledByNative("Observer") public void onMessage(Buffer buffer); + } + + /** Keep in sync with DataChannelInterface::DataState. */ + public enum State { + CONNECTING, + OPEN, + CLOSING, + CLOSED; + + @CalledByNative("State") + static State fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + private long nativeDataChannel; + private long nativeObserver; + + @CalledByNative + public DataChannel(long nativeDataChannel) { + this.nativeDataChannel = nativeDataChannel; + } + + /** Register `observer`, replacing any previously-registered observer. */ + public void registerObserver(Observer observer) { + checkDataChannelExists(); + if (nativeObserver != 0) { + nativeUnregisterObserver(nativeObserver); + } + nativeObserver = nativeRegisterObserver(observer); + } + + /** Unregister the (only) observer. */ + public void unregisterObserver() { + checkDataChannelExists(); + nativeUnregisterObserver(nativeObserver); + nativeObserver = 0; + } + + public String label() { + checkDataChannelExists(); + return nativeLabel(); + } + + public int id() { + checkDataChannelExists(); + return nativeId(); + } + + public State state() { + checkDataChannelExists(); + return nativeState(); + } + + /** + * Return the number of bytes of application data (UTF-8 text and binary data) + * that have been queued using SendBuffer but have not yet been transmitted + * to the network. + */ + public long bufferedAmount() { + checkDataChannelExists(); + return nativeBufferedAmount(); + } + + /** Close the channel. */ + public void close() { + checkDataChannelExists(); + nativeClose(); + } + + /** Send `data` to the remote peer; return success. */ + public boolean send(Buffer buffer) { + checkDataChannelExists(); + // TODO(fischman): this could be cleverer about avoiding copies if the + // ByteBuffer is direct and/or is backed by an array. + byte[] data = new byte[buffer.data.remaining()]; + buffer.data.get(data); + return nativeSend(data, buffer.binary); + } + + /** Dispose of native resources attached to this channel. */ + public void dispose() { + checkDataChannelExists(); + JniCommon.nativeReleaseRef(nativeDataChannel); + nativeDataChannel = 0; + } + + @CalledByNative + long getNativeDataChannel() { + return nativeDataChannel; + } + + private void checkDataChannelExists() { + if (nativeDataChannel == 0) { + throw new IllegalStateException("DataChannel has been disposed."); + } + } + + private native long nativeRegisterObserver(Observer observer); + private native void nativeUnregisterObserver(long observer); + private native String nativeLabel(); + private native int nativeId(); + private native State nativeState(); + private native long nativeBufferedAmount(); + private native void nativeClose(); + private native boolean nativeSend(byte[] data, boolean binary); +}; diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Dav1dDecoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Dav1dDecoder.java new file mode 100644 index 00000000..ecb16bc3 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Dav1dDecoder.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public class Dav1dDecoder extends WrappedNativeVideoDecoder { + @Override + public long createNativeVideoDecoder() { + return nativeCreateDecoder(); + } + + static native long nativeCreateDecoder(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoDecoderFactory.java new file mode 100644 index 00000000..d7a8694d --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoDecoderFactory.java @@ -0,0 +1,69 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.LinkedHashSet; + +/** + * Helper class that combines HW and SW decoders. + */ +public class DefaultVideoDecoderFactory implements VideoDecoderFactory { + private final VideoDecoderFactory hardwareVideoDecoderFactory; + private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory(); + private final @Nullable VideoDecoderFactory platformSoftwareVideoDecoderFactory; + + /** + * Create decoder factory using default hardware decoder factory. + */ + public DefaultVideoDecoderFactory(@Nullable EglBase.Context eglContext) { + this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext); + this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext); + } + + /** + * Create decoder factory using explicit hardware decoder factory. + */ + DefaultVideoDecoderFactory(VideoDecoderFactory hardwareVideoDecoderFactory) { + this.hardwareVideoDecoderFactory = hardwareVideoDecoderFactory; + this.platformSoftwareVideoDecoderFactory = null; + } + + @Override + public @Nullable VideoDecoder createDecoder(VideoCodecInfo codecType) { + VideoDecoder softwareDecoder = softwareVideoDecoderFactory.createDecoder(codecType); + final VideoDecoder hardwareDecoder = hardwareVideoDecoderFactory.createDecoder(codecType); + if (softwareDecoder == null && platformSoftwareVideoDecoderFactory != null) { + softwareDecoder = platformSoftwareVideoDecoderFactory.createDecoder(codecType); + } + if (hardwareDecoder != null && softwareDecoder != null) { + // Both hardware and software supported, wrap it in a software fallback + return new VideoDecoderFallback( + /* fallback= */ softwareDecoder, /* primary= */ hardwareDecoder); + } + return hardwareDecoder != null ? hardwareDecoder : softwareDecoder; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + LinkedHashSet supportedCodecInfos = new LinkedHashSet(); + + supportedCodecInfos.addAll(Arrays.asList(softwareVideoDecoderFactory.getSupportedCodecs())); + supportedCodecInfos.addAll(Arrays.asList(hardwareVideoDecoderFactory.getSupportedCodecs())); + if (platformSoftwareVideoDecoderFactory != null) { + supportedCodecInfos.addAll( + Arrays.asList(platformSoftwareVideoDecoderFactory.getSupportedCodecs())); + } + + return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java new file mode 100644 index 00000000..76896b6b --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java @@ -0,0 +1,56 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.LinkedHashSet; + +/** Helper class that combines HW and SW encoders. */ +public class DefaultVideoEncoderFactory implements VideoEncoderFactory { + private final VideoEncoderFactory hardwareVideoEncoderFactory; + private final VideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory(); + + /** Create encoder factory using default hardware encoder factory. */ + public DefaultVideoEncoderFactory( + EglBase.Context eglContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) { + this.hardwareVideoEncoderFactory = + new HardwareVideoEncoderFactory(eglContext, enableIntelVp8Encoder, enableH264HighProfile); + } + + /** Create encoder factory using explicit hardware encoder factory. */ + DefaultVideoEncoderFactory(VideoEncoderFactory hardwareVideoEncoderFactory) { + this.hardwareVideoEncoderFactory = hardwareVideoEncoderFactory; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo info) { + final VideoEncoder softwareEncoder = softwareVideoEncoderFactory.createEncoder(info); + final VideoEncoder hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info); + if (hardwareEncoder != null && softwareEncoder != null) { + // Both hardware and software supported, wrap it in a software fallback + return new VideoEncoderFallback( + /* fallback= */ softwareEncoder, /* primary= */ hardwareEncoder); + } + return hardwareEncoder != null ? hardwareEncoder : softwareEncoder; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + LinkedHashSet supportedCodecInfos = new LinkedHashSet(); + + supportedCodecInfos.addAll(Arrays.asList(softwareVideoEncoderFactory.getSupportedCodecs())); + supportedCodecInfos.addAll(Arrays.asList(hardwareVideoEncoderFactory.getSupportedCodecs())); + + return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DtmfSender.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DtmfSender.java new file mode 100644 index 00000000..65498230 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DtmfSender.java @@ -0,0 +1,96 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Java wrapper for a C++ DtmfSenderInterface. */ +public class DtmfSender { + private long nativeDtmfSender; + + public DtmfSender(long nativeDtmfSender) { + this.nativeDtmfSender = nativeDtmfSender; + } + + /** + * @return true if this DtmfSender is capable of sending DTMF. Otherwise false. + */ + public boolean canInsertDtmf() { + checkDtmfSenderExists(); + return nativeCanInsertDtmf(nativeDtmfSender); + } + + /** + * Queues a task that sends the provided DTMF tones. + *

+ * If insertDtmf is called on the same object while an existing task for this + * object to generate DTMF is still running, the previous task is canceled. + * + * @param tones This parameter is treated as a series of characters. The characters 0 + * through 9, A through D, #, and * generate the associated DTMF tones. The + * characters a to d are equivalent to A to D. The character ',' indicates a + * delay of 2 seconds before processing the next character in the tones + * parameter. Unrecognized characters are ignored. + * @param duration Indicates the duration in ms to use for each character passed in the tones + * parameter. The duration cannot be more than 6000 or less than 70. + * @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be + * as short as possible. + * @return true on success and false on failure. + */ + public boolean insertDtmf(String tones, int duration, int interToneGap) { + checkDtmfSenderExists(); + return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap); + } + + /** + * @return The tones remaining to be played out + */ + public String tones() { + checkDtmfSenderExists(); + return nativeTones(nativeDtmfSender); + } + + /** + * @return The current tone duration value in ms. This value will be the value last set via the + * insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called. + */ + public int duration() { + checkDtmfSenderExists(); + return nativeDuration(nativeDtmfSender); + } + + /** + * @return The current value of the between-tone gap in ms. This value will be the value last set + * via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never + * called. + */ + public int interToneGap() { + checkDtmfSenderExists(); + return nativeInterToneGap(nativeDtmfSender); + } + + public void dispose() { + checkDtmfSenderExists(); + JniCommon.nativeReleaseRef(nativeDtmfSender); + nativeDtmfSender = 0; + } + + private void checkDtmfSenderExists() { + if (nativeDtmfSender == 0) { + throw new IllegalStateException("DtmfSender has been disposed."); + } + } + + private static native boolean nativeCanInsertDtmf(long dtmfSender); + private static native boolean nativeInsertDtmf( + long dtmfSender, String tones, int duration, int interToneGap); + private static native String nativeTones(long dtmfSender); + private static native int nativeDuration(long dtmfSender); + private static native int nativeInterToneGap(long dtmfSender); +}; diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/DynamicBitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/DynamicBitrateAdjuster.java new file mode 100644 index 00000000..96a15bbf --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/DynamicBitrateAdjuster.java @@ -0,0 +1,98 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the + * bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the + * target bitrate by unacceptable margins. + */ +class DynamicBitrateAdjuster extends BaseBitrateAdjuster { + // Change the bitrate at most once every three seconds. + private static final double BITRATE_ADJUSTMENT_SEC = 3.0; + // Maximum bitrate adjustment scale - no more than 4 times. + private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4; + // Amount of adjustment steps to reach maximum scale. + private static final int BITRATE_ADJUSTMENT_STEPS = 20; + + private static final double BITS_PER_BYTE = 8.0; + + // How far the codec has deviated above (or below) the target bitrate (tracked in bytes). + private double deviationBytes; + private double timeSinceLastAdjustmentMs; + private int bitrateAdjustmentScaleExp; + + @Override + public void setTargets(int targetBitrateBps, double targetFramerateFps) { + if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) { + // Rescale the accumulator level if the accumulator max decreases + deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps; + } + super.setTargets(targetBitrateBps, targetFramerateFps); + } + + @Override + public void reportEncodedFrame(int size) { + if (targetFramerateFps == 0) { + return; + } + + // Accumulate the difference between actual and expected frame sizes. + double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps; + deviationBytes += (size - expectedBytesPerFrame); + timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps; + + // Adjust the bitrate when the encoder accumulates one second's worth of data in excess or + // shortfall of the target. + double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE; + + // Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for + // bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle. + double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes; + deviationBytes = Math.min(deviationBytes, deviationCap); + deviationBytes = Math.max(deviationBytes, -deviationCap); + + // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much + // from the target value. + if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) { + return; + } + + if (deviationBytes > deviationThresholdBytes) { + // Encoder generates too high bitrate - need to reduce the scale. + int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5); + bitrateAdjustmentScaleExp -= bitrateAdjustmentInc; + // Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS. + // This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS). + bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS); + deviationBytes = deviationThresholdBytes; + } else if (deviationBytes < -deviationThresholdBytes) { + // Encoder generates too low bitrate - need to increase the scale. + int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5); + bitrateAdjustmentScaleExp += bitrateAdjustmentInc; + // Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS. + // This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS). + bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS); + deviationBytes = -deviationThresholdBytes; + } + timeSinceLastAdjustmentMs = 0; + } + + private double getBitrateAdjustmentScale() { + return Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE, + (double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS); + } + + @Override + public int getAdjustedBitrateBps() { + return (int) (targetBitrateBps * getBitrateAdjustmentScale()); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase.java new file mode 100644 index 00000000..3b45e357 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase.java @@ -0,0 +1,305 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.SurfaceTexture; +import android.view.Surface; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import javax.microedition.khronos.egl.EGL10; + +/** + * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay, + * and an EGLSurface. + */ +public interface EglBase { + // EGL wrapper for an actual EGLContext. + public interface Context { + public final static long NO_CONTEXT = 0; + + /** + * Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is + * unsupported. + * + * @note This is currently only supported for EGL 1.4 and not for EGL 1.0. + */ + long getNativeEglContext(); + } + + /** + * Wraps the objects needed to interact with EGL that are independent of a particular EGLSurface. + * In practice this means EGLContext, EGLDisplay and EGLConfig objects. Separating them out in a + * standalone object allows for multiple EglBase instances to use the same underlying EGLContext, + * while still operating on their own EGLSurface. + */ + public interface EglConnection extends RefCounted { + /** Analogous to corresponding EglBase#create below. */ + public static EglConnection create(@Nullable Context sharedContext, int[] configAttributes) { + if (sharedContext == null) { + return EglConnection.createEgl14(configAttributes); + } else if (sharedContext instanceof EglBase14.Context) { + return new EglBase14Impl.EglConnection( + ((EglBase14.Context) sharedContext).getRawContext(), configAttributes); + } else if (sharedContext instanceof EglBase10.Context) { + return new EglBase10Impl.EglConnection( + ((EglBase10.Context) sharedContext).getRawContext(), configAttributes); + } + throw new IllegalArgumentException("Unrecognized Context"); + } + + /** Analogous to corresponding EglBase#createEgl10 below. */ + public static EglConnection createEgl10(int[] configAttributes) { + return new EglBase10Impl.EglConnection(/* sharedContext= */ null, configAttributes); + } + + /** Analogous to corresponding EglBase#createEgl14 below. */ + public static EglConnection createEgl14(int[] configAttributes) { + return new EglBase14Impl.EglConnection(/* sharedContext= */ null, configAttributes); + } + } + + // According to the documentation, EGL can be used from multiple threads at the same time if each + // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this. + // Therefore, synchronize on this global lock before calling dangerous EGL functions that might + // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info. + public static final Object lock = new Object(); + + // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION. + // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java + // This is similar to how GlSurfaceView does: + // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760 + public static final int EGL_OPENGL_ES2_BIT = 4; + public static final int EGL_OPENGL_ES3_BIT = 0x40; + // Android-specific extension. + public static final int EGL_RECORDABLE_ANDROID = 0x3142; + + public static ConfigBuilder configBuilder() { + return new ConfigBuilder(); + } + + public static class ConfigBuilder { + private int openGlesVersion = 2; + private boolean hasAlphaChannel; + private boolean supportsPixelBuffer; + private boolean isRecordable; + + public ConfigBuilder setOpenGlesVersion(int version) { + if (version < 1 || version > 3) { + throw new IllegalArgumentException("OpenGL ES version " + version + " not supported"); + } + this.openGlesVersion = version; + return this; + } + + public ConfigBuilder setHasAlphaChannel(boolean hasAlphaChannel) { + this.hasAlphaChannel = hasAlphaChannel; + return this; + } + + public ConfigBuilder setSupportsPixelBuffer(boolean supportsPixelBuffer) { + this.supportsPixelBuffer = supportsPixelBuffer; + return this; + } + + public ConfigBuilder setIsRecordable(boolean isRecordable) { + this.isRecordable = isRecordable; + return this; + } + + public int[] createConfigAttributes() { + ArrayList list = new ArrayList<>(); + list.add(EGL10.EGL_RED_SIZE); + list.add(8); + list.add(EGL10.EGL_GREEN_SIZE); + list.add(8); + list.add(EGL10.EGL_BLUE_SIZE); + list.add(8); + if (hasAlphaChannel) { + list.add(EGL10.EGL_ALPHA_SIZE); + list.add(8); + } + if (openGlesVersion == 2 || openGlesVersion == 3) { + list.add(EGL10.EGL_RENDERABLE_TYPE); + list.add(openGlesVersion == 3 ? EGL_OPENGL_ES3_BIT : EGL_OPENGL_ES2_BIT); + } + if (supportsPixelBuffer) { + list.add(EGL10.EGL_SURFACE_TYPE); + list.add(EGL10.EGL_PBUFFER_BIT); + } + if (isRecordable) { + list.add(EGL_RECORDABLE_ANDROID); + list.add(1); + } + list.add(EGL10.EGL_NONE); + + final int[] res = new int[list.size()]; + for (int i = 0; i < list.size(); ++i) { + res[i] = list.get(i); + } + return res; + } + } + + public static final int[] CONFIG_PLAIN = configBuilder().createConfigAttributes(); + public static final int[] CONFIG_RGBA = + configBuilder().setHasAlphaChannel(true).createConfigAttributes(); + public static final int[] CONFIG_PIXEL_BUFFER = + configBuilder().setSupportsPixelBuffer(true).createConfigAttributes(); + public static final int[] CONFIG_PIXEL_RGBA_BUFFER = configBuilder() + .setHasAlphaChannel(true) + .setSupportsPixelBuffer(true) + .createConfigAttributes(); + public static final int[] CONFIG_RECORDABLE = + configBuilder().setIsRecordable(true).createConfigAttributes(); + + static int getOpenGlesVersionFromConfig(int[] configAttributes) { + for (int i = 0; i < configAttributes.length - 1; ++i) { + if (configAttributes[i] == EGL10.EGL_RENDERABLE_TYPE) { + switch (configAttributes[i + 1]) { + case EGL_OPENGL_ES2_BIT: + return 2; + case EGL_OPENGL_ES3_BIT: + return 3; + default: + return 1; + } + } + } + // Default to V1 if no renderable type is specified. + return 1; + } + + /** + * Creates a new EglBase with a shared EglConnection. EglBase instances sharing the same + * EglConnection should be used on the same thread to avoid the underlying EGLContext being made + * current on multiple threads. It is up to the client of EglBase to ensure that instances with a + * shared EglConnection are current on that thread before each use since other EglBase instances + * may have used the same EGLContext since the last interaction. + */ + public static EglBase create(EglConnection eglConnection) { + if (eglConnection == null) { + return create(); + } else if (eglConnection instanceof EglBase14Impl.EglConnection) { + return new EglBase14Impl((EglBase14Impl.EglConnection) eglConnection); + } else if (eglConnection instanceof EglBase10Impl.EglConnection) { + return new EglBase10Impl((EglBase10Impl.EglConnection) eglConnection); + } + throw new IllegalArgumentException("Unrecognized EglConnection"); + } + + /** + * Create a new context with the specified config attributes, sharing data with `sharedContext`. + * If `sharedContext` is null, a root EGL 1.4 context is created. + */ + public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) { + if (sharedContext == null) { + return createEgl14(configAttributes); + } else if (sharedContext instanceof EglBase14.Context) { + return createEgl14((EglBase14.Context) sharedContext, configAttributes); + } else if (sharedContext instanceof EglBase10.Context) { + return createEgl10((EglBase10.Context) sharedContext, configAttributes); + } + throw new IllegalArgumentException("Unrecognized Context"); + } + + /** + * Helper function for creating a plain root context. This function will try to create an EGL 1.4 + * context if possible, and an EGL 1.0 context otherwise. + */ + public static EglBase create() { + return create(null /* shaderContext */, CONFIG_PLAIN); + } + + /** + * Helper function for creating a plain context, sharing data with `sharedContext`. This function + * will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise. + */ + public static EglBase create(Context sharedContext) { + return create(sharedContext, CONFIG_PLAIN); + } + + /** Explicitly create a root EGl 1.0 context with the specified config attributes. */ + public static EglBase10 createEgl10(int[] configAttributes) { + return new EglBase10Impl(/* sharedContext= */ null, configAttributes); + } + + /** + * Explicitly create a root EGl 1.0 context with the specified config attributes and shared + * context. + */ + public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) { + return new EglBase10Impl( + sharedContext == null ? null : sharedContext.getRawContext(), configAttributes); + } + + /** + * Explicitly create a root EGl 1.0 context with the specified config attributes + * and shared context. + */ + public static EglBase10 createEgl10( + javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) { + return new EglBase10Impl(sharedContext, configAttributes); + } + + /** Explicitly create a root EGl 1.4 context with the specified config attributes. */ + public static EglBase14 createEgl14(int[] configAttributes) { + return new EglBase14Impl(/* sharedContext= */ null, configAttributes); + } + + /** + * Explicitly create a root EGl 1.4 context with the specified config attributes and shared + * context. + */ + public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) { + return new EglBase14Impl( + sharedContext == null ? null : sharedContext.getRawContext(), configAttributes); + } + + /** + * Explicitly create a root EGl 1.4 context with the specified config attributes + * and shared context. + */ + public static EglBase14 createEgl14( + android.opengl.EGLContext sharedContext, int[] configAttributes) { + return new EglBase14Impl(sharedContext, configAttributes); + } + + void createSurface(Surface surface); + + // Create EGLSurface from the Android SurfaceTexture. + void createSurface(SurfaceTexture surfaceTexture); + + // Create dummy 1x1 pixel buffer surface so the context can be made current. + void createDummyPbufferSurface(); + + void createPbufferSurface(int width, int height); + + Context getEglBaseContext(); + + boolean hasSurface(); + + int surfaceWidth(); + + int surfaceHeight(); + + void releaseSurface(); + + void release(); + + void makeCurrent(); + + // Detach the current EGL context, so that it can be made current on another thread. + void detachCurrent(); + + void swapBuffers(); + + void swapBuffers(long presentationTimeStampNs); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10.java new file mode 100644 index 00000000..ad2eb1c0 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10.java @@ -0,0 +1,33 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; + +/** EGL 1.0 implementation of EglBase. */ +public interface EglBase10 extends EglBase { + interface Context extends EglBase.Context { + EGLContext getRawContext(); + } + + interface EglConnection extends EglBase.EglConnection { + EGL10 getEgl(); + + EGLContext getContext(); + + EGLDisplay getDisplay(); + + EGLConfig getConfig(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10Impl.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10Impl.java new file mode 100644 index 00000000..caa10e7e --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase10Impl.java @@ -0,0 +1,448 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Canvas; +import android.graphics.Rect; +import android.graphics.SurfaceTexture; +import android.opengl.GLException; +import android.view.Surface; +import android.view.SurfaceHolder; +import androidx.annotation.Nullable; +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; + +/** + * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay, + * and an EGLSurface. + */ +class EglBase10Impl implements EglBase10 { + private static final String TAG = "EglBase10Impl"; + // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION. + private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; + + private static final EglConnection EGL_NO_CONNECTION = new EglConnection(); + + private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE; + private EglConnection eglConnection; + + // EGL wrapper for an actual EGLContext. + private static class Context implements EglBase10.Context { + private final EGL10 egl; + private final EGLContext eglContext; + private final EGLConfig eglContextConfig; + + @Override + public EGLContext getRawContext() { + return eglContext; + } + + @Override + public long getNativeEglContext() { + EGLContext previousContext = egl.eglGetCurrentContext(); + EGLDisplay currentDisplay = egl.eglGetCurrentDisplay(); + EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW); + EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ); + EGLSurface tempEglSurface = null; + + if (currentDisplay == EGL10.EGL_NO_DISPLAY) { + currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + } + + try { + if (previousContext != eglContext) { + int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE}; + tempEglSurface = + egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs); + if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) { + throw new GLException(egl.eglGetError(), + "Failed to make temporary EGL surface active: " + egl.eglGetError()); + } + } + + return nativeGetCurrentNativeEGLContext(); + } finally { + if (tempEglSurface != null) { + egl.eglMakeCurrent( + currentDisplay, previousDrawSurface, previousReadSurface, previousContext); + egl.eglDestroySurface(currentDisplay, tempEglSurface); + } + } + } + + public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) { + this.egl = egl; + this.eglContext = eglContext; + this.eglContextConfig = eglContextConfig; + } + } + + public static class EglConnection implements EglBase10.EglConnection { + private final EGL10 egl; + private final EGLContext eglContext; + private final EGLDisplay eglDisplay; + private final EGLConfig eglConfig; + private final RefCountDelegate refCountDelegate; + private EGLSurface currentSurface = EGL10.EGL_NO_SURFACE; + + public EglConnection(EGLContext sharedContext, int[] configAttributes) { + egl = (EGL10) EGLContext.getEGL(); + eglDisplay = getEglDisplay(egl); + eglConfig = getEglConfig(egl, eglDisplay, configAttributes); + final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes); + Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion); + eglContext = createEglContext(egl, sharedContext, eglDisplay, eglConfig, openGlesVersion); + + // Ref count delegate with release callback. + refCountDelegate = new RefCountDelegate(() -> { + synchronized (EglBase.lock) { + egl.eglMakeCurrent( + eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); + } + egl.eglDestroyContext(eglDisplay, eglContext); + egl.eglTerminate(eglDisplay); + currentSurface = EGL10.EGL_NO_SURFACE; + }); + } + + // Returns a "null" EglConnection. Useful to represent a released instance with default values. + private EglConnection() { + egl = (EGL10) EGLContext.getEGL(); + eglContext = EGL10.EGL_NO_CONTEXT; + eglDisplay = EGL10.EGL_NO_DISPLAY; + eglConfig = null; + refCountDelegate = new RefCountDelegate(() -> {}); + } + + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @Override + public EGL10 getEgl() { + return egl; + } + + @Override + public EGLContext getContext() { + return eglContext; + } + + @Override + public EGLDisplay getDisplay() { + return eglDisplay; + } + + @Override + public EGLConfig getConfig() { + return eglConfig; + } + + public void makeCurrent(EGLSurface eglSurface) { + if (egl.eglGetCurrentContext() == eglContext && currentSurface == eglSurface) { + return; + } + + synchronized (EglBase.lock) { + if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + throw new GLException(egl.eglGetError(), + "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + currentSurface = eglSurface; + } + + public void detachCurrent() { + synchronized (EglBase.lock) { + if (!egl.eglMakeCurrent( + eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) { + throw new GLException(egl.eglGetError(), + "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + currentSurface = EGL10.EGL_NO_SURFACE; + } + } + + // Create a new context with the specified config type, sharing data with sharedContext. + public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) { + this.eglConnection = new EglConnection(sharedContext, configAttributes); + } + + public EglBase10Impl(EglConnection eglConnection) { + this.eglConnection = eglConnection; + this.eglConnection.retain(); + } + + @Override + public void createSurface(Surface surface) { + /** + * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface + * couldn't actually take a Surface object until API 17. Older versions fortunately just call + * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant. + */ + class FakeSurfaceHolder implements SurfaceHolder { + private final Surface surface; + + FakeSurfaceHolder(Surface surface) { + this.surface = surface; + } + + @Override + public void addCallback(Callback callback) {} + + @Override + public void removeCallback(Callback callback) {} + + @Override + public boolean isCreating() { + return false; + } + + @Deprecated + @Override + public void setType(int i) {} + + @Override + public void setFixedSize(int i, int i2) {} + + @Override + public void setSizeFromLayout() {} + + @Override + public void setFormat(int i) {} + + @Override + public void setKeepScreenOn(boolean b) {} + + @Nullable + @Override + public Canvas lockCanvas() { + return null; + } + + @Nullable + @Override + public Canvas lockCanvas(Rect rect) { + return null; + } + + @Override + public void unlockCanvasAndPost(Canvas canvas) {} + + @Nullable + @Override + public Rect getSurfaceFrame() { + return null; + } + + @Override + public Surface getSurface() { + return surface; + } + } + + createSurfaceInternal(new FakeSurfaceHolder(surface)); + } + + // Create EGLSurface from the Android SurfaceTexture. + @Override + public void createSurface(SurfaceTexture surfaceTexture) { + createSurfaceInternal(surfaceTexture); + } + + // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture. + private void createSurfaceInternal(Object nativeWindow) { + if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) { + throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture"); + } + checkIsNotReleased(); + if (eglSurface != EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + + EGL10 egl = eglConnection.getEgl(); + int[] surfaceAttribs = {EGL10.EGL_NONE}; + eglSurface = egl.eglCreateWindowSurface( + eglConnection.getDisplay(), eglConnection.getConfig(), nativeWindow, surfaceAttribs); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new GLException(egl.eglGetError(), + "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + + // Create dummy 1x1 pixel buffer surface so the context can be made current. + @Override + public void createDummyPbufferSurface() { + createPbufferSurface(1, 1); + } + + @Override + public void createPbufferSurface(int width, int height) { + checkIsNotReleased(); + if (eglSurface != EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + EGL10 egl = eglConnection.getEgl(); + int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE}; + eglSurface = egl.eglCreatePbufferSurface( + eglConnection.getDisplay(), eglConnection.getConfig(), surfaceAttribs); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new GLException(egl.eglGetError(), + "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x" + + Integer.toHexString(egl.eglGetError())); + } + } + + @Override + public org.webrtc.EglBase.Context getEglBaseContext() { + return new Context( + eglConnection.getEgl(), eglConnection.getContext(), eglConnection.getConfig()); + } + + @Override + public boolean hasSurface() { + return eglSurface != EGL10.EGL_NO_SURFACE; + } + + @Override + public int surfaceWidth() { + final int widthArray[] = new int[1]; + eglConnection.getEgl().eglQuerySurface( + eglConnection.getDisplay(), eglSurface, EGL10.EGL_WIDTH, widthArray); + return widthArray[0]; + } + + @Override + public int surfaceHeight() { + final int heightArray[] = new int[1]; + eglConnection.getEgl().eglQuerySurface( + eglConnection.getDisplay(), eglSurface, EGL10.EGL_HEIGHT, heightArray); + return heightArray[0]; + } + + @Override + public void releaseSurface() { + if (eglSurface != EGL10.EGL_NO_SURFACE) { + eglConnection.getEgl().eglDestroySurface(eglConnection.getDisplay(), eglSurface); + eglSurface = EGL10.EGL_NO_SURFACE; + } + } + + private void checkIsNotReleased() { + if (eglConnection == EGL_NO_CONNECTION) { + throw new RuntimeException("This object has been released"); + } + } + + @Override + public void release() { + checkIsNotReleased(); + releaseSurface(); + eglConnection.release(); + eglConnection = EGL_NO_CONNECTION; + } + + @Override + public void makeCurrent() { + checkIsNotReleased(); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't make current"); + } + eglConnection.makeCurrent(eglSurface); + } + + // Detach the current EGL context, so that it can be made current on another thread. + @Override + public void detachCurrent() { + eglConnection.detachCurrent(); + } + + @Override + public void swapBuffers() { + checkIsNotReleased(); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't swap buffers"); + } + synchronized (EglBase.lock) { + eglConnection.getEgl().eglSwapBuffers(eglConnection.getDisplay(), eglSurface); + } + } + + @Override + public void swapBuffers(long timeStampNs) { + // Setting presentation time is not supported for EGL 1.0. + swapBuffers(); + } + + // Return an EGLDisplay, or die trying. + private static EGLDisplay getEglDisplay(EGL10 egl) { + EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL10.EGL_NO_DISPLAY) { + throw new GLException(egl.eglGetError(), + "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError())); + } + int[] version = new int[2]; + if (!egl.eglInitialize(eglDisplay, version)) { + throw new GLException(egl.eglGetError(), + "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError())); + } + return eglDisplay; + } + + // Return an EGLConfig, or die trying. + private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) { + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) { + throw new GLException( + egl.eglGetError(), "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + if (numConfigs[0] <= 0) { + throw new RuntimeException("Unable to find any matching EGL config"); + } + final EGLConfig eglConfig = configs[0]; + if (eglConfig == null) { + throw new RuntimeException("eglChooseConfig returned null"); + } + return eglConfig; + } + + // Return an EGLConfig, or die trying. + private static EGLContext createEglContext(EGL10 egl, @Nullable EGLContext sharedContext, + EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) { + if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) { + throw new RuntimeException("Invalid sharedContext"); + } + int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE}; + EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext; + final EGLContext eglContext; + synchronized (EglBase.lock) { + eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes); + } + if (eglContext == EGL10.EGL_NO_CONTEXT) { + throw new GLException(egl.eglGetError(), + "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError())); + } + return eglContext; + } + + private static native long nativeGetCurrentNativeEGLContext(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14.java new file mode 100644 index 00000000..74553625 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; + +/** EGL 1.4 implementation of EglBase. */ +public interface EglBase14 extends EglBase { + interface Context extends EglBase.Context { + EGLContext getRawContext(); + } + + interface EglConnection extends EglBase.EglConnection { + EGLContext getContext(); + + EGLDisplay getDisplay(); + + EGLConfig getConfig(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14Impl.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14Impl.java new file mode 100644 index 00000000..22cee866 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglBase14Impl.java @@ -0,0 +1,340 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.opengl.GLException; +import android.view.Surface; +import androidx.annotation.Nullable; + +/** + * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay, + * and an EGLSurface. + */ +@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants. +class EglBase14Impl implements EglBase14 { + private static final String TAG = "EglBase14Impl"; + private static final EglConnection EGL_NO_CONNECTION = new EglConnection(); + + private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE; + private EglConnection eglConnection; + + public static class Context implements EglBase14.Context { + private final EGLContext egl14Context; + + @Override + public EGLContext getRawContext() { + return egl14Context; + } + + @Override + public long getNativeEglContext() { + return egl14Context.getNativeHandle(); + } + + public Context(android.opengl.EGLContext eglContext) { + this.egl14Context = eglContext; + } + } + + public static class EglConnection implements EglBase14.EglConnection { + private final EGLContext eglContext; + private final EGLDisplay eglDisplay; + private final EGLConfig eglConfig; + private final RefCountDelegate refCountDelegate; + private EGLSurface currentSurface = EGL14.EGL_NO_SURFACE; + + public EglConnection(EGLContext sharedContext, int[] configAttributes) { + eglDisplay = getEglDisplay(); + eglConfig = getEglConfig(eglDisplay, configAttributes); + final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes); + Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion); + eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion); + + // Ref count delegate with release callback. + refCountDelegate = new RefCountDelegate(() -> { + synchronized (EglBase.lock) { + EGL14.eglMakeCurrent( + eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); + EGL14.eglDestroyContext(eglDisplay, eglContext); + } + EGL14.eglReleaseThread(); + EGL14.eglTerminate(eglDisplay); + currentSurface = EGL14.EGL_NO_SURFACE; + }); + } + + // Returns a "null" EglConnection. Useful to represent a released instance with default values. + private EglConnection() { + eglContext = EGL14.EGL_NO_CONTEXT; + eglDisplay = EGL14.EGL_NO_DISPLAY; + eglConfig = null; + refCountDelegate = new RefCountDelegate(() -> {}); + } + + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @Override + public EGLContext getContext() { + return eglContext; + } + + @Override + public EGLDisplay getDisplay() { + return eglDisplay; + } + + @Override + public EGLConfig getConfig() { + return eglConfig; + } + + public void makeCurrent(EGLSurface eglSurface) { + if (EGL14.eglGetCurrentContext() == eglContext && currentSurface == eglSurface) { + return; + } + + synchronized (EglBase.lock) { + if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + throw new GLException(EGL14.eglGetError(), + "eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } + currentSurface = eglSurface; + } + + public void detachCurrent() { + synchronized (EglBase.lock) { + if (!EGL14.eglMakeCurrent( + eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) { + throw new GLException(EGL14.eglGetError(), + "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } + currentSurface = EGL14.EGL_NO_SURFACE; + } + } + // Create a new context with the specified config type, sharing data with sharedContext. + // `sharedContext` may be null. + public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) { + this.eglConnection = new EglConnection(sharedContext, configAttributes); + } + + // Create a new EglBase using an existing, possibly externally managed, EglConnection. + public EglBase14Impl(EglConnection eglConnection) { + this.eglConnection = eglConnection; + this.eglConnection.retain(); + } + + // Create EGLSurface from the Android Surface. + @Override + public void createSurface(Surface surface) { + createSurfaceInternal(surface); + } + + // Create EGLSurface from the Android SurfaceTexture. + @Override + public void createSurface(SurfaceTexture surfaceTexture) { + createSurfaceInternal(surfaceTexture); + } + + // Create EGLSurface from either Surface or SurfaceTexture. + private void createSurfaceInternal(Object surface) { + if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { + throw new IllegalStateException("Input must be either a Surface or SurfaceTexture"); + } + checkIsNotReleased(); + if (eglSurface != EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL14.EGL_NONE}; + eglSurface = EGL14.eglCreateWindowSurface( + eglConnection.getDisplay(), eglConnection.getConfig(), surface, surfaceAttribs, 0); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new GLException(EGL14.eglGetError(), + "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } + + @Override + public void createDummyPbufferSurface() { + createPbufferSurface(1, 1); + } + + @Override + public void createPbufferSurface(int width, int height) { + checkIsNotReleased(); + if (eglSurface != EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE}; + eglSurface = EGL14.eglCreatePbufferSurface( + eglConnection.getDisplay(), eglConnection.getConfig(), surfaceAttribs, 0); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new GLException(EGL14.eglGetError(), + "Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x" + + Integer.toHexString(EGL14.eglGetError())); + } + } + + @Override + public Context getEglBaseContext() { + return new Context(eglConnection.getContext()); + } + + @Override + public boolean hasSurface() { + return eglSurface != EGL14.EGL_NO_SURFACE; + } + + @Override + public int surfaceWidth() { + final int[] widthArray = new int[1]; + EGL14.eglQuerySurface(eglConnection.getDisplay(), eglSurface, EGL14.EGL_WIDTH, widthArray, 0); + return widthArray[0]; + } + + @Override + public int surfaceHeight() { + final int[] heightArray = new int[1]; + EGL14.eglQuerySurface(eglConnection.getDisplay(), eglSurface, EGL14.EGL_HEIGHT, heightArray, 0); + return heightArray[0]; + } + + @Override + public void releaseSurface() { + if (eglSurface != EGL14.EGL_NO_SURFACE) { + EGL14.eglDestroySurface(eglConnection.getDisplay(), eglSurface); + eglSurface = EGL14.EGL_NO_SURFACE; + } + } + + private void checkIsNotReleased() { + if (eglConnection == EGL_NO_CONNECTION) { + throw new RuntimeException("This object has been released"); + } + } + + @Override + public void release() { + checkIsNotReleased(); + releaseSurface(); + eglConnection.release(); + eglConnection = EGL_NO_CONNECTION; + } + + @Override + public void makeCurrent() { + checkIsNotReleased(); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't make current"); + } + eglConnection.makeCurrent(eglSurface); + } + + // Detach the current EGL context, so that it can be made current on another thread. + @Override + public void detachCurrent() { + eglConnection.detachCurrent(); + } + + @Override + public void swapBuffers() { + checkIsNotReleased(); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't swap buffers"); + } + synchronized (EglBase.lock) { + EGL14.eglSwapBuffers(eglConnection.getDisplay(), eglSurface); + } + } + + @Override + public void swapBuffers(long timeStampNs) { + checkIsNotReleased(); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't swap buffers"); + } + synchronized (EglBase.lock) { + // See + // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt + EGLExt.eglPresentationTimeANDROID(eglConnection.getDisplay(), eglSurface, timeStampNs); + EGL14.eglSwapBuffers(eglConnection.getDisplay(), eglSurface); + } + } + + // Return an EGLDisplay, or die trying. + private static EGLDisplay getEglDisplay() { + EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL14.EGL_NO_DISPLAY) { + throw new GLException(EGL14.eglGetError(), + "Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) { + throw new GLException(EGL14.eglGetError(), + "Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + return eglDisplay; + } + + // Return an EGLConfig, or die trying. + private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) { + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig( + eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) { + throw new GLException(EGL14.eglGetError(), + "eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + if (numConfigs[0] <= 0) { + throw new RuntimeException("Unable to find any matching EGL config"); + } + final EGLConfig eglConfig = configs[0]; + if (eglConfig == null) { + throw new RuntimeException("eglChooseConfig returned null"); + } + return eglConfig; + } + + // Return an EGLConfig, or die trying. + private static EGLContext createEglContext(@Nullable EGLContext sharedContext, + EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) { + if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) { + throw new RuntimeException("Invalid sharedContext"); + } + int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE}; + EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext; + final EGLContext eglContext; + synchronized (EglBase.lock) { + eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0); + } + if (eglContext == EGL14.EGL_NO_CONTEXT) { + throw new GLException(EGL14.eglGetError(), + "Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + return eglContext; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglRenderer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglRenderer.java new file mode 100644 index 00000000..0a0479b3 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglRenderer.java @@ -0,0 +1,776 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Bitmap; +import android.graphics.Matrix; +import android.graphics.SurfaceTexture; +import android.opengl.GLES20; +import android.view.Surface; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +/** + * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to + * be used as a helper class for rendering on SurfaceViews and TextureViews. + */ +public class EglRenderer implements VideoSink { + private static final String TAG = "EglRenderer"; + private static final long LOG_INTERVAL_SEC = 4; + + public interface FrameListener { void onFrame(Bitmap frame); } + + /** Callback for clients to be notified about errors encountered during rendering. */ + public static interface ErrorCallback { + /** Called if GLES20.GL_OUT_OF_MEMORY is encountered during rendering. */ + void onGlOutOfMemory(); + } + + private static class FrameListenerAndParams { + public final FrameListener listener; + public final float scale; + public final RendererCommon.GlDrawer drawer; + public final boolean applyFpsReduction; + + public FrameListenerAndParams(FrameListener listener, float scale, + RendererCommon.GlDrawer drawer, boolean applyFpsReduction) { + this.listener = listener; + this.scale = scale; + this.drawer = drawer; + this.applyFpsReduction = applyFpsReduction; + } + } + + private class EglSurfaceCreation implements Runnable { + private Object surface; + + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void setSurface(Object surface) { + this.surface = surface; + } + + @Override + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void run() { + if (surface != null && eglBase != null && !eglBase.hasSurface()) { + if (surface instanceof Surface) { + eglBase.createSurface((Surface) surface); + } else if (surface instanceof SurfaceTexture) { + eglBase.createSurface((SurfaceTexture) surface); + } else { + throw new IllegalStateException("Invalid surface: " + surface); + } + eglBase.makeCurrent(); + // Necessary for YUV frames with odd width. + GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); + } + } + } + + protected final String name; + + // `eglThread` is used for rendering, and is synchronized on `threadLock`. + private final Object threadLock = new Object(); + @GuardedBy("threadLock") @Nullable private EglThread eglThread; + + private final Runnable eglExceptionCallback = new Runnable() { + @Override + public void run() { + synchronized (threadLock) { + eglThread = null; + } + } + }; + + private final ArrayList frameListeners = new ArrayList<>(); + + private volatile ErrorCallback errorCallback; + + // Variables for fps reduction. + private final Object fpsReductionLock = new Object(); + // Time for when next frame should be rendered. + private long nextFrameTimeNs; + // Minimum duration between frames when fps reduction is active, or -1 if video is completely + // paused. + private long minRenderPeriodNs; + + // EGL and GL resources for drawing YUV/OES textures. After initialization, these are only + // accessed from the render thread. + @Nullable private EglBase eglBase; + private final VideoFrameDrawer frameDrawer; + @Nullable private RendererCommon.GlDrawer drawer; + private boolean usePresentationTimeStamp; + private final Matrix drawMatrix = new Matrix(); + + // Pending frame to render. Serves as a queue with size 1. Synchronized on `frameLock`. + private final Object frameLock = new Object(); + @Nullable private VideoFrame pendingFrame; + + // These variables are synchronized on `layoutLock`. + private final Object layoutLock = new Object(); + private float layoutAspectRatio; + // If true, mirrors the video stream horizontally. + private boolean mirrorHorizontally; + // If true, mirrors the video stream vertically. + private boolean mirrorVertically; + + // These variables are synchronized on `statisticsLock`. + private final Object statisticsLock = new Object(); + // Total number of video frames received in renderFrame() call. + private int framesReceived; + // Number of video frames dropped by renderFrame() because previous frame has not been rendered + // yet. + private int framesDropped; + // Number of rendered video frames. + private int framesRendered; + // Start time for counting these statistics, or 0 if we haven't started measuring yet. + private long statisticsStartTimeNs; + // Time in ns spent in renderFrameOnRenderThread() function. + private long renderTimeNs; + // Time in ns spent by the render thread in the swapBuffers() function. + private long renderSwapBufferTimeNs; + + // Used for bitmap capturing. + private final GlTextureFrameBuffer bitmapTextureFramebuffer = + new GlTextureFrameBuffer(GLES20.GL_RGBA); + + private final Runnable logStatisticsRunnable = new Runnable() { + @Override + public void run() { + logStatistics(); + synchronized (threadLock) { + if (eglThread != null) { + eglThread.getHandler().removeCallbacks(logStatisticsRunnable); + eglThread.getHandler().postDelayed( + logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC)); + } + } + } + }; + + private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation(); + + /** + * Standard constructor. The name will be included when logging. In order to render something, + * you must first call init() and createEglSurface. + */ + public EglRenderer(String name) { + this(name, new VideoFrameDrawer()); + } + + public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) { + this.name = name; + this.frameDrawer = videoFrameDrawer; + } + + public void init( + EglThread eglThread, RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) { + synchronized (threadLock) { + if (this.eglThread != null) { + throw new IllegalStateException(name + "Already initialized"); + } + + logD("Initializing EglRenderer"); + this.eglThread = eglThread; + this.drawer = drawer; + this.usePresentationTimeStamp = usePresentationTimeStamp; + + eglThread.addExceptionCallback(eglExceptionCallback); + + eglBase = eglThread.createEglBaseWithSharedConnection(); + eglThread.getHandler().post(eglSurfaceCreationRunnable); + + final long currentTimeNs = System.nanoTime(); + resetStatistics(currentTimeNs); + + eglThread.getHandler().postDelayed( + logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC)); + } + } + + /** + * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used + * for drawing frames on the EGLSurface. This class is responsible for calling release() on + * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous + * init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be + * set with the frame timestamps, which specifies desired presentation time and might be useful + * for e.g. syncing audio and video. + */ + public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes, + RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) { + EglThread thread = + EglThread.create(/* releaseMonitor= */ null, sharedContext, configAttributes); + init(thread, drawer, usePresentationTimeStamp); + } + + /** + * Same as above with usePresentationTimeStamp set to false. + * + * @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean) + */ + public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes, + RendererCommon.GlDrawer drawer) { + init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false); + } + + public void createEglSurface(Surface surface) { + createEglSurfaceInternal(surface); + } + + public void createEglSurface(SurfaceTexture surfaceTexture) { + createEglSurfaceInternal(surfaceTexture); + } + + private void createEglSurfaceInternal(Object surface) { + eglSurfaceCreationRunnable.setSurface(surface); + postToRenderThread(eglSurfaceCreationRunnable); + } + + /** + * Block until any pending frame is returned and all GL resources released, even if an interrupt + * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function + * should be called before the Activity is destroyed and the EGLContext is still valid. If you + * don't call this function, the GL resources might leak. + */ + public void release() { + logD("Releasing."); + final CountDownLatch eglCleanupBarrier = new CountDownLatch(1); + synchronized (threadLock) { + if (eglThread == null) { + logD("Already released"); + return; + } + eglThread.getHandler().removeCallbacks(logStatisticsRunnable); + eglThread.removeExceptionCallback(eglExceptionCallback); + + // Release EGL and GL resources on render thread. + eglThread.getHandler().postAtFrontOfQueue(() -> { + // Detach current shader program. + synchronized (EglBase.lock) { + GLES20.glUseProgram(/* program= */ 0); + } + if (drawer != null) { + drawer.release(); + drawer = null; + } + frameDrawer.release(); + bitmapTextureFramebuffer.release(); + + if (eglBase != null) { + logD("eglBase detach and release."); + eglBase.detachCurrent(); + eglBase.release(); + eglBase = null; + } + + frameListeners.clear(); + eglCleanupBarrier.countDown(); + }); + + // Don't accept any more frames or messages to the render thread. + eglThread.release(); + eglThread = null; + } + // Make sure the EGL/GL cleanup posted above is executed. + ThreadUtils.awaitUninterruptibly(eglCleanupBarrier); + synchronized (frameLock) { + if (pendingFrame != null) { + pendingFrame.release(); + pendingFrame = null; + } + } + logD("Releasing done."); + } + + /** + * Reset the statistics logged in logStatistics(). + */ + private void resetStatistics(long currentTimeNs) { + synchronized (statisticsLock) { + statisticsStartTimeNs = currentTimeNs; + framesReceived = 0; + framesDropped = 0; + framesRendered = 0; + renderTimeNs = 0; + renderSwapBufferTimeNs = 0; + } + } + + public void printStackTrace() { + synchronized (threadLock) { + final Thread renderThread = + (eglThread == null) ? null : eglThread.getHandler().getLooper().getThread(); + if (renderThread != null) { + final StackTraceElement[] renderStackTrace = renderThread.getStackTrace(); + if (renderStackTrace.length > 0) { + logW("EglRenderer stack trace:"); + for (StackTraceElement traceElem : renderStackTrace) { + logW(traceElem.toString()); + } + } + } + } + } + + /** + * Set if the video stream should be mirrored horizontally or not. + */ + public void setMirror(final boolean mirror) { + synchronized (layoutLock) { + this.mirrorHorizontally = mirror; + } + } + + /** + * Set if the video stream should be mirrored vertically or not. + */ + public void setMirrorVertically(final boolean mirrorVertically) { + synchronized (layoutLock) { + this.mirrorVertically = mirrorVertically; + } + } + + /** + * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video. + * Set this to 0 to disable cropping. + */ + public void setLayoutAspectRatio(float layoutAspectRatio) { + synchronized (layoutLock) { + this.layoutAspectRatio = layoutAspectRatio; + } + } + + /** + * Limit render framerate. + * + * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps + * reduction. + */ + public void setFpsReduction(float fps) { + synchronized (fpsReductionLock) { + final long previousRenderPeriodNs = minRenderPeriodNs; + if (fps <= 0) { + minRenderPeriodNs = Long.MAX_VALUE; + } else { + minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps); + } + if (minRenderPeriodNs != previousRenderPeriodNs) { + // Fps reduction changed - reset frame time. + nextFrameTimeNs = System.nanoTime(); + } + } + } + + public void disableFpsReduction() { + setFpsReduction(Float.POSITIVE_INFINITY /* fps */); + } + + public void pauseVideo() { + setFpsReduction(0 /* fps */); + } + + /** + * Register a callback to be invoked when a new video frame has been received. This version uses + * the drawer of the EglRenderer that was passed in init. + * + * @param listener The callback to be invoked. The callback will be invoked on the render thread. + * It should be lightweight and must not call removeFrameListener. + * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is + * required. + */ + public void addFrameListener(final FrameListener listener, final float scale) { + addFrameListener(listener, scale, null, false /* applyFpsReduction */); + } + + /** + * Register a callback to be invoked when a new video frame has been received. + * + * @param listener The callback to be invoked. The callback will be invoked on the render thread. + * It should be lightweight and must not call removeFrameListener. + * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is + * required. + * @param drawer Custom drawer to use for this frame listener or null to use the default one. + */ + public void addFrameListener( + final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) { + addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */); + } + + /** + * Register a callback to be invoked when a new video frame has been received. + * + * @param listener The callback to be invoked. The callback will be invoked on the render thread. + * It should be lightweight and must not call removeFrameListener. + * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is + * required. + * @param drawer Custom drawer to use for this frame listener or null to use the default one. + * @param applyFpsReduction This callback will not be called for frames that have been dropped by + * FPS reduction. + */ + public void addFrameListener(final FrameListener listener, final float scale, + @Nullable final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) { + postToRenderThread(() -> { + final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam; + frameListeners.add( + new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction)); + }); + } + + /** + * Remove any pending callback that was added with addFrameListener. If the callback is not in + * the queue, nothing happens. It is ensured that callback won't be called after this method + * returns. + * + * @param runnable The callback to remove. + */ + public void removeFrameListener(final FrameListener listener) { + final CountDownLatch latch = new CountDownLatch(1); + synchronized (threadLock) { + if (eglThread == null) { + return; + } + if (Thread.currentThread() == eglThread.getHandler().getLooper().getThread()) { + throw new RuntimeException("removeFrameListener must not be called on the render thread."); + } + postToRenderThread(() -> { + latch.countDown(); + final Iterator iter = frameListeners.iterator(); + while (iter.hasNext()) { + if (iter.next().listener == listener) { + iter.remove(); + } + } + }); + } + ThreadUtils.awaitUninterruptibly(latch); + } + + /** Can be set in order to be notified about errors encountered during rendering. */ + public void setErrorCallback(ErrorCallback errorCallback) { + this.errorCallback = errorCallback; + } + + // VideoSink interface. + @Override + public void onFrame(VideoFrame frame) { + synchronized (statisticsLock) { + ++framesReceived; + } + final boolean dropOldFrame; + synchronized (threadLock) { + if (eglThread == null) { + logD("Dropping frame - Not initialized or already released."); + return; + } + synchronized (frameLock) { + dropOldFrame = (pendingFrame != null); + if (dropOldFrame) { + pendingFrame.release(); + } + pendingFrame = frame; + pendingFrame.retain(); + eglThread.getHandler().post(this::renderFrameOnRenderThread); + } + } + if (dropOldFrame) { + synchronized (statisticsLock) { + ++framesDropped; + } + } + } + + /** + * Release EGL surface. This function will block until the EGL surface is released. + */ + public void releaseEglSurface(final Runnable completionCallback) { + // Ensure that the render thread is no longer touching the Surface before returning from this + // function. + eglSurfaceCreationRunnable.setSurface(null /* surface */); + synchronized (threadLock) { + if (eglThread != null) { + eglThread.getHandler().removeCallbacks(eglSurfaceCreationRunnable); + eglThread.getHandler().postAtFrontOfQueue(() -> { + if (eglBase != null) { + eglBase.detachCurrent(); + eglBase.releaseSurface(); + } + completionCallback.run(); + }); + return; + } + } + completionCallback.run(); + } + + /** + * Private helper function to post tasks safely. + */ + private void postToRenderThread(Runnable runnable) { + synchronized (threadLock) { + if (eglThread != null) { + eglThread.getHandler().post(runnable); + } + } + } + + private void clearSurfaceOnRenderThread(float r, float g, float b, float a) { + if (eglBase != null && eglBase.hasSurface()) { + logD("clearSurface"); + eglBase.makeCurrent(); + GLES20.glClearColor(r, g, b, a); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + eglBase.swapBuffers(); + } + } + + /** + * Post a task to clear the surface to a transparent uniform color. + */ + public void clearImage() { + clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); + } + + /** + * Post a task to clear the surface to a specific color. + */ + public void clearImage(final float r, final float g, final float b, final float a) { + synchronized (threadLock) { + if (eglThread == null) { + return; + } + eglThread.getHandler().postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a)); + } + } + + private void swapBuffersOnRenderThread(final VideoFrame frame, long swapBuffersStartTimeNs) { + synchronized (threadLock) { + if (eglThread != null) { + eglThread.scheduleRenderUpdate( + runsInline -> { + if (!runsInline) { + if (eglBase == null || !eglBase.hasSurface()) { + return; + } + eglBase.makeCurrent(); + } + + if (usePresentationTimeStamp) { + eglBase.swapBuffers(frame.getTimestampNs()); + } else { + eglBase.swapBuffers(); + } + + synchronized (statisticsLock) { + renderSwapBufferTimeNs += (System.nanoTime() - swapBuffersStartTimeNs); + } + }); + } + } + } + + /** + * Renders and releases `pendingFrame`. + */ + private void renderFrameOnRenderThread() { + // Fetch and render `pendingFrame`. + final VideoFrame frame; + synchronized (frameLock) { + if (pendingFrame == null) { + return; + } + frame = pendingFrame; + pendingFrame = null; + } + if (eglBase == null || !eglBase.hasSurface()) { + logD("Dropping frame - No surface"); + frame.release(); + return; + } + eglBase.makeCurrent(); + + // Check if fps reduction is active. + final boolean shouldRenderFrame; + synchronized (fpsReductionLock) { + if (minRenderPeriodNs == Long.MAX_VALUE) { + // Rendering is paused. + shouldRenderFrame = false; + } else if (minRenderPeriodNs <= 0) { + // FPS reduction is disabled. + shouldRenderFrame = true; + } else { + final long currentTimeNs = System.nanoTime(); + if (currentTimeNs < nextFrameTimeNs) { + logD("Skipping frame rendering - fps reduction is active."); + shouldRenderFrame = false; + } else { + nextFrameTimeNs += minRenderPeriodNs; + // The time for the next frame should always be in the future. + nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs); + shouldRenderFrame = true; + } + } + } + + final long startTimeNs = System.nanoTime(); + + final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight(); + final float drawnAspectRatio; + synchronized (layoutLock) { + drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio; + } + + final float scaleX; + final float scaleY; + + if (frameAspectRatio > drawnAspectRatio) { + scaleX = drawnAspectRatio / frameAspectRatio; + scaleY = 1f; + } else { + scaleX = 1f; + scaleY = frameAspectRatio / drawnAspectRatio; + } + + drawMatrix.reset(); + drawMatrix.preTranslate(0.5f, 0.5f); + drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f); + drawMatrix.preScale(scaleX, scaleY); + drawMatrix.preTranslate(-0.5f, -0.5f); + + try { + if (shouldRenderFrame) { + GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */, + eglBase.surfaceWidth(), eglBase.surfaceHeight()); + + final long swapBuffersStartTimeNs = System.nanoTime(); + swapBuffersOnRenderThread(frame, swapBuffersStartTimeNs); + + synchronized (statisticsLock) { + ++framesRendered; + renderTimeNs += (swapBuffersStartTimeNs - startTimeNs); + } + } + + notifyCallbacks(frame, shouldRenderFrame); + } catch (GlUtil.GlOutOfMemoryException e) { + logE("Error while drawing frame", e); + final ErrorCallback errorCallback = this.errorCallback; + if (errorCallback != null) { + errorCallback.onGlOutOfMemory(); + } + // Attempt to free up some resources. + drawer.release(); + frameDrawer.release(); + bitmapTextureFramebuffer.release(); + // Continue here on purpose and retry again for next frame. In worst case, this is a + // continuous problem and no more frames will be drawn. + } finally { + frame.release(); + } + } + + private void notifyCallbacks(VideoFrame frame, boolean wasRendered) { + if (frameListeners.isEmpty()) + return; + + drawMatrix.reset(); + drawMatrix.preTranslate(0.5f, 0.5f); + drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f); + drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap. + drawMatrix.preTranslate(-0.5f, -0.5f); + + Iterator it = frameListeners.iterator(); + while (it.hasNext()) { + FrameListenerAndParams listenerAndParams = it.next(); + if (!wasRendered && listenerAndParams.applyFpsReduction) { + continue; + } + it.remove(); + + final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth()); + final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight()); + + if (scaledWidth == 0 || scaledHeight == 0) { + listenerAndParams.listener.onFrame(null); + continue; + } + + bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight); + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId()); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, + GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0); + + GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */, + 0 /* viewportY */, scaledWidth, scaledHeight); + + final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4); + GLES20.glViewport(0, 0, scaledWidth, scaledHeight); + GLES20.glReadPixels( + 0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer); + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks"); + + final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(bitmapBuffer); + listenerAndParams.listener.onFrame(bitmap); + } + } + + private String averageTimeAsString(long sumTimeNs, int count) { + return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us"; + } + + private void logStatistics() { + final DecimalFormat fpsFormat = new DecimalFormat("#.0"); + final long currentTimeNs = System.nanoTime(); + synchronized (statisticsLock) { + final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs; + if (elapsedTimeNs <= 0 || (minRenderPeriodNs == Long.MAX_VALUE && framesReceived == 0)) { + return; + } + final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs; + logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms." + + " Frames received: " + framesReceived + "." + + " Dropped: " + framesDropped + "." + + " Rendered: " + framesRendered + "." + + " Render fps: " + fpsFormat.format(renderFps) + "." + + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "." + + " Average swapBuffer time: " + + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + "."); + resetStatistics(currentTimeNs); + } + } + + private void logE(String string, Throwable e) { + Logging.e(TAG, name + string, e); + } + + private void logD(String string) { + Logging.d(TAG, name + string); + } + + private void logW(String string) { + Logging.w(TAG, name + string); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EglThread.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglThread.java new file mode 100644 index 00000000..73323d59 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EglThread.java @@ -0,0 +1,216 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Looper; +import android.os.Message; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.List; +import org.webrtc.EglBase.EglConnection; + +/** EGL graphics thread that allows multiple clients to share the same underlying EGLContext. */ +public class EglThread implements RenderSynchronizer.Listener { + /** Callback for externally managed reference count. */ + public interface ReleaseMonitor { + /** + * Called by EglThread when a client releases its reference. Returns true when there are no more + * references and resources should be released. + */ + boolean onRelease(EglThread eglThread); + } + + /** Interface for clients to schedule rendering updates that will run synchronized. */ + public interface RenderUpdate { + + /** + * Called by EglThread when the rendering window is open. `runsInline` is true when the update + * is executed directly while the client schedules the update. + */ + void update(boolean runsInline); + } + + public static EglThread create( + @Nullable ReleaseMonitor releaseMonitor, + @Nullable final EglBase.Context sharedContext, + final int[] configAttributes, + @Nullable RenderSynchronizer renderSynchronizer) { + final HandlerThread renderThread = new HandlerThread("EglThread"); + renderThread.start(); + HandlerWithExceptionCallbacks handler = + new HandlerWithExceptionCallbacks(renderThread.getLooper()); + + // Not creating the EGLContext on the thread it will be used on seems to cause issues with + // creating window surfaces on certain devices. So keep the same legacy behavior as EglRenderer + // and create the context on the render thread. + EglConnection eglConnection = ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> { + // If sharedContext is null, then texture frames are disabled. This is typically for old + // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has + // caused trouble on some weird devices. + if (sharedContext == null) { + return EglConnection.createEgl10(configAttributes); + } else { + return EglConnection.create(sharedContext, configAttributes); + } + }); + + return new EglThread( + releaseMonitor != null ? releaseMonitor : eglThread -> true, + handler, + eglConnection, + renderSynchronizer); + } + + public static EglThread create( + @Nullable ReleaseMonitor releaseMonitor, + @Nullable final EglBase.Context sharedContext, + final int[] configAttributes) { + return create(releaseMonitor, sharedContext, configAttributes, /* renderSynchronizer= */ null); + } + + /** + * Handler that triggers callbacks when an uncaught exception happens when handling a message. + */ + private static class HandlerWithExceptionCallbacks extends Handler { + private final Object callbackLock = new Object(); + @GuardedBy("callbackLock") private final List exceptionCallbacks = new ArrayList<>(); + + public HandlerWithExceptionCallbacks(Looper looper) { + super(looper); + } + + @Override + public void dispatchMessage(Message msg) { + try { + super.dispatchMessage(msg); + } catch (Exception e) { + Logging.e("EglThread", "Exception on EglThread", e); + synchronized (callbackLock) { + for (Runnable callback : exceptionCallbacks) { + callback.run(); + } + } + throw e; + } + } + + public void addExceptionCallback(Runnable callback) { + synchronized (callbackLock) { + exceptionCallbacks.add(callback); + } + } + + public void removeExceptionCallback(Runnable callback) { + synchronized (callbackLock) { + exceptionCallbacks.remove(callback); + } + } + } + + private final ReleaseMonitor releaseMonitor; + private final HandlerWithExceptionCallbacks handler; + private final EglConnection eglConnection; + private final RenderSynchronizer renderSynchronizer; + private final List pendingRenderUpdates = new ArrayList<>(); + private boolean renderWindowOpen = true; + + private EglThread( + ReleaseMonitor releaseMonitor, + HandlerWithExceptionCallbacks handler, + EglConnection eglConnection, + RenderSynchronizer renderSynchronizer) { + this.releaseMonitor = releaseMonitor; + this.handler = handler; + this.eglConnection = eglConnection; + this.renderSynchronizer = renderSynchronizer; + if (renderSynchronizer != null) { + renderSynchronizer.registerListener(this); + } + } + + public void release() { + if (!releaseMonitor.onRelease(this)) { + // Thread is still in use, do not release yet. + return; + } + + if (renderSynchronizer != null) { + renderSynchronizer.removeListener(this); + } + + handler.post(eglConnection::release); + handler.getLooper().quitSafely(); + } + + /** + * Creates an EglBase instance with the EglThread's EglConnection. This method can be called on + * any thread, but the returned EglBase instance should only be used on this EglThread's Handler. + */ + public EglBase createEglBaseWithSharedConnection() { + return EglBase.create(eglConnection); + } + + /** + * Returns the Handler to interact with Gl/EGL on. Callers need to make sure that their own + * EglBase is current on the handler before running any graphics operations since the EglThread + * can be shared by multiple clients. + */ + public Handler getHandler() { + return handler; + } + + /** + * Adds a callback that will be called on the EGL thread if there is an exception on the thread. + */ + public void addExceptionCallback(Runnable callback) { + handler.addExceptionCallback(callback); + } + + /** + * Removes a previously added exception callback. + */ + public void removeExceptionCallback(Runnable callback) { + handler.removeExceptionCallback(callback); + } + + /** + * Schedules a render update (like swapBuffers) to be run in sync with other updates on the next + * open render window. If the render window is currently open the update will run immediately. + * This method must be called on the EglThread during a render pass. + */ + public void scheduleRenderUpdate(RenderUpdate update) { + if (renderWindowOpen) { + update.update(/* runsInline = */true); + } else { + pendingRenderUpdates.add(update); + } + } + + @Override + public void onRenderWindowOpen() { + handler.post( + () -> { + renderWindowOpen = true; + for (RenderUpdate update : pendingRenderUpdates) { + update.update(/* runsInline = */false); + } + pendingRenderUpdates.clear(); + }); + } + + @Override + public void onRenderWindowClose() { + handler.post(() -> renderWindowOpen = false); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Empty.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Empty.java new file mode 100644 index 00000000..fe9481e1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Empty.java @@ -0,0 +1,17 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Empty class for use in libjingle_peerconnection_java because all targets require at least one + * Java file. + */ +class Empty {} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/EncodedImage.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/EncodedImage.java new file mode 100644 index 00000000..a6eef67d --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/EncodedImage.java @@ -0,0 +1,183 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; +import java.util.concurrent.TimeUnit; + +/** + * An encoded frame from a video stream. Used as an input for decoders and as an output for + * encoders. + */ +public class EncodedImage implements RefCounted { + // Must be kept in sync with common_types.h FrameType. + public enum FrameType { + EmptyFrame(0), + VideoFrameKey(3), + VideoFrameDelta(4); + + private final int nativeIndex; + + private FrameType(int nativeIndex) { + this.nativeIndex = nativeIndex; + } + + public int getNative() { + return nativeIndex; + } + + @CalledByNative("FrameType") + static FrameType fromNativeIndex(int nativeIndex) { + for (FrameType type : FrameType.values()) { + if (type.getNative() == nativeIndex) { + return type; + } + } + throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex); + } + } + + private final RefCountDelegate refCountDelegate; + public final ByteBuffer buffer; + public final int encodedWidth; + public final int encodedHeight; + public final long captureTimeMs; // Deprecated + public final long captureTimeNs; + public final FrameType frameType; + public final int rotation; + public final @Nullable Integer qp; + + // TODO(bugs.webrtc.org/9378): Use retain and release from jni code. + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @CalledByNative + private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth, + int encodedHeight, long captureTimeNs, FrameType frameType, int rotation, + @Nullable Integer qp) { + this.buffer = buffer; + this.encodedWidth = encodedWidth; + this.encodedHeight = encodedHeight; + this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs); + this.captureTimeNs = captureTimeNs; + this.frameType = frameType; + this.rotation = rotation; + this.qp = qp; + this.refCountDelegate = new RefCountDelegate(releaseCallback); + } + + @CalledByNative + private ByteBuffer getBuffer() { + return buffer; + } + + @CalledByNative + private int getEncodedWidth() { + return encodedWidth; + } + + @CalledByNative + private int getEncodedHeight() { + return encodedHeight; + } + + @CalledByNative + private long getCaptureTimeNs() { + return captureTimeNs; + } + + @CalledByNative + private int getFrameType() { + return frameType.getNative(); + } + + @CalledByNative + private int getRotation() { + return rotation; + } + + @CalledByNative + private @Nullable Integer getQp() { + return qp; + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private ByteBuffer buffer; + private @Nullable Runnable releaseCallback; + private int encodedWidth; + private int encodedHeight; + private long captureTimeNs; + private EncodedImage.FrameType frameType; + private int rotation; + private @Nullable Integer qp; + + private Builder() {} + + public Builder setBuffer(ByteBuffer buffer, @Nullable Runnable releaseCallback) { + this.buffer = buffer; + this.releaseCallback = releaseCallback; + return this; + } + + public Builder setEncodedWidth(int encodedWidth) { + this.encodedWidth = encodedWidth; + return this; + } + + public Builder setEncodedHeight(int encodedHeight) { + this.encodedHeight = encodedHeight; + return this; + } + + @Deprecated + public Builder setCaptureTimeMs(long captureTimeMs) { + this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs); + return this; + } + + public Builder setCaptureTimeNs(long captureTimeNs) { + this.captureTimeNs = captureTimeNs; + return this; + } + + public Builder setFrameType(EncodedImage.FrameType frameType) { + this.frameType = frameType; + return this; + } + + public Builder setRotation(int rotation) { + this.rotation = rotation; + return this; + } + + public Builder setQp(@Nullable Integer qp) { + this.qp = qp; + return this; + } + + public EncodedImage createEncodedImage() { + return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs, + frameType, rotation, qp); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FecControllerFactoryFactoryInterface.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FecControllerFactoryFactoryInterface.java new file mode 100644 index 00000000..6d39390f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FecControllerFactoryFactoryInterface.java @@ -0,0 +1,22 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Factory for creating webrtc::FecControllerFactory instances. + */ +public interface FecControllerFactoryFactoryInterface { + /** + * Dynamically allocates a webrtc::FecControllerFactory instance and returns a pointer to it. + * The caller takes ownership of the object. + */ + public long createNative(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FileVideoCapturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FileVideoCapturer.java new file mode 100644 index 00000000..a00a5e5f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FileVideoCapturer.java @@ -0,0 +1,201 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.os.SystemClock; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.charset.Charset; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.TimeUnit; + +public class FileVideoCapturer implements VideoCapturer { + private interface VideoReader { + VideoFrame getNextFrame(); + void close(); + } + + /** + * Read video data from file for the .y4m container. + */ + @SuppressWarnings("StringSplitter") + private static class VideoReaderY4M implements VideoReader { + private static final String TAG = "VideoReaderY4M"; + private static final String Y4M_FRAME_DELIMETER = "FRAME"; + private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1; + + private final int frameWidth; + private final int frameHeight; + // First char after header + private final long videoStart; + private final RandomAccessFile mediaFile; + private final FileChannel mediaFileChannel; + + public VideoReaderY4M(String file) throws IOException { + mediaFile = new RandomAccessFile(file, "r"); + mediaFileChannel = mediaFile.getChannel(); + StringBuilder builder = new StringBuilder(); + for (;;) { + int c = mediaFile.read(); + if (c == -1) { + // End of file reached. + throw new RuntimeException("Found end of file before end of header for file: " + file); + } + if (c == '\n') { + // End of header found. + break; + } + builder.append((char) c); + } + videoStart = mediaFileChannel.position(); + String header = builder.toString(); + String[] headerTokens = header.split("[ ]"); + int w = 0; + int h = 0; + String colorSpace = "420"; + for (String tok : headerTokens) { + char c = tok.charAt(0); + switch (c) { + case 'W': + w = Integer.parseInt(tok.substring(1)); + break; + case 'H': + h = Integer.parseInt(tok.substring(1)); + break; + case 'C': + colorSpace = tok.substring(1); + break; + } + } + Logging.d(TAG, "Color space: " + colorSpace); + if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) { + throw new IllegalArgumentException( + "Does not support any other color space than I420 or I420mpeg2"); + } + if ((w % 2) == 1 || (h % 2) == 1) { + throw new IllegalArgumentException("Does not support odd width or height"); + } + frameWidth = w; + frameHeight = h; + Logging.d(TAG, "frame dim: (" + w + ", " + h + ")"); + } + + @Override + public VideoFrame getNextFrame() { + final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); + final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight); + final ByteBuffer dataY = buffer.getDataY(); + final ByteBuffer dataU = buffer.getDataU(); + final ByteBuffer dataV = buffer.getDataV(); + final int chromaHeight = (frameHeight + 1) / 2; + final int sizeY = frameHeight * buffer.getStrideY(); + final int sizeU = chromaHeight * buffer.getStrideU(); + final int sizeV = chromaHeight * buffer.getStrideV(); + + try { + ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH); + if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) { + // We reach end of file, loop + mediaFileChannel.position(videoStart); + if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) { + throw new RuntimeException("Error looping video"); + } + } + String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII")); + if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) { + throw new RuntimeException( + "Frames should be delimited by FRAME plus newline, found delimter was: '" + + frameDelimStr + "'"); + } + + mediaFileChannel.read(dataY); + mediaFileChannel.read(dataU); + mediaFileChannel.read(dataV); + } catch (IOException e) { + throw new RuntimeException(e); + } + + return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs); + } + + @Override + public void close() { + try { + // Closing a file also closes the channel. + mediaFile.close(); + } catch (IOException e) { + Logging.e(TAG, "Problem closing file", e); + } + } + } + + private final static String TAG = "FileVideoCapturer"; + private final VideoReader videoReader; + private CapturerObserver capturerObserver; + private final Timer timer = new Timer(); + + private final TimerTask tickTask = new TimerTask() { + @Override + public void run() { + tick(); + } + }; + + public FileVideoCapturer(String inputFile) throws IOException { + try { + videoReader = new VideoReaderY4M(inputFile); + } catch (IOException e) { + Logging.d(TAG, "Could not open video file: " + inputFile); + throw e; + } + } + + public void tick() { + VideoFrame videoFrame = videoReader.getNextFrame(); + capturerObserver.onFrameCaptured(videoFrame); + videoFrame.release(); + } + + @Override + public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, + CapturerObserver capturerObserver) { + this.capturerObserver = capturerObserver; + } + + @Override + public void startCapture(int width, int height, int framerate) { + timer.schedule(tickTask, 0, 1000 / framerate); + } + + @Override + public void stopCapture() throws InterruptedException { + timer.cancel(); + } + + @Override + public void changeCaptureFormat(int width, int height, int framerate) { + // Empty on purpose + } + + @Override + public void dispose() { + videoReader.close(); + } + + @Override + public boolean isScreencast() { + return false; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameDecryptor.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameDecryptor.java new file mode 100644 index 00000000..2932f3d9 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameDecryptor.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * The FrameDecryptor interface allows Java API users to provide a + * pointer to their native implementation of the FrameDecryptorInterface. + * FrameDecryptors are extremely performance sensitive as they must process all + * incoming video and audio frames. Due to this reason they should always be + * backed by a native implementation + * @note Not ready for production use. + */ +public interface FrameDecryptor { + /** + * @return A FrameDecryptorInterface pointer. + */ + long getNativeFrameDecryptor(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameEncryptor.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameEncryptor.java new file mode 100644 index 00000000..bc81223f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FrameEncryptor.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * The FrameEncryptor interface allows Java API users to provide a pointer to + * their native implementation of the FrameEncryptorInterface. + * FrameEncyptors are extremely performance sensitive as they must process all + * outgoing video and audio frames. Due to this reason they should always be + * backed by a native implementation. + * @note Not ready for production use. + */ +public interface FrameEncryptor { + /** + * @return A FrameEncryptorInterface pointer. + */ + long getNativeFrameEncryptor(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/FramerateBitrateAdjuster.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/FramerateBitrateAdjuster.java new file mode 100644 index 00000000..e28b7b5a --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/FramerateBitrateAdjuster.java @@ -0,0 +1,26 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with + * hardware codecs that assume the framerate never changes. + */ +class FramerateBitrateAdjuster extends BaseBitrateAdjuster { + private static final int DEFAULT_FRAMERATE_FPS = 30; + + @Override + public void setTargets(int targetBitrateBps, double targetFramerateFps) { + // Keep frame rate unchanged and adjust bit rate. + this.targetFramerateFps = DEFAULT_FRAMERATE_FPS; + this.targetBitrateBps = (int) (targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlGenericDrawer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlGenericDrawer.java new file mode 100644 index 00000000..b70a3728 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlGenericDrawer.java @@ -0,0 +1,284 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import androidx.annotation.Nullable; +import java.nio.FloatBuffer; +import org.webrtc.GlShader; +import org.webrtc.GlUtil; +import org.webrtc.RendererCommon; + +/** + * Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input + * sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader + * should sample pixel values from the function "sample" that will be provided by this class and + * provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate + * variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The + * simplest possible generic shader that just draws pixel from the frame unmodified looks like: + * void main() { + * gl_FragColor = sample(tc); + * } + * This class covers the cases for most simple shaders and generates the necessary boiler plate. + * Advanced shaders can always implement RendererCommon.GlDrawer directly. + */ +class GlGenericDrawer implements RendererCommon.GlDrawer { + /** + * The different shader types representing different input sources. YUV here represents three + * separate Y, U, V textures. + */ + public static enum ShaderType { OES, RGB, YUV } + + /** + * The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set + * uniform variables in the shader before a frame is drawn. + */ + public static interface ShaderCallbacks { + /** + * This callback is called when a new shader has been compiled and created. It will be called + * for the first frame as well as when the shader type is changed. This callback can be used to + * do custom initialization of the shader that only needs to happen once. + */ + void onNewShader(GlShader shader); + + /** + * This callback is called before rendering a frame. It can be used to do custom preparation of + * the shader that needs to happen every frame. + */ + void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight, + int viewportWidth, int viewportHeight); + } + + private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos"; + private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc"; + private static final String TEXTURE_MATRIX_NAME = "tex_mat"; + private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n" + + "attribute vec4 in_pos;\n" + + "attribute vec4 in_tc;\n" + + "uniform mat4 tex_mat;\n" + + "void main() {\n" + + " gl_Position = in_pos;\n" + + " tc = (tex_mat * in_tc).xy;\n" + + "}\n"; + + // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) + // is top-right. + private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] { + -1.0f, -1.0f, // Bottom left. + 1.0f, -1.0f, // Bottom right. + -1.0f, 1.0f, // Top left. + 1.0f, 1.0f, // Top right. + }); + + // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right. + private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER = + GlUtil.createFloatBuffer(new float[] { + 0.0f, 0.0f, // Bottom left. + 1.0f, 0.0f, // Bottom right. + 0.0f, 1.0f, // Top left. + 1.0f, 1.0f, // Top right. + }); + + static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) { + final StringBuilder stringBuilder = new StringBuilder(); + if (shaderType == ShaderType.OES) { + stringBuilder.append("#extension GL_OES_EGL_image_external : require\n"); + } + stringBuilder.append("precision mediump float;\n"); + stringBuilder.append("varying vec2 tc;\n"); + + if (shaderType == ShaderType.YUV) { + stringBuilder.append("uniform sampler2D y_tex;\n"); + stringBuilder.append("uniform sampler2D u_tex;\n"); + stringBuilder.append("uniform sampler2D v_tex;\n"); + + // Add separate function for sampling texture. + // yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter. + stringBuilder.append("vec4 sample(vec2 p) {\n"); + stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n"); + stringBuilder.append(" float u = texture2D(u_tex, p).r;\n"); + stringBuilder.append(" float v = texture2D(v_tex, p).r;\n"); + stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n"); + stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n"); + stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n"); + stringBuilder.append("}\n"); + stringBuilder.append(genericFragmentSource); + } else { + final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D"; + stringBuilder.append("uniform ").append(samplerName).append(" tex;\n"); + + // Update the sampling function in-place. + stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, ")); + } + + return stringBuilder.toString(); + } + + private final String genericFragmentSource; + private final String vertexShader; + private final ShaderCallbacks shaderCallbacks; + @Nullable private ShaderType currentShaderType; + @Nullable private GlShader currentShader; + private int inPosLocation; + private int inTcLocation; + private int texMatrixLocation; + + public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) { + this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks); + } + + public GlGenericDrawer( + String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) { + this.vertexShader = vertexShader; + this.genericFragmentSource = genericFragmentSource; + this.shaderCallbacks = shaderCallbacks; + } + + // Visible for testing. + GlShader createShader(ShaderType shaderType) { + return new GlShader( + vertexShader, createFragmentShaderString(genericFragmentSource, shaderType)); + } + + /** + * Draw an OES texture frame with specified texture transformation matrix. Required resources are + * allocated at the first call to this function. + */ + @Override + public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight) { + prepareShader( + ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + // Bind the texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId); + // Draw the texture. + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + // Unbind the texture as a precaution. + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + } + + /** + * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources + * are allocated at the first call to this function. + */ + @Override + public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight) { + prepareShader( + ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + // Bind the texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); + // Draw the texture. + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + // Unbind the texture as a precaution. + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + } + + /** + * Draw a YUV frame with specified texture transformation matrix. Required resources are allocated + * at the first call to this function. + */ + @Override + public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight) { + prepareShader( + ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + // Bind the textures. + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); + } + // Draw the textures. + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + // Unbind the textures as a precaution. + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + } + } + + private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth, + int frameHeight, int viewportWidth, int viewportHeight) { + final GlShader shader; + if (shaderType.equals(currentShaderType)) { + // Same shader type as before, reuse exising shader. + shader = currentShader; + } else { + // Allocate new shader. + currentShaderType = null; + if (currentShader != null) { + currentShader.release(); + currentShader = null; + } + + shader = createShader(shaderType); + currentShaderType = shaderType; + currentShader = shader; + + shader.useProgram(); + // Set input texture units. + if (shaderType == ShaderType.YUV) { + GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0); + GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1); + GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2); + } else { + GLES20.glUniform1i(shader.getUniformLocation("tex"), 0); + } + + GlUtil.checkNoGLES2Error("Create shader"); + shaderCallbacks.onNewShader(shader); + texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME); + inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME); + inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME); + } + + shader.useProgram(); + + // Upload the vertex coordinates. + GLES20.glEnableVertexAttribArray(inPosLocation); + GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2, + /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, + FULL_RECTANGLE_BUFFER); + + // Upload the texture coordinates. + GLES20.glEnableVertexAttribArray(inTcLocation); + GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2, + /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, + FULL_RECTANGLE_TEXTURE_BUFFER); + + // Upload the texture transformation matrix. + GLES20.glUniformMatrix4fv( + texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */); + + // Do custom per-frame shader preparation. + shaderCallbacks.onPrepareShader( + shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + GlUtil.checkNoGLES2Error("Prepare shader"); + } + + /** + * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked. + */ + @Override + public void release() { + if (currentShader != null) { + currentShader.release(); + currentShader = null; + currentShaderType = null; + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlRectDrawer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlRectDrawer.java new file mode 100644 index 00000000..d1fbd1b7 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlRectDrawer.java @@ -0,0 +1,31 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Simplest possible GL shader that just draws frames as opaque quads. */ +public class GlRectDrawer extends GlGenericDrawer { + private static final String FRAGMENT_SHADER = "void main() {\n" + + " gl_FragColor = sample(tc);\n" + + "}\n"; + + private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks { + @Override + public void onNewShader(GlShader shader) {} + + @Override + public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight, + int viewportWidth, int viewportHeight) {} + } + + public GlRectDrawer() { + super(FRAGMENT_SHADER, new ShaderCallbacks()); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlShader.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlShader.java new file mode 100644 index 00000000..7efd8d3a --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlShader.java @@ -0,0 +1,131 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.opengl.GLES20; + +import java.nio.FloatBuffer; + +// Helper class for handling OpenGL shaders and shader programs. +public class GlShader { + private static final String TAG = "GlShader"; + + private static int compileShader(int shaderType, String source) { + final int shader = GLES20.glCreateShader(shaderType); + if (shader == 0) { + throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError()); + } + GLES20.glShaderSource(shader, source); + GLES20.glCompileShader(shader); + int[] compileStatus = new int[] {GLES20.GL_FALSE}; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); + if (compileStatus[0] != GLES20.GL_TRUE) { + Logging.e( + TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source); + throw new RuntimeException(GLES20.glGetShaderInfoLog(shader)); + } + GlUtil.checkNoGLES2Error("compileShader"); + return shader; + } + + private int program; + + public GlShader(String vertexSource, String fragmentSource) { + final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource); + final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); + program = GLES20.glCreateProgram(); + if (program == 0) { + throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError()); + } + GLES20.glAttachShader(program, vertexShader); + GLES20.glAttachShader(program, fragmentShader); + GLES20.glLinkProgram(program); + int[] linkStatus = new int[] {GLES20.GL_FALSE}; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program)); + throw new RuntimeException(GLES20.glGetProgramInfoLog(program)); + } + // According to the documentation of glLinkProgram(): + // "After the link operation, applications are free to modify attached shader objects, compile + // attached shader objects, detach shader objects, delete shader objects, and attach additional + // shader objects. None of these operations affects the information log or the program that is + // part of the program object." + // But in practice, detaching shaders from the program seems to break some devices. Deleting the + // shaders are fine however - it will delete them when they are no longer attached to a program. + GLES20.glDeleteShader(vertexShader); + GLES20.glDeleteShader(fragmentShader); + GlUtil.checkNoGLES2Error("Creating GlShader"); + } + + public int getAttribLocation(String label) { + if (program == -1) { + throw new RuntimeException("The program has been released"); + } + int location = GLES20.glGetAttribLocation(program, label); + if (location < 0) { + throw new RuntimeException("Could not locate '" + label + "' in program"); + } + return location; + } + + /** + * Enable and upload a vertex array for attribute `label`. The vertex data is specified in + * `buffer` with `dimension` number of components per vertex. + */ + public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) { + setVertexAttribArray(label, dimension, 0 /* stride */, buffer); + } + + /** + * Enable and upload a vertex array for attribute `label`. The vertex data is specified in + * `buffer` with `dimension` number of components per vertex and specified `stride`. + */ + public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) { + if (program == -1) { + throw new RuntimeException("The program has been released"); + } + int location = getAttribLocation(label); + GLES20.glEnableVertexAttribArray(location); + GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer); + GlUtil.checkNoGLES2Error("setVertexAttribArray"); + } + + public int getUniformLocation(String label) { + if (program == -1) { + throw new RuntimeException("The program has been released"); + } + int location = GLES20.glGetUniformLocation(program, label); + if (location < 0) { + throw new RuntimeException("Could not locate uniform '" + label + "' in program"); + } + return location; + } + + public void useProgram() { + if (program == -1) { + throw new RuntimeException("The program has been released"); + } + synchronized (EglBase.lock) { + GLES20.glUseProgram(program); + } + GlUtil.checkNoGLES2Error("glUseProgram"); + } + + public void release() { + Logging.d(TAG, "Deleting shader."); + // Delete program, automatically detaching any shaders from it. + if (program != -1) { + GLES20.glDeleteProgram(program); + program = -1; + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlTextureFrameBuffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlTextureFrameBuffer.java new file mode 100644 index 00000000..b906fe56 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlTextureFrameBuffer.java @@ -0,0 +1,122 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.opengl.GLES20; + +/** + * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil + * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color + * conversion. This class is not thread safe and must be used by a thread with an active GL context. + */ +// TODO(magjed): Add unittests for this class. +public class GlTextureFrameBuffer { + private final int pixelFormat; + private int frameBufferId; + private int textureId; + private int width; + private int height; + + /** + * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread + * when calling this function. The framebuffer is not complete until setSize() is called. + */ + public GlTextureFrameBuffer(int pixelFormat) { + switch (pixelFormat) { + case GLES20.GL_LUMINANCE: + case GLES20.GL_RGB: + case GLES20.GL_RGBA: + this.pixelFormat = pixelFormat; + break; + default: + throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat); + } + this.width = 0; + this.height = 0; + } + + /** + * (Re)allocate texture. Will do nothing if the requested size equals the current size. An + * EGLContext must be bound on the current thread when calling this function. Must be called at + * least once before using the framebuffer. May be called multiple times to change size. + */ + public void setSize(int width, int height) { + if (width <= 0 || height <= 0) { + throw new IllegalArgumentException("Invalid size: " + width + "x" + height); + } + if (width == this.width && height == this.height) { + return; + } + this.width = width; + this.height = height; + // Lazy allocation the first time setSize() is called. + if (textureId == 0) { + textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); + } + if (frameBufferId == 0) { + final int frameBuffers[] = new int[1]; + GLES20.glGenFramebuffers(1, frameBuffers, 0); + frameBufferId = frameBuffers[0]; + } + + // Allocate texture. + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat, + GLES20.GL_UNSIGNED_BYTE, null); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize"); + + // Attach the texture to the framebuffer as color attachment. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId); + GLES20.glFramebufferTexture2D( + GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0); + + // Check that the framebuffer is in a good state. + final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER); + if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) { + throw new IllegalStateException("Framebuffer not complete, status: " + status); + } + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + } + + public int getWidth() { + return width; + } + + public int getHeight() { + return height; + } + + /** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */ + public int getFrameBufferId() { + return frameBufferId; + } + + /** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */ + public int getTextureId() { + return textureId; + } + + /** + * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling + * this function. This object should not be used after this call. + */ + public void release() { + GLES20.glDeleteTextures(1, new int[] {textureId}, 0); + textureId = 0; + GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0); + frameBufferId = 0; + width = 0; + height = 0; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/GlUtil.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlUtil.java new file mode 100644 index 00000000..e2dd0c56 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/GlUtil.java @@ -0,0 +1,66 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.opengl.GLES20; +import android.opengl.GLException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; + +/** + * Some OpenGL static utility functions. + */ +public class GlUtil { + private GlUtil() {} + + public static class GlOutOfMemoryException extends GLException { + public GlOutOfMemoryException(int error, String msg) { + super(error, msg); + } + } + + // Assert that no OpenGL ES 2.0 error has been raised. + public static void checkNoGLES2Error(String msg) { + int error = GLES20.glGetError(); + if (error != GLES20.GL_NO_ERROR) { + throw error == GLES20.GL_OUT_OF_MEMORY + ? new GlOutOfMemoryException(error, msg) + : new GLException(error, msg + ": GLES20 error: " + error); + } + } + + public static FloatBuffer createFloatBuffer(float[] coords) { + // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. + ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4); + bb.order(ByteOrder.nativeOrder()); + FloatBuffer fb = bb.asFloatBuffer(); + fb.put(coords); + fb.position(0); + return fb; + } + + /** + * Generate texture with standard parameters. + */ + public static int generateTexture(int target) { + final int textureArray[] = new int[1]; + GLES20.glGenTextures(1, textureArray, 0); + final int textureId = textureArray[0]; + GLES20.glBindTexture(target, textureId); + GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + checkNoGLES2Error("generateTexture"); + return textureId; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/H264Utils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/H264Utils.java new file mode 100644 index 00000000..abb79c65 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/H264Utils.java @@ -0,0 +1,52 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.Map; +import java.util.HashMap; + +/** Container for static helper functions related to dealing with H264 codecs. */ +class H264Utils { + public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id"; + public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed"; + public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode"; + + public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0"; + public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c"; + public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex. + public static final String H264_CONSTRAINED_HIGH_3_1 = + H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1; + public static final String H264_CONSTRAINED_BASELINE_3_1 = + H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1; + + public static Map getDefaultH264Params(boolean isHighProfile) { + final Map params = new HashMap<>(); + params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1"); + params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1"); + params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, + isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1 + : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); + return params; + } + + public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC = + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false)); + public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC = + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true)); + + public static boolean isSameH264Profile( + Map params1, Map params2) { + return nativeIsSameH264Profile(params1, params2); + } + + private static native boolean nativeIsSameH264Profile( + Map params1, Map params2); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java new file mode 100644 index 00000000..215598a8 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java @@ -0,0 +1,57 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodecInfo; +import androidx.annotation.Nullable; +import java.util.Arrays; + +/** Factory for Android hardware VideoDecoders. */ +public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory { + private final static Predicate defaultAllowedPredicate = + new Predicate() { + @Override + public boolean test(MediaCodecInfo arg) { + return MediaCodecUtils.isHardwareAccelerated(arg); + } + }; + + /** Creates a HardwareVideoDecoderFactory that does not use surface textures. */ + @Deprecated // Not removed yet to avoid breaking callers. + public HardwareVideoDecoderFactory() { + this(null); + } + + /** + * Creates a HardwareVideoDecoderFactory that supports surface texture rendering. + * + * @param sharedContext The textures generated will be accessible from this context. May be null, + * this disables texture support. + */ + public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) { + this(sharedContext, /* codecAllowedPredicate= */ null); + } + + /** + * Creates a HardwareVideoDecoderFactory that supports surface texture rendering. + * + * @param sharedContext The textures generated will be accessible from this context. May be null, + * this disables texture support. + * @param codecAllowedPredicate predicate to filter codecs. It is combined with the default + * predicate that only allows hardware codecs. + */ + public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext, + @Nullable Predicate codecAllowedPredicate) { + super(sharedContext, + (codecAllowedPredicate == null ? defaultAllowedPredicate + : codecAllowedPredicate.and(defaultAllowedPredicate))); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoder.java new file mode 100644 index 00000000..ad2d1925 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoder.java @@ -0,0 +1,810 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel3; +import static android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileHigh; +import static android.media.MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaCodecInfo.CodecCapabilities; +import android.media.MediaFormat; +import android.opengl.GLES20; +import android.os.Build; +import android.os.Bundle; +import android.view.Surface; + +import androidx.annotation.Nullable; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; + +import org.webrtc.ThreadUtils.ThreadChecker; + +/** + * Android hardware video encoder. + */ +class HardwareVideoEncoder implements VideoEncoder { + private static final String TAG = "HardwareVideoEncoder"; + + private static final int MAX_VIDEO_FRAMERATE = 30; + + // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc. + private static final int MAX_ENCODER_Q_SIZE = 2; + + private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; + private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; + + // Size of the input frames should be multiple of 16 for the H/W encoder. + private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16; + + /** + * Keeps track of the number of output buffers that have been passed down the pipeline and not yet + * released. We need to wait for this to go down to zero before operations invalidating the output + * buffers, i.e., stop() and getOutputBuffer(). + */ + private static class BusyCount { + private final Object countLock = new Object(); + private int count; + + public void increment() { + synchronized (countLock) { + count++; + } + } + + // This method may be called on an arbitrary thread. + public void decrement() { + synchronized (countLock) { + count--; + if (count == 0) { + countLock.notifyAll(); + } + } + } + + // The increment and waitForZero methods are called on the same thread (deliverEncodedImage, + // running on the output thread). Hence, after waitForZero returns, the count will stay zero + // until the same thread calls increment. + public void waitForZero() { + boolean wasInterrupted = false; + synchronized (countLock) { + while (count > 0) { + try { + countLock.wait(); + } catch (InterruptedException e) { + Logging.e(TAG, "Interrupted while waiting on busy count", e); + wasInterrupted = true; + } + } + } + + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + } + } + + // --- Initialized on construction. + private final MediaCodecWrapperFactory mediaCodecWrapperFactory; + private final String codecName; + private final VideoCodecMimeType codecType; + private final Integer surfaceColorFormat; + private final Integer yuvColorFormat; + private final Map params; + private final int keyFrameIntervalSec; // Base interval for generating key frames. + // Interval at which to force a key frame. Used to reduce color distortions caused by some + // Qualcomm video encoders. + private final long forcedKeyFrameNs; + private final BitrateAdjuster bitrateAdjuster; + // EGL context shared with the application. Used to access texture inputs. + private final EglBase14.Context sharedContext; + + // Drawer used to draw input textures onto the codec's input surface. + private final GlRectDrawer textureDrawer = new GlRectDrawer(); + private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer(); + // A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are + // pre-populated with all the information that can't be sent through MediaCodec. + private final BlockingDeque outputBuilders = new LinkedBlockingDeque<>(); + + private final ThreadChecker encodeThreadChecker = new ThreadChecker(); + private final ThreadChecker outputThreadChecker = new ThreadChecker(); + private final BusyCount outputBuffersBusyCount = new BusyCount(); + + // --- Set on initialize and immutable until release. + private Callback callback; + private boolean automaticResizeOn; + + // --- Valid and immutable while an encoding session is running. + @Nullable + private MediaCodecWrapper codec; + // Thread that delivers encoded frames to the user callback. + @Nullable + private Thread outputThread; + + // EGL base wrapping the shared texture context. Holds hooks to both the shared context and the + // input surface. Making this base current allows textures from the context to be drawn onto the + // surface. + @Nullable + private EglBase14 textureEglBase; + // Input surface for the codec. The encoder will draw input textures onto this surface. + @Nullable + private Surface textureInputSurface; + + private int width; + private int height; + // Y-plane strides in the encoder's input + private int stride; + // Y-plane slice-height in the encoder's input + private int sliceHeight; + // True if encoder input color format is semi-planar (NV12). + private boolean isSemiPlanar; + // Size of frame for current color format and stride, in bytes. + private int frameSizeBytes; + private boolean useSurfaceMode; + + // --- Only accessed from the encoding thread. + // Presentation timestamp of next frame to encode. + private long nextPresentationTimestampUs; + // Presentation timestamp of the last requested (or forced) key frame. + private long lastKeyFrameNs; + + // --- Only accessed on the output thread. + // Contents of the last observed config frame output by the MediaCodec. Used by H.264. + @Nullable + private ByteBuffer configBuffer; + private int adjustedBitrate; + + // Whether the encoder is running. Volatile so that the output thread can watch this value and + // exit when the encoder stops. + private volatile boolean running; + // Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this + // value to send exceptions thrown during release back to the encoder thread. + @Nullable + private volatile Exception shutdownException; + + // True if collection of encoding statistics is enabled. + private boolean isEncodingStatisticsEnabled; + + /** + * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame + * intervals, and bitrateAdjuster. + * + * @param codecName the hardware codec implementation to use + * @param codecType the type of the given video codec (eg. VP8, VP9, H264, H265, AV1) + * @param surfaceColorFormat color format for surface mode or null if not available + * @param yuvColorFormat color format for bytebuffer mode + * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec + * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested; + * used to reduce distortion caused by some codec implementations + * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the + * desired bitrates + * @throws IllegalArgumentException if colorFormat is unsupported + */ + public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, + VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, + Map params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, + BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) { + this.mediaCodecWrapperFactory = mediaCodecWrapperFactory; + this.codecName = codecName; + this.codecType = codecType; + this.surfaceColorFormat = surfaceColorFormat; + this.yuvColorFormat = yuvColorFormat; + this.params = params; + this.keyFrameIntervalSec = keyFrameIntervalSec; + this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs); + this.bitrateAdjuster = bitrateAdjuster; + this.sharedContext = sharedContext; + + // Allow construction on a different thread. + encodeThreadChecker.detachThread(); + } + + @Override + public VideoCodecStatus initEncode(Settings settings, Callback callback) { + encodeThreadChecker.checkIsOnValidThread(); + + this.callback = callback; + automaticResizeOn = settings.automaticResizeOn; + + this.width = settings.width; + this.height = settings.height; + useSurfaceMode = canUseSurface(); + + if (settings.startBitrate != 0 && settings.maxFramerate != 0) { + bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate); + } + adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); + + Logging.d(TAG, + "initEncode name: " + codecName + " type: " + codecType + " width: " + width + + " height: " + height + " framerate_fps: " + settings.maxFramerate + + " bitrate_kbps: " + settings.startBitrate + " surface mode: " + useSurfaceMode); + return initEncodeInternal(); + } + + private VideoCodecStatus initEncodeInternal() { + encodeThreadChecker.checkIsOnValidThread(); + + nextPresentationTimestampUs = 0; + lastKeyFrameNs = -1; + + isEncodingStatisticsEnabled = false; + + try { + codec = mediaCodecWrapperFactory.createByCodecName(codecName); + } catch (IOException | IllegalArgumentException e) { + Logging.e(TAG, "Cannot create media encoder " + codecName); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + + final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat; + try { + MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); + format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); + format.setInteger(MediaFormat.KEY_BITRATE_MODE, BITRATE_MODE_CBR); + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + format.setFloat( + MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps()); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); + if (codecType == VideoCodecMimeType.H264) { + String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID); + if (profileLevelId == null) { + profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1; + } + switch (profileLevelId) { + case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1: + format.setInteger("profile", AVCProfileHigh); + format.setInteger("level", AVCLevel3); + break; + case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1: + break; + default: + Logging.w(TAG, "Unknown profile level id: " + profileLevelId); + } + } + + if (codecName.equals("c2.google.av1.encoder")) { + // Enable RTC mode in AV1 HW encoder. + format.setInteger("vendor.google-av1enc.encoding-preset.int32.value", 1); + } + + if (isEncodingStatisticsSupported()) { + format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL, + MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_1); + isEncodingStatisticsEnabled = true; + } + + Logging.d(TAG, "Format: " + format); + codec.configure( + format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE); + + if (useSurfaceMode) { + textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE); + textureInputSurface = codec.createInputSurface(); + textureEglBase.createSurface(textureInputSurface); + textureEglBase.makeCurrent(); + } + + updateInputFormat(codec.getInputFormat()); + + codec.start(); + } catch (IllegalArgumentException | IllegalStateException e) { + Logging.e(TAG, "initEncodeInternal failed", e); + release(); + return VideoCodecStatus.FALLBACK_SOFTWARE; + } + + running = true; + outputThreadChecker.detachThread(); + outputThread = createOutputThread(); + outputThread.start(); + + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus release() { + encodeThreadChecker.checkIsOnValidThread(); + + final VideoCodecStatus returnValue; + if (outputThread == null) { + returnValue = VideoCodecStatus.OK; + } else { + // The outputThread actually stops and releases the codec once running is false. + running = false; + if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { + Logging.e(TAG, "Media encoder release timeout"); + returnValue = VideoCodecStatus.TIMEOUT; + } else if (shutdownException != null) { + // Log the exception and turn it into an error. + Logging.e(TAG, "Media encoder release exception", shutdownException); + returnValue = VideoCodecStatus.ERROR; + } else { + returnValue = VideoCodecStatus.OK; + } + } + + textureDrawer.release(); + videoFrameDrawer.release(); + if (textureEglBase != null) { + textureEglBase.release(); + textureEglBase = null; + } + if (textureInputSurface != null) { + textureInputSurface.release(); + textureInputSurface = null; + } + outputBuilders.clear(); + + codec = null; + outputThread = null; + + // Allow changing thread after release. + encodeThreadChecker.detachThread(); + + return returnValue; + } + + @Override + public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { + encodeThreadChecker.checkIsOnValidThread(); + if (codec == null) { + return VideoCodecStatus.UNINITIALIZED; + } + + final boolean isTextureBuffer = videoFrame.getBuffer() instanceof VideoFrame.TextureBuffer; + + // If input resolution changed, restart the codec with the new resolution. + final int frameWidth = videoFrame.getBuffer().getWidth(); + final int frameHeight = videoFrame.getBuffer().getHeight(); + final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer; + if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) { + VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode); + if (status != VideoCodecStatus.OK) { + return status; + } + } + + if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) { + // Too many frames in the encoder. Drop this frame. + Logging.e(TAG, "Dropped frame, encoder queue full"); + //https://github.com/open-webrtc-toolkit/owt-deps-webrtc/issues/117 + VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode); + if (status != VideoCodecStatus.OK) { + return status; + } + return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887. + } + + boolean requestedKeyFrame = false; + for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) { + if (frameType == EncodedImage.FrameType.VideoFrameKey) { + requestedKeyFrame = true; + } + } + + if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) { + requestKeyFrame(videoFrame.getTimestampNs()); + } + + EncodedImage.Builder builder = EncodedImage.builder() + .setCaptureTimeNs(videoFrame.getTimestampNs()) + .setEncodedWidth(videoFrame.getBuffer().getWidth()) + .setEncodedHeight(videoFrame.getBuffer().getHeight()) + .setRotation(videoFrame.getRotation()); + outputBuilders.offer(builder); + + long presentationTimestampUs = nextPresentationTimestampUs; + // Round frame duration down to avoid bitrate overshoot. + long frameDurationUs = + (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps()); + nextPresentationTimestampUs += frameDurationUs; + + final VideoCodecStatus returnValue; + if (useSurfaceMode) { + returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs); + } else { + returnValue = encodeByteBuffer(videoFrame, presentationTimestampUs); + } + + // Check if the queue was successful. + if (returnValue != VideoCodecStatus.OK) { + // Keep the output builders in sync with buffers in the codec. + outputBuilders.pollLast(); + } + + return returnValue; + } + + private VideoCodecStatus encodeTextureBuffer( + VideoFrame videoFrame, long presentationTimestampUs) { + encodeThreadChecker.checkIsOnValidThread(); + try { + // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, + // but it's a workaround for bug webrtc:5147. + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + // It is not necessary to release this frame because it doesn't own the buffer. + VideoFrame derotatedFrame = + new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs()); + videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */); + textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)); + } catch (RuntimeException e) { + Logging.e(TAG, "encodeTexture failed", e); + return VideoCodecStatus.ERROR; + } + return VideoCodecStatus.OK; + } + + private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs) { + encodeThreadChecker.checkIsOnValidThread(); + // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind. + int index; + try { + index = codec.dequeueInputBuffer(0 /* timeout */); + } catch (IllegalStateException e) { + Logging.e(TAG, "dequeueInputBuffer failed", e); + return VideoCodecStatus.ERROR; + } + + if (index == -1) { + // Encoder is falling behind. No input buffers available. Drop the frame. + Logging.d(TAG, "Dropped frame, no input buffers available"); + return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887. + } + + ByteBuffer buffer; + try { + buffer = codec.getInputBuffer(index); + } catch (IllegalStateException e) { + Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e); + return VideoCodecStatus.ERROR; + } + + if (buffer.capacity() < frameSizeBytes) { + Logging.e(TAG, + "Input buffer size: " + buffer.capacity() + + " is smaller than frame size: " + frameSizeBytes); + return VideoCodecStatus.ERROR; + } + + fillInputBuffer(buffer, videoFrame.getBuffer()); + + try { + codec.queueInputBuffer( + index, 0 /* offset */, frameSizeBytes, presentationTimestampUs, 0 /* flags */); + } catch (IllegalStateException e) { + Logging.e(TAG, "queueInputBuffer failed", e); + // IllegalStateException thrown when the codec is in the wrong state. + return VideoCodecStatus.ERROR; + } + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) { + encodeThreadChecker.checkIsOnValidThread(); + if (framerate > MAX_VIDEO_FRAMERATE) { + framerate = MAX_VIDEO_FRAMERATE; + } + bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate); + return VideoCodecStatus.OK; + } + + @Override + public VideoCodecStatus setRates(RateControlParameters rcParameters) { + encodeThreadChecker.checkIsOnValidThread(); + bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps); + return VideoCodecStatus.OK; + } + + @Override + public ScalingSettings getScalingSettings() { + if (automaticResizeOn) { + if (codecType == VideoCodecMimeType.VP8) { + final int kLowVp8QpThreshold = 29; + final int kHighVp8QpThreshold = 95; + return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold); + } else if (codecType == VideoCodecMimeType.H264) { + final int kLowH264QpThreshold = 24; + final int kHighH264QpThreshold = 37; + return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold); + } + } + return ScalingSettings.OFF; + } + + @Override + public String getImplementationName() { + return codecName; + } + + @Override + public EncoderInfo getEncoderInfo() { + // Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment + // value to be 16. Additionally, this encoder produces a single stream. So it should not require + // alignment for all layers. + return new EncoderInfo( + /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT, + /* applyAlignmentToAllSimulcastLayers= */ false); + } + + private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) { + encodeThreadChecker.checkIsOnValidThread(); + VideoCodecStatus status = release(); + if (status != VideoCodecStatus.OK) { + return status; + } + width = newWidth; + height = newHeight; + useSurfaceMode = newUseSurfaceMode; + return initEncodeInternal(); + } + + private boolean shouldForceKeyFrame(long presentationTimestampNs) { + encodeThreadChecker.checkIsOnValidThread(); + return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs; + } + + private void requestKeyFrame(long presentationTimestampNs) { + encodeThreadChecker.checkIsOnValidThread(); + // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could + // indicate this in queueInputBuffer() below and guarantee _this_ frame + // be encoded as a key frame, but sadly that flag is ignored. Instead, + // we request a key frame "soon". + try { + Bundle b = new Bundle(); + b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); + codec.setParameters(b); + } catch (IllegalStateException e) { + Logging.e(TAG, "requestKeyFrame failed", e); + return; + } + lastKeyFrameNs = presentationTimestampNs; + } + + private Thread createOutputThread() { + return new Thread() { + @Override + public void run() { + while (running) { + deliverEncodedImage(); + } + releaseCodecOnOutputThread(); + } + }; + } + + // Visible for testing. + protected void deliverEncodedImage() { + outputThreadChecker.checkIsOnValidThread(); + try { + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US); + if (index < 0) { + if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + outputBuffersBusyCount.waitForZero(); + } + return; + } + + ByteBuffer outputBuffer = codec.getOutputBuffer(index); + outputBuffer.position(info.offset); + outputBuffer.limit(info.offset + info.size); + + if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size); + if (info.size > 0 + && (codecType == VideoCodecMimeType.H264 || codecType == VideoCodecMimeType.H265)) { + // In case of H264 and H265 config buffer contains SPS and PPS headers. Presence of these + // headers makes IDR frame a truly keyframe. Some encoders issue IDR frames without SPS + // and PPS. We save config buffer here to prepend it to all IDR frames encoder delivers. + configBuffer = ByteBuffer.allocateDirect(info.size); + configBuffer.put(outputBuffer); + } + return; + } + + bitrateAdjuster.reportEncodedFrame(info.size); + if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) { + updateBitrate(); + } + + final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0; + if (isKeyFrame) { + Logging.d(TAG, "Sync frame generated"); + } + + // Extract QP before releasing output buffer. + Integer qp = null; + if (isEncodingStatisticsEnabled) { + MediaFormat format = codec.getOutputFormat(index); + if (format != null && format.containsKey(MediaFormat.KEY_VIDEO_QP_AVERAGE)) { + qp = format.getInteger(MediaFormat.KEY_VIDEO_QP_AVERAGE); + } + } + + final ByteBuffer frameBuffer; + final Runnable releaseCallback; + if (isKeyFrame && configBuffer != null) { + Logging.d(TAG, + "Prepending config buffer of size " + configBuffer.capacity() + + " to output buffer with offset " + info.offset + ", size " + info.size); + frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity()); + configBuffer.rewind(); + frameBuffer.put(configBuffer); + frameBuffer.put(outputBuffer); + frameBuffer.rewind(); + codec.releaseOutputBuffer(index, /* render= */ false); + releaseCallback = null; + } else { + frameBuffer = outputBuffer.slice(); + outputBuffersBusyCount.increment(); + releaseCallback = () -> { + // This callback should not throw any exceptions since + // it may be called on an arbitrary thread. + // Check bug webrtc:11230 for more details. + try { + codec.releaseOutputBuffer(index, /* render= */ false); + } catch (Exception e) { + Logging.e(TAG, "releaseOutputBuffer failed", e); + } + outputBuffersBusyCount.decrement(); + }; + } + + final EncodedImage.FrameType frameType = isKeyFrame ? EncodedImage.FrameType.VideoFrameKey + : EncodedImage.FrameType.VideoFrameDelta; + + EncodedImage.Builder builder = outputBuilders.poll(); + builder.setBuffer(frameBuffer, releaseCallback); + builder.setFrameType(frameType); + builder.setQp(qp); + + EncodedImage encodedImage = builder.createEncodedImage(); + // TODO(mellem): Set codec-specific info. + callback.onEncodedFrame(encodedImage, new CodecSpecificInfo()); + // Note that the callback may have retained the image. + encodedImage.release(); + } catch (IllegalStateException e) { + Logging.e(TAG, "deliverOutput failed", e); + } + } + + private void releaseCodecOnOutputThread() { + outputThreadChecker.checkIsOnValidThread(); + Logging.d(TAG, "Releasing MediaCodec on output thread"); + outputBuffersBusyCount.waitForZero(); + try { + codec.stop(); + } catch (Exception e) { + Logging.e(TAG, "Media encoder stop failed", e); + } + try { + codec.release(); + } catch (Exception e) { + Logging.e(TAG, "Media encoder release failed", e); + // Propagate exceptions caught during release back to the main thread. + shutdownException = e; + } + configBuffer = null; + Logging.d(TAG, "Release on output thread done"); + } + + private VideoCodecStatus updateBitrate() { + outputThreadChecker.checkIsOnValidThread(); + adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); + try { + Bundle params = new Bundle(); + params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate); + codec.setParameters(params); + return VideoCodecStatus.OK; + } catch (IllegalStateException e) { + Logging.e(TAG, "updateBitrate failed", e); + return VideoCodecStatus.ERROR; + } + } + + private boolean canUseSurface() { + return sharedContext != null && surfaceColorFormat != null; + } + + /** + * Fetches stride and slice height from input media format + */ + private void updateInputFormat(MediaFormat format) { + stride = width; + sliceHeight = height; + + if (format != null) { + if (format.containsKey(MediaFormat.KEY_STRIDE)) { + stride = format.getInteger(MediaFormat.KEY_STRIDE); + stride = Math.max(stride, width); + } + + if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) { + sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT); + sliceHeight = Math.max(sliceHeight, height); + } + } + + isSemiPlanar = isSemiPlanar(yuvColorFormat); + if (isSemiPlanar) { + int chromaHeight = (height + 1) / 2; + frameSizeBytes = sliceHeight * stride + chromaHeight * stride; + } else { + int chromaStride = (stride + 1) / 2; + int chromaSliceHeight = (sliceHeight + 1) / 2; + frameSizeBytes = sliceHeight * stride + chromaSliceHeight * chromaStride * 2; + } + + Logging.d(TAG, + "updateInputFormat format: " + format + " stride: " + stride + + " sliceHeight: " + sliceHeight + " isSemiPlanar: " + isSemiPlanar + + " frameSizeBytes: " + frameSizeBytes); + } + + protected boolean isEncodingStatisticsSupported() { + // WebRTC quality scaler, which adjusts resolution and/or frame rate based on encoded QP, + // expects QP to be in native bitstream range for given codec. Native QP range for VP8 is + // [0, 127] and for VP9 is [0, 255]. MediaCodec VP8 and VP9 encoders (perhaps not all) + // return QP in range [0, 64], which is libvpx API specific range. Due to this mismatch we + // can't use QP feedback from these codecs. + if (codecType == VideoCodecMimeType.VP8 || codecType == VideoCodecMimeType.VP9) { + return false; + } + + MediaCodecInfo codecInfo = codec.getCodecInfo(); + if (codecInfo == null) { + return false; + } + + CodecCapabilities codecCaps = codecInfo.getCapabilitiesForType(codecType.mimeType()); + if (codecCaps == null) { + return false; + } + + return codecCaps.isFeatureSupported(CodecCapabilities.FEATURE_EncodingStatistics); + } + + // Visible for testing. + protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer frame) { + VideoFrame.I420Buffer i420 = frame.toI420(); + if (isSemiPlanar) { + YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), + i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride, + sliceHeight); + } else { + YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), + i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride, + sliceHeight); + } + i420.release(); + } + + protected boolean isSemiPlanar(int colorFormat) { + switch (colorFormat) { + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + return false; + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar: + case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: + return true; + default: + throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java new file mode 100644 index 00000000..4d103ff9 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java @@ -0,0 +1,280 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX; +import static org.webrtc.MediaCodecUtils.HISI_PREFIX; +import static org.webrtc.MediaCodecUtils.INTEL_PREFIX; +import static org.webrtc.MediaCodecUtils.MTK_PREFIX; +import static org.webrtc.MediaCodecUtils.QCOM_PREFIX; + +import android.media.MediaCodecInfo; +import android.media.MediaCodecList; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** Factory for android hardware video encoders. */ +@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods. +public class HardwareVideoEncoderFactory implements VideoEncoderFactory { + private static final String TAG = "HardwareVideoEncoderFactory"; + + // We don't need periodic keyframes. But some HW encoders, Exynos in particular, fails to + // initialize with value -1 which should disable periodic keyframes according to the spec. Set it + // to 1 hour. + private static final int PERIODIC_KEY_FRAME_INTERVAL_S = 3600; + + // Forced key frame interval - used to reduce color distortions on Qualcomm platforms. + private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000; + private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000; + private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000; + + // List of devices with poor H.264 encoder quality. + // HW H.264 encoder on below devices has poor bitrate control - actual + // bitrates deviates a lot from the target value. + private static final List H264_HW_EXCEPTION_MODELS = + Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"); + + @Nullable private final EglBase14.Context sharedContext; + private final boolean enableIntelVp8Encoder; + private final boolean enableH264HighProfile; + @Nullable private final Predicate codecAllowedPredicate; + + /** + * Creates a HardwareVideoEncoderFactory that supports surface texture encoding. + * + * @param sharedContext The textures generated will be accessible from this context. May be null, + * this disables texture support. + * @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled. + * @param enableH264HighProfile true if H264 High Profile enabled. + */ + public HardwareVideoEncoderFactory( + EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) { + this(sharedContext, enableIntelVp8Encoder, enableH264HighProfile, + /* codecAllowedPredicate= */ null); + } + + /** + * Creates a HardwareVideoEncoderFactory that supports surface texture encoding. + * + * @param sharedContext The textures generated will be accessible from this context. May be null, + * this disables texture support. + * @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled. + * @param enableH264HighProfile true if H264 High Profile enabled. + * @param codecAllowedPredicate optional predicate to filter codecs. All codecs are allowed + * when predicate is not provided. + */ + public HardwareVideoEncoderFactory(EglBase.Context sharedContext, boolean enableIntelVp8Encoder, + boolean enableH264HighProfile, @Nullable Predicate codecAllowedPredicate) { + // Texture mode requires EglBase14. + if (sharedContext instanceof EglBase14.Context) { + this.sharedContext = (EglBase14.Context) sharedContext; + } else { + Logging.w(TAG, "No shared EglBase.Context. Encoders will not use texture mode."); + this.sharedContext = null; + } + this.enableIntelVp8Encoder = enableIntelVp8Encoder; + this.enableH264HighProfile = enableH264HighProfile; + this.codecAllowedPredicate = codecAllowedPredicate; + } + + @Deprecated + public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) { + this(null, enableIntelVp8Encoder, enableH264HighProfile); + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo input) { + VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.getName()); + MediaCodecInfo info = findCodecForType(type); + + if (info == null) { + return null; + } + + String codecName = info.getName(); + String mime = type.mimeType(); + Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat( + MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime)); + Integer yuvColorFormat = MediaCodecUtils.selectColorFormat( + MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime)); + + if (type == VideoCodecMimeType.H264) { + boolean isHighProfile = H264Utils.isSameH264Profile( + input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)); + boolean isBaselineProfile = H264Utils.isSameH264Profile( + input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)); + + if (!isHighProfile && !isBaselineProfile) { + return null; + } + if (isHighProfile && !isH264HighProfileSupported(info)) { + return null; + } + } + + return new HardwareVideoEncoder(new MediaCodecWrapperFactoryImpl(), codecName, type, + surfaceColorFormat, yuvColorFormat, input.params, PERIODIC_KEY_FRAME_INTERVAL_S, + getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName), + sharedContext); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List supportedCodecInfos = new ArrayList(); + // Generate a list of supported codecs in order of preference: + // VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265. + for (VideoCodecMimeType type : + new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, + VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) { + MediaCodecInfo codec = findCodecForType(type); + if (codec != null) { + String name = type.name(); + // TODO(sakal): Always add H264 HP once WebRTC correctly removes codecs that are not + // supported by the decoder. + if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { + supportedCodecInfos.add(new VideoCodecInfo( + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + } + + supportedCodecInfos.add(new VideoCodecInfo( + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + } + } + + return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } + + private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) { + for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { + MediaCodecInfo info = null; + try { + info = MediaCodecList.getCodecInfoAt(i); + } catch (IllegalArgumentException e) { + Logging.e(TAG, "Cannot retrieve encoder codec info", e); + } + + if (info == null || !info.isEncoder()) { + continue; + } + + if (isSupportedCodec(info, type)) { + return info; + } + } + return null; // No support for this type. + } + + // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type. + private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { + if (!MediaCodecUtils.codecSupportsType(info, type)) { + return false; + } + // Check for a supported color format. + if (MediaCodecUtils.selectColorFormat( + MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType())) + == null) { + return false; + } + return isHardwareSupportedInCurrentSdk(info, type) && isMediaCodecAllowed(info); + } + + // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the + // current SDK. + private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + return info.isHardwareAccelerated(); + } + + switch (type) { + case VP8: + return isHardwareSupportedInCurrentSdkVp8(info); + case VP9: + return isHardwareSupportedInCurrentSdkVp9(info); + case H264: + return isHardwareSupportedInCurrentSdkH264(info); + case H265: + case AV1: + return false; + } + return false; + } + + private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) { + String name = info.getName(); + // QCOM Vp8 encoder is always supported. + return name.startsWith(QCOM_PREFIX) + // Exynos VP8 encoder is supported in M or later. + || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) + // Intel Vp8 encoder is always supported, with the intel encoder enabled. + || (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder); + } + + private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) { + String name = info.getName(); + return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX)) + // Both QCOM and Exynos VP9 encoders are supported in N or later. + && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; + } + + private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) { + // First, H264 hardware might perform poorly on this model. + if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) { + return false; + } + String name = info.getName(); + // QCOM and Exynos H264 encoders are always supported. + return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX)|| name.startsWith(MTK_PREFIX); + } + + private boolean isMediaCodecAllowed(MediaCodecInfo info) { + if (codecAllowedPredicate == null) { + return true; + } + return codecAllowedPredicate.test(info); + } + + private int getForcedKeyFrameIntervalMs(VideoCodecMimeType type, String codecName) { + if (type == VideoCodecMimeType.VP8 && codecName.startsWith(QCOM_PREFIX)) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { + return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS; + } + if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) { + return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS; + } + return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS; + } + // Other codecs don't need key frame forcing. + return 0; + } + + private BitrateAdjuster createBitrateAdjuster(VideoCodecMimeType type, String codecName) { + if (codecName.startsWith(EXYNOS_PREFIX)) { + if (type == VideoCodecMimeType.VP8) { + // Exynos VP8 encoders need dynamic bitrate adjustment. + return new DynamicBitrateAdjuster(); + } else { + // Exynos VP9 and H264 encoders need framerate-based bitrate adjustment. + return new FramerateBitrateAdjuster(); + } + } + // Other codecs don't need bitrate adjustment. + return new BaseBitrateAdjuster(); + } + + private boolean isH264HighProfileSupported(MediaCodecInfo info) { + return enableH264HighProfile && Build.VERSION.SDK_INT > Build.VERSION_CODES.M + && info.getName().startsWith(EXYNOS_PREFIX); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Histogram.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Histogram.java new file mode 100644 index 00000000..87798613 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Histogram.java @@ -0,0 +1,44 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Class for holding the native pointer of a histogram. Since there is no way to destroy a + * histogram, please don't create unnecessary instances of this object. This class is thread safe. + * + * Usage example: + * private static final Histogram someMetricHistogram = + * Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50); + * someMetricHistogram.addSample(someVariable); + */ +class Histogram { + private final long handle; + + private Histogram(long handle) { + this.handle = handle; + } + + static public Histogram createCounts(String name, int min, int max, int bucketCount) { + return new Histogram(nativeCreateCounts(name, min, max, bucketCount)); + } + + static public Histogram createEnumeration(String name, int max) { + return new Histogram(nativeCreateEnumeration(name, max)); + } + + public void addSample(int sample) { + nativeAddSample(handle, sample); + } + + private static native long nativeCreateCounts(String name, int min, int max, int bucketCount); + private static native long nativeCreateEnumeration(String name, int max); + private static native void nativeAddSample(long handle, int sample); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidate.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidate.java new file mode 100644 index 00000000..5f00b2a5 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidate.java @@ -0,0 +1,86 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.Arrays; +import org.webrtc.PeerConnection; + +/** + * Representation of a single ICE Candidate, mirroring + * {@code IceCandidateInterface} in the C++ API. + */ +public class IceCandidate { + public final String sdpMid; + public final int sdpMLineIndex; + public final String sdp; + public final String serverUrl; + public final PeerConnection.AdapterType adapterType; + + public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) { + this.sdpMid = sdpMid; + this.sdpMLineIndex = sdpMLineIndex; + this.sdp = sdp; + this.serverUrl = ""; + this.adapterType = PeerConnection.AdapterType.UNKNOWN; + } + + @CalledByNative + IceCandidate(String sdpMid, int sdpMLineIndex, String sdp, String serverUrl, + PeerConnection.AdapterType adapterType) { + this.sdpMid = sdpMid; + this.sdpMLineIndex = sdpMLineIndex; + this.sdp = sdp; + this.serverUrl = serverUrl; + this.adapterType = adapterType; + } + + @Override + public String toString() { + return sdpMid + ":" + sdpMLineIndex + ":" + sdp + ":" + serverUrl + ":" + + adapterType.toString(); + } + + @CalledByNative + String getSdpMid() { + return sdpMid; + } + + @CalledByNative + String getSdp() { + return sdp; + } + + /** equals() checks sdpMid, sdpMLineIndex, and sdp for equality. */ + @Override + public boolean equals(@Nullable Object object) { + if (!(object instanceof IceCandidate)) { + return false; + } + + IceCandidate that = (IceCandidate) object; + return objectEquals(this.sdpMid, that.sdpMid) && this.sdpMLineIndex == that.sdpMLineIndex + && objectEquals(this.sdp, that.sdp); + } + + @Override + public int hashCode() { + Object[] values = {sdpMid, sdpMLineIndex, sdp}; + return Arrays.hashCode(values); + } + + private static boolean objectEquals(Object o1, Object o2) { + if (o1 == null) { + return o2 == null; + } + return o1.equals(o2); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidateErrorEvent.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidateErrorEvent.java new file mode 100644 index 00000000..aae9da70 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/IceCandidateErrorEvent.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public final class IceCandidateErrorEvent { + /** The local IP address used to communicate with the STUN or TURN server. */ + public final String address; + /** The port used to communicate with the STUN or TURN server. */ + public final int port; + /** + * The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred. + */ + public final String url; + /** + * The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach + * the server, errorCode will be set to the value 701 which is outside the STUN error code range. + * This error is only fired once per server URL while in the RTCIceGatheringState of "gathering". + */ + public final int errorCode; + /** + * The STUN reason text returned by the STUN or TURN server. If the server could not be reached, + * errorText will be set to an implementation-specific value providing details about the error. + */ + public final String errorText; + + @CalledByNative + public IceCandidateErrorEvent( + String address, int port, String url, int errorCode, String errorText) { + this.address = address; + this.port = port; + this.url = url; + this.errorCode = errorCode; + this.errorText = errorText; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JNILogging.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JNILogging.java new file mode 100644 index 00000000..f391db61 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JNILogging.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.CalledByNative; +import org.webrtc.Loggable; +import org.webrtc.Logging.Severity; + +class JNILogging { + private final Loggable loggable; + + public JNILogging(Loggable loggable) { + this.loggable = loggable; + } + + @CalledByNative + public void logToInjectable(String message, Integer severity, String tag) { + loggable.onLogMessage(message, Severity.values()[severity], tag); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JavaI420Buffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JavaI420Buffer.java new file mode 100644 index 00000000..322b8f38 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JavaI420Buffer.java @@ -0,0 +1,200 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; +import org.webrtc.VideoFrame.I420Buffer; + +/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */ +public class JavaI420Buffer implements VideoFrame.I420Buffer { + private final int width; + private final int height; + private final ByteBuffer dataY; + private final ByteBuffer dataU; + private final ByteBuffer dataV; + private final int strideY; + private final int strideU; + private final int strideV; + private final RefCountDelegate refCountDelegate; + + private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU, + int strideU, ByteBuffer dataV, int strideV, @Nullable Runnable releaseCallback) { + this.width = width; + this.height = height; + this.dataY = dataY; + this.dataU = dataU; + this.dataV = dataV; + this.strideY = strideY; + this.strideU = strideU; + this.strideV = strideV; + this.refCountDelegate = new RefCountDelegate(releaseCallback); + } + + private static void checkCapacity(ByteBuffer data, int width, int height, int stride) { + // The last row does not necessarily need padding. + final int minCapacity = stride * (height - 1) + width; + if (data.capacity() < minCapacity) { + throw new IllegalArgumentException( + "Buffer must be at least " + minCapacity + " bytes, but was " + data.capacity()); + } + } + + /** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */ + public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY, + ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV, + @Nullable Runnable releaseCallback) { + if (dataY == null || dataU == null || dataV == null) { + throw new IllegalArgumentException("Data buffers cannot be null."); + } + if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) { + throw new IllegalArgumentException("Data buffers must be direct byte buffers."); + } + + // Slice the buffers to prevent external modifications to the position / limit of the buffer. + // Note that this doesn't protect the contents of the buffers from modifications. + dataY = dataY.slice(); + dataU = dataU.slice(); + dataV = dataV.slice(); + + final int chromaWidth = (width + 1) / 2; + final int chromaHeight = (height + 1) / 2; + checkCapacity(dataY, width, height, strideY); + checkCapacity(dataU, chromaWidth, chromaHeight, strideU); + checkCapacity(dataV, chromaWidth, chromaHeight, strideV); + + return new JavaI420Buffer( + width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback); + } + + /** Allocates an empty I420Buffer suitable for an image of the given dimensions. */ + public static JavaI420Buffer allocate(int width, int height) { + int chromaHeight = (height + 1) / 2; + int strideUV = (width + 1) / 2; + int yPos = 0; + int uPos = yPos + width * height; + int vPos = uPos + strideUV * chromaHeight; + + ByteBuffer buffer = + JniCommon.nativeAllocateByteBuffer(width * height + 2 * strideUV * chromaHeight); + + buffer.position(yPos); + buffer.limit(uPos); + ByteBuffer dataY = buffer.slice(); + + buffer.position(uPos); + buffer.limit(vPos); + ByteBuffer dataU = buffer.slice(); + + buffer.position(vPos); + buffer.limit(vPos + strideUV * chromaHeight); + ByteBuffer dataV = buffer.slice(); + + return new JavaI420Buffer(width, height, dataY, width, dataU, strideUV, dataV, strideUV, + () -> { JniCommon.nativeFreeByteBuffer(buffer); }); + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public ByteBuffer getDataY() { + // Return a slice to prevent relative reads from changing the position. + return dataY.slice(); + } + + @Override + public ByteBuffer getDataU() { + // Return a slice to prevent relative reads from changing the position. + return dataU.slice(); + } + + @Override + public ByteBuffer getDataV() { + // Return a slice to prevent relative reads from changing the position. + return dataV.slice(); + } + + @Override + public int getStrideY() { + return strideY; + } + + @Override + public int getStrideU() { + return strideU; + } + + @Override + public int getStrideV() { + return strideV; + } + + @Override + public I420Buffer toI420() { + retain(); + return this; + } + + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + return cropAndScaleI420(this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight); + } + + public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY, + int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + if (cropWidth == scaleWidth && cropHeight == scaleHeight) { + // No scaling. + ByteBuffer dataY = buffer.getDataY(); + ByteBuffer dataU = buffer.getDataU(); + ByteBuffer dataV = buffer.getDataV(); + + dataY.position(cropX + cropY * buffer.getStrideY()); + dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU()); + dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV()); + + buffer.retain(); + return JavaI420Buffer.wrap(scaleWidth, scaleHeight, dataY.slice(), buffer.getStrideY(), + dataU.slice(), buffer.getStrideU(), dataV.slice(), buffer.getStrideV(), buffer::release); + } + + JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight); + nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(), + buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth, + cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(), + newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth, + scaleHeight); + return newBuffer; + } + + private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY, + ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY, + int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, + int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JniCommon.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniCommon.java new file mode 100644 index 00000000..e1b2e513 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniCommon.java @@ -0,0 +1,23 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** Class with static JNI helper functions that are used in many places. */ +public class JniCommon { + /** Functions to increment/decrement an rtc::RefCountInterface pointer. */ + public static native void nativeAddRef(long refCountedPointer); + public static native void nativeReleaseRef(long refCountedPointer); + + public static native ByteBuffer nativeAllocateByteBuffer(int size); + public static native void nativeFreeByteBuffer(ByteBuffer buffer); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/JniHelper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniHelper.java new file mode 100644 index 00000000..0d56d5d9 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/JniHelper.java @@ -0,0 +1,48 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.io.UnsupportedEncodingException; +import java.util.Map; + +/** + * This class is only used from jni_helper.cc to give some Java functionality that were not possible + * to generate in other ways due to bugs.webrtc.org/8606 and bugs.webrtc.org/8632. + */ +class JniHelper { + // TODO(bugs.webrtc.org/8632): Remove. + @CalledByNative + static byte[] getStringBytes(String s) { + try { + return s.getBytes("ISO-8859-1"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("ISO-8859-1 is unsupported"); + } + } + + // TODO(bugs.webrtc.org/8632): Remove. + @CalledByNative + static Object getStringClass() { + return String.class; + } + + // TODO(bugs.webrtc.org/8606): Remove. + @CalledByNative + static Object getKey(Map.Entry entry) { + return entry.getKey(); + } + + // TODO(bugs.webrtc.org/8606): Remove. + @CalledByNative + static Object getValue(Map.Entry entry) { + return entry.getValue(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibaomAv1Encoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibaomAv1Encoder.java new file mode 100644 index 00000000..569a719f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibaomAv1Encoder.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public class LibaomAv1Encoder extends WrappedNativeVideoEncoder { + @Override + public long createNativeVideoEncoder() { + return nativeCreateEncoder(); + } + + static native long nativeCreateEncoder(); + + @Override + public boolean isHardwareEncoder() { + return false; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Decoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Decoder.java new file mode 100644 index 00000000..54ad0aa1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Decoder.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public class LibvpxVp8Decoder extends WrappedNativeVideoDecoder { + @Override + public long createNativeVideoDecoder() { + return nativeCreateDecoder(); + } + + static native long nativeCreateDecoder(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Encoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Encoder.java new file mode 100644 index 00000000..4be9e52c --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp8Encoder.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public class LibvpxVp8Encoder extends WrappedNativeVideoEncoder { + @Override + public long createNativeVideoEncoder() { + return nativeCreateEncoder(); + } + + static native long nativeCreateEncoder(); + + @Override + public boolean isHardwareEncoder() { + return false; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Decoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Decoder.java new file mode 100644 index 00000000..90a24433 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Decoder.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public class LibvpxVp9Decoder extends WrappedNativeVideoDecoder { + @Override + public long createNativeVideoDecoder() { + return nativeCreateDecoder(); + } + + static native long nativeCreateDecoder(); + + static native boolean nativeIsSupported(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Encoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Encoder.java new file mode 100644 index 00000000..1211ae93 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/LibvpxVp9Encoder.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder { + @Override + public long createNativeVideoEncoder() { + return nativeCreateEncoder(); + } + + static native long nativeCreateEncoder(); + + @Override + public boolean isHardwareEncoder() { + return false; + } + + static native boolean nativeIsSupported(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Loggable.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Loggable.java new file mode 100644 index 00000000..cd66aa12 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Loggable.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.Logging.Severity; + +/** + * Java interface for WebRTC logging. The default implementation uses webrtc.Logging. + * + * When injected, the Loggable will receive logging from both Java and native. + */ +public interface Loggable { + public void onLogMessage(String message, Severity severity, String tag); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Logging.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Logging.java new file mode 100644 index 00000000..e7a9921f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Logging.java @@ -0,0 +1,201 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.EnumSet; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.webrtc.Loggable; + +/** + * Java wrapper for WebRTC logging. Logging defaults to java.util.logging.Logger, but a custom + * logger implementing the Loggable interface can be injected along with a Severity. All subsequent + * log messages will then be redirected to the injected Loggable, except those with a severity lower + * than the specified severity, which will be discarded. + * + * It is also possible to switch to native logging (rtc::LogMessage) if one of the following static + * functions are called from the app: + * - Logging.enableLogThreads + * - Logging.enableLogTimeStamps + * - Logging.enableLogToDebugOutput + * + * The priority goes: + * 1. Injected loggable + * 2. Native logging + * 3. Fallback logging. + * Only one method will be used at a time. + * + * Injecting a Loggable or using any of the enable... methods requires that the native library is + * loaded, using PeerConnectionFactory.initialize. + */ +public class Logging { + private static final Logger fallbackLogger = createFallbackLogger(); + private static volatile boolean loggingEnabled; + @Nullable private static Loggable loggable; + private static Severity loggableSeverity; + + private static Logger createFallbackLogger() { + final Logger fallbackLogger = Logger.getLogger("org.webrtc.Logging"); + fallbackLogger.setLevel(Level.ALL); + return fallbackLogger; + } + + static void injectLoggable(Loggable injectedLoggable, Severity severity) { + if (injectedLoggable != null) { + loggable = injectedLoggable; + loggableSeverity = severity; + } + } + + static void deleteInjectedLoggable() { + loggable = null; + } + + // TODO(solenberg): Remove once dependent projects updated. + @Deprecated + public enum TraceLevel { + TRACE_NONE(0x0000), + TRACE_STATEINFO(0x0001), + TRACE_WARNING(0x0002), + TRACE_ERROR(0x0004), + TRACE_CRITICAL(0x0008), + TRACE_APICALL(0x0010), + TRACE_DEFAULT(0x00ff), + TRACE_MODULECALL(0x0020), + TRACE_MEMORY(0x0100), + TRACE_TIMER(0x0200), + TRACE_STREAM(0x0400), + TRACE_DEBUG(0x0800), + TRACE_INFO(0x1000), + TRACE_TERSEINFO(0x2000), + TRACE_ALL(0xffff); + + public final int level; + TraceLevel(int level) { + this.level = level; + } + } + + // Keep in sync with webrtc/rtc_base/logging.h:LoggingSeverity. + public enum Severity { LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE } + + public static void enableLogThreads() { + nativeEnableLogThreads(); + } + + public static void enableLogTimeStamps() { + nativeEnableLogTimeStamps(); + } + + // TODO(solenberg): Remove once dependent projects updated. + @Deprecated + public static void enableTracing(String path, EnumSet levels) {} + + // Enable diagnostic logging for messages of `severity` to the platform debug + // output. On Android, the output will be directed to Logcat. + // Note: this function starts collecting the output of the RTC_LOG() macros. + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public static synchronized void enableLogToDebugOutput(Severity severity) { + if (loggable != null) { + throw new IllegalStateException( + "Logging to native debug output not supported while Loggable is injected. " + + "Delete the Loggable before calling this method."); + } + nativeEnableLogToDebugOutput(severity.ordinal()); + loggingEnabled = true; + } + + public static void log(Severity severity, String tag, String message) { + if (tag == null || message == null) { + throw new IllegalArgumentException("Logging tag or message may not be null."); + } + if (loggable != null) { + // Filter log messages below loggableSeverity. + if (severity.ordinal() < loggableSeverity.ordinal()) { + return; + } + loggable.onLogMessage(message, severity, tag); + return; + } + + // Try native logging if no loggable is injected. + if (loggingEnabled) { + nativeLog(severity.ordinal(), tag, message); + return; + } + + // Fallback to system log. + Level level; + switch (severity) { + case LS_ERROR: + level = Level.SEVERE; + break; + case LS_WARNING: + level = Level.WARNING; + break; + case LS_INFO: + level = Level.INFO; + break; + default: + level = Level.FINE; + break; + } + fallbackLogger.log(level, tag + ": " + message); + } + + public static void d(String tag, String message) { + log(Severity.LS_INFO, tag, message); + } + + public static void e(String tag, String message) { + log(Severity.LS_ERROR, tag, message); + } + + public static void w(String tag, String message) { + log(Severity.LS_WARNING, tag, message); + } + + public static void e(String tag, String message, Throwable e) { + log(Severity.LS_ERROR, tag, message); + log(Severity.LS_ERROR, tag, e.toString()); + log(Severity.LS_ERROR, tag, getStackTraceString(e)); + } + + public static void w(String tag, String message, Throwable e) { + log(Severity.LS_WARNING, tag, message); + log(Severity.LS_WARNING, tag, e.toString()); + log(Severity.LS_WARNING, tag, getStackTraceString(e)); + } + + public static void v(String tag, String message) { + log(Severity.LS_VERBOSE, tag, message); + } + + private static String getStackTraceString(Throwable e) { + if (e == null) { + return ""; + } + + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + return sw.toString(); + } + + private static native void nativeEnableLogToDebugOutput(int nativeSeverity); + private static native void nativeEnableLogThreads(); + private static native void nativeEnableLogTimeStamps(); + private static native void nativeLog(int severity, String tag, String message); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecUtils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecUtils.java new file mode 100644 index 00000000..8e7616bc --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecUtils.java @@ -0,0 +1,135 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.annotation.TargetApi; +import android.media.MediaCodecInfo; +import android.media.MediaCodecInfo.CodecCapabilities; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.HashMap; +import java.util.Map; + +/** Container class for static constants and helpers used with MediaCodec. */ +// We are forced to use the old API because we want to support API level < 21. +@SuppressWarnings("deprecation") +class MediaCodecUtils { + private static final String TAG = "MediaCodecUtils"; + + // Prefixes for supported hardware encoder/decoder component names. + static final String EXYNOS_PREFIX = "OMX.Exynos."; + static final String INTEL_PREFIX = "OMX.Intel."; + static final String NVIDIA_PREFIX = "OMX.Nvidia."; + static final String QCOM_PREFIX = "OMX.qcom."; + static final String HISI_PREFIX = "OMX.hisi."; + + static final String MTK_PREFIX = "OMX.mtk."; + + + static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = { + "OMX.google.", "OMX.SEC.", "c2.android"}; + + // NV12 color format supported by QCOM codec, but not declared in MediaCodec - + // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h + static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01; + static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02; + static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03; + static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; + + // Color formats supported by hardware decoder - in order of preference. + static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar, + CodecCapabilities.COLOR_FormatYUV420SemiPlanar, + CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, + MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; + + // Color formats supported by hardware encoder - in order of preference. + static final int[] ENCODER_COLOR_FORMATS = { + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, + MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, + MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; + + // Color formats supported by texture mode encoding - in order of preference. + static final int[] TEXTURE_COLOR_FORMATS = + new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface}; + + static @Nullable Integer selectColorFormat( + int[] supportedColorFormats, CodecCapabilities capabilities) { + for (int supportedColorFormat : supportedColorFormats) { + for (int codecColorFormat : capabilities.colorFormats) { + if (codecColorFormat == supportedColorFormat) { + return codecColorFormat; + } + } + } + return null; + } + + static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) { + for (String mimeType : info.getSupportedTypes()) { + if (type.mimeType().equals(mimeType)) { + return true; + } + } + return false; + } + + static Map getCodecProperties(VideoCodecMimeType type, boolean highProfile) { + switch (type) { + case VP8: + case VP9: + case AV1: + case H265: + return new HashMap(); + case H264: + return H264Utils.getDefaultH264Params(highProfile); + default: + throw new IllegalArgumentException("Unsupported codec: " + type); + } + } + + static boolean isHardwareAccelerated(MediaCodecInfo info) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + return isHardwareAcceleratedQOrHigher(info); + } + return !isSoftwareOnly(info); + } + + @TargetApi(29) + private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) { + return codecInfo.isHardwareAccelerated(); + } + + static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + return isSoftwareOnlyQOrHigher(codecInfo); + } + String name = codecInfo.getName(); + for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) { + if (name.startsWith(prefix)) { + return true; + } + } + return false; + } + + @TargetApi(29) + private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) { + return codecInfo.isSoftwareOnly(); + } + + private MediaCodecUtils() { + // This class should not be instantiated. + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java new file mode 100644 index 00000000..6ef0eb5e --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -0,0 +1,141 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX; +import static org.webrtc.MediaCodecUtils.HISI_PREFIX; +import static org.webrtc.MediaCodecUtils.QCOM_PREFIX; + +import android.media.MediaCodecInfo; +import android.media.MediaCodecInfo.CodecCapabilities; +import android.media.MediaCodecList; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.List; + +/** Factory for decoders backed by Android MediaCodec API. */ +@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods. +class MediaCodecVideoDecoderFactory implements VideoDecoderFactory { + private static final String TAG = "MediaCodecVideoDecoderFactory"; + + private final @Nullable EglBase.Context sharedContext; + private final @Nullable Predicate codecAllowedPredicate; + + /** + * MediaCodecVideoDecoderFactory with support of codecs filtering. + * + * @param sharedContext The textures generated will be accessible from this context. May be null, + * this disables texture support. + * @param codecAllowedPredicate optional predicate to test if codec allowed. All codecs are + * allowed when predicate is not provided. + */ + public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext, + @Nullable Predicate codecAllowedPredicate) { + this.sharedContext = sharedContext; + this.codecAllowedPredicate = codecAllowedPredicate; + } + + @Nullable + @Override + public VideoDecoder createDecoder(VideoCodecInfo codecType) { + VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName()); + MediaCodecInfo info = findCodecForType(type); + + if (info == null) { + return null; + } + + CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType()); + return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type, + MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities), + sharedContext); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List supportedCodecInfos = new ArrayList(); + // Generate a list of supported codecs in order of preference: + // VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265. + for (VideoCodecMimeType type : + new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, + VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) { + MediaCodecInfo codec = findCodecForType(type); + if (codec != null) { + String name = type.name(); + if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { + supportedCodecInfos.add(new VideoCodecInfo( + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + } + + supportedCodecInfos.add(new VideoCodecInfo( + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + } + } + + return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } + + private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) { + for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { + MediaCodecInfo info = null; + try { + info = MediaCodecList.getCodecInfoAt(i); + } catch (IllegalArgumentException e) { + Logging.e(TAG, "Cannot retrieve decoder codec info", e); + } + + if (info == null || info.isEncoder()) { + continue; + } + + if (isSupportedCodec(info, type)) { + return info; + } + } + + return null; // No support for this type. + } + + // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type. + private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { + if (!MediaCodecUtils.codecSupportsType(info, type)) { + return false; + } + // Check for a supported color format. + if (MediaCodecUtils.selectColorFormat( + MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType())) + == null) { + return false; + } + return isCodecAllowed(info); + } + + private boolean isCodecAllowed(MediaCodecInfo info) { + if (codecAllowedPredicate == null) { + return true; + } + return codecAllowedPredicate.test(info); + } + + private boolean isH264HighProfileSupported(MediaCodecInfo info) { + String name = info.getName(); + // Support H.264 HP decoding on QCOM chips. + if (name.startsWith(QCOM_PREFIX)) { + return true; + } + // Support H.264 HP decoding on Exynos chips for Android M and above. + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && name.startsWith(EXYNOS_PREFIX)) { + return true; + } + return false; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapper.java new file mode 100644 index 00000000..11e0f58d --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapper.java @@ -0,0 +1,60 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaCrypto; +import android.media.MediaFormat; +import android.os.Bundle; +import android.view.Surface; +import java.nio.ByteBuffer; + +/** + * Subset of methods defined in {@link android.media.MediaCodec} needed by + * {@link HardwareVideoEncoder} and {@link AndroidVideoDecoder}. This interface + * exists to allow mocking and using a fake implementation in tests. + */ +interface MediaCodecWrapper { + void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags); + + void start(); + + void flush(); + + void stop(); + + void release(); + + int dequeueInputBuffer(long timeoutUs); + + void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags); + + int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs); + + void releaseOutputBuffer(int index, boolean render); + + MediaFormat getInputFormat(); + + MediaFormat getOutputFormat(); + + MediaFormat getOutputFormat(int index); + + ByteBuffer getInputBuffer(int index); + + ByteBuffer getOutputBuffer(int index); + + Surface createInputSurface(); + + void setParameters(Bundle params); + + MediaCodecInfo getCodecInfo(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactory.java new file mode 100644 index 00000000..2962cb62 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactory.java @@ -0,0 +1,22 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.io.IOException; + +interface MediaCodecWrapperFactory { + /** + * Creates a new {@link MediaCodecWrapper} by codec name. + * + *

For additional information see {@link android.media.MediaCodec#createByCodecName}. + */ + MediaCodecWrapper createByCodecName(String name) throws IOException; +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactoryImpl.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactoryImpl.java new file mode 100644 index 00000000..207492f3 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaCodecWrapperFactoryImpl.java @@ -0,0 +1,126 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodec; +import android.media.MediaCodec.BufferInfo; +import android.media.MediaCodecInfo; +import android.media.MediaCrypto; +import android.media.MediaFormat; +import android.os.Bundle; +import android.view.Surface; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping + * {@link android.media.MediaCodec} objects. + */ +class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory { + private static class MediaCodecWrapperImpl implements MediaCodecWrapper { + private final MediaCodec mediaCodec; + + public MediaCodecWrapperImpl(MediaCodec mediaCodec) { + this.mediaCodec = mediaCodec; + } + + @Override + public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) { + mediaCodec.configure(format, surface, crypto, flags); + } + + @Override + public void start() { + mediaCodec.start(); + } + + @Override + public void flush() { + mediaCodec.flush(); + } + + @Override + public void stop() { + mediaCodec.stop(); + } + + @Override + public void release() { + mediaCodec.release(); + } + + @Override + public int dequeueInputBuffer(long timeoutUs) { + return mediaCodec.dequeueInputBuffer(timeoutUs); + } + + @Override + public void queueInputBuffer( + int index, int offset, int size, long presentationTimeUs, int flags) { + mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags); + } + + @Override + public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) { + return mediaCodec.dequeueOutputBuffer(info, timeoutUs); + } + + @Override + public void releaseOutputBuffer(int index, boolean render) { + mediaCodec.releaseOutputBuffer(index, render); + } + + @Override + public MediaFormat getInputFormat() { + return mediaCodec.getInputFormat(); + } + + @Override + public MediaFormat getOutputFormat() { + return mediaCodec.getOutputFormat(); + } + + @Override + public MediaFormat getOutputFormat(int index) { + return mediaCodec.getOutputFormat(index); + } + + @Override + public ByteBuffer getInputBuffer(int index) { + return mediaCodec.getInputBuffer(index); + } + + @Override + public ByteBuffer getOutputBuffer(int index) { + return mediaCodec.getOutputBuffer(index); + } + + @Override + public Surface createInputSurface() { + return mediaCodec.createInputSurface(); + } + + @Override + public void setParameters(Bundle params) { + mediaCodec.setParameters(params); + } + + @Override + public MediaCodecInfo getCodecInfo() { + return mediaCodec.getCodecInfo(); + } + } + + @Override + public MediaCodecWrapper createByCodecName(String name) throws IOException { + return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name)); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaConstraints.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaConstraints.java new file mode 100644 index 00000000..bae04e53 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaConstraints.java @@ -0,0 +1,99 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.List; + +/** + * Description of media constraints for {@code MediaStream} and + * {@code PeerConnection}. + */ +public class MediaConstraints { + /** Simple String key/value pair. */ + public static class KeyValuePair { + private final String key; + private final String value; + + public KeyValuePair(String key, String value) { + this.key = key; + this.value = value; + } + + @CalledByNative("KeyValuePair") + public String getKey() { + return key; + } + + @CalledByNative("KeyValuePair") + public String getValue() { + return value; + } + + @Override + public String toString() { + return key + ": " + value; + } + + @Override + public boolean equals(@Nullable Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + KeyValuePair that = (KeyValuePair) other; + return key.equals(that.key) && value.equals(that.value); + } + + @Override + public int hashCode() { + return key.hashCode() + value.hashCode(); + } + } + + public final List mandatory; + public final List optional; + + public MediaConstraints() { + mandatory = new ArrayList(); + optional = new ArrayList(); + } + + private static String stringifyKeyValuePairList(List list) { + StringBuilder builder = new StringBuilder("["); + for (KeyValuePair pair : list) { + if (builder.length() > 1) { + builder.append(", "); + } + builder.append(pair.toString()); + } + return builder.append("]").toString(); + } + + @Override + public String toString() { + return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: " + + stringifyKeyValuePairList(optional); + } + + @CalledByNative + List getMandatory() { + return mandatory; + } + + @CalledByNative + List getOptional() { + return optional; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaSource.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaSource.java new file mode 100644 index 00000000..9245e3e2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaSource.java @@ -0,0 +1,74 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Java wrapper for a C++ MediaSourceInterface. */ +public class MediaSource { + /** Tracks MediaSourceInterface.SourceState */ + public enum State { + INITIALIZING, + LIVE, + ENDED, + MUTED; + + @CalledByNative("State") + static State fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + private final RefCountDelegate refCountDelegate; + private long nativeSource; + + public MediaSource(long nativeSource) { + refCountDelegate = new RefCountDelegate(() -> JniCommon.nativeReleaseRef(nativeSource)); + this.nativeSource = nativeSource; + } + + public State state() { + checkMediaSourceExists(); + return nativeGetState(nativeSource); + } + + public void dispose() { + checkMediaSourceExists(); + refCountDelegate.release(); + nativeSource = 0; + } + + /** Returns a pointer to webrtc::MediaSourceInterface. */ + protected long getNativeMediaSource() { + checkMediaSourceExists(); + return nativeSource; + } + + /** + * Runs code in {@code runnable} holding a reference to the media source. If the object has + * already been released, does nothing. + */ + void runWithReference(Runnable runnable) { + if (refCountDelegate.safeRetain()) { + try { + runnable.run(); + } finally { + refCountDelegate.release(); + } + } + } + + private void checkMediaSourceExists() { + if (nativeSource == 0) { + throw new IllegalStateException("MediaSource has been disposed."); + } + } + + private static native State nativeGetState(long pointer); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStream.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStream.java new file mode 100644 index 00000000..e530fe5f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStream.java @@ -0,0 +1,159 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** Java wrapper for a C++ MediaStreamInterface. */ +public class MediaStream { + private static final String TAG = "MediaStream"; + + public final List audioTracks = new ArrayList<>(); + public final List videoTracks = new ArrayList<>(); + public final List preservedVideoTracks = new ArrayList<>(); + private long nativeStream; + + @CalledByNative + public MediaStream(long nativeStream) { + this.nativeStream = nativeStream; + } + + public boolean addTrack(AudioTrack track) { + checkMediaStreamExists(); + if (nativeAddAudioTrackToNativeStream(nativeStream, track.getNativeAudioTrack())) { + audioTracks.add(track); + return true; + } + return false; + } + + public boolean addTrack(VideoTrack track) { + checkMediaStreamExists(); + if (nativeAddVideoTrackToNativeStream(nativeStream, track.getNativeVideoTrack())) { + videoTracks.add(track); + return true; + } + return false; + } + + // Tracks added in addTrack() call will be auto released once MediaStream.dispose() + // is called. If video track need to be preserved after MediaStream is destroyed it + // should be added to MediaStream using addPreservedTrack() call. + public boolean addPreservedTrack(VideoTrack track) { + checkMediaStreamExists(); + if (nativeAddVideoTrackToNativeStream(nativeStream, track.getNativeVideoTrack())) { + preservedVideoTracks.add(track); + return true; + } + return false; + } + + public boolean removeTrack(AudioTrack track) { + checkMediaStreamExists(); + audioTracks.remove(track); + return nativeRemoveAudioTrack(nativeStream, track.getNativeAudioTrack()); + } + + public boolean removeTrack(VideoTrack track) { + checkMediaStreamExists(); + videoTracks.remove(track); + preservedVideoTracks.remove(track); + return nativeRemoveVideoTrack(nativeStream, track.getNativeVideoTrack()); + } + + @CalledByNative + public void dispose() { + checkMediaStreamExists(); + // Remove and release previously added audio and video tracks. + while (!audioTracks.isEmpty()) { + AudioTrack track = audioTracks.get(0 /* index */); + removeTrack(track); + track.dispose(); + } + while (!videoTracks.isEmpty()) { + VideoTrack track = videoTracks.get(0 /* index */); + removeTrack(track); + track.dispose(); + } + // Remove, but do not release preserved video tracks. + while (!preservedVideoTracks.isEmpty()) { + removeTrack(preservedVideoTracks.get(0 /* index */)); + } + JniCommon.nativeReleaseRef(nativeStream); + nativeStream = 0; + } + + public String getId() { + checkMediaStreamExists(); + return nativeGetId(nativeStream); + } + + @Override + public String toString() { + return "[" + getId() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]"; + } + + @CalledByNative + void addNativeAudioTrack(long nativeTrack) { + audioTracks.add(new AudioTrack(nativeTrack)); + } + + @CalledByNative + void addNativeVideoTrack(long nativeTrack) { + videoTracks.add(new VideoTrack(nativeTrack)); + } + + @CalledByNative + void removeAudioTrack(long nativeTrack) { + removeMediaStreamTrack(audioTracks, nativeTrack); + } + + @CalledByNative + void removeVideoTrack(long nativeTrack) { + removeMediaStreamTrack(videoTracks, nativeTrack); + } + + /** Returns a pointer to webrtc::MediaStreamInterface. */ + long getNativeMediaStream() { + checkMediaStreamExists(); + return nativeStream; + } + + private void checkMediaStreamExists() { + if (nativeStream == 0) { + throw new IllegalStateException("MediaStream has been disposed."); + } + } + + private static void removeMediaStreamTrack( + List tracks, long nativeTrack) { + final Iterator it = tracks.iterator(); + while (it.hasNext()) { + MediaStreamTrack track = it.next(); + if (track.getNativeMediaStreamTrack() == nativeTrack) { + track.dispose(); + it.remove(); + return; + } + } + Logging.e(TAG, "Couldn't not find track"); + } + + private static native boolean nativeAddAudioTrackToNativeStream( + long stream, long nativeAudioTrack); + private static native boolean nativeAddVideoTrackToNativeStream( + long stream, long nativeVideoTrack); + private static native boolean nativeRemoveAudioTrack(long stream, long nativeAudioTrack); + private static native boolean nativeRemoveVideoTrack(long stream, long nativeVideoTrack); + private static native String nativeGetId(long stream); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStreamTrack.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStreamTrack.java new file mode 100644 index 00000000..2e4c3e18 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/MediaStreamTrack.java @@ -0,0 +1,129 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +/** Java wrapper for a C++ MediaStreamTrackInterface. */ +public class MediaStreamTrack { + public static final String AUDIO_TRACK_KIND = "audio"; + public static final String VIDEO_TRACK_KIND = "video"; + + /** Tracks MediaStreamTrackInterface.TrackState */ + public enum State { + LIVE, + ENDED; + + @CalledByNative("State") + static State fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + // Must be kept in sync with cricket::MediaType. + public enum MediaType { + MEDIA_TYPE_AUDIO(0), + MEDIA_TYPE_VIDEO(1); + + private final int nativeIndex; + + private MediaType(int nativeIndex) { + this.nativeIndex = nativeIndex; + } + + @CalledByNative("MediaType") + int getNative() { + return nativeIndex; + } + + @CalledByNative("MediaType") + static MediaType fromNativeIndex(int nativeIndex) { + for (MediaType type : MediaType.values()) { + if (type.getNative() == nativeIndex) { + return type; + } + } + throw new IllegalArgumentException("Unknown native media type: " + nativeIndex); + } + } + + /** Factory method to create an AudioTrack or VideoTrack subclass. */ + static @Nullable MediaStreamTrack createMediaStreamTrack(long nativeTrack) { + if (nativeTrack == 0) { + return null; + } + String trackKind = nativeGetKind(nativeTrack); + if (trackKind.equals(AUDIO_TRACK_KIND)) { + return new AudioTrack(nativeTrack); + } else if (trackKind.equals(VIDEO_TRACK_KIND)) { + return new VideoTrack(nativeTrack); + } else { + return null; + } + } + + private long nativeTrack; + + public MediaStreamTrack(long nativeTrack) { + if (nativeTrack == 0) { + throw new IllegalArgumentException("nativeTrack may not be null"); + } + this.nativeTrack = nativeTrack; + } + + public String id() { + checkMediaStreamTrackExists(); + return nativeGetId(nativeTrack); + } + + public String kind() { + checkMediaStreamTrackExists(); + return nativeGetKind(nativeTrack); + } + + public boolean enabled() { + checkMediaStreamTrackExists(); + return nativeGetEnabled(nativeTrack); + } + + public boolean setEnabled(boolean enable) { + checkMediaStreamTrackExists(); + return nativeSetEnabled(nativeTrack, enable); + } + + public State state() { + checkMediaStreamTrackExists(); + return nativeGetState(nativeTrack); + } + + public void dispose() { + checkMediaStreamTrackExists(); + JniCommon.nativeReleaseRef(nativeTrack); + nativeTrack = 0; + } + + long getNativeMediaStreamTrack() { + checkMediaStreamTrackExists(); + return nativeTrack; + } + + private void checkMediaStreamTrackExists() { + if (nativeTrack == 0) { + throw new IllegalStateException("MediaStreamTrack has been disposed."); + } + } + + private static native String nativeGetId(long track); + private static native String nativeGetKind(long track); + private static native boolean nativeGetEnabled(long track); + private static native boolean nativeSetEnabled(long track, boolean enabled); + private static native State nativeGetState(long track); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Metrics.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Metrics.java new file mode 100644 index 00000000..25337683 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Metrics.java @@ -0,0 +1,81 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.HashMap; +import java.util.Map; + +// Java-side of androidmetrics.cc +// +// Rtc histograms can be queried through the API, getAndReset(). +// The returned map holds the name of a histogram and its samples. +// +// Example of `map` with one histogram: +// `name`: "WebRTC.Video.InputFramesPerSecond" +// `min`: 1 +// `max`: 100 +// `bucketCount`: 50 +// `samples`: [30]:1 +// +// Most histograms are not updated frequently (e.g. most video metrics are an +// average over the call and recorded when a stream is removed). +// The metrics can for example be retrieved when a peer connection is closed. +public class Metrics { + private static final String TAG = "Metrics"; + + public final Map map = + new HashMap(); // + + @CalledByNative + Metrics() {} + + /** + * Class holding histogram information. + */ + public static class HistogramInfo { + public final int min; + public final int max; + public final int bucketCount; + public final Map samples = + new HashMap(); // + + @CalledByNative("HistogramInfo") + public HistogramInfo(int min, int max, int bucketCount) { + this.min = min; + this.max = max; + this.bucketCount = bucketCount; + } + + @CalledByNative("HistogramInfo") + public void addSample(int value, int numEvents) { + samples.put(value, numEvents); + } + } + + @CalledByNative + private void add(String name, HistogramInfo info) { + map.put(name, info); + } + + // Enables gathering of metrics (which can be fetched with getAndReset()). + // Must be called before PeerConnectionFactory is created. + public static void enable() { + nativeEnable(); + } + + // Gets and clears native histograms. + public static Metrics getAndReset() { + return nativeGetAndReset(); + } + + private static native void nativeEnable(); + private static native Metrics nativeGetAndReset(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NV12Buffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV12Buffer.java new file mode 100644 index 00000000..fe0221d8 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV12Buffer.java @@ -0,0 +1,73 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; + +public class NV12Buffer implements VideoFrame.Buffer { + private final int width; + private final int height; + private final int stride; + private final int sliceHeight; + private final ByteBuffer buffer; + private final RefCountDelegate refCountDelegate; + + public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer, + @Nullable Runnable releaseCallback) { + this.width = width; + this.height = height; + this.stride = stride; + this.sliceHeight = sliceHeight; + this.buffer = buffer; + this.refCountDelegate = new RefCountDelegate(releaseCallback); + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public VideoFrame.I420Buffer toI420() { + return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height); + } + + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight); + nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width, + height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(), + newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV()); + return newBuffer; + } + + private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight, + int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride, + int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU, + ByteBuffer dstV, int dstStrideV); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NV21Buffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV21Buffer.java new file mode 100644 index 00000000..0fb1afe7 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NV21Buffer.java @@ -0,0 +1,69 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; + +public class NV21Buffer implements VideoFrame.Buffer { + private final byte[] data; + private final int width; + private final int height; + private final RefCountDelegate refCountDelegate; + + public NV21Buffer(byte[] data, int width, int height, @Nullable Runnable releaseCallback) { + this.data = data; + this.width = width; + this.height = height; + this.refCountDelegate = new RefCountDelegate(releaseCallback); + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public VideoFrame.I420Buffer toI420() { + // Cropping converts the frame to I420. Just crop and scale to the whole image. + return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */, + height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */); + } + + @Override + public void retain() { + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountDelegate.release(); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight); + nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width, + height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(), + newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV()); + return newBuffer; + } + + private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight, + int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY, + int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeAndroidVideoTrackSource.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeAndroidVideoTrackSource.java new file mode 100644 index 00000000..d4fba481 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeAndroidVideoTrackSource.java @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import org.webrtc.VideoFrame; +import org.webrtc.VideoProcessor; + +/** + * This class is meant to be a simple layer that only handles the JNI wrapping of a C++ + * AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding + * any unnecessary logic to this class. + * This class is thred safe and methods can be called from any thread, but if frames A, B, ..., are + * sent to adaptFrame(), the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same + * order to onFrameCaptured(). + */ +class NativeAndroidVideoTrackSource { + // Pointer to webrtc::jni::AndroidVideoTrackSource. + private final long nativeAndroidVideoTrackSource; + + public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) { + this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource; + } + + /** + * Set the state for the native MediaSourceInterface. Maps boolean to either + * SourceState::kLive or SourceState::kEnded. + */ + public void setState(boolean isLive) { + nativeSetState(nativeAndroidVideoTrackSource, isLive); + } + + /** + * This function should be called before delivering any frame to determine if the frame should be + * dropped or what the cropping and scaling parameters should be. If the return value is null, the + * frame should be dropped, otherwise the frame should be adapted in accordance to the frame + * adaptation parameters before calling onFrameCaptured(). + */ + @Nullable + public VideoProcessor.FrameAdaptationParameters adaptFrame(VideoFrame frame) { + return nativeAdaptFrame(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(), + frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs()); + } + + /** + * Pass an adapted frame to the native AndroidVideoTrackSource. Note that adaptFrame() is + * expected to be called first and that the passed frame conforms to those parameters. + */ + public void onFrameCaptured(VideoFrame frame) { + nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getRotation(), + frame.getTimestampNs(), frame.getBuffer()); + } + + /** + * Calling this function will cause frames to be scaled down to the requested resolution. Also, + * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match + * the requested fps. + */ + public void adaptOutputFormat(VideoSource.AspectRatio targetLandscapeAspectRatio, + @Nullable Integer maxLandscapePixelCount, VideoSource.AspectRatio targetPortraitAspectRatio, + @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) { + nativeAdaptOutputFormat(nativeAndroidVideoTrackSource, targetLandscapeAspectRatio.width, + targetLandscapeAspectRatio.height, maxLandscapePixelCount, targetPortraitAspectRatio.width, + targetPortraitAspectRatio.height, maxPortraitPixelCount, maxFps); + } + + public void setIsScreencast(boolean isScreencast) { + nativeSetIsScreencast(nativeAndroidVideoTrackSource, isScreencast); + } + + @CalledByNative + static VideoProcessor.FrameAdaptationParameters createFrameAdaptationParameters(int cropX, + int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, long timestampNs, + boolean drop) { + return new VideoProcessor.FrameAdaptationParameters( + cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, timestampNs, drop); + } + + private static native void nativeSetIsScreencast( + long nativeAndroidVideoTrackSource, boolean isScreencast); + private static native void nativeSetState(long nativeAndroidVideoTrackSource, boolean isLive); + private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackSource, + int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount, + int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount, + @Nullable Integer maxFps); + @Nullable + private static native VideoProcessor.FrameAdaptationParameters nativeAdaptFrame( + long nativeAndroidVideoTrackSource, int width, int height, int rotation, long timestampNs); + private static native void nativeOnFrameCaptured( + long nativeAndroidVideoTrackSource, int rotation, long timestampNs, VideoFrame.Buffer buffer); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeCapturerObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeCapturerObserver.java new file mode 100644 index 00000000..c195fb3a --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeCapturerObserver.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.VideoFrame; + +/** + * Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to + * a webrtc::jni::AndroidVideoTrackSource. + */ +class NativeCapturerObserver implements CapturerObserver { + private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource; + + @CalledByNative + public NativeCapturerObserver(long nativeSource) { + this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource); + } + + @Override + public void onCapturerStarted(boolean success) { + nativeAndroidVideoTrackSource.setState(success); + } + + @Override + public void onCapturerStopped() { + nativeAndroidVideoTrackSource.setState(/* isLive= */ false); + } + + @Override + public void onFrameCaptured(VideoFrame frame) { + final VideoProcessor.FrameAdaptationParameters parameters = + nativeAndroidVideoTrackSource.adaptFrame(frame); + if (parameters == null) { + // Drop frame. + return; + } + + final VideoFrame.Buffer adaptedBuffer = + frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth, + parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight); + nativeAndroidVideoTrackSource.onFrameCaptured( + new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs)); + adaptedBuffer.release(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibrary.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibrary.java new file mode 100644 index 00000000..531c2163 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibrary.java @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +class NativeLibrary { + private static String TAG = "NativeLibrary"; + + static class DefaultLoader implements NativeLibraryLoader { + @Override + public boolean load(String name) { + Logging.d(TAG, "Loading library: " + name); + System.loadLibrary(name); + + // Not relevant, but kept for API compatibility. + return true; + } + } + + private static Object lock = new Object(); + private static boolean libraryLoaded; + + /** + * Loads the native library. Clients should call PeerConnectionFactory.initialize. It will call + * this method for them. + */ + static void initialize(NativeLibraryLoader loader, String libraryName) { + synchronized (lock) { + if (libraryLoaded) { + Logging.d(TAG, "Native library has already been loaded."); + return; + } + Logging.d(TAG, "Loading native library: " + libraryName); + libraryLoaded = loader.load(libraryName); + } + } + + /** Returns true if the library has been loaded successfully. */ + static boolean isLoaded() { + synchronized (lock) { + return libraryLoaded; + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibraryLoader.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibraryLoader.java new file mode 100644 index 00000000..8bd7b3b2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativeLibraryLoader.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Interface for loading native libraries. A custom loader can be passed to + * PeerConnectionFactory.initialize. + */ +public interface NativeLibraryLoader { + /** + * Loads a native library with the given name. + * + * @return True on success + */ + boolean load(String name); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NativePeerConnectionFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativePeerConnectionFactory.java new file mode 100644 index 00000000..aeb91e17 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NativePeerConnectionFactory.java @@ -0,0 +1,20 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Factory for creating webrtc::jni::OwnedPeerConnection instances. */ +public interface NativePeerConnectionFactory { + /** + * Create a new webrtc::jni::OwnedPeerConnection instance and returns a pointer to it. + * The caller takes ownership of the object. + */ + long createNativePeerConnection(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetEqFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetEqFactoryFactory.java new file mode 100644 index 00000000..8464324c --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetEqFactoryFactory.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Implementations of this interface can create a native {@code webrtc::NetEqFactory}. + */ +public interface NetEqFactoryFactory { + /** + * Returns a pointer to a {@code webrtc::NetEqFactory}. The caller takes ownership. + */ + long createNativeNetEqFactory(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetector.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetector.java new file mode 100644 index 00000000..ed3210e0 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetector.java @@ -0,0 +1,122 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.List; + +/** Interface for detecting network changes */ +public interface NetworkChangeDetector { + // java equivalent of c++ android_network_monitor.h / NetworkType. + public static enum ConnectionType { + CONNECTION_UNKNOWN, + CONNECTION_ETHERNET, + CONNECTION_WIFI, + CONNECTION_5G, + CONNECTION_4G, + CONNECTION_3G, + CONNECTION_2G, + CONNECTION_UNKNOWN_CELLULAR, + CONNECTION_BLUETOOTH, + CONNECTION_VPN, + CONNECTION_NONE + } + + public static class IPAddress { + public final byte[] address; + + public IPAddress(byte[] address) { + this.address = address; + } + + @CalledByNative("IPAddress") + private byte[] getAddress() { + return address; + } + } + + /** Java version of NetworkMonitor.NetworkInformation */ + public static class NetworkInformation { + public final String name; + public final ConnectionType type; + // Used to specify the underlying network type if the type is CONNECTION_VPN. + public final ConnectionType underlyingTypeForVpn; + public final long handle; + public final IPAddress[] ipAddresses; + + public NetworkInformation(String name, ConnectionType type, ConnectionType underlyingTypeForVpn, + long handle, IPAddress[] addresses) { + this.name = name; + this.type = type; + this.underlyingTypeForVpn = underlyingTypeForVpn; + this.handle = handle; + this.ipAddresses = addresses; + } + + @CalledByNative("NetworkInformation") + private IPAddress[] getIpAddresses() { + return ipAddresses; + } + + @CalledByNative("NetworkInformation") + private ConnectionType getConnectionType() { + return type; + } + + @CalledByNative("NetworkInformation") + private ConnectionType getUnderlyingConnectionTypeForVpn() { + return underlyingTypeForVpn; + } + + @CalledByNative("NetworkInformation") + private long getHandle() { + return handle; + } + + @CalledByNative("NetworkInformation") + private String getName() { + return name; + } + }; + + /** Observer interface by which observer is notified of network changes. */ + public static abstract class Observer { + /** Called when default network changes. */ + public abstract void onConnectionTypeChanged(ConnectionType newConnectionType); + + public abstract void onNetworkConnect(NetworkInformation networkInfo); + + public abstract void onNetworkDisconnect(long networkHandle); + + /** + * Called when network preference change for a (list of) connection type(s). (e.g WIFI) is + * `NOT_PREFERRED` or `NEUTRAL`. + * + *

note: `types` is a list of ConnectionTypes, so that all cellular types can be modified in + * one call. + */ + public abstract void onNetworkPreference( + List types, @NetworkPreference int preference); + + // Add default impl. for down-stream tests. + public String getFieldTrialsString() { + return ""; + } + } + + public ConnectionType getCurrentConnectionType(); + + public boolean supportNetworkCallback(); + + @Nullable public List getActiveNetworkList(); + + public void destroy(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetectorFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetectorFactory.java new file mode 100644 index 00000000..14e98b23 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkChangeDetectorFactory.java @@ -0,0 +1,17 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; + +public interface NetworkChangeDetectorFactory { + public NetworkChangeDetector create(NetworkChangeDetector.Observer observer, Context context); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkControllerFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkControllerFactoryFactory.java new file mode 100644 index 00000000..75e8fcaa --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkControllerFactoryFactory.java @@ -0,0 +1,20 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Factory for creating webrtc::NetworkControllerFactory instances. */ +public interface NetworkControllerFactoryFactory { + /** + * Dynamically allocates a webrtc::NetworkControllerFactory instance and returns a pointer to + * it. The caller takes ownership of the object. + */ + public long createNativeNetworkControllerFactory(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitor.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitor.java new file mode 100644 index 00000000..0bc461df --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitor.java @@ -0,0 +1,367 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.List; +import org.webrtc.NetworkChangeDetector; + +/** + * Borrowed from Chromium's + * src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java + * + *

Triggers updates to the underlying network state from OS networking events. + * + *

This class is thread-safe. + */ +public class NetworkMonitor { + /** + * Alerted when the connection type of the network changes. The alert is fired on the UI thread. + */ + public interface NetworkObserver { + public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType connectionType); + } + + private static final String TAG = "NetworkMonitor"; + + // Lazy initialization holder class idiom for static fields. + private static class InstanceHolder { + // We are storing application context so it is okay. + static final NetworkMonitor instance = new NetworkMonitor(); + } + + // Factory for creating NetworkChangeDetector. + private NetworkChangeDetectorFactory networkChangeDetectorFactory = + new NetworkChangeDetectorFactory() { + @Override + public NetworkChangeDetector create( + NetworkChangeDetector.Observer observer, Context context) { + return new NetworkMonitorAutoDetect(observer, context); + } + }; + + // Native observers of the connection type changes. + private final ArrayList nativeNetworkObservers; + // Java observers of the connection type changes. + private final ArrayList networkObservers; + + private final Object networkChangeDetectorLock = new Object(); + // Object that detects the connection type changes and brings up mobile networks. + @Nullable private NetworkChangeDetector networkChangeDetector; + // Also guarded by autoDetectLock. + private int numObservers; + + private volatile NetworkChangeDetector.ConnectionType currentConnectionType; + + private NetworkMonitor() { + nativeNetworkObservers = new ArrayList(); + networkObservers = new ArrayList(); + numObservers = 0; + currentConnectionType = NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN; + } + + /** + * Set the factory that will be used to create the network change detector. + * Needs to be called before the monitoring is starts. + */ + public void setNetworkChangeDetectorFactory(NetworkChangeDetectorFactory factory) { + assertIsTrue(numObservers == 0); + this.networkChangeDetectorFactory = factory; + } + + // TODO(sakal): Remove once downstream dependencies have been updated. + @Deprecated + public static void init(Context context) {} + + /** Returns the singleton instance. This may be called from native or from Java code. */ + @CalledByNative + public static NetworkMonitor getInstance() { + return InstanceHolder.instance; + } + + private static void assertIsTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected to be true"); + } + } + + /** + * Enables auto detection of the network state change and brings up mobile networks for using + * multi-networking. This requires the embedding app have the platform ACCESS_NETWORK_STATE and + * CHANGE_NETWORK_STATE permission. + */ + public void startMonitoring(Context applicationContext, String fieldTrialsString) { + synchronized (networkChangeDetectorLock) { + ++numObservers; + if (networkChangeDetector == null) { + networkChangeDetector = createNetworkChangeDetector(applicationContext, fieldTrialsString); + } + currentConnectionType = networkChangeDetector.getCurrentConnectionType(); + } + } + + /** Deprecated, use startMonitoring with fieldTrialsStringString argument. */ + @Deprecated + public void startMonitoring(Context applicationContext) { + startMonitoring(applicationContext, ""); + } + + /** Deprecated, pass in application context in startMonitoring instead. */ + @Deprecated + public void startMonitoring() { + startMonitoring(ContextUtils.getApplicationContext(), ""); + } + + /** + * Enables auto detection of the network state change and brings up mobile networks for using + * multi-networking. This requires the embedding app have the platform ACCESS_NETWORK_STATE and + * CHANGE_NETWORK_STATE permission. + */ + @CalledByNative + private void startMonitoring( + @Nullable Context applicationContext, long nativeObserver, String fieldTrialsString) { + Logging.d(TAG, + "Start monitoring with native observer " + nativeObserver + + " fieldTrialsString: " + fieldTrialsString); + + startMonitoring( + applicationContext != null ? applicationContext : ContextUtils.getApplicationContext(), + fieldTrialsString); + + synchronized (nativeNetworkObservers) { + nativeNetworkObservers.add(nativeObserver); + } + // The native observer expects a network list update after startMonitoring. + updateObserverActiveNetworkList(nativeObserver); + // currentConnectionType was updated in startMonitoring(). + // Need to notify the native observers here. + notifyObserversOfConnectionTypeChange(currentConnectionType); + } + + /** + * Stop network monitoring. If no one is monitoring networks, destroy and reset + * networkChangeDetector. + */ + public void stopMonitoring() { + synchronized (networkChangeDetectorLock) { + if (--numObservers == 0) { + networkChangeDetector.destroy(); + networkChangeDetector = null; + } + } + } + + @CalledByNative + private void stopMonitoring(long nativeObserver) { + Logging.d(TAG, "Stop monitoring with native observer " + nativeObserver); + stopMonitoring(); + synchronized (nativeNetworkObservers) { + nativeNetworkObservers.remove(nativeObserver); + } + } + + // Returns true if network binding is supported on this platform. + @CalledByNative + private boolean networkBindingSupported() { + synchronized (networkChangeDetectorLock) { + return networkChangeDetector != null && networkChangeDetector.supportNetworkCallback(); + } + } + + @CalledByNative + private static int androidSdkInt() { + return Build.VERSION.SDK_INT; + } + + private NetworkChangeDetector.ConnectionType getCurrentConnectionType() { + return currentConnectionType; + } + + private NetworkChangeDetector createNetworkChangeDetector( + Context appContext, String fieldTrialsString) { + return networkChangeDetectorFactory.create(new NetworkChangeDetector.Observer() { + @Override + public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType newConnectionType) { + updateCurrentConnectionType(newConnectionType); + } + + @Override + public void onNetworkConnect(NetworkChangeDetector.NetworkInformation networkInfo) { + notifyObserversOfNetworkConnect(networkInfo); + } + + @Override + public void onNetworkDisconnect(long networkHandle) { + notifyObserversOfNetworkDisconnect(networkHandle); + } + + @Override + public void onNetworkPreference( + List types, int preference) { + notifyObserversOfNetworkPreference(types, preference); + } + + @Override + public String getFieldTrialsString() { + return fieldTrialsString; + } + }, appContext); + } + + private void updateCurrentConnectionType(NetworkChangeDetector.ConnectionType newConnectionType) { + currentConnectionType = newConnectionType; + notifyObserversOfConnectionTypeChange(newConnectionType); + } + + /** Alerts all observers of a connection change. */ + private void notifyObserversOfConnectionTypeChange( + NetworkChangeDetector.ConnectionType newConnectionType) { + List nativeObservers = getNativeNetworkObserversSync(); + for (Long nativeObserver : nativeObservers) { + nativeNotifyConnectionTypeChanged(nativeObserver); + } + // This avoids calling external methods while locking on an object. + List javaObservers; + synchronized (networkObservers) { + javaObservers = new ArrayList<>(networkObservers); + } + for (NetworkObserver observer : javaObservers) { + observer.onConnectionTypeChanged(newConnectionType); + } + } + + private void notifyObserversOfNetworkConnect( + NetworkChangeDetector.NetworkInformation networkInfo) { + List nativeObservers = getNativeNetworkObserversSync(); + for (Long nativeObserver : nativeObservers) { + nativeNotifyOfNetworkConnect(nativeObserver, networkInfo); + } + } + + private void notifyObserversOfNetworkDisconnect(long networkHandle) { + List nativeObservers = getNativeNetworkObserversSync(); + for (Long nativeObserver : nativeObservers) { + nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle); + } + } + + private void notifyObserversOfNetworkPreference( + List types, int preference) { + List nativeObservers = getNativeNetworkObserversSync(); + for (NetworkChangeDetector.ConnectionType type : types) { + for (Long nativeObserver : nativeObservers) { + nativeNotifyOfNetworkPreference(nativeObserver, type, preference); + } + } + } + + private void updateObserverActiveNetworkList(long nativeObserver) { + List networkInfoList; + synchronized (networkChangeDetectorLock) { + networkInfoList = + (networkChangeDetector == null) ? null : networkChangeDetector.getActiveNetworkList(); + } + if (networkInfoList == null) { + return; + } + + NetworkChangeDetector.NetworkInformation[] networkInfos = + new NetworkChangeDetector.NetworkInformation[networkInfoList.size()]; + networkInfos = networkInfoList.toArray(networkInfos); + nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos); + } + + private List getNativeNetworkObserversSync() { + synchronized (nativeNetworkObservers) { + return new ArrayList<>(nativeNetworkObservers); + } + } + + /** + * Adds an observer for any connection type changes. + * + * @deprecated Use getInstance(appContext).addObserver instead. + */ + @Deprecated + public static void addNetworkObserver(NetworkObserver observer) { + getInstance().addObserver(observer); + } + + public void addObserver(NetworkObserver observer) { + synchronized (networkObservers) { + networkObservers.add(observer); + } + } + + /** + * Removes an observer for any connection type changes. + * + * @deprecated Use getInstance(appContext).removeObserver instead. + */ + @Deprecated + public static void removeNetworkObserver(NetworkObserver observer) { + getInstance().removeObserver(observer); + } + + public void removeObserver(NetworkObserver observer) { + synchronized (networkObservers) { + networkObservers.remove(observer); + } + } + + /** Checks if there currently is connectivity. */ + public static boolean isOnline() { + NetworkChangeDetector.ConnectionType connectionType = getInstance().getCurrentConnectionType(); + return connectionType != NetworkChangeDetector.ConnectionType.CONNECTION_NONE; + } + + private native void nativeNotifyConnectionTypeChanged(long nativeAndroidNetworkMonitor); + + private native void nativeNotifyOfNetworkConnect( + long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation networkInfo); + + private native void nativeNotifyOfNetworkDisconnect( + long nativeAndroidNetworkMonitor, long networkHandle); + + private native void nativeNotifyOfActiveNetworkList( + long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation[] networkInfos); + + private native void nativeNotifyOfNetworkPreference( + long nativeAndroidNetworkMonitor, NetworkChangeDetector.ConnectionType type, int preference); + + // For testing only. + @Nullable + NetworkChangeDetector getNetworkChangeDetector() { + synchronized (networkChangeDetectorLock) { + return networkChangeDetector; + } + } + + // For testing only. + int getNumObservers() { + synchronized (networkChangeDetectorLock) { + return numObservers; + } + } + + // For testing only. + static NetworkMonitorAutoDetect createAndSetAutoDetectForTest( + Context context, String fieldTrialsString) { + NetworkMonitor networkMonitor = getInstance(); + NetworkChangeDetector networkChangeDetector = + networkMonitor.createNetworkChangeDetector(context, fieldTrialsString); + networkMonitor.networkChangeDetector = networkChangeDetector; + return (NetworkMonitorAutoDetect) networkChangeDetector; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java new file mode 100644 index 00000000..a6f24c28 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkMonitorAutoDetect.java @@ -0,0 +1,901 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.annotation.SuppressLint; +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.net.ConnectivityManager; +import android.net.ConnectivityManager.NetworkCallback; +import android.net.LinkAddress; +import android.net.LinkProperties; +import android.net.Network; +import android.net.NetworkCapabilities; +import android.net.NetworkInfo; +import android.net.NetworkRequest; +import android.net.wifi.WifiInfo; +import android.net.wifi.WifiManager; +import android.net.wifi.p2p.WifiP2pGroup; +import android.net.wifi.p2p.WifiP2pManager; +import android.os.Build; +import android.telephony.TelephonyManager; +import androidx.annotation.GuardedBy; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import java.net.InetAddress; +import java.net.NetworkInterface; +import java.net.SocketException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +/** + * Borrowed from Chromium's + * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java + * + *

Used by the NetworkMonitor to listen to platform changes in connectivity. Note that use of + * this class requires that the app have the platform ACCESS_NETWORK_STATE permission. + */ +public class NetworkMonitorAutoDetect extends BroadcastReceiver implements NetworkChangeDetector { + static class NetworkState { + private final boolean connected; + // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is + // further divided into 2G, 3G, or 4G from the subtype. + private final int type; + // Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs. + // Will be useful to find the maximum bandwidth. + private final int subtype; + // When the type is TYPE_VPN, the following two fields specify the similar type and subtype as + // above for the underlying network that is used by the VPN. + private final int underlyingNetworkTypeForVpn; + private final int underlyingNetworkSubtypeForVpn; + + public NetworkState(boolean connected, int type, int subtype, int underlyingNetworkTypeForVpn, + int underlyingNetworkSubtypeForVpn) { + this.connected = connected; + this.type = type; + this.subtype = subtype; + this.underlyingNetworkTypeForVpn = underlyingNetworkTypeForVpn; + this.underlyingNetworkSubtypeForVpn = underlyingNetworkSubtypeForVpn; + } + + public boolean isConnected() { + return connected; + } + + public int getNetworkType() { + return type; + } + + public int getNetworkSubType() { + return subtype; + } + + public int getUnderlyingNetworkTypeForVpn() { + return underlyingNetworkTypeForVpn; + } + + public int getUnderlyingNetworkSubtypeForVpn() { + return underlyingNetworkSubtypeForVpn; + } + } + + @SuppressLint("NewApi") + @VisibleForTesting() + class SimpleNetworkCallback extends NetworkCallback { + @GuardedBy("availableNetworks") final Set availableNetworks; + + SimpleNetworkCallback(Set availableNetworks) { + this.availableNetworks = availableNetworks; + } + + @Override + public void onAvailable(Network network) { + Logging.d(TAG, + "Network" + + " handle: " + networkToNetId(network) + + " becomes available: " + network.toString()); + + synchronized (availableNetworks) { + availableNetworks.add(network); + } + onNetworkChanged(network); + } + + @Override + public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) { + // A capabilities change may indicate the ConnectionType has changed, + // so forward the new NetworkInformation along to the observer. + Logging.d(TAG, + "handle: " + networkToNetId(network) + + " capabilities changed: " + networkCapabilities.toString()); + onNetworkChanged(network); + } + + @Override + public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) { + // A link property change may indicate the IP address changes. + // so forward the new NetworkInformation to the observer. + // + // linkProperties.toString() has PII that cannot be redacted + // very reliably, so do not include in log. + Logging.d(TAG, "handle: " + networkToNetId(network) + " link properties changed"); + onNetworkChanged(network); + } + + @Override + public void onLosing(Network network, int maxMsToLive) { + // Tell the network is going to lose in MaxMsToLive milliseconds. + // We may use this signal later. + Logging.d(TAG, + "Network" + + " handle: " + networkToNetId(network) + ", " + network.toString() + + " is about to lose in " + maxMsToLive + "ms"); + } + + @Override + public void onLost(Network network) { + Logging.d(TAG, + "Network" + + " handle: " + networkToNetId(network) + ", " + network.toString() + + " is disconnected"); + + synchronized (availableNetworks) { + availableNetworks.remove(network); + } + observer.onNetworkDisconnect(networkToNetId(network)); + } + + private void onNetworkChanged(Network network) { + NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network); + if (networkInformation != null) { + observer.onNetworkConnect(networkInformation); + } + } + } + + /** Queries the ConnectivityManager for information about the current connection. */ + static class ConnectivityManagerDelegate { + /** + * Note: In some rare Android systems connectivityManager is null. We handle that + * gracefully below. + */ + @Nullable private final ConnectivityManager connectivityManager; + + /** + * Note: The availableNetworks set is instantiated in NetworkMonitorAutoDetect + * and the instance is mutated by SimpleNetworkCallback. + */ + @NonNull @GuardedBy("availableNetworks") private final Set availableNetworks; + + /** field trials */ + private final boolean getAllNetworksFromCache; + private final boolean requestVPN; + private final boolean includeOtherUidNetworks; + + ConnectivityManagerDelegate( + Context context, Set availableNetworks, String fieldTrialsString) { + this((ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE), + availableNetworks, fieldTrialsString); + } + + @VisibleForTesting + ConnectivityManagerDelegate(ConnectivityManager connectivityManager, + Set availableNetworks, String fieldTrialsString) { + this.connectivityManager = connectivityManager; + this.availableNetworks = availableNetworks; + this.getAllNetworksFromCache = + checkFieldTrial(fieldTrialsString, "getAllNetworksFromCache", false); + this.requestVPN = checkFieldTrial(fieldTrialsString, "requestVPN", false); + this.includeOtherUidNetworks = + checkFieldTrial(fieldTrialsString, "includeOtherUidNetworks", false); + } + + private static boolean checkFieldTrial( + String fieldTrialsString, String key, boolean defaultValue) { + if (fieldTrialsString.contains(key + ":true")) { + return true; + } else if (fieldTrialsString.contains(key + ":false")) { + return false; + } + return defaultValue; + } + + /** + * Returns connection type and status information about the current + * default network. + */ + NetworkState getNetworkState() { + if (connectivityManager == null) { + return new NetworkState(false, -1, -1, -1, -1); + } + return getNetworkState(connectivityManager.getActiveNetworkInfo()); + } + + /** + * Returns connection type and status information about `network`. + * Only callable on Lollipop and newer releases. + */ + @SuppressLint("NewApi") + NetworkState getNetworkState(@Nullable Network network) { + if (network == null || connectivityManager == null) { + return new NetworkState(false, -1, -1, -1, -1); + } + NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network); + if (networkInfo == null) { + Logging.w(TAG, "Couldn't retrieve information from network " + network.toString()); + return new NetworkState(false, -1, -1, -1, -1); + } + // The general logic of handling a VPN in this method is as follows. getNetworkInfo will + // return the info of the network with the same id as in `network` when it is registered via + // ConnectivityManager.registerNetworkAgent in Android. `networkInfo` may or may not indicate + // the type TYPE_VPN if `network` is a VPN. To reliably detect the VPN interface, we need to + // query the network capability as below in the case when networkInfo.getType() is not + // TYPE_VPN. On the other hand when networkInfo.getType() is TYPE_VPN, the only solution so + // far to obtain the underlying network information is to query the active network interface. + // However, the active network interface may not be used for the VPN, for example, if the VPN + // is restricted to WiFi by the implementation but the WiFi interface is currently turned + // off and the active interface is the Cell. Using directly the result from + // getActiveNetworkInfo may thus give the wrong interface information, and one should note + // that getActiveNetworkInfo would return the default network interface if the VPN does not + // specify its underlying networks in the implementation. Therefore, we need further compare + // `network` to the active network. If they are not the same network, we will have to fall + // back to report an unknown network. + + if (networkInfo.getType() != ConnectivityManager.TYPE_VPN) { + // Note that getNetworkCapabilities returns null if the network is unknown. + NetworkCapabilities networkCapabilities = + connectivityManager.getNetworkCapabilities(network); + if (networkCapabilities == null + || !networkCapabilities.hasTransport(NetworkCapabilities.TRANSPORT_VPN)) { + return getNetworkState(networkInfo); + } + // When `network` is in fact a VPN after querying its capability but `networkInfo` is not of + // type TYPE_VPN, `networkInfo` contains the info for the underlying network, and we return + // a NetworkState constructed from it. + return new NetworkState(networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1, + networkInfo.getType(), networkInfo.getSubtype()); + } + + // When `networkInfo` is of type TYPE_VPN, which implies `network` is a VPN, we return the + // NetworkState of the active network via getActiveNetworkInfo(), if `network` is the active + // network that supports the VPN. Otherwise, NetworkState of an unknown network with type -1 + // will be returned. + // + // Note that getActiveNetwork and getActiveNetworkInfo return null if no default network is + // currently active. + if (networkInfo.getType() == ConnectivityManager.TYPE_VPN) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M + && network.equals(connectivityManager.getActiveNetwork())) { + // If a VPN network is in place, we can find the underlying network type via querying the + // active network info thanks to + // https://android.googlesource.com/platform/frameworks/base/+/d6a7980d + NetworkInfo underlyingActiveNetworkInfo = connectivityManager.getActiveNetworkInfo(); + // We use the NetworkInfo of the underlying network if it is not of TYPE_VPN itself. + if (underlyingActiveNetworkInfo != null + && underlyingActiveNetworkInfo.getType() != ConnectivityManager.TYPE_VPN) { + return new NetworkState(networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1, + underlyingActiveNetworkInfo.getType(), underlyingActiveNetworkInfo.getSubtype()); + } + } + return new NetworkState( + networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1, -1, -1); + } + + return getNetworkState(networkInfo); + } + + /** + * Returns connection type and status information gleaned from networkInfo. Note that to obtain + * the complete information about a VPN including the type of the underlying network, one should + * use the above method getNetworkState with a Network object. + */ + private NetworkState getNetworkState(@Nullable NetworkInfo networkInfo) { + if (networkInfo == null || !networkInfo.isConnected()) { + return new NetworkState(false, -1, -1, -1, -1); + } + return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype(), -1, -1); + } + + /** + * Returns all connected networks. + * Only callable on Lollipop and newer releases. + */ + @SuppressLint("NewApi") + Network[] getAllNetworks() { + if (connectivityManager == null) { + return new Network[0]; + } + + if (supportNetworkCallback() && getAllNetworksFromCache) { + synchronized (availableNetworks) { + return availableNetworks.toArray(new Network[0]); + } + } + + return connectivityManager.getAllNetworks(); + } + + @Nullable + List getActiveNetworkList() { + if (!supportNetworkCallback()) { + return null; + } + ArrayList netInfoList = new ArrayList(); + for (Network network : getAllNetworks()) { + NetworkInformation info = networkToInfo(network); + if (info != null) { + netInfoList.add(info); + } + } + return netInfoList; + } + + /** + * Returns the NetID of the current default network. Returns + * INVALID_NET_ID if no current default network connected. + * Only callable on Lollipop and newer releases. + */ + @SuppressLint("NewApi") + long getDefaultNetId() { + if (!supportNetworkCallback()) { + return INVALID_NET_ID; + } + // Android Lollipop had no API to get the default network; only an + // API to return the NetworkInfo for the default network. To + // determine the default network one can find the network with + // type matching that of the default network. + final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo(); + if (defaultNetworkInfo == null) { + return INVALID_NET_ID; + } + final Network[] networks = getAllNetworks(); + long defaultNetId = INVALID_NET_ID; + for (Network network : networks) { + if (!hasInternetCapability(network)) { + continue; + } + final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network); + if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) { + // There should not be multiple connected networks of the + // same type. At least as of Android Marshmallow this is + // not supported. If this becomes supported this assertion + // may trigger. At that point we could consider using + // ConnectivityManager.getDefaultNetwork() though this + // may give confusing results with VPNs and is only + // available with Android Marshmallow. + if (defaultNetId != INVALID_NET_ID) { + throw new RuntimeException( + "Multiple connected networks of same type are not supported."); + } + defaultNetId = networkToNetId(network); + } + } + return defaultNetId; + } + + @SuppressLint("NewApi") + private @Nullable NetworkInformation networkToInfo(@Nullable Network network) { + if (network == null || connectivityManager == null) { + return null; + } + LinkProperties linkProperties = connectivityManager.getLinkProperties(network); + // getLinkProperties will return null if the network is unknown. + if (linkProperties == null) { + Logging.w(TAG, "Detected unknown network: " + network.toString()); + return null; + } + if (linkProperties.getInterfaceName() == null) { + Logging.w(TAG, "Null interface name for network " + network.toString()); + return null; + } + + NetworkState networkState = getNetworkState(network); + NetworkChangeDetector.ConnectionType connectionType = getConnectionType(networkState); + if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_NONE) { + // This may not be an error. The OS may signal a network event with connection type + // NONE when the network disconnects. + Logging.d(TAG, "Network " + network.toString() + " is disconnected"); + return null; + } + + // Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type, + // which appears to be usable. Just log them here. + if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN + || connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR) { + Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType + + " because it has type " + networkState.getNetworkType() + " and subtype " + + networkState.getNetworkSubType()); + } + // NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN if the network is not a VPN or the + // underlying network is + // unknown. + ConnectionType underlyingConnectionTypeForVpn = + getUnderlyingConnectionTypeForVpn(networkState); + + NetworkInformation networkInformation = new NetworkInformation( + linkProperties.getInterfaceName(), connectionType, underlyingConnectionTypeForVpn, + networkToNetId(network), getIPAddresses(linkProperties)); + return networkInformation; + } + + /** + * Returns true if {@code network} can provide Internet access. Can be used to + * ignore specialized networks (e.g. IMS, FOTA). + */ + @SuppressLint("NewApi") + boolean hasInternetCapability(Network network) { + if (connectivityManager == null) { + return false; + } + final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network); + return capabilities != null + && capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET); + } + + @SuppressLint("NewApi") + @VisibleForTesting() + NetworkRequest createNetworkRequest() { + // Requests the following capabilities by default: NOT_VPN, NOT_RESTRICTED, TRUSTED + NetworkRequest.Builder builder = + new NetworkRequest.Builder().addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET); + + if (requestVPN) { + builder.removeCapability(NetworkCapabilities.NET_CAPABILITY_NOT_VPN); + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && includeOtherUidNetworks) { + builder.setIncludeOtherUidNetworks(true); + } + return builder.build(); + } + + /** Only callable on Lollipop and newer releases. */ + @SuppressLint("NewApi") + public void registerNetworkCallback(NetworkCallback networkCallback) { + connectivityManager.registerNetworkCallback(createNetworkRequest(), networkCallback); + } + + /** Only callable on Lollipop and newer releases. */ + @SuppressLint("NewApi") + public void requestMobileNetwork(NetworkCallback networkCallback) { + NetworkRequest.Builder builder = new NetworkRequest.Builder(); + builder.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET) + .addTransportType(NetworkCapabilities.TRANSPORT_CELLULAR); + connectivityManager.requestNetwork(builder.build(), networkCallback); + } + + @SuppressLint("NewApi") + IPAddress[] getIPAddresses(LinkProperties linkProperties) { + IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()]; + int i = 0; + for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) { + ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress()); + ++i; + } + return ipAddresses; + } + + @SuppressLint("NewApi") + public void releaseCallback(NetworkCallback networkCallback) { + if (supportNetworkCallback()) { + Logging.d(TAG, "Unregister network callback"); + connectivityManager.unregisterNetworkCallback(networkCallback); + } + } + + public boolean supportNetworkCallback() { + return connectivityManager != null; + } + } + + /** Queries the WifiManager for SSID of the current Wifi connection. */ + static class WifiManagerDelegate { + @Nullable private final Context context; + WifiManagerDelegate(Context context) { + this.context = context; + } + + // For testing. + WifiManagerDelegate() { + // All the methods below should be overridden. + context = null; + } + + String getWifiSSID() { + final Intent intent = context.registerReceiver( + null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION)); + if (intent != null) { + final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO); + if (wifiInfo != null) { + final String ssid = wifiInfo.getSSID(); + if (ssid != null) { + return ssid; + } + } + } + return ""; + } + } + + /** Maintains the information about wifi direct (aka WifiP2p) networks. */ + static class WifiDirectManagerDelegate extends BroadcastReceiver { + // Network "handle" for the Wifi P2p network. We have to bind to the default network id + // (NETWORK_UNSPECIFIED) for these addresses. + private static final int WIFI_P2P_NETWORK_HANDLE = 0; + private final Context context; + private final NetworkChangeDetector.Observer observer; + // Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is + // connected. + @Nullable private NetworkInformation wifiP2pNetworkInfo; + + WifiDirectManagerDelegate(NetworkChangeDetector.Observer observer, Context context) { + this.context = context; + this.observer = observer; + IntentFilter intentFilter = new IntentFilter(); + intentFilter.addAction(WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION); + intentFilter.addAction(WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION); + context.registerReceiver(this, intentFilter); + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.P) { + // Starting with Android Q (10), WIFI_P2P_CONNECTION_CHANGED_ACTION is no longer sticky. + // This means we have to explicitly request WifiP2pGroup info during initialization in order + // to get this data if we are already connected to a Wi-Fi Direct network. + WifiP2pManager manager = + (WifiP2pManager) context.getSystemService(Context.WIFI_P2P_SERVICE); + WifiP2pManager.Channel channel = + manager.initialize(context, context.getMainLooper(), null /* listener */); + manager.requestGroupInfo(channel, wifiP2pGroup -> { onWifiP2pGroupChange(wifiP2pGroup); }); + } + } + + // BroadcastReceiver + @Override + @SuppressLint("InlinedApi") + public void onReceive(Context context, Intent intent) { + if (WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION.equals(intent.getAction())) { + WifiP2pGroup wifiP2pGroup = intent.getParcelableExtra(WifiP2pManager.EXTRA_WIFI_P2P_GROUP); + onWifiP2pGroupChange(wifiP2pGroup); + } else if (WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION.equals(intent.getAction())) { + int state = intent.getIntExtra(WifiP2pManager.EXTRA_WIFI_STATE, 0 /* default to unknown */); + onWifiP2pStateChange(state); + } + } + + /** Releases the broadcast receiver. */ + public void release() { + context.unregisterReceiver(this); + } + + public List getActiveNetworkList() { + if (wifiP2pNetworkInfo != null) { + return Collections.singletonList(wifiP2pNetworkInfo); + } + + return Collections.emptyList(); + } + + /** Handle a change notification about the wifi p2p group. */ + private void onWifiP2pGroupChange(@Nullable WifiP2pGroup wifiP2pGroup) { + if (wifiP2pGroup == null || wifiP2pGroup.getInterface() == null) { + return; + } + + NetworkInterface wifiP2pInterface; + try { + wifiP2pInterface = NetworkInterface.getByName(wifiP2pGroup.getInterface()); + } catch (SocketException e) { + Logging.e(TAG, "Unable to get WifiP2p network interface", e); + return; + } + + List interfaceAddresses = Collections.list(wifiP2pInterface.getInetAddresses()); + IPAddress[] ipAddresses = new IPAddress[interfaceAddresses.size()]; + for (int i = 0; i < interfaceAddresses.size(); ++i) { + ipAddresses[i] = new IPAddress(interfaceAddresses.get(i).getAddress()); + } + + wifiP2pNetworkInfo = new NetworkInformation(wifiP2pGroup.getInterface(), + NetworkChangeDetector.ConnectionType.CONNECTION_WIFI, + NetworkChangeDetector.ConnectionType.CONNECTION_NONE, WIFI_P2P_NETWORK_HANDLE, + ipAddresses); + observer.onNetworkConnect(wifiP2pNetworkInfo); + } + + /** Handle a state change notification about wifi p2p. */ + private void onWifiP2pStateChange(int state) { + if (state == WifiP2pManager.WIFI_P2P_STATE_DISABLED) { + wifiP2pNetworkInfo = null; + observer.onNetworkDisconnect(WIFI_P2P_NETWORK_HANDLE); + } + } + } + + private static final long INVALID_NET_ID = -1; + private static final String TAG = "NetworkMonitorAutoDetect"; + + // Observer for the connection type change. + private final NetworkChangeDetector.Observer observer; + private final IntentFilter intentFilter; + private final Context context; + // Used to request mobile network. It does not do anything except for keeping + // the callback for releasing the request. + @Nullable private final NetworkCallback mobileNetworkCallback; + // Used to receive updates on all networks. + @Nullable private final NetworkCallback allNetworkCallback; + // connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing. + private ConnectivityManagerDelegate connectivityManagerDelegate; + private WifiManagerDelegate wifiManagerDelegate; + private WifiDirectManagerDelegate wifiDirectManagerDelegate; + private static boolean includeWifiDirect; + + @GuardedBy("availableNetworks") final Set availableNetworks = new HashSet<>(); + + private boolean isRegistered; + private NetworkChangeDetector.ConnectionType connectionType; + private String wifiSSID; + + /** Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread. */ + @SuppressLint("NewApi") + public NetworkMonitorAutoDetect(NetworkChangeDetector.Observer observer, Context context) { + this.observer = observer; + this.context = context; + String fieldTrialsString = observer.getFieldTrialsString(); + connectivityManagerDelegate = + new ConnectivityManagerDelegate(context, availableNetworks, fieldTrialsString); + wifiManagerDelegate = new WifiManagerDelegate(context); + + final NetworkState networkState = connectivityManagerDelegate.getNetworkState(); + connectionType = getConnectionType(networkState); + wifiSSID = getWifiSSID(networkState); + intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION); + + if (includeWifiDirect) { + wifiDirectManagerDelegate = new WifiDirectManagerDelegate(observer, context); + } + + registerReceiver(); + if (connectivityManagerDelegate.supportNetworkCallback()) { + // On Android 6.0.0, the WRITE_SETTINGS permission is necessary for + // requestNetwork, so it will fail. This was fixed in Android 6.0.1. + NetworkCallback tempNetworkCallback = new NetworkCallback(); + try { + connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback); + } catch (java.lang.SecurityException e) { + Logging.w(TAG, "Unable to obtain permission to request a cellular network."); + tempNetworkCallback = null; + } + mobileNetworkCallback = tempNetworkCallback; + allNetworkCallback = new SimpleNetworkCallback(availableNetworks); + connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback); + } else { + mobileNetworkCallback = null; + allNetworkCallback = null; + } + } + + /** Enables WifiDirectManager. */ + public static void setIncludeWifiDirect(boolean enable) { + includeWifiDirect = enable; + } + + @Override + public boolean supportNetworkCallback() { + return connectivityManagerDelegate.supportNetworkCallback(); + } + + /** + * Allows overriding the ConnectivityManagerDelegate for tests. + */ + void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) { + connectivityManagerDelegate = delegate; + } + + /** + * Allows overriding the WifiManagerDelegate for tests. + */ + void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) { + wifiManagerDelegate = delegate; + } + + /** + * Returns whether the object has registered to receive network connectivity intents. + * Visible for testing. + */ + boolean isReceiverRegisteredForTesting() { + return isRegistered; + } + + @Override + @Nullable + public List getActiveNetworkList() { + List connectivityManagerList = + connectivityManagerDelegate.getActiveNetworkList(); + if (connectivityManagerList == null) { + return null; + } + ArrayList result = + new ArrayList(connectivityManagerList); + if (wifiDirectManagerDelegate != null) { + result.addAll(wifiDirectManagerDelegate.getActiveNetworkList()); + } + return result; + } + + @Override + public void destroy() { + if (allNetworkCallback != null) { + connectivityManagerDelegate.releaseCallback(allNetworkCallback); + } + if (mobileNetworkCallback != null) { + connectivityManagerDelegate.releaseCallback(mobileNetworkCallback); + } + if (wifiDirectManagerDelegate != null) { + wifiDirectManagerDelegate.release(); + } + unregisterReceiver(); + } + + /** + * Registers a BroadcastReceiver in the given context. + */ + private void registerReceiver() { + if (isRegistered) + return; + + isRegistered = true; + context.registerReceiver(this, intentFilter); + } + + /** + * Unregisters the BroadcastReceiver in the given context. + */ + private void unregisterReceiver() { + if (!isRegistered) + return; + + isRegistered = false; + context.unregisterReceiver(this); + } + + public NetworkState getCurrentNetworkState() { + return connectivityManagerDelegate.getNetworkState(); + } + + /** + * Returns NetID of device's current default connected network used for + * communication. + * Only implemented on Lollipop and newer releases, returns INVALID_NET_ID + * when not implemented. + */ + public long getDefaultNetId() { + return connectivityManagerDelegate.getDefaultNetId(); + } + + private static NetworkChangeDetector.ConnectionType getConnectionType( + boolean isConnected, int networkType, int networkSubtype) { + if (!isConnected) { + return NetworkChangeDetector.ConnectionType.CONNECTION_NONE; + } + + switch (networkType) { + case ConnectivityManager.TYPE_ETHERNET: + return NetworkChangeDetector.ConnectionType.CONNECTION_ETHERNET; + case ConnectivityManager.TYPE_WIFI: + return NetworkChangeDetector.ConnectionType.CONNECTION_WIFI; + case ConnectivityManager.TYPE_WIMAX: + return NetworkChangeDetector.ConnectionType.CONNECTION_4G; + case ConnectivityManager.TYPE_BLUETOOTH: + return NetworkChangeDetector.ConnectionType.CONNECTION_BLUETOOTH; + case ConnectivityManager.TYPE_MOBILE: + case ConnectivityManager.TYPE_MOBILE_DUN: + case ConnectivityManager.TYPE_MOBILE_HIPRI: + // Use information from TelephonyManager to classify the connection. + switch (networkSubtype) { + case TelephonyManager.NETWORK_TYPE_GPRS: + case TelephonyManager.NETWORK_TYPE_EDGE: + case TelephonyManager.NETWORK_TYPE_CDMA: + case TelephonyManager.NETWORK_TYPE_1xRTT: + case TelephonyManager.NETWORK_TYPE_IDEN: + case TelephonyManager.NETWORK_TYPE_GSM: + return NetworkChangeDetector.ConnectionType.CONNECTION_2G; + case TelephonyManager.NETWORK_TYPE_UMTS: + case TelephonyManager.NETWORK_TYPE_EVDO_0: + case TelephonyManager.NETWORK_TYPE_EVDO_A: + case TelephonyManager.NETWORK_TYPE_HSDPA: + case TelephonyManager.NETWORK_TYPE_HSUPA: + case TelephonyManager.NETWORK_TYPE_HSPA: + case TelephonyManager.NETWORK_TYPE_EVDO_B: + case TelephonyManager.NETWORK_TYPE_EHRPD: + case TelephonyManager.NETWORK_TYPE_HSPAP: + case TelephonyManager.NETWORK_TYPE_TD_SCDMA: + return NetworkChangeDetector.ConnectionType.CONNECTION_3G; + case TelephonyManager.NETWORK_TYPE_LTE: + case TelephonyManager.NETWORK_TYPE_IWLAN: + return NetworkChangeDetector.ConnectionType.CONNECTION_4G; + case TelephonyManager.NETWORK_TYPE_NR: + return NetworkChangeDetector.ConnectionType.CONNECTION_5G; + default: + return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR; + } + case ConnectivityManager.TYPE_VPN: + return NetworkChangeDetector.ConnectionType.CONNECTION_VPN; + default: + return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN; + } + } + + public static NetworkChangeDetector.ConnectionType getConnectionType(NetworkState networkState) { + return getConnectionType(networkState.isConnected(), networkState.getNetworkType(), + networkState.getNetworkSubType()); + } + + @Override + public NetworkChangeDetector.ConnectionType getCurrentConnectionType() { + return getConnectionType(getCurrentNetworkState()); + } + + private static NetworkChangeDetector.ConnectionType getUnderlyingConnectionTypeForVpn( + NetworkState networkState) { + if (networkState.getNetworkType() != ConnectivityManager.TYPE_VPN) { + return NetworkChangeDetector.ConnectionType.CONNECTION_NONE; + } + return getConnectionType(networkState.isConnected(), + networkState.getUnderlyingNetworkTypeForVpn(), + networkState.getUnderlyingNetworkSubtypeForVpn()); + } + + private String getWifiSSID(NetworkState networkState) { + if (getConnectionType(networkState) != NetworkChangeDetector.ConnectionType.CONNECTION_WIFI) + return ""; + return wifiManagerDelegate.getWifiSSID(); + } + + // BroadcastReceiver + @Override + public void onReceive(Context context, Intent intent) { + final NetworkState networkState = getCurrentNetworkState(); + if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) { + connectionTypeChanged(networkState); + } + } + + private void connectionTypeChanged(NetworkState networkState) { + NetworkChangeDetector.ConnectionType newConnectionType = getConnectionType(networkState); + String newWifiSSID = getWifiSSID(networkState); + if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) + return; + + connectionType = newConnectionType; + wifiSSID = newWifiSSID; + Logging.d(TAG, "Network connectivity changed, type is: " + connectionType); + observer.onConnectionTypeChanged(newConnectionType); + } + + /** + * Extracts NetID of network on Lollipop and NetworkHandle (which is mungled + * NetID) on Marshmallow and newer releases. Only available on Lollipop and + * newer releases. Returns long since getNetworkHandle returns long. + */ + @SuppressLint("NewApi") + private static long networkToNetId(Network network) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return network.getNetworkHandle(); + } + + // NOTE(honghaiz): This depends on Android framework implementation details. + // These details cannot change because Lollipop has been released. + return Integer.parseInt(network.toString()); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkPreference.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkPreference.java new file mode 100644 index 00000000..b96ad89c --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkPreference.java @@ -0,0 +1,11 @@ + +// IntelliJ API Decompiler stub source generated from a class file +// Implementation of methods is not available + +package org.webrtc; + +@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) +public @interface NetworkPreference { + int NEUTRAL = 0; + int NOT_PREFERRED = -1; +} \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkStatePredictorFactoryFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkStatePredictorFactoryFactory.java new file mode 100644 index 00000000..bf965bcb --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/NetworkStatePredictorFactoryFactory.java @@ -0,0 +1,20 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Factory for creating webrtc::NetworkStatePredictorFactory instances. */ +public interface NetworkStatePredictorFactoryFactory { + /** + * Dynamically allocates a webrtc::NetworkStatePredictorFactory instance and returns a pointer to + * it. The caller takes ownership of the object. + */ + public long createNativeNetworkStatePredictorFactory(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/OWNERS b/webrtc_player/android/zlm/src/main/java/org/webrtc/OWNERS new file mode 100644 index 00000000..109bea27 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/OWNERS @@ -0,0 +1,2 @@ +magjed@webrtc.org +xalep@webrtc.org diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnection.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnection.java new file mode 100644 index 00000000..d530bc2c --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnection.java @@ -0,0 +1,1316 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.webrtc.CandidatePairChangeEvent; +import org.webrtc.DataChannel; +import org.webrtc.MediaStreamTrack; +import org.webrtc.RtpTransceiver; + +/** + * Java-land version of the PeerConnection APIs; wraps the C++ API + * http://www.webrtc.org/reference/native-apis, which in turn is inspired by the + * JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and + * http://www.w3.org/TR/mediacapture-streams/ + */ +public class PeerConnection { + /** Tracks PeerConnectionInterface::IceGatheringState */ + public enum IceGatheringState { + NEW, + GATHERING, + COMPLETE; + + @CalledByNative("IceGatheringState") + static IceGatheringState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + /** Tracks PeerConnectionInterface::IceConnectionState */ + public enum IceConnectionState { + NEW, + CHECKING, + CONNECTED, + COMPLETED, + FAILED, + DISCONNECTED, + CLOSED; + + @CalledByNative("IceConnectionState") + static IceConnectionState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + /** Tracks PeerConnectionInterface::PeerConnectionState */ + public enum PeerConnectionState { + NEW, + CONNECTING, + CONNECTED, + DISCONNECTED, + FAILED, + CLOSED; + + @CalledByNative("PeerConnectionState") + static PeerConnectionState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + /** Tracks PeerConnectionInterface::TlsCertPolicy */ + public enum TlsCertPolicy { + TLS_CERT_POLICY_SECURE, + TLS_CERT_POLICY_INSECURE_NO_CHECK, + } + + /** Tracks PeerConnectionInterface::SignalingState */ + public enum SignalingState { + STABLE, + HAVE_LOCAL_OFFER, + HAVE_LOCAL_PRANSWER, + HAVE_REMOTE_OFFER, + HAVE_REMOTE_PRANSWER, + CLOSED; + + @CalledByNative("SignalingState") + static SignalingState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + /** Java version of PeerConnectionObserver. */ + public static interface Observer { + /** Triggered when the SignalingState changes. */ + @CalledByNative("Observer") void onSignalingChange(SignalingState newState); + + /** Triggered when the IceConnectionState changes. */ + @CalledByNative("Observer") void onIceConnectionChange(IceConnectionState newState); + + /* Triggered when the standard-compliant state transition of IceConnectionState happens. */ + @CalledByNative("Observer") + default void onStandardizedIceConnectionChange(IceConnectionState newState) {} + + /** Triggered when the PeerConnectionState changes. */ + @CalledByNative("Observer") + default void onConnectionChange(PeerConnectionState newState) {} + + /** Triggered when the ICE connection receiving status changes. */ + @CalledByNative("Observer") void onIceConnectionReceivingChange(boolean receiving); + + /** Triggered when the IceGatheringState changes. */ + @CalledByNative("Observer") void onIceGatheringChange(IceGatheringState newState); + + /** Triggered when a new ICE candidate has been found. */ + @CalledByNative("Observer") void onIceCandidate(IceCandidate candidate); + + /** Triggered when gathering of an ICE candidate failed. */ + default @CalledByNative("Observer") void onIceCandidateError(IceCandidateErrorEvent event) {} + + /** Triggered when some ICE candidates have been removed. */ + @CalledByNative("Observer") void onIceCandidatesRemoved(IceCandidate[] candidates); + + /** Triggered when the ICE candidate pair is changed. */ + @CalledByNative("Observer") + default void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {} + + /** Triggered when media is received on a new stream from remote peer. */ + @CalledByNative("Observer") void onAddStream(MediaStream stream); + + /** Triggered when a remote peer close a stream. */ + @CalledByNative("Observer") void onRemoveStream(MediaStream stream); + + /** Triggered when a remote peer opens a DataChannel. */ + @CalledByNative("Observer") void onDataChannel(DataChannel dataChannel); + + /** Triggered when renegotiation is necessary. */ + @CalledByNative("Observer") void onRenegotiationNeeded(); + + /** + * Triggered when a new track is signaled by the remote peer, as a result of + * setRemoteDescription. + */ + @CalledByNative("Observer") + default void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams){}; + + /** + * Triggered when a previously added remote track is removed by the remote + * peer, as a result of setRemoteDescription. + */ + @CalledByNative("Observer") default void onRemoveTrack(RtpReceiver receiver){}; + + /** + * Triggered when the signaling from SetRemoteDescription indicates that a transceiver + * will be receiving media from a remote endpoint. This is only called if UNIFIED_PLAN + * semantics are specified. The transceiver will be disposed automatically. + */ + @CalledByNative("Observer") default void onTrack(RtpTransceiver transceiver){}; + } + + /** Java version of PeerConnectionInterface.IceServer. */ + public static class IceServer { + // List of URIs associated with this server. Valid formats are described + // in RFC7064 and RFC7065, and more may be added in the future. The "host" + // part of the URI may contain either an IP address or a hostname. + @Deprecated public final String uri; + public final List urls; + public final String username; + public final String password; + public final TlsCertPolicy tlsCertPolicy; + + // If the URIs in `urls` only contain IP addresses, this field can be used + // to indicate the hostname, which may be necessary for TLS (using the SNI + // extension). If `urls` itself contains the hostname, this isn't + // necessary. + public final String hostname; + + // List of protocols to be used in the TLS ALPN extension. + public final List tlsAlpnProtocols; + + // List of elliptic curves to be used in the TLS elliptic curves extension. + // Only curve names supported by OpenSSL should be used (eg. "P-256","X25519"). + public final List tlsEllipticCurves; + + /** Convenience constructor for STUN servers. */ + @Deprecated + public IceServer(String uri) { + this(uri, "", ""); + } + + @Deprecated + public IceServer(String uri, String username, String password) { + this(uri, username, password, TlsCertPolicy.TLS_CERT_POLICY_SECURE); + } + + @Deprecated + public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy) { + this(uri, username, password, tlsCertPolicy, ""); + } + + @Deprecated + public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy, + String hostname) { + this(uri, Collections.singletonList(uri), username, password, tlsCertPolicy, hostname, null, + null); + } + + private IceServer(String uri, List urls, String username, String password, + TlsCertPolicy tlsCertPolicy, String hostname, List tlsAlpnProtocols, + List tlsEllipticCurves) { + if (uri == null || urls == null || urls.isEmpty()) { + throw new IllegalArgumentException("uri == null || urls == null || urls.isEmpty()"); + } + for (String it : urls) { + if (it == null) { + throw new IllegalArgumentException("urls element is null: " + urls); + } + } + if (username == null) { + throw new IllegalArgumentException("username == null"); + } + if (password == null) { + throw new IllegalArgumentException("password == null"); + } + if (hostname == null) { + throw new IllegalArgumentException("hostname == null"); + } + this.uri = uri; + this.urls = urls; + this.username = username; + this.password = password; + this.tlsCertPolicy = tlsCertPolicy; + this.hostname = hostname; + this.tlsAlpnProtocols = tlsAlpnProtocols; + this.tlsEllipticCurves = tlsEllipticCurves; + } + + @Override + public String toString() { + return urls + " [" + username + ":" + password + "] [" + tlsCertPolicy + "] [" + hostname + + "] [" + tlsAlpnProtocols + "] [" + tlsEllipticCurves + "]"; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (obj == null) { + return false; + } + if (obj == this) { + return true; + } + if (!(obj instanceof IceServer)) { + return false; + } + IceServer other = (IceServer) obj; + return (uri.equals(other.uri) && urls.equals(other.urls) && username.equals(other.username) + && password.equals(other.password) && tlsCertPolicy.equals(other.tlsCertPolicy) + && hostname.equals(other.hostname) && tlsAlpnProtocols.equals(other.tlsAlpnProtocols) + && tlsEllipticCurves.equals(other.tlsEllipticCurves)); + } + + @Override + public int hashCode() { + Object[] values = {uri, urls, username, password, tlsCertPolicy, hostname, tlsAlpnProtocols, + tlsEllipticCurves}; + return Arrays.hashCode(values); + } + + public static Builder builder(String uri) { + return new Builder(Collections.singletonList(uri)); + } + + public static Builder builder(List urls) { + return new Builder(urls); + } + + public static class Builder { + @Nullable private final List urls; + private String username = ""; + private String password = ""; + private TlsCertPolicy tlsCertPolicy = TlsCertPolicy.TLS_CERT_POLICY_SECURE; + private String hostname = ""; + private List tlsAlpnProtocols; + private List tlsEllipticCurves; + + private Builder(List urls) { + if (urls == null || urls.isEmpty()) { + throw new IllegalArgumentException("urls == null || urls.isEmpty(): " + urls); + } + this.urls = urls; + } + + public Builder setUsername(String username) { + this.username = username; + return this; + } + + public Builder setPassword(String password) { + this.password = password; + return this; + } + + public Builder setTlsCertPolicy(TlsCertPolicy tlsCertPolicy) { + this.tlsCertPolicy = tlsCertPolicy; + return this; + } + + public Builder setHostname(String hostname) { + this.hostname = hostname; + return this; + } + + public Builder setTlsAlpnProtocols(List tlsAlpnProtocols) { + this.tlsAlpnProtocols = tlsAlpnProtocols; + return this; + } + + public Builder setTlsEllipticCurves(List tlsEllipticCurves) { + this.tlsEllipticCurves = tlsEllipticCurves; + return this; + } + + public IceServer createIceServer() { + return new IceServer(urls.get(0), urls, username, password, tlsCertPolicy, hostname, + tlsAlpnProtocols, tlsEllipticCurves); + } + } + + @Nullable + @CalledByNative("IceServer") + List getUrls() { + return urls; + } + + @Nullable + @CalledByNative("IceServer") + String getUsername() { + return username; + } + + @Nullable + @CalledByNative("IceServer") + String getPassword() { + return password; + } + + @CalledByNative("IceServer") + TlsCertPolicy getTlsCertPolicy() { + return tlsCertPolicy; + } + + @Nullable + @CalledByNative("IceServer") + String getHostname() { + return hostname; + } + + @CalledByNative("IceServer") + List getTlsAlpnProtocols() { + return tlsAlpnProtocols; + } + + @CalledByNative("IceServer") + List getTlsEllipticCurves() { + return tlsEllipticCurves; + } + } + + /** Java version of PeerConnectionInterface.IceTransportsType */ + public enum IceTransportsType { NONE, RELAY, NOHOST, ALL } + + /** Java version of PeerConnectionInterface.BundlePolicy */ + public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT } + + /** Java version of PeerConnectionInterface.RtcpMuxPolicy */ + public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE } + + /** Java version of PeerConnectionInterface.TcpCandidatePolicy */ + public enum TcpCandidatePolicy { ENABLED, DISABLED } + + /** Java version of PeerConnectionInterface.CandidateNetworkPolicy */ + public enum CandidateNetworkPolicy { ALL, LOW_COST } + + // Keep in sync with webrtc/rtc_base/network_constants.h. + public enum AdapterType { + UNKNOWN(0), + ETHERNET(1 << 0), + WIFI(1 << 1), + CELLULAR(1 << 2), + VPN(1 << 3), + LOOPBACK(1 << 4), + ADAPTER_TYPE_ANY(1 << 5), + CELLULAR_2G(1 << 6), + CELLULAR_3G(1 << 7), + CELLULAR_4G(1 << 8), + CELLULAR_5G(1 << 9); + + public final Integer bitMask; + private AdapterType(Integer bitMask) { + this.bitMask = bitMask; + } + private static final Map BY_BITMASK = new HashMap<>(); + static { + for (AdapterType t : values()) { + BY_BITMASK.put(t.bitMask, t); + } + } + + @Nullable + @CalledByNative("AdapterType") + static AdapterType fromNativeIndex(int nativeIndex) { + return BY_BITMASK.get(nativeIndex); + } + } + + /** Java version of rtc::KeyType */ + public enum KeyType { RSA, ECDSA } + + /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */ + public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY } + + /** Java version of webrtc::PortPrunePolicy */ + public enum PortPrunePolicy { + NO_PRUNE, // Do not prune turn port. + PRUNE_BASED_ON_PRIORITY, // Prune turn port based the priority on the same network + KEEP_FIRST_READY // Keep the first ready port and prune the rest on the same network. + } + + /** + * Java version of webrtc::SdpSemantics. + * + * Configure the SDP semantics used by this PeerConnection. By default, this + * is UNIFIED_PLAN which is compliant to the WebRTC 1.0 specification. It is + * possible to overrwite this to the deprecated PLAN_B SDP format, but note + * that PLAN_B will be deleted at some future date, see + * https://crbug.com/webrtc/13528. + * + * UNIFIED_PLAN will cause PeerConnection to create offers and answers with + * multiple m= sections where each m= section maps to one RtpSender and one + * RtpReceiver (an RtpTransceiver), either both audio or both video. This + * will also cause PeerConnection to ignore all but the first a=ssrc lines + * that form a Plan B stream. + * + * PLAN_B will cause PeerConnection to create offers and answers with at most + * one audio and one video m= section with multiple RtpSenders and + * RtpReceivers specified as multiple a=ssrc lines within the section. This + * will also cause PeerConnection to ignore all but the first m= section of + * the same media type. + */ + public enum SdpSemantics { + // TODO(https://crbug.com/webrtc/13528): Remove support for PLAN_B. + @Deprecated PLAN_B, + UNIFIED_PLAN + } + + /** Java version of PeerConnectionInterface.RTCConfiguration */ + // TODO(qingsi): Resolve the naming inconsistency of fields with/without units. + public static class RTCConfiguration { + public IceTransportsType iceTransportsType; + public List iceServers; + public BundlePolicy bundlePolicy; + @Nullable public RtcCertificatePem certificate; + public RtcpMuxPolicy rtcpMuxPolicy; + public TcpCandidatePolicy tcpCandidatePolicy; + public CandidateNetworkPolicy candidateNetworkPolicy; + public int audioJitterBufferMaxPackets; + public boolean audioJitterBufferFastAccelerate; + public int iceConnectionReceivingTimeout; + public int iceBackupCandidatePairPingInterval; + public KeyType keyType; + public ContinualGatheringPolicy continualGatheringPolicy; + public int iceCandidatePoolSize; + @Deprecated // by the turnPortPrunePolicy. See bugs.webrtc.org/11026 + public boolean pruneTurnPorts; + public PortPrunePolicy turnPortPrunePolicy; + public boolean presumeWritableWhenFullyRelayed; + public boolean surfaceIceCandidatesOnIceTransportTypeChanged; + // The following fields define intervals in milliseconds at which ICE + // connectivity checks are sent. + // + // We consider ICE is "strongly connected" for an agent when there is at + // least one candidate pair that currently succeeds in connectivity check + // from its direction i.e. sending a ping and receives a ping response, AND + // all candidate pairs have sent a minimum number of pings for connectivity + // (this number is implementation-specific). Otherwise, ICE is considered in + // "weak connectivity". + // + // Note that the above notion of strong and weak connectivity is not defined + // in RFC 5245, and they apply to our current ICE implementation only. + // + // 1) iceCheckIntervalStrongConnectivityMs defines the interval applied to + // ALL candidate pairs when ICE is strongly connected, + // 2) iceCheckIntervalWeakConnectivityMs defines the counterpart for ALL + // pairs when ICE is weakly connected, and + // 3) iceCheckMinInterval defines the minimal interval (equivalently the + // maximum rate) that overrides the above two intervals when either of them + // is less. + @Nullable public Integer iceCheckIntervalStrongConnectivityMs; + @Nullable public Integer iceCheckIntervalWeakConnectivityMs; + @Nullable public Integer iceCheckMinInterval; + // The time period in milliseconds for which a candidate pair must wait for response to + // connectivitiy checks before it becomes unwritable. + @Nullable public Integer iceUnwritableTimeMs; + // The minimum number of connectivity checks that a candidate pair must sent without receiving + // response before it becomes unwritable. + @Nullable public Integer iceUnwritableMinChecks; + // The interval in milliseconds at which STUN candidates will resend STUN binding requests + // to keep NAT bindings open. + // The default value in the implementation is used if this field is null. + @Nullable public Integer stunCandidateKeepaliveIntervalMs; + // The interval in milliseconds of pings sent when the connection is stable and writable. + // The default value in the implementation is used if this field is null. + @Nullable public Integer stableWritableConnectionPingIntervalMs; + public boolean disableIPv6OnWifi; + // By default, PeerConnection will use a limited number of IPv6 network + // interfaces, in order to avoid too many ICE candidate pairs being created + // and delaying ICE completion. + // + // Can be set to Integer.MAX_VALUE to effectively disable the limit. + public int maxIPv6Networks; + + // These values will be overridden by MediaStream constraints if deprecated constraints-based + // create peerconnection interface is used. + public boolean enableDscp; + public boolean enableCpuOveruseDetection; + public boolean suspendBelowMinBitrate; + @Nullable public Integer screencastMinBitrate; + // Use "Unknown" to represent no preference of adapter types, not the + // preference of adapters of unknown types. + public AdapterType networkPreference; + public SdpSemantics sdpSemantics; + + // This is an optional wrapper for the C++ webrtc::TurnCustomizer. + @Nullable public TurnCustomizer turnCustomizer; + + // Actively reset the SRTP parameters whenever the DTLS transports underneath are reset for + // every offer/answer negotiation.This is only intended to be a workaround for crbug.com/835958 + public boolean activeResetSrtpParams; + + /** + * Defines advanced optional cryptographic settings related to SRTP and + * frame encryption for native WebRTC. Setting this will overwrite any + * options set through the PeerConnectionFactory (which is deprecated). + */ + @Nullable public CryptoOptions cryptoOptions; + + /** + * An optional string that if set will be attached to the + * TURN_ALLOCATE_REQUEST which can be used to correlate client + * logs with backend logs + */ + @Nullable public String turnLoggingId; + + /** + * Allow implicit rollback of local description when remote description + * conflicts with local description. + * See: https://w3c.github.io/webrtc-pc/#dom-peerconnection-setremotedescription + */ + public boolean enableImplicitRollback; + + /** + * Control if "a=extmap-allow-mixed" is included in the offer. + * See: https://www.chromestatus.com/feature/6269234631933952 + */ + public boolean offerExtmapAllowMixed; + + // TODO(deadbeef): Instead of duplicating the defaults here, we should do + // something to pick up the defaults from C++. The Objective-C equivalent + // of RTCConfiguration does that. + public RTCConfiguration(List iceServers) { + iceTransportsType = IceTransportsType.ALL; + bundlePolicy = BundlePolicy.BALANCED; + rtcpMuxPolicy = RtcpMuxPolicy.REQUIRE; + tcpCandidatePolicy = TcpCandidatePolicy.ENABLED; + candidateNetworkPolicy = CandidateNetworkPolicy.ALL; + this.iceServers = iceServers; + audioJitterBufferMaxPackets = 50; + audioJitterBufferFastAccelerate = false; + iceConnectionReceivingTimeout = -1; + iceBackupCandidatePairPingInterval = -1; + keyType = KeyType.ECDSA; + continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE; + iceCandidatePoolSize = 0; + pruneTurnPorts = false; + turnPortPrunePolicy = PortPrunePolicy.NO_PRUNE; + presumeWritableWhenFullyRelayed = false; + surfaceIceCandidatesOnIceTransportTypeChanged = false; + iceCheckIntervalStrongConnectivityMs = null; + iceCheckIntervalWeakConnectivityMs = null; + iceCheckMinInterval = null; + iceUnwritableTimeMs = null; + iceUnwritableMinChecks = null; + stunCandidateKeepaliveIntervalMs = null; + stableWritableConnectionPingIntervalMs = null; + disableIPv6OnWifi = false; + maxIPv6Networks = 5; + enableDscp = false; + enableCpuOveruseDetection = true; + suspendBelowMinBitrate = false; + screencastMinBitrate = null; + networkPreference = AdapterType.UNKNOWN; + sdpSemantics = SdpSemantics.UNIFIED_PLAN; + activeResetSrtpParams = false; + cryptoOptions = null; + turnLoggingId = null; + enableImplicitRollback = false; + offerExtmapAllowMixed = true; + } + + @CalledByNative("RTCConfiguration") + IceTransportsType getIceTransportsType() { + return iceTransportsType; + } + + @CalledByNative("RTCConfiguration") + List getIceServers() { + return iceServers; + } + + @CalledByNative("RTCConfiguration") + BundlePolicy getBundlePolicy() { + return bundlePolicy; + } + + @CalledByNative("RTCConfiguration") + PortPrunePolicy getTurnPortPrunePolicy() { + return turnPortPrunePolicy; + } + + @Nullable + @CalledByNative("RTCConfiguration") + RtcCertificatePem getCertificate() { + return certificate; + } + + @CalledByNative("RTCConfiguration") + RtcpMuxPolicy getRtcpMuxPolicy() { + return rtcpMuxPolicy; + } + + @CalledByNative("RTCConfiguration") + TcpCandidatePolicy getTcpCandidatePolicy() { + return tcpCandidatePolicy; + } + + @CalledByNative("RTCConfiguration") + CandidateNetworkPolicy getCandidateNetworkPolicy() { + return candidateNetworkPolicy; + } + + @CalledByNative("RTCConfiguration") + int getAudioJitterBufferMaxPackets() { + return audioJitterBufferMaxPackets; + } + + @CalledByNative("RTCConfiguration") + boolean getAudioJitterBufferFastAccelerate() { + return audioJitterBufferFastAccelerate; + } + + @CalledByNative("RTCConfiguration") + int getIceConnectionReceivingTimeout() { + return iceConnectionReceivingTimeout; + } + + @CalledByNative("RTCConfiguration") + int getIceBackupCandidatePairPingInterval() { + return iceBackupCandidatePairPingInterval; + } + + @CalledByNative("RTCConfiguration") + KeyType getKeyType() { + return keyType; + } + + @CalledByNative("RTCConfiguration") + ContinualGatheringPolicy getContinualGatheringPolicy() { + return continualGatheringPolicy; + } + + @CalledByNative("RTCConfiguration") + int getIceCandidatePoolSize() { + return iceCandidatePoolSize; + } + + @CalledByNative("RTCConfiguration") + boolean getPruneTurnPorts() { + return pruneTurnPorts; + } + + @CalledByNative("RTCConfiguration") + boolean getPresumeWritableWhenFullyRelayed() { + return presumeWritableWhenFullyRelayed; + } + + @CalledByNative("RTCConfiguration") + boolean getSurfaceIceCandidatesOnIceTransportTypeChanged() { + return surfaceIceCandidatesOnIceTransportTypeChanged; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getIceCheckIntervalStrongConnectivity() { + return iceCheckIntervalStrongConnectivityMs; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getIceCheckIntervalWeakConnectivity() { + return iceCheckIntervalWeakConnectivityMs; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getIceCheckMinInterval() { + return iceCheckMinInterval; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getIceUnwritableTimeout() { + return iceUnwritableTimeMs; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getIceUnwritableMinChecks() { + return iceUnwritableMinChecks; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getStunCandidateKeepaliveInterval() { + return stunCandidateKeepaliveIntervalMs; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getStableWritableConnectionPingIntervalMs() { + return stableWritableConnectionPingIntervalMs; + } + + @CalledByNative("RTCConfiguration") + boolean getDisableIPv6OnWifi() { + return disableIPv6OnWifi; + } + + @CalledByNative("RTCConfiguration") + int getMaxIPv6Networks() { + return maxIPv6Networks; + } + + @Nullable + @CalledByNative("RTCConfiguration") + TurnCustomizer getTurnCustomizer() { + return turnCustomizer; + } + + @CalledByNative("RTCConfiguration") + boolean getEnableDscp() { + return enableDscp; + } + + @CalledByNative("RTCConfiguration") + boolean getEnableCpuOveruseDetection() { + return enableCpuOveruseDetection; + } + + @CalledByNative("RTCConfiguration") + boolean getSuspendBelowMinBitrate() { + return suspendBelowMinBitrate; + } + + @Nullable + @CalledByNative("RTCConfiguration") + Integer getScreencastMinBitrate() { + return screencastMinBitrate; + } + + @CalledByNative("RTCConfiguration") + AdapterType getNetworkPreference() { + return networkPreference; + } + + @CalledByNative("RTCConfiguration") + SdpSemantics getSdpSemantics() { + return sdpSemantics; + } + + @CalledByNative("RTCConfiguration") + boolean getActiveResetSrtpParams() { + return activeResetSrtpParams; + } + + @Nullable + @CalledByNative("RTCConfiguration") + CryptoOptions getCryptoOptions() { + return cryptoOptions; + } + + @Nullable + @CalledByNative("RTCConfiguration") + String getTurnLoggingId() { + return turnLoggingId; + } + + @CalledByNative("RTCConfiguration") + boolean getEnableImplicitRollback() { + return enableImplicitRollback; + } + + @CalledByNative("RTCConfiguration") + boolean getOfferExtmapAllowMixed() { + return offerExtmapAllowMixed; + } + }; + + private final List localStreams = new ArrayList<>(); + private final long nativePeerConnection; + private List senders = new ArrayList<>(); + private List receivers = new ArrayList<>(); + private List transceivers = new ArrayList<>(); + + /** + * Wraps a PeerConnection created by the factory. Can be used by clients that want to implement + * their PeerConnection creation in JNI. + */ + public PeerConnection(NativePeerConnectionFactory factory) { + this(factory.createNativePeerConnection()); + } + + PeerConnection(long nativePeerConnection) { + this.nativePeerConnection = nativePeerConnection; + } + + // JsepInterface. + public SessionDescription getLocalDescription() { + return nativeGetLocalDescription(); + } + + public SessionDescription getRemoteDescription() { + return nativeGetRemoteDescription(); + } + + public RtcCertificatePem getCertificate() { + return nativeGetCertificate(); + } + + public DataChannel createDataChannel(String label, DataChannel.Init init) { + return nativeCreateDataChannel(label, init); + } + + public void createOffer(SdpObserver observer, MediaConstraints constraints) { + nativeCreateOffer(observer, constraints); + } + + public void createAnswer(SdpObserver observer, MediaConstraints constraints) { + nativeCreateAnswer(observer, constraints); + } + + public void setLocalDescription(SdpObserver observer) { + nativeSetLocalDescriptionAutomatically(observer); + } + + public void setLocalDescription(SdpObserver observer, SessionDescription sdp) { + nativeSetLocalDescription(observer, sdp); + } + + public void setRemoteDescription(SdpObserver observer, SessionDescription sdp) { + nativeSetRemoteDescription(observer, sdp); + } + + /** + * Tells the PeerConnection that ICE should be restarted. + */ + public void restartIce() { + nativeRestartIce(); + } + + /** + * Enables/disables playout of received audio streams. Enabled by default. + * + * Note that even if playout is enabled, streams will only be played out if + * the appropriate SDP is also applied. The main purpose of this API is to + * be able to control the exact time when audio playout starts. + */ + public void setAudioPlayout(boolean playout) { + nativeSetAudioPlayout(playout); + } + + /** + * Enables/disables recording of transmitted audio streams. Enabled by default. + * + * Note that even if recording is enabled, streams will only be recorded if + * the appropriate SDP is also applied. The main purpose of this API is to + * be able to control the exact time when audio recording starts. + */ + public void setAudioRecording(boolean recording) { + nativeSetAudioRecording(recording); + } + + public boolean setConfiguration(RTCConfiguration config) { + return nativeSetConfiguration(config); + } + + public boolean addIceCandidate(IceCandidate candidate) { + return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp); + } + + public void addIceCandidate(IceCandidate candidate, AddIceObserver observer) { + nativeAddIceCandidateWithObserver( + candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp, observer); + } + + public boolean removeIceCandidates(final IceCandidate[] candidates) { + return nativeRemoveIceCandidates(candidates); + } + + /** + * Adds a new MediaStream to be sent on this peer connection. + * Note: This method is not supported with SdpSemantics.UNIFIED_PLAN. Please + * use addTrack instead. + */ + public boolean addStream(MediaStream stream) { + boolean ret = nativeAddLocalStream(stream.getNativeMediaStream()); + if (!ret) { + return false; + } + localStreams.add(stream); + return true; + } + + /** + * Removes the given media stream from this peer connection. + * This method is not supported with SdpSemantics.UNIFIED_PLAN. Please use + * removeTrack instead. + */ + public void removeStream(MediaStream stream) { + nativeRemoveLocalStream(stream.getNativeMediaStream()); + localStreams.remove(stream); + } + + /** + * Creates an RtpSender without a track. + * + *

This method allows an application to cause the PeerConnection to negotiate + * sending/receiving a specific media type, but without having a track to + * send yet. + * + *

When the application does want to begin sending a track, it can call + * RtpSender.setTrack, which doesn't require any additional SDP negotiation. + * + *

Example use: + *

+   * {@code
+   * audioSender = pc.createSender("audio", "stream1");
+   * videoSender = pc.createSender("video", "stream1");
+   * // Do normal SDP offer/answer, which will kick off ICE/DTLS and negotiate
+   * // media parameters....
+   * // Later, when the endpoint is ready to actually begin sending:
+   * audioSender.setTrack(audioTrack, false);
+   * videoSender.setTrack(videoTrack, false);
+   * }
+   * 
+ *

Note: This corresponds most closely to "addTransceiver" in the official + * WebRTC API, in that it creates a sender without a track. It was + * implemented before addTransceiver because it provides useful + * functionality, and properly implementing transceivers would have required + * a great deal more work. + * + *

Note: This is only available with SdpSemantics.PLAN_B specified. Please use + * addTransceiver instead. + * + * @param kind Corresponds to MediaStreamTrack kinds (must be "audio" or + * "video"). + * @param stream_id The ID of the MediaStream that this sender's track will + * be associated with when SDP is applied to the remote + * PeerConnection. If createSender is used to create an + * audio and video sender that should be synchronized, they + * should use the same stream ID. + * @return A new RtpSender object if successful, or null otherwise. + */ + public RtpSender createSender(String kind, String stream_id) { + RtpSender newSender = nativeCreateSender(kind, stream_id); + if (newSender != null) { + senders.add(newSender); + } + return newSender; + } + + /** + * Gets all RtpSenders associated with this peer connection. + * Note that calling getSenders will dispose of the senders previously + * returned. + */ + public List getSenders() { + for (RtpSender sender : senders) { + sender.dispose(); + } + senders = nativeGetSenders(); + return Collections.unmodifiableList(senders); + } + + /** + * Gets all RtpReceivers associated with this peer connection. + * Note that calling getReceivers will dispose of the receivers previously + * returned. + */ + public List getReceivers() { + for (RtpReceiver receiver : receivers) { + receiver.dispose(); + } + receivers = nativeGetReceivers(); + return Collections.unmodifiableList(receivers); + } + + /** + * Gets all RtpTransceivers associated with this peer connection. + * Note that calling getTransceivers will dispose of the transceivers previously + * returned. + * Note: This is only available with SdpSemantics.UNIFIED_PLAN specified. + */ + public List getTransceivers() { + for (RtpTransceiver transceiver : transceivers) { + transceiver.dispose(); + } + transceivers = nativeGetTransceivers(); + return Collections.unmodifiableList(transceivers); + } + + /** + * Adds a new media stream track to be sent on this peer connection, and returns + * the newly created RtpSender. If streamIds are specified, the RtpSender will + * be associated with the streams specified in the streamIds list. + * + * @throws IllegalStateException if an error accors in C++ addTrack. + * An error can occur if: + * - A sender already exists for the track. + * - The peer connection is closed. + */ + public RtpSender addTrack(MediaStreamTrack track) { + return addTrack(track, Collections.emptyList()); + } + + public RtpSender addTrack(MediaStreamTrack track, List streamIds) { + if (track == null || streamIds == null) { + throw new NullPointerException("No MediaStreamTrack specified in addTrack."); + } + RtpSender newSender = nativeAddTrack(track.getNativeMediaStreamTrack(), streamIds); + if (newSender == null) { + throw new IllegalStateException("C++ addTrack failed."); + } + senders.add(newSender); + return newSender; + } + + /** + * Stops sending media from sender. The sender will still appear in getSenders. Future + * calls to createOffer will mark the m section for the corresponding transceiver as + * receive only or inactive, as defined in JSEP. Returns true on success. + */ + public boolean removeTrack(RtpSender sender) { + if (sender == null) { + throw new NullPointerException("No RtpSender specified for removeTrack."); + } + return nativeRemoveTrack(sender.getNativeRtpSender()); + } + + /** + * Creates a new RtpTransceiver and adds it to the set of transceivers. Adding a + * transceiver will cause future calls to CreateOffer to add a media description + * for the corresponding transceiver. + * + *

The initial value of `mid` in the returned transceiver is null. Setting a + * new session description may change it to a non-null value. + * + *

https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver + * + *

If a MediaStreamTrack is specified then a transceiver will be added with a + * sender set to transmit the given track. The kind + * of the transceiver (and sender/receiver) will be derived from the kind of + * the track. + * + *

If MediaType is specified then a transceiver will be added based upon that type. + * This can be either MEDIA_TYPE_AUDIO or MEDIA_TYPE_VIDEO. + * + *

Optionally, an RtpTransceiverInit structure can be specified to configure + * the transceiver from construction. If not specified, the transceiver will + * default to having a direction of kSendRecv and not be part of any streams. + * + *

Note: These methods are only available with SdpSemantics.UNIFIED_PLAN specified. + * @throws IllegalStateException if an error accors in C++ addTransceiver + */ + public RtpTransceiver addTransceiver(MediaStreamTrack track) { + return addTransceiver(track, new RtpTransceiver.RtpTransceiverInit()); + } + + public RtpTransceiver addTransceiver( + MediaStreamTrack track, @Nullable RtpTransceiver.RtpTransceiverInit init) { + if (track == null) { + throw new NullPointerException("No MediaStreamTrack specified for addTransceiver."); + } + if (init == null) { + init = new RtpTransceiver.RtpTransceiverInit(); + } + RtpTransceiver newTransceiver = + nativeAddTransceiverWithTrack(track.getNativeMediaStreamTrack(), init); + if (newTransceiver == null) { + throw new IllegalStateException("C++ addTransceiver failed."); + } + transceivers.add(newTransceiver); + return newTransceiver; + } + + public RtpTransceiver addTransceiver(MediaStreamTrack.MediaType mediaType) { + return addTransceiver(mediaType, new RtpTransceiver.RtpTransceiverInit()); + } + + public RtpTransceiver addTransceiver( + MediaStreamTrack.MediaType mediaType, @Nullable RtpTransceiver.RtpTransceiverInit init) { + if (mediaType == null) { + throw new NullPointerException("No MediaType specified for addTransceiver."); + } + if (init == null) { + init = new RtpTransceiver.RtpTransceiverInit(); + } + RtpTransceiver newTransceiver = nativeAddTransceiverOfType(mediaType, init); + if (newTransceiver == null) { + throw new IllegalStateException("C++ addTransceiver failed."); + } + transceivers.add(newTransceiver); + return newTransceiver; + } + + // Older, non-standard implementation of getStats. + @Deprecated + public boolean getStats(StatsObserver observer, @Nullable MediaStreamTrack track) { + return nativeOldGetStats(observer, (track == null) ? 0 : track.getNativeMediaStreamTrack()); + } + + /** + * Gets stats using the new stats collection API, see webrtc/api/stats/. These + * will replace old stats collection API when the new API has matured enough. + */ + public void getStats(RTCStatsCollectorCallback callback) { + nativeNewGetStats(callback); + } + + /** + * Gets stats using the new stats collection API, see webrtc/api/stats/. These + * will replace old stats collection API when the new API has matured enough. + */ + public void getStats(RtpSender sender, RTCStatsCollectorCallback callback) { + nativeNewGetStatsSender(sender.getNativeRtpSender(), callback); + } + + /** + * Gets stats using the new stats collection API, see webrtc/api/stats/. These + * will replace old stats collection API when the new API has matured enough. + */ + public void getStats(RtpReceiver receiver, RTCStatsCollectorCallback callback) { + nativeNewGetStatsReceiver(receiver.getNativeRtpReceiver(), callback); + } + + /** + * Limits the bandwidth allocated for all RTP streams sent by this + * PeerConnection. Pass null to leave a value unchanged. + */ + public boolean setBitrate(Integer min, Integer current, Integer max) { + return nativeSetBitrate(min, current, max); + } + + /** + * Starts recording an RTC event log. + * + * Ownership of the file is transfered to the native code. If an RTC event + * log is already being recorded, it will be stopped and a new one will start + * using the provided file. Logging will continue until the stopRtcEventLog + * function is called. The max_size_bytes argument is ignored, it is added + * for future use. + */ + public boolean startRtcEventLog(int file_descriptor, int max_size_bytes) { + return nativeStartRtcEventLog(file_descriptor, max_size_bytes); + } + + /** + * Stops recording an RTC event log. If no RTC event log is currently being + * recorded, this call will have no effect. + */ + public void stopRtcEventLog() { + nativeStopRtcEventLog(); + } + + // TODO(fischman): add support for DTMF-related methods once that API + // stabilizes. + public SignalingState signalingState() { + return nativeSignalingState(); + } + + public IceConnectionState iceConnectionState() { + return nativeIceConnectionState(); + } + + public PeerConnectionState connectionState() { + return nativeConnectionState(); + } + + public IceGatheringState iceGatheringState() { + return nativeIceGatheringState(); + } + + public void close() { + nativeClose(); + } + + /** + * Free native resources associated with this PeerConnection instance. + * + * This method removes a reference count from the C++ PeerConnection object, + * which should result in it being destroyed. It also calls equivalent + * "dispose" methods on the Java objects attached to this PeerConnection + * (streams, senders, receivers), such that their associated C++ objects + * will also be destroyed. + * + *

Note that this method cannot be safely called from an observer callback + * (PeerConnection.Observer, DataChannel.Observer, etc.). If you want to, for + * example, destroy the PeerConnection after an "ICE failed" callback, you + * must do this asynchronously (in other words, unwind the stack first). See + * bug + * 3721 for more details. + */ + public void dispose() { + close(); + for (MediaStream stream : localStreams) { + nativeRemoveLocalStream(stream.getNativeMediaStream()); + stream.dispose(); + } + localStreams.clear(); + for (RtpSender sender : senders) { + sender.dispose(); + } + senders.clear(); + for (RtpReceiver receiver : receivers) { + receiver.dispose(); + } + for (RtpTransceiver transceiver : transceivers) { + transceiver.dispose(); + } + transceivers.clear(); + receivers.clear(); + nativeFreeOwnedPeerConnection(nativePeerConnection); + } + + /** Returns a pointer to the native webrtc::PeerConnectionInterface. */ + public long getNativePeerConnection() { + return nativeGetNativePeerConnection(); + } + + @CalledByNative + long getNativeOwnedPeerConnection() { + return nativePeerConnection; + } + + public static long createNativePeerConnectionObserver(Observer observer) { + return nativeCreatePeerConnectionObserver(observer); + } + + private native long nativeGetNativePeerConnection(); + private native SessionDescription nativeGetLocalDescription(); + private native SessionDescription nativeGetRemoteDescription(); + private native RtcCertificatePem nativeGetCertificate(); + private native DataChannel nativeCreateDataChannel(String label, DataChannel.Init init); + private native void nativeCreateOffer(SdpObserver observer, MediaConstraints constraints); + private native void nativeCreateAnswer(SdpObserver observer, MediaConstraints constraints); + private native void nativeSetLocalDescriptionAutomatically(SdpObserver observer); + private native void nativeSetLocalDescription(SdpObserver observer, SessionDescription sdp); + private native void nativeSetRemoteDescription(SdpObserver observer, SessionDescription sdp); + private native void nativeRestartIce(); + private native void nativeSetAudioPlayout(boolean playout); + private native void nativeSetAudioRecording(boolean recording); + private native boolean nativeSetBitrate(Integer min, Integer current, Integer max); + private native SignalingState nativeSignalingState(); + private native IceConnectionState nativeIceConnectionState(); + private native PeerConnectionState nativeConnectionState(); + private native IceGatheringState nativeIceGatheringState(); + private native void nativeClose(); + private static native long nativeCreatePeerConnectionObserver(Observer observer); + private static native void nativeFreeOwnedPeerConnection(long ownedPeerConnection); + private native boolean nativeSetConfiguration(RTCConfiguration config); + private native boolean nativeAddIceCandidate( + String sdpMid, int sdpMLineIndex, String iceCandidateSdp); + private native void nativeAddIceCandidateWithObserver( + String sdpMid, int sdpMLineIndex, String iceCandidateSdp, AddIceObserver observer); + private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates); + private native boolean nativeAddLocalStream(long stream); + private native void nativeRemoveLocalStream(long stream); + private native boolean nativeOldGetStats(StatsObserver observer, long nativeTrack); + private native void nativeNewGetStats(RTCStatsCollectorCallback callback); + private native void nativeNewGetStatsSender(long sender, RTCStatsCollectorCallback callback); + private native void nativeNewGetStatsReceiver(long receiver, RTCStatsCollectorCallback callback); + private native RtpSender nativeCreateSender(String kind, String stream_id); + private native List nativeGetSenders(); + private native List nativeGetReceivers(); + private native List nativeGetTransceivers(); + private native RtpSender nativeAddTrack(long track, List streamIds); + private native boolean nativeRemoveTrack(long sender); + private native RtpTransceiver nativeAddTransceiverWithTrack( + long track, RtpTransceiver.RtpTransceiverInit init); + private native RtpTransceiver nativeAddTransceiverOfType( + MediaStreamTrack.MediaType mediaType, RtpTransceiver.RtpTransceiverInit init); + private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes); + private native void nativeStopRtcEventLog(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionDependencies.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionDependencies.java new file mode 100644 index 00000000..ac6c94bb --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionDependencies.java @@ -0,0 +1,65 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +/** + * PeerConnectionDependencies holds all PeerConnection dependencies that are + * applied per PeerConnection. A dependency is distinct from a configuration + * as it defines significant executable code that can be provided by a user of + * the API. + */ +public final class PeerConnectionDependencies { + // Mandatory dependencies. + private final PeerConnection.Observer observer; + + // Optional fields. + private final SSLCertificateVerifier sslCertificateVerifier; + + public static class Builder { + private PeerConnection.Observer observer; + private SSLCertificateVerifier sslCertificateVerifier; + + private Builder(PeerConnection.Observer observer) { + this.observer = observer; + } + + public Builder setSSLCertificateVerifier(SSLCertificateVerifier sslCertificateVerifier) { + this.sslCertificateVerifier = sslCertificateVerifier; + return this; + } + + // Observer is a required dependency and so is forced in the construction of the object. + public PeerConnectionDependencies createPeerConnectionDependencies() { + return new PeerConnectionDependencies(observer, sslCertificateVerifier); + } + } + + public static Builder builder(PeerConnection.Observer observer) { + return new Builder(observer); + } + + PeerConnection.Observer getObserver() { + return observer; + } + + @Nullable + SSLCertificateVerifier getSSLCertificateVerifier() { + return sslCertificateVerifier; + } + + private PeerConnectionDependencies( + PeerConnection.Observer observer, SSLCertificateVerifier sslCertificateVerifier) { + this.observer = observer; + this.sslCertificateVerifier = sslCertificateVerifier; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionFactory.java new file mode 100644 index 00000000..c46718fd --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PeerConnectionFactory.java @@ -0,0 +1,634 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.os.Process; +import androidx.annotation.Nullable; +import java.util.List; +import org.webrtc.Logging.Severity; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnection; +import org.webrtc.RtpCapabilities; +import org.webrtc.audio.AudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule; + +/** + * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to + * the PeerConnection API for clients. + */ +public class PeerConnectionFactory { + public static final String TRIAL_ENABLED = "Enabled"; + @Deprecated public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit"; + + private static final String TAG = "PeerConnectionFactory"; + private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread"; + + /** Helper class holding both Java and C++ thread info. */ + private static class ThreadInfo { + final Thread thread; + final int tid; + + public static ThreadInfo getCurrent() { + return new ThreadInfo(Thread.currentThread(), Process.myTid()); + } + + private ThreadInfo(Thread thread, int tid) { + this.thread = thread; + this.tid = tid; + } + } + + private static volatile boolean internalTracerInitialized; + + // Remove these once deprecated static printStackTrace() is gone. + @Nullable private static ThreadInfo staticNetworkThread; + @Nullable private static ThreadInfo staticWorkerThread; + @Nullable private static ThreadInfo staticSignalingThread; + + private long nativeFactory; + @Nullable private volatile ThreadInfo networkThread; + @Nullable private volatile ThreadInfo workerThread; + @Nullable private volatile ThreadInfo signalingThread; + + public static class InitializationOptions { + final Context applicationContext; + final String fieldTrials; + final boolean enableInternalTracer; + final NativeLibraryLoader nativeLibraryLoader; + final String nativeLibraryName; + @Nullable Loggable loggable; + @Nullable Severity loggableSeverity; + + private InitializationOptions(Context applicationContext, String fieldTrials, + boolean enableInternalTracer, NativeLibraryLoader nativeLibraryLoader, + String nativeLibraryName, @Nullable Loggable loggable, + @Nullable Severity loggableSeverity) { + this.applicationContext = applicationContext; + this.fieldTrials = fieldTrials; + this.enableInternalTracer = enableInternalTracer; + this.nativeLibraryLoader = nativeLibraryLoader; + this.nativeLibraryName = nativeLibraryName; + this.loggable = loggable; + this.loggableSeverity = loggableSeverity; + } + + public static Builder builder(Context applicationContext) { + return new Builder(applicationContext); + } + + public static class Builder { + private final Context applicationContext; + private String fieldTrials = ""; + private boolean enableInternalTracer; + private NativeLibraryLoader nativeLibraryLoader = new NativeLibrary.DefaultLoader(); + private String nativeLibraryName = "jingle_peerconnection_so"; + @Nullable private Loggable loggable; + @Nullable private Severity loggableSeverity; + + Builder(Context applicationContext) { + this.applicationContext = applicationContext; + } + + public Builder setFieldTrials(String fieldTrials) { + this.fieldTrials = fieldTrials; + return this; + } + + public Builder setEnableInternalTracer(boolean enableInternalTracer) { + this.enableInternalTracer = enableInternalTracer; + return this; + } + + public Builder setNativeLibraryLoader(NativeLibraryLoader nativeLibraryLoader) { + this.nativeLibraryLoader = nativeLibraryLoader; + return this; + } + + public Builder setNativeLibraryName(String nativeLibraryName) { + this.nativeLibraryName = nativeLibraryName; + return this; + } + + public Builder setInjectableLogger(Loggable loggable, Severity severity) { + this.loggable = loggable; + this.loggableSeverity = severity; + return this; + } + + public PeerConnectionFactory.InitializationOptions createInitializationOptions() { + return new PeerConnectionFactory.InitializationOptions(applicationContext, fieldTrials, + enableInternalTracer, nativeLibraryLoader, nativeLibraryName, loggable, + loggableSeverity); + } + } + } + + public static class Options { + // Keep in sync with webrtc/rtc_base/network.h! + // + // These bit fields are defined for `networkIgnoreMask` below. + public static final int ADAPTER_TYPE_UNKNOWN = 0; + public static final int ADAPTER_TYPE_ETHERNET = 1 << 0; + public static final int ADAPTER_TYPE_WIFI = 1 << 1; + public static final int ADAPTER_TYPE_CELLULAR = 1 << 2; + public static final int ADAPTER_TYPE_VPN = 1 << 3; + public static final int ADAPTER_TYPE_LOOPBACK = 1 << 4; + public static final int ADAPTER_TYPE_ANY = 1 << 5; + + public int networkIgnoreMask; + public boolean disableEncryption; + public boolean disableNetworkMonitor; + + @CalledByNative("Options") + int getNetworkIgnoreMask() { + return networkIgnoreMask; + } + + @CalledByNative("Options") + boolean getDisableEncryption() { + return disableEncryption; + } + + @CalledByNative("Options") + boolean getDisableNetworkMonitor() { + return disableNetworkMonitor; + } + } + + public static class Builder { + @Nullable private Options options; + @Nullable private AudioDeviceModule audioDeviceModule; + private AudioEncoderFactoryFactory audioEncoderFactoryFactory = + new BuiltinAudioEncoderFactoryFactory(); + private AudioDecoderFactoryFactory audioDecoderFactoryFactory = + new BuiltinAudioDecoderFactoryFactory(); + @Nullable private VideoEncoderFactory videoEncoderFactory; + @Nullable private VideoDecoderFactory videoDecoderFactory; + @Nullable private AudioProcessingFactory audioProcessingFactory; + @Nullable private FecControllerFactoryFactoryInterface fecControllerFactoryFactory; + @Nullable private NetworkControllerFactoryFactory networkControllerFactoryFactory; + @Nullable private NetworkStatePredictorFactoryFactory networkStatePredictorFactoryFactory; + @Nullable private NetEqFactoryFactory neteqFactoryFactory; + + private Builder() {} + + public Builder setOptions(Options options) { + this.options = options; + return this; + } + + public Builder setAudioDeviceModule(AudioDeviceModule audioDeviceModule) { + this.audioDeviceModule = audioDeviceModule; + return this; + } + + public Builder setAudioEncoderFactoryFactory( + AudioEncoderFactoryFactory audioEncoderFactoryFactory) { + if (audioEncoderFactoryFactory == null) { + throw new IllegalArgumentException( + "PeerConnectionFactory.Builder does not accept a null AudioEncoderFactoryFactory."); + } + this.audioEncoderFactoryFactory = audioEncoderFactoryFactory; + return this; + } + + public Builder setAudioDecoderFactoryFactory( + AudioDecoderFactoryFactory audioDecoderFactoryFactory) { + if (audioDecoderFactoryFactory == null) { + throw new IllegalArgumentException( + "PeerConnectionFactory.Builder does not accept a null AudioDecoderFactoryFactory."); + } + this.audioDecoderFactoryFactory = audioDecoderFactoryFactory; + return this; + } + + public Builder setVideoEncoderFactory(VideoEncoderFactory videoEncoderFactory) { + this.videoEncoderFactory = videoEncoderFactory; + return this; + } + + public Builder setVideoDecoderFactory(VideoDecoderFactory videoDecoderFactory) { + this.videoDecoderFactory = videoDecoderFactory; + return this; + } + + public Builder setAudioProcessingFactory(AudioProcessingFactory audioProcessingFactory) { + if (audioProcessingFactory == null) { + throw new NullPointerException( + "PeerConnectionFactory builder does not accept a null AudioProcessingFactory."); + } + this.audioProcessingFactory = audioProcessingFactory; + return this; + } + + public Builder setFecControllerFactoryFactoryInterface( + FecControllerFactoryFactoryInterface fecControllerFactoryFactory) { + this.fecControllerFactoryFactory = fecControllerFactoryFactory; + return this; + } + + public Builder setNetworkControllerFactoryFactory( + NetworkControllerFactoryFactory networkControllerFactoryFactory) { + this.networkControllerFactoryFactory = networkControllerFactoryFactory; + return this; + } + + public Builder setNetworkStatePredictorFactoryFactory( + NetworkStatePredictorFactoryFactory networkStatePredictorFactoryFactory) { + this.networkStatePredictorFactoryFactory = networkStatePredictorFactoryFactory; + return this; + } + + /** + * Sets a NetEqFactoryFactory for the PeerConnectionFactory. When using a + * custom NetEqFactoryFactory, the AudioDecoderFactoryFactory will be set + * to null. The AudioDecoderFactoryFactory should be wrapped in the + * NetEqFactoryFactory. + */ + public Builder setNetEqFactoryFactory(NetEqFactoryFactory neteqFactoryFactory) { + this.neteqFactoryFactory = neteqFactoryFactory; + return this; + } + + public PeerConnectionFactory createPeerConnectionFactory() { + checkInitializeHasBeenCalled(); + if (audioDeviceModule == null) { + audioDeviceModule = JavaAudioDeviceModule.builder(ContextUtils.getApplicationContext()) + .createAudioDeviceModule(); + } + return nativeCreatePeerConnectionFactory(ContextUtils.getApplicationContext(), options, + audioDeviceModule.getNativeAudioDeviceModulePointer(), + audioEncoderFactoryFactory.createNativeAudioEncoderFactory(), + audioDecoderFactoryFactory.createNativeAudioDecoderFactory(), videoEncoderFactory, + videoDecoderFactory, + audioProcessingFactory == null ? 0 : audioProcessingFactory.createNative(), + fecControllerFactoryFactory == null ? 0 : fecControllerFactoryFactory.createNative(), + networkControllerFactoryFactory == null + ? 0 + : networkControllerFactoryFactory.createNativeNetworkControllerFactory(), + networkStatePredictorFactoryFactory == null + ? 0 + : networkStatePredictorFactoryFactory.createNativeNetworkStatePredictorFactory(), + neteqFactoryFactory == null ? 0 : neteqFactoryFactory.createNativeNetEqFactory()); + } + } + + public static Builder builder() { + return new Builder(); + } + + /** + * Loads and initializes WebRTC. This must be called at least once before creating a + * PeerConnectionFactory. Replaces all the old initialization methods. Must not be called while + * a PeerConnectionFactory is alive. + */ + public static void initialize(InitializationOptions options) { + ContextUtils.initialize(options.applicationContext); + NativeLibrary.initialize(options.nativeLibraryLoader, options.nativeLibraryName); + nativeInitializeAndroidGlobals(); + nativeInitializeFieldTrials(options.fieldTrials); + if (options.enableInternalTracer && !internalTracerInitialized) { + initializeInternalTracer(); + } + if (options.loggable != null) { + Logging.injectLoggable(options.loggable, options.loggableSeverity); + nativeInjectLoggable(new JNILogging(options.loggable), options.loggableSeverity.ordinal()); + } else { + Logging.d(TAG, + "PeerConnectionFactory was initialized without an injected Loggable. " + + "Any existing Loggable will be deleted."); + Logging.deleteInjectedLoggable(); + nativeDeleteLoggable(); + } + } + + private static void checkInitializeHasBeenCalled() { + if (!NativeLibrary.isLoaded() || ContextUtils.getApplicationContext() == null) { + throw new IllegalStateException( + "PeerConnectionFactory.initialize was not called before creating a " + + "PeerConnectionFactory."); + } + } + + private static void initializeInternalTracer() { + internalTracerInitialized = true; + nativeInitializeInternalTracer(); + } + + public static void shutdownInternalTracer() { + internalTracerInitialized = false; + nativeShutdownInternalTracer(); + } + + // Field trial initialization. Must be called before PeerConnectionFactory + // is created. + // Deprecated, use PeerConnectionFactory.initialize instead. + @Deprecated + public static void initializeFieldTrials(String fieldTrialsInitString) { + nativeInitializeFieldTrials(fieldTrialsInitString); + } + + // Wrapper of webrtc::field_trial::FindFullName. Develop the feature with default behaviour off. + // Example usage: + // if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTCExperiment").equals("Enabled")) { + // method1(); + // } else { + // method2(); + // } + public static String fieldTrialsFindFullName(String name) { + return NativeLibrary.isLoaded() ? nativeFindFieldTrialsFullName(name) : ""; + } + // Start/stop internal capturing of internal tracing. + public static boolean startInternalTracingCapture(String tracingFilename) { + return nativeStartInternalTracingCapture(tracingFilename); + } + + public static void stopInternalTracingCapture() { + nativeStopInternalTracingCapture(); + } + + @CalledByNative + PeerConnectionFactory(long nativeFactory) { + checkInitializeHasBeenCalled(); + if (nativeFactory == 0) { + throw new RuntimeException("Failed to initialize PeerConnectionFactory!"); + } + this.nativeFactory = nativeFactory; + } + + /** + * Internal helper function to pass the parameters down into the native JNI bridge. + */ + @Nullable + PeerConnection createPeerConnectionInternal(PeerConnection.RTCConfiguration rtcConfig, + MediaConstraints constraints, PeerConnection.Observer observer, + SSLCertificateVerifier sslCertificateVerifier) { + checkPeerConnectionFactoryExists(); + long nativeObserver = PeerConnection.createNativePeerConnectionObserver(observer); + if (nativeObserver == 0) { + return null; + } + long nativePeerConnection = nativeCreatePeerConnection( + nativeFactory, rtcConfig, constraints, nativeObserver, sslCertificateVerifier); + if (nativePeerConnection == 0) { + return null; + } + return new PeerConnection(nativePeerConnection); + } + + /** + * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct + * instead and use the method without constraints in the signature. + */ + @Nullable + @Deprecated + public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig, + MediaConstraints constraints, PeerConnection.Observer observer) { + return createPeerConnectionInternal( + rtcConfig, constraints, observer, /* sslCertificateVerifier= */ null); + } + + /** + * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct + * instead and use the method without constraints in the signature. + */ + @Nullable + @Deprecated + public PeerConnection createPeerConnection(List iceServers, + MediaConstraints constraints, PeerConnection.Observer observer) { + PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers); + rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; + return createPeerConnection(rtcConfig, constraints, observer); + } + + @Nullable + public PeerConnection createPeerConnection( + List iceServers, PeerConnection.Observer observer) { + PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers); + rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; + return createPeerConnection(rtcConfig, observer); + } + + @Nullable + public PeerConnection createPeerConnection( + PeerConnection.RTCConfiguration rtcConfig, PeerConnection.Observer observer) { + return createPeerConnection(rtcConfig, null /* constraints */, observer); + } + + @Nullable + public PeerConnection createPeerConnection( + PeerConnection.RTCConfiguration rtcConfig, PeerConnectionDependencies dependencies) { + return createPeerConnectionInternal(rtcConfig, null /* constraints */, + dependencies.getObserver(), dependencies.getSSLCertificateVerifier()); + } + + public MediaStream createLocalMediaStream(String label) { + checkPeerConnectionFactoryExists(); + return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label)); + } + + /** + * Create video source with given parameters. If alignTimestamps is false, the caller is + * responsible for aligning the frame timestamps to rtc::TimeNanos(). This can be used to achieve + * higher accuracy if there is a big delay between frame creation and frames being delivered to + * the returned video source. If alignTimestamps is true, timestamps will be aligned to + * rtc::TimeNanos() when they arrive to the returned video source. + */ + public VideoSource createVideoSource(boolean isScreencast, boolean alignTimestamps) { + checkPeerConnectionFactoryExists(); + return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast, alignTimestamps)); + } + + /** + * Same as above with alignTimestamps set to true. + * + * @see #createVideoSource(boolean, boolean) + */ + public VideoSource createVideoSource(boolean isScreencast) { + return createVideoSource(isScreencast, /* alignTimestamps= */ true); + } + + public VideoTrack createVideoTrack(String id, VideoSource source) { + checkPeerConnectionFactoryExists(); + return new VideoTrack( + nativeCreateVideoTrack(nativeFactory, id, source.getNativeVideoTrackSource())); + } + + public AudioSource createAudioSource(MediaConstraints constraints) { + checkPeerConnectionFactoryExists(); + return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints)); + } + + public AudioTrack createAudioTrack(String id, AudioSource source) { + checkPeerConnectionFactoryExists(); + return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.getNativeAudioSource())); + } + + public RtpCapabilities getRtpReceiverCapabilities(MediaStreamTrack.MediaType mediaType) { + checkPeerConnectionFactoryExists(); + return nativeGetRtpReceiverCapabilities(nativeFactory, mediaType); + } + + public RtpCapabilities getRtpSenderCapabilities(MediaStreamTrack.MediaType mediaType) { + checkPeerConnectionFactoryExists(); + return nativeGetRtpSenderCapabilities(nativeFactory, mediaType); + } + + // Starts recording an AEC dump. Ownership of the file is transfered to the + // native code. If an AEC dump is already in progress, it will be stopped and + // a new one will start using the provided file. + public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) { + checkPeerConnectionFactoryExists(); + return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes); + } + + // Stops recording an AEC dump. If no AEC dump is currently being recorded, + // this call will have no effect. + public void stopAecDump() { + checkPeerConnectionFactoryExists(); + nativeStopAecDump(nativeFactory); + } + + public void dispose() { + checkPeerConnectionFactoryExists(); + nativeFreeFactory(nativeFactory); + networkThread = null; + workerThread = null; + signalingThread = null; + nativeFactory = 0; + } + + /** Returns a pointer to the native webrtc::PeerConnectionFactoryInterface. */ + public long getNativePeerConnectionFactory() { + checkPeerConnectionFactoryExists(); + return nativeGetNativePeerConnectionFactory(nativeFactory); + } + + /** Returns a pointer to the native OwnedFactoryAndThreads object */ + public long getNativeOwnedFactoryAndThreads() { + checkPeerConnectionFactoryExists(); + return nativeFactory; + } + + private void checkPeerConnectionFactoryExists() { + if (nativeFactory == 0) { + throw new IllegalStateException("PeerConnectionFactory has been disposed."); + } + } + + private static void printStackTrace( + @Nullable ThreadInfo threadInfo, boolean printNativeStackTrace) { + if (threadInfo == null) { + // Thread callbacks have not been completed yet, ignore call. + return; + } + final String threadName = threadInfo.thread.getName(); + StackTraceElement[] stackTraces = threadInfo.thread.getStackTrace(); + if (stackTraces.length > 0) { + Logging.w(TAG, threadName + " stacktrace:"); + for (StackTraceElement stackTrace : stackTraces) { + Logging.w(TAG, stackTrace.toString()); + } + } + if (printNativeStackTrace) { + // Imitate output from debuggerd/tombstone so that stack trace can easily be symbolized with + // ndk-stack. + Logging.w(TAG, "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***"); + Logging.w(TAG, + "pid: " + Process.myPid() + ", tid: " + threadInfo.tid + ", name: " + threadName + + " >>> WebRTC <<<"); + nativePrintStackTrace(threadInfo.tid); + } + } + + /** Deprecated, use non-static version instead. */ + @Deprecated + public static void printStackTraces() { + printStackTrace(staticNetworkThread, /* printNativeStackTrace= */ false); + printStackTrace(staticWorkerThread, /* printNativeStackTrace= */ false); + printStackTrace(staticSignalingThread, /* printNativeStackTrace= */ false); + } + + /** + * Print the Java stack traces for the critical threads used by PeerConnectionFactory, namely; + * signaling thread, worker thread, and network thread. If printNativeStackTraces is true, also + * attempt to print the C++ stack traces for these threads. + */ + public void printInternalStackTraces(boolean printNativeStackTraces) { + printStackTrace(signalingThread, printNativeStackTraces); + printStackTrace(workerThread, printNativeStackTraces); + printStackTrace(networkThread, printNativeStackTraces); + } + + @CalledByNative + private void onNetworkThreadReady() { + networkThread = ThreadInfo.getCurrent(); + staticNetworkThread = networkThread; + Logging.d(TAG, "onNetworkThreadReady"); + } + + @CalledByNative + private void onWorkerThreadReady() { + workerThread = ThreadInfo.getCurrent(); + staticWorkerThread = workerThread; + Logging.d(TAG, "onWorkerThreadReady"); + } + + @CalledByNative + private void onSignalingThreadReady() { + signalingThread = ThreadInfo.getCurrent(); + staticSignalingThread = signalingThread; + Logging.d(TAG, "onSignalingThreadReady"); + } + + // Must be called at least once before creating a PeerConnectionFactory + // (for example, at application startup time). + private static native void nativeInitializeAndroidGlobals(); + private static native void nativeInitializeFieldTrials(String fieldTrialsInitString); + private static native String nativeFindFieldTrialsFullName(String name); + private static native void nativeInitializeInternalTracer(); + // Internal tracing shutdown, called to prevent resource leaks. Must be called after + // PeerConnectionFactory is gone to prevent races with code performing tracing. + private static native void nativeShutdownInternalTracer(); + private static native boolean nativeStartInternalTracingCapture(String tracingFilename); + private static native void nativeStopInternalTracingCapture(); + + private static native PeerConnectionFactory nativeCreatePeerConnectionFactory(Context context, + Options options, long nativeAudioDeviceModule, long audioEncoderFactory, + long audioDecoderFactory, VideoEncoderFactory encoderFactory, + VideoDecoderFactory decoderFactory, long nativeAudioProcessor, + long nativeFecControllerFactory, long nativeNetworkControllerFactory, + long nativeNetworkStatePredictorFactory, long neteqFactory); + + private static native long nativeCreatePeerConnection(long factory, + PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver, + SSLCertificateVerifier sslCertificateVerifier); + private static native long nativeCreateLocalMediaStream(long factory, String label); + private static native long nativeCreateVideoSource( + long factory, boolean is_screencast, boolean alignTimestamps); + private static native long nativeCreateVideoTrack( + long factory, String id, long nativeVideoSource); + private static native long nativeCreateAudioSource(long factory, MediaConstraints constraints); + private static native long nativeCreateAudioTrack(long factory, String id, long nativeSource); + private static native boolean nativeStartAecDump( + long factory, int file_descriptor, int filesize_limit_bytes); + private static native void nativeStopAecDump(long factory); + private static native void nativeFreeFactory(long factory); + private static native long nativeGetNativePeerConnectionFactory(long factory); + private static native void nativeInjectLoggable(JNILogging jniLogging, int severity); + private static native void nativeDeleteLoggable(); + private static native void nativePrintStackTrace(int tid); + private static native RtpCapabilities nativeGetRtpSenderCapabilities( + long factory, MediaStreamTrack.MediaType mediaType); + private static native RtpCapabilities nativeGetRtpReceiverCapabilities( + long factory, MediaStreamTrack.MediaType mediaType); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/PlatformSoftwareVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/PlatformSoftwareVideoDecoderFactory.java new file mode 100644 index 00000000..caca5e58 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/PlatformSoftwareVideoDecoderFactory.java @@ -0,0 +1,39 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.media.MediaCodecInfo; +import androidx.annotation.Nullable; +import java.util.Arrays; + +/** Factory for Android platform software VideoDecoders. */ +public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory { + /** + * Default allowed predicate. + */ + private static final Predicate defaultAllowedPredicate = + new Predicate() { + @Override + public boolean test(MediaCodecInfo arg) { + return MediaCodecUtils.isSoftwareOnly(arg); + } + }; + + /** + * Creates a PlatformSoftwareVideoDecoderFactory that supports surface texture rendering. + * + * @param sharedContext The textures generated will be accessible from this context. May be null, + * this disables texture support. + */ + public PlatformSoftwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) { + super(sharedContext, defaultAllowedPredicate); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Predicate.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Predicate.java new file mode 100644 index 00000000..50e69750 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Predicate.java @@ -0,0 +1,73 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Represents a predicate (boolean-valued function) of one argument. + */ +public interface Predicate { + /** + * Evaluates this predicate on the given argument. + * + * @param arg the input argument + * @return true if the input argument matches the predicate, otherwise false + */ + boolean test(T arg); + + /** + * Returns a composed predicate that represents a short-circuiting logical OR of this predicate + * and another. When evaluating the composed predicate, if this predicate is true, then the other + * predicate is not evaluated. + * + * @param other a predicate that will be logically-ORed with this predicate + * @return a composed predicate that represents the short-circuiting logical OR of this predicate + * and the other predicate + */ + default Predicate or(Predicate other) { + return new Predicate() { + @Override + public boolean test(T arg) { + return Predicate.this.test(arg) || other.test(arg); + } + }; + } + + /** + * Returns a composed predicate that represents a short-circuiting logical AND of this predicate + * and another. + * + * @param other a predicate that will be logically-ANDed with this predicate + * @return a composed predicate that represents the short-circuiting logical AND of this predicate + * and the other predicate + */ + default Predicate and(Predicate other) { + return new Predicate() { + @Override + public boolean test(T arg) { + return Predicate.this.test(arg) && other.test(arg); + } + }; + } + + /** + * Returns a predicate that represents the logical negation of this predicate. + * + * @return a predicate that represents the logical negation of this predicate + */ + default Predicate negate() { + return new Predicate() { + @Override + public boolean test(T arg) { + return !Predicate.this.test(arg); + } + }; + } +} \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Priority.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Priority.java new file mode 100644 index 00000000..a858cc61 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Priority.java @@ -0,0 +1,13 @@ + +// IntelliJ API Decompiler stub source generated from a class file +// Implementation of methods is not available + +package org.webrtc; + +@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) +public @interface Priority { + int VERY_LOW = 0; + int LOW = 1; + int MEDIUM = 2; + int HIGH = 3; +} \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStats.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStats.java new file mode 100644 index 00000000..eaa28de1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStats.java @@ -0,0 +1,114 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.Map; + +/** + * Java version of webrtc::RTCStats. Represents an RTCStats object, as + * described in https://w3c.github.io/webrtc-stats/. The `id`, `timestampUs` + * and `type` accessors have the same meaning for this class as for the + * RTCStats dictionary. Each RTCStatsReport produced by getStats contains + * multiple RTCStats objects; one for each underlying object (codec, stream, + * transport, etc.) that was inspected to produce the stats. + */ +public class RTCStats { + private final long timestampUs; + private final String type; + private final String id; + private final Map members; + + public RTCStats(long timestampUs, String type, String id, Map members) { + this.timestampUs = timestampUs; + this.type = type; + this.id = id; + this.members = members; + } + + // Timestamp in microseconds. + public double getTimestampUs() { + return timestampUs; + } + + // Equivalent to RTCStatsType in the stats spec. Indicates the type of the + // object that was inspected to produce the stats. + public String getType() { + return type; + } + + // Unique ID representing this stats object. May be referred to by members of + // other stats objects. + public String getId() { + return id; + } + + /** + * Returns map of member names to values. Returns as an ordered map so that + * the stats object can be serialized with a consistent ordering. + * + * Values will be one of the following objects: + * - Boolean + * - Integer (for 32-bit signed integers) + * - Long (for 32-bit unsigned and 64-bit signed integers) + * - BigInteger (for 64-bit unsigned integers) + * - Double + * - String + * - The array form of any of the above (e.g., Integer[]) + * - Map of String keys to BigInteger / Double values + */ + public Map getMembers() { + return members; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("{ timestampUs: ") + .append(timestampUs) + .append(", type: ") + .append(type) + .append(", id: ") + .append(id); + boolean first = true; + for (Map.Entry entry : members.entrySet()) { + builder.append(", ").append(entry.getKey()).append(": "); + appendValue(builder, entry.getValue()); + } + builder.append(" }"); + return builder.toString(); + } + + private static void appendValue(StringBuilder builder, Object value) { + if (value instanceof Object[]) { + Object[] arrayValue = (Object[]) value; + builder.append('['); + for (int i = 0; i < arrayValue.length; ++i) { + if (i != 0) { + builder.append(", "); + } + appendValue(builder, arrayValue[i]); + } + builder.append(']'); + } else if (value instanceof String) { + // Enclose strings in quotes to make it clear they're strings. + builder.append('"').append(value).append('"'); + } else { + builder.append(value); + } + } + + // TODO(bugs.webrtc.org/8557) Use ctor directly with full Map type. + @SuppressWarnings("unchecked") + @CalledByNative + static RTCStats create(long timestampUs, String type, String id, Map members) { + return new RTCStats(timestampUs, type, id, members); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsCollectorCallback.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsCollectorCallback.java new file mode 100644 index 00000000..dc8902c9 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsCollectorCallback.java @@ -0,0 +1,17 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Interface for receiving stats reports (see webrtc::RTCStatsCollectorCallback). */ +public interface RTCStatsCollectorCallback { + /** Called when the stats report is ready. */ + @CalledByNative public void onStatsDelivered(RTCStatsReport report); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsReport.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsReport.java new file mode 100644 index 00000000..d4d90db1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RTCStatsReport.java @@ -0,0 +1,62 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.Map; + +/** + * Java version of webrtc::RTCStatsReport. Each RTCStatsReport produced by + * getStats contains multiple RTCStats objects; one for each underlying object + * (codec, stream, transport, etc.) that was inspected to produce the stats. + */ +public class RTCStatsReport { + private final long timestampUs; + private final Map stats; + + public RTCStatsReport(long timestampUs, Map stats) { + this.timestampUs = timestampUs; + this.stats = stats; + } + + // Timestamp in microseconds. + public double getTimestampUs() { + return timestampUs; + } + + // Map of stats object IDs to stats objects. Can be used to easily look up + // other stats objects, when they refer to each other by ID. + public Map getStatsMap() { + return stats; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("{ timestampUs: ").append(timestampUs).append(", stats: [\n"); + boolean first = true; + for (RTCStats stat : stats.values()) { + if (!first) { + builder.append(",\n"); + } + builder.append(stat); + first = false; + } + builder.append(" ] }"); + return builder.toString(); + } + + // TODO(bugs.webrtc.org/8557) Use ctor directly with full Map type. + @SuppressWarnings("unchecked") + @CalledByNative + private static RTCStatsReport create(long timestampUs, Map stats) { + return new RTCStatsReport(timestampUs, stats); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCountDelegate.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCountDelegate.java new file mode 100644 index 00000000..b9210d26 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCountDelegate.java @@ -0,0 +1,63 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Implementation of RefCounted that executes a Runnable once the ref count reaches zero. + */ +class RefCountDelegate implements RefCounted { + private final AtomicInteger refCount = new AtomicInteger(1); + private final @Nullable Runnable releaseCallback; + + /** + * @param releaseCallback Callback that will be executed once the ref count reaches zero. + */ + public RefCountDelegate(@Nullable Runnable releaseCallback) { + this.releaseCallback = releaseCallback; + } + + @Override + public void retain() { + int updated_count = refCount.incrementAndGet(); + if (updated_count < 2) { + throw new IllegalStateException("retain() called on an object with refcount < 1"); + } + } + + @Override + public void release() { + int updated_count = refCount.decrementAndGet(); + if (updated_count < 0) { + throw new IllegalStateException("release() called on an object with refcount < 1"); + } + if (updated_count == 0 && releaseCallback != null) { + releaseCallback.run(); + } + } + + /** + * Tries to retain the object. Can be used in scenarios where it is unknown if the object has + * already been released. Returns true if successful or false if the object was already released. + */ + boolean safeRetain() { + int currentRefCount = refCount.get(); + while (currentRefCount != 0) { + if (refCount.weakCompareAndSet(currentRefCount, currentRefCount + 1)) { + return true; + } + currentRefCount = refCount.get(); + } + return false; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCounted.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCounted.java new file mode 100644 index 00000000..0c1c3bf1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RefCounted.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Interface for ref counted objects in WebRTC. These objects have significant resources that need + * to be freed when they are no longer in use. Each objects starts with ref count of one when + * created. If a reference is passed as a parameter to a method, the caller has ownesrship of the + * object by default - calling release is not necessary unless retain is called. + */ +public interface RefCounted { + /** Increases ref count by one. */ + @CalledByNative void retain(); + + /** + * Decreases ref count by one. When the ref count reaches zero, resources related to the object + * will be freed. + */ + @CalledByNative void release(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RenderSynchronizer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RenderSynchronizer.java new file mode 100644 index 00000000..c89f798c --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RenderSynchronizer.java @@ -0,0 +1,127 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.os.Build.VERSION; +import android.os.Build.VERSION_CODES; +import android.os.Handler; +import android.os.Looper; +import android.os.Trace; +import android.view.Choreographer; +import androidx.annotation.GuardedBy; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeUnit; + +/** + * Class to synchronize rendering updates with display refresh cycles and save power by blocking + * updates that exceeds the target frame rate. + */ +public final class RenderSynchronizer { + + /** Interface for listening to render window updates. */ + public interface Listener { + void onRenderWindowOpen(); + + void onRenderWindowClose(); + } + + private static final String TAG = "RenderSynchronizer"; + private static final float DEFAULT_TARGET_FPS = 30f; + private final Object lock = new Object(); + private final List listeners = new CopyOnWriteArrayList<>(); + private final long targetFrameIntervalNanos; + private final Handler mainThreadHandler; + private Choreographer choreographer; + + @GuardedBy("lock") + private boolean isListening; + + private boolean renderWindowOpen; + private long lastRefreshTimeNanos; + private long lastOpenedTimeNanos; + + public RenderSynchronizer(float targetFrameRateFps) { + this.targetFrameIntervalNanos = Math.round(TimeUnit.SECONDS.toNanos(1) / targetFrameRateFps); + this.mainThreadHandler = new Handler(Looper.getMainLooper()); + mainThreadHandler.post(() -> this.choreographer = Choreographer.getInstance()); + Logging.d(TAG, "Created"); + } + + public RenderSynchronizer() { + this(DEFAULT_TARGET_FPS); + } + + public void registerListener(Listener listener) { + listeners.add(listener); + + synchronized (lock) { + if (!isListening) { + Logging.d(TAG, "First listener, subscribing to frame callbacks"); + isListening = true; + mainThreadHandler.post( + () -> choreographer.postFrameCallback(this::onDisplayRefreshCycleBegin)); + } + } + } + + public void removeListener(Listener listener) { + listeners.remove(listener); + } + + private void onDisplayRefreshCycleBegin(long refreshTimeNanos) { + synchronized (lock) { + if (listeners.isEmpty()) { + Logging.d(TAG, "No listeners, unsubscribing to frame callbacks"); + isListening = false; + return; + } + } + choreographer.postFrameCallback(this::onDisplayRefreshCycleBegin); + + long lastOpenDeltaNanos = refreshTimeNanos - lastOpenedTimeNanos; + long refreshDeltaNanos = refreshTimeNanos - lastRefreshTimeNanos; + lastRefreshTimeNanos = refreshTimeNanos; + + // Make a greedy choice whether to open (or keep open) the render window. If the current time + // since the render window was last opened is closer to the target than what we predict it would + // be in the next refresh cycle then we open the window. + if (Math.abs(lastOpenDeltaNanos - targetFrameIntervalNanos) + < Math.abs(lastOpenDeltaNanos - targetFrameIntervalNanos + refreshDeltaNanos)) { + lastOpenedTimeNanos = refreshTimeNanos; + openRenderWindow(); + } else if (renderWindowOpen) { + closeRenderWindow(); + } + } + + private void traceRenderWindowChange() { + if (VERSION.SDK_INT >= VERSION_CODES.Q) { + Trace.setCounter("RenderWindow", renderWindowOpen ? 1 : 0); + } + } + + private void openRenderWindow() { + renderWindowOpen = true; + traceRenderWindowChange(); + for (Listener listener : listeners) { + listener.onRenderWindowOpen(); + } + } + + private void closeRenderWindow() { + renderWindowOpen = false; + traceRenderWindowChange(); + for (Listener listener : listeners) { + listener.onRenderWindowClose(); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RendererCommon.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RendererCommon.java new file mode 100644 index 00000000..b97901c6 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RendererCommon.java @@ -0,0 +1,259 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Point; +import android.opengl.Matrix; +import android.view.View; + +/** + * Static helper functions for renderer implementations. + */ +public class RendererCommon { + /** Interface for reporting rendering events. */ + public static interface RendererEvents { + /** + * Callback fired once first frame is rendered. + */ + public void onFirstFrameRendered(); + + /** + * Callback fired when rendered frame resolution or rotation has changed. + */ + public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation); + } + + /** + * Interface for rendering frames on an EGLSurface with specified viewport location. Rotation, + * mirror, and cropping is specified using a 4x4 texture coordinate transform matrix. The frame + * input can either be an OES texture, RGB texture, or YUV textures in I420 format. The function + * release() must be called manually to free the resources held by this object. + */ + public static interface GlDrawer { + /** + * Functions for drawing frames with different sources. The rendering surface target is + * implied by the current EGL context of the calling thread and requires no explicit argument. + * The coordinates specify the viewport location on the surface target. + */ + void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight); + void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX, + int viewportY, int viewportWidth, int viewportHeight); + void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight); + + /** + * Release all GL resources. This needs to be done manually, otherwise resources may leak. + */ + void release(); + } + + /** + * Helper class for determining layout size based on layout requirements, scaling type, and video + * aspect ratio. + */ + public static class VideoLayoutMeasure { + // The scaling type determines how the video will fill the allowed layout area in measure(). It + // can be specified separately for the case when video has matched orientation with layout size + // and when there is an orientation mismatch. + private float visibleFractionMatchOrientation = + convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED); + private float visibleFractionMismatchOrientation = + convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED); + + public void setScalingType(ScalingType scalingType) { + setScalingType(/* scalingTypeMatchOrientation= */ scalingType, + /* scalingTypeMismatchOrientation= */ scalingType); + } + + public void setScalingType( + ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) { + this.visibleFractionMatchOrientation = + convertScalingTypeToVisibleFraction(scalingTypeMatchOrientation); + this.visibleFractionMismatchOrientation = + convertScalingTypeToVisibleFraction(scalingTypeMismatchOrientation); + } + + public void setVisibleFraction( + float visibleFractionMatchOrientation, float visibleFractionMismatchOrientation) { + this.visibleFractionMatchOrientation = visibleFractionMatchOrientation; + this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation; + } + + public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) { + // Calculate max allowed layout size. + final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec); + final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec); + if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) { + return new Point(maxWidth, maxHeight); + } + // Calculate desired display size based on scaling type, video aspect ratio, + // and maximum layout size. + final float frameAspect = frameWidth / (float) frameHeight; + final float displayAspect = maxWidth / (float) maxHeight; + final float visibleFraction = (frameAspect > 1.0f) == (displayAspect > 1.0f) + ? visibleFractionMatchOrientation + : visibleFractionMismatchOrientation; + final Point layoutSize = getDisplaySize(visibleFraction, frameAspect, maxWidth, maxHeight); + + // If the measure specification is forcing a specific size - yield. + if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) { + layoutSize.x = maxWidth; + } + if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) { + layoutSize.y = maxHeight; + } + return layoutSize; + } + } + + // Types of video scaling: + // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by + // maintaining the aspect ratio (black borders may be displayed). + // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by + // maintaining the aspect ratio. Some portion of the video frame may be + // clipped. + // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as + // possible of the view while maintaining aspect ratio, under the constraint that at least + // `BALANCED_VISIBLE_FRACTION` of the frame content will be shown. + public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED } + // The minimum fraction of the frame content that will be shown for `SCALE_ASPECT_BALANCED`. + // This limits excessive cropping when adjusting display size. + private static float BALANCED_VISIBLE_FRACTION = 0.5625f; + + /** + * Returns layout transformation matrix that applies an optional mirror effect and compensates + * for video vs display aspect ratio. + */ + public static float[] getLayoutMatrix( + boolean mirror, float videoAspectRatio, float displayAspectRatio) { + float scaleX = 1; + float scaleY = 1; + // Scale X or Y dimension so that video and display size have same aspect ratio. + if (displayAspectRatio > videoAspectRatio) { + scaleY = videoAspectRatio / displayAspectRatio; + } else { + scaleX = displayAspectRatio / videoAspectRatio; + } + // Apply optional horizontal flip. + if (mirror) { + scaleX *= -1; + } + final float matrix[] = new float[16]; + Matrix.setIdentityM(matrix, 0); + Matrix.scaleM(matrix, 0, scaleX, scaleY, 1); + adjustOrigin(matrix); + return matrix; + } + + /** Converts a float[16] matrix array to android.graphics.Matrix. */ + public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) { + // clang-format off + float[] values = { + matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0], + matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1], + matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3], + }; + // clang-format on + + android.graphics.Matrix matrix = new android.graphics.Matrix(); + matrix.setValues(values); + return matrix; + } + + /** Converts android.graphics.Matrix to a float[16] matrix array. */ + public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) { + float[] values = new float[9]; + matrix.getValues(values); + + // The android.graphics.Matrix looks like this: + // [x1 y1 w1] + // [x2 y2 w2] + // [x3 y3 w3] + // We want to contruct a matrix that looks like this: + // [x1 y1 0 w1] + // [x2 y2 0 w2] + // [ 0 0 1 0] + // [x3 y3 0 w3] + // Since it is stored in column-major order, it looks like this: + // [x1 x2 0 x3 + // y1 y2 0 y3 + // 0 0 1 0 + // w1 w2 0 w3] + // clang-format off + float[] matrix4x4 = { + values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0], + values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1], + 0, 0, 1, 0, + values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2], + }; + // clang-format on + return matrix4x4; + } + + /** + * Calculate display size based on scaling type, video aspect ratio, and maximum display size. + */ + public static Point getDisplaySize( + ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) { + return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio, + maxDisplayWidth, maxDisplayHeight); + } + + /** + * Move `matrix` transformation origin to (0.5, 0.5). This is the origin for texture coordinates + * that are in the range 0 to 1. + */ + private static void adjustOrigin(float[] matrix) { + // Note that OpenGL is using column-major order. + // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5]. + matrix[12] -= 0.5f * (matrix[0] + matrix[4]); + matrix[13] -= 0.5f * (matrix[1] + matrix[5]); + // Post translate with 0.5 to move coordinates to range [0, 1]. + matrix[12] += 0.5f; + matrix[13] += 0.5f; + } + + /** + * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video + * that must remain visible. + */ + private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) { + switch (scalingType) { + case SCALE_ASPECT_FIT: + return 1.0f; + case SCALE_ASPECT_FILL: + return 0.0f; + case SCALE_ASPECT_BALANCED: + return BALANCED_VISIBLE_FRACTION; + default: + throw new IllegalArgumentException(); + } + } + + /** + * Calculate display size based on minimum fraction of the video that must remain visible, + * video aspect ratio, and maximum display size. + */ + public static Point getDisplaySize( + float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) { + // If there is no constraint on the amount of cropping, fill the allowed display area. + if (minVisibleFraction == 0 || videoAspectRatio == 0) { + return new Point(maxDisplayWidth, maxDisplayHeight); + } + // Each dimension is constrained on max display size and how much we are allowed to crop. + final int width = Math.min( + maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio)); + final int height = Math.min( + maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio)); + return new Point(width, height); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtcCertificatePem.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtcCertificatePem.java new file mode 100644 index 00000000..6070135b --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtcCertificatePem.java @@ -0,0 +1,75 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.PeerConnection; + +/** + * Easily storable/serializable version of a native C++ RTCCertificatePEM. + */ +public class RtcCertificatePem { + /** PEM string representation of the private key. */ + public final String privateKey; + /** PEM string representation of the certificate. */ + public final String certificate; + /** Default expiration time of 30 days. */ + private static final long DEFAULT_EXPIRY = 60 * 60 * 24 * 30; + + /** Instantiate an RtcCertificatePem object from stored strings. */ + @CalledByNative + public RtcCertificatePem(String privateKey, String certificate) { + this.privateKey = privateKey; + this.certificate = certificate; + } + + @CalledByNative + String getPrivateKey() { + return privateKey; + } + + @CalledByNative + String getCertificate() { + return certificate; + } + + /** + * Generate a new RtcCertificatePem with the default settings of KeyType = ECDSA and + * expires = 30 days. + */ + public static RtcCertificatePem generateCertificate() { + return nativeGenerateCertificate(PeerConnection.KeyType.ECDSA, DEFAULT_EXPIRY); + } + + /** + * Generate a new RtcCertificatePem with a custom KeyType and the default setting of + * expires = 30 days. + */ + public static RtcCertificatePem generateCertificate(PeerConnection.KeyType keyType) { + return nativeGenerateCertificate(keyType, DEFAULT_EXPIRY); + } + + /** + * Generate a new RtcCertificatePem with a custom expires and the default setting of + * KeyType = ECDSA. + */ + public static RtcCertificatePem generateCertificate(long expires) { + return nativeGenerateCertificate(PeerConnection.KeyType.ECDSA, expires); + } + + /** Generate a new RtcCertificatePem with a custom KeyType and a custom expires. */ + public static RtcCertificatePem generateCertificate( + PeerConnection.KeyType keyType, long expires) { + return nativeGenerateCertificate(keyType, expires); + } + + private static native RtcCertificatePem nativeGenerateCertificate( + PeerConnection.KeyType keyType, long expires); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpCapabilities.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpCapabilities.java new file mode 100644 index 00000000..02d17042 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpCapabilities.java @@ -0,0 +1,125 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.List; +import java.util.Map; +import org.webrtc.MediaStreamTrack; + +public class RtpCapabilities { + public static class CodecCapability { + public int preferredPayloadType; + // Name used to identify the codec. Equivalent to MIME subtype. + public String name; + // The media type of this codec. Equivalent to MIME top-level type. + public MediaStreamTrack.MediaType kind; + // Clock rate in Hertz. + public Integer clockRate; + // The number of audio channels used. Set to null for video codecs. + public Integer numChannels; + // The "format specific parameters" field from the "a=fmtp" line in the SDP + public Map parameters; + // The MIME type of the codec. This is a convenience field. + public String mimeType; + + public CodecCapability() {} + + @CalledByNative("CodecCapability") + CodecCapability(int preferredPayloadType, String name, MediaStreamTrack.MediaType kind, + Integer clockRate, Integer numChannels, String mimeType, Map parameters) { + this.preferredPayloadType = preferredPayloadType; + this.name = name; + this.kind = kind; + this.clockRate = clockRate; + this.numChannels = numChannels; + this.parameters = parameters; + this.mimeType = mimeType; + } + + @CalledByNative("CodecCapability") + int getPreferredPayloadType() { + return preferredPayloadType; + } + + @CalledByNative("CodecCapability") + String getName() { + return name; + } + + @CalledByNative("CodecCapability") + MediaStreamTrack.MediaType getKind() { + return kind; + } + + @CalledByNative("CodecCapability") + Integer getClockRate() { + return clockRate; + } + + @CalledByNative("CodecCapability") + Integer getNumChannels() { + return numChannels; + } + + @CalledByNative("CodecCapability") + Map getParameters() { + return parameters; + } + } + + public static class HeaderExtensionCapability { + private final String uri; + private final int preferredId; + private final boolean preferredEncrypted; + + @CalledByNative("HeaderExtensionCapability") + HeaderExtensionCapability(String uri, int preferredId, boolean preferredEncrypted) { + this.uri = uri; + this.preferredId = preferredId; + this.preferredEncrypted = preferredEncrypted; + } + + @CalledByNative("HeaderExtensionCapability") + public String getUri() { + return uri; + } + + @CalledByNative("HeaderExtensionCapability") + public int getPreferredId() { + return preferredId; + } + + @CalledByNative("HeaderExtensionCapability") + public boolean getPreferredEncrypted() { + return preferredEncrypted; + } + } + + public List codecs; + public List headerExtensions; + + @CalledByNative + RtpCapabilities(List codecs, List headerExtensions) { + this.headerExtensions = headerExtensions; + this.codecs = codecs; + } + + @CalledByNative + public List getHeaderExtensions() { + return headerExtensions; + } + + @CalledByNative + List getCodecs() { + return codecs; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpParameters.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpParameters.java new file mode 100644 index 00000000..9ca83116 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpParameters.java @@ -0,0 +1,340 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.lang.Double; +import java.lang.String; +import java.util.List; +import java.util.Map; +import org.webrtc.MediaStreamTrack; + +/** + * The parameters for an {@code RtpSender}, as defined in + * http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface. + * + * Note: These structures use nullable Integer/etc. types because in the + * future, they may be used to construct ORTC RtpSender/RtpReceivers, in + * which case "null" will be used to represent "choose the implementation + * default value". + */ +public class RtpParameters { + public enum DegradationPreference { + /** Does not degrade resolution or framerate. */ + DISABLED, + /** Degrade resolution in order to maintain framerate. */ + MAINTAIN_FRAMERATE, + /** Degrade framerate in order to maintain resolution. */ + MAINTAIN_RESOLUTION, + /** Degrade a balance of framerate and resolution. */ + BALANCED; + + @CalledByNative("DegradationPreference") + static DegradationPreference fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + public static class Encoding { + // If non-null, this represents the RID that identifies this encoding layer. + // RIDs are used to identify layers in simulcast. + @Nullable public String rid; + // Set to true to cause this encoding to be sent, and false for it not to + // be sent. + public boolean active = true; + // The relative bitrate priority of this encoding. Currently this is + // implemented for the entire RTP sender by using the value of the first + // encoding parameter. + // See: https://w3c.github.io/webrtc-priority/#enumdef-rtcprioritytype + // "very-low" = 0.5 + // "low" = 1.0 + // "medium" = 2.0 + // "high" = 4.0 + public double bitratePriority = 1.0; + // The relative DiffServ Code Point priority for this encoding, allowing + // packets to be marked relatively higher or lower without affecting + // bandwidth allocations. + @Priority public int networkPriority = Priority.LOW; + // If non-null, this represents the Transport Independent Application + // Specific maximum bandwidth defined in RFC3890. If null, there is no + // maximum bitrate. + @Nullable public Integer maxBitrateBps; + // The minimum bitrate in bps for video. + @Nullable public Integer minBitrateBps; + // The max framerate in fps for video. + @Nullable public Integer maxFramerate; + // The number of temporal layers for video. + @Nullable public Integer numTemporalLayers; + // If non-null, scale the width and height down by this factor for video. If null, + // implementation default scaling factor will be used. + @Nullable public Double scaleResolutionDownBy; + // SSRC to be used by this encoding. + // Can't be changed between getParameters/setParameters. + public Long ssrc; + // Set to true to allow dynamic frame length changes for audio: + // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime + public boolean adaptiveAudioPacketTime; + + // This constructor is useful for creating simulcast layers. + public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { + this.rid = rid; + this.active = active; + this.scaleResolutionDownBy = scaleResolutionDownBy; + } + + @CalledByNative("Encoding") + Encoding(String rid, boolean active, double bitratePriority, @Priority int networkPriority, + Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, + Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc, + boolean adaptiveAudioPacketTime) { + this.rid = rid; + this.active = active; + this.bitratePriority = bitratePriority; + this.networkPriority = networkPriority; + this.maxBitrateBps = maxBitrateBps; + this.minBitrateBps = minBitrateBps; + this.maxFramerate = maxFramerate; + this.numTemporalLayers = numTemporalLayers; + this.scaleResolutionDownBy = scaleResolutionDownBy; + this.ssrc = ssrc; + this.adaptiveAudioPacketTime = adaptiveAudioPacketTime; + } + + @Nullable + @CalledByNative("Encoding") + String getRid() { + return rid; + } + + @CalledByNative("Encoding") + boolean getActive() { + return active; + } + + @CalledByNative("Encoding") + double getBitratePriority() { + return bitratePriority; + } + + @CalledByNative("Encoding") + @Priority + int getNetworkPriority() { + return networkPriority; + } + + @Nullable + @CalledByNative("Encoding") + Integer getMaxBitrateBps() { + return maxBitrateBps; + } + + @Nullable + @CalledByNative("Encoding") + Integer getMinBitrateBps() { + return minBitrateBps; + } + + @Nullable + @CalledByNative("Encoding") + Integer getMaxFramerate() { + return maxFramerate; + } + + @Nullable + @CalledByNative("Encoding") + Integer getNumTemporalLayers() { + return numTemporalLayers; + } + + @Nullable + @CalledByNative("Encoding") + Double getScaleResolutionDownBy() { + return scaleResolutionDownBy; + } + + @CalledByNative("Encoding") + Long getSsrc() { + return ssrc; + } + + @CalledByNative("Encoding") + boolean getAdaptivePTime() { + return adaptiveAudioPacketTime; + } + } + + public static class Codec { + // Payload type used to identify this codec in RTP packets. + public int payloadType; + // Name used to identify the codec. Equivalent to MIME subtype. + public String name; + // The media type of this codec. Equivalent to MIME top-level type. + MediaStreamTrack.MediaType kind; + // Clock rate in Hertz. + public Integer clockRate; + // The number of audio channels used. Set to null for video codecs. + public Integer numChannels; + // The "format specific parameters" field from the "a=fmtp" line in the SDP + public Map parameters; + + @CalledByNative("Codec") + Codec(int payloadType, String name, MediaStreamTrack.MediaType kind, Integer clockRate, + Integer numChannels, Map parameters) { + this.payloadType = payloadType; + this.name = name; + this.kind = kind; + this.clockRate = clockRate; + this.numChannels = numChannels; + this.parameters = parameters; + } + + @CalledByNative("Codec") + int getPayloadType() { + return payloadType; + } + + @CalledByNative("Codec") + String getName() { + return name; + } + + @CalledByNative("Codec") + MediaStreamTrack.MediaType getKind() { + return kind; + } + + @CalledByNative("Codec") + Integer getClockRate() { + return clockRate; + } + + @CalledByNative("Codec") + Integer getNumChannels() { + return numChannels; + } + + @CalledByNative("Codec") + Map getParameters() { + return parameters; + } + } + + public static class Rtcp { + /** The Canonical Name used by RTCP */ + private final String cname; + /** Whether reduced size RTCP is configured or compound RTCP */ + private final boolean reducedSize; + + @CalledByNative("Rtcp") + Rtcp(String cname, boolean reducedSize) { + this.cname = cname; + this.reducedSize = reducedSize; + } + + @CalledByNative("Rtcp") + public String getCname() { + return cname; + } + + @CalledByNative("Rtcp") + public boolean getReducedSize() { + return reducedSize; + } + } + + public static class HeaderExtension { + /** The URI of the RTP header extension, as defined in RFC5285. */ + private final String uri; + /** The value put in the RTP packet to identify the header extension. */ + private final int id; + /** Whether the header extension is encrypted or not. */ + private final boolean encrypted; + + @CalledByNative("HeaderExtension") + HeaderExtension(String uri, int id, boolean encrypted) { + this.uri = uri; + this.id = id; + this.encrypted = encrypted; + } + + @CalledByNative("HeaderExtension") + public String getUri() { + return uri; + } + + @CalledByNative("HeaderExtension") + public int getId() { + return id; + } + + @CalledByNative("HeaderExtension") + public boolean getEncrypted() { + return encrypted; + } + } + + public final String transactionId; + + /** + * When bandwidth is constrained and the RtpSender needs to choose between degrading resolution or + * degrading framerate, degradationPreference indicates which is preferred. + */ + @Nullable public DegradationPreference degradationPreference; + + private final Rtcp rtcp; + + private final List headerExtensions; + + public final List encodings; + + public final List codecs; + + @CalledByNative + RtpParameters(String transactionId, DegradationPreference degradationPreference, Rtcp rtcp, + List headerExtensions, List encodings, List codecs) { + this.transactionId = transactionId; + this.degradationPreference = degradationPreference; + this.rtcp = rtcp; + this.headerExtensions = headerExtensions; + this.encodings = encodings; + this.codecs = codecs; + } + + @CalledByNative + String getTransactionId() { + return transactionId; + } + + @CalledByNative + DegradationPreference getDegradationPreference() { + return degradationPreference; + } + + @CalledByNative + public Rtcp getRtcp() { + return rtcp; + } + + @CalledByNative + public List getHeaderExtensions() { + return headerExtensions; + } + + @CalledByNative + List getEncodings() { + return encodings; + } + + @CalledByNative + List getCodecs() { + return codecs; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpReceiver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpReceiver.java new file mode 100644 index 00000000..c3cff3dd --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpReceiver.java @@ -0,0 +1,98 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import org.webrtc.MediaStreamTrack; + +/** Java wrapper for a C++ RtpReceiverInterface. */ +public class RtpReceiver { + /** Java wrapper for a C++ RtpReceiverObserverInterface*/ + public static interface Observer { + // Called when the first audio or video packet is received. + @CalledByNative("Observer") + public void onFirstPacketReceived(MediaStreamTrack.MediaType media_type); + } + + private long nativeRtpReceiver; + private long nativeObserver; + + @Nullable private MediaStreamTrack cachedTrack; + + @CalledByNative + public RtpReceiver(long nativeRtpReceiver) { + this.nativeRtpReceiver = nativeRtpReceiver; + long nativeTrack = nativeGetTrack(nativeRtpReceiver); + cachedTrack = MediaStreamTrack.createMediaStreamTrack(nativeTrack); + } + + @Nullable + public MediaStreamTrack track() { + return cachedTrack; + } + + public RtpParameters getParameters() { + checkRtpReceiverExists(); + return nativeGetParameters(nativeRtpReceiver); + } + + public String id() { + checkRtpReceiverExists(); + return nativeGetId(nativeRtpReceiver); + } + + /** Returns a pointer to webrtc::RtpReceiverInterface. */ + long getNativeRtpReceiver() { + checkRtpReceiverExists(); + return nativeRtpReceiver; + } + + @CalledByNative + public void dispose() { + checkRtpReceiverExists(); + cachedTrack.dispose(); + if (nativeObserver != 0) { + nativeUnsetObserver(nativeRtpReceiver, nativeObserver); + nativeObserver = 0; + } + JniCommon.nativeReleaseRef(nativeRtpReceiver); + nativeRtpReceiver = 0; + } + + public void SetObserver(Observer observer) { + checkRtpReceiverExists(); + // Unset the existing one before setting a new one. + if (nativeObserver != 0) { + nativeUnsetObserver(nativeRtpReceiver, nativeObserver); + } + nativeObserver = nativeSetObserver(nativeRtpReceiver, observer); + } + + public void setFrameDecryptor(FrameDecryptor frameDecryptor) { + checkRtpReceiverExists(); + nativeSetFrameDecryptor(nativeRtpReceiver, frameDecryptor.getNativeFrameDecryptor()); + } + + private void checkRtpReceiverExists() { + if (nativeRtpReceiver == 0) { + throw new IllegalStateException("RtpReceiver has been disposed."); + } + } + + // This should increment the reference count of the track. + // Will be released in dispose(). + private static native long nativeGetTrack(long rtpReceiver); + private static native RtpParameters nativeGetParameters(long rtpReceiver); + private static native String nativeGetId(long rtpReceiver); + private static native long nativeSetObserver(long rtpReceiver, Observer observer); + private static native void nativeUnsetObserver(long rtpReceiver, long nativeObserver); + private static native void nativeSetFrameDecryptor(long rtpReceiver, long nativeFrameDecryptor); +}; diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpSender.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpSender.java new file mode 100644 index 00000000..2d0bc6c1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpSender.java @@ -0,0 +1,153 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.List; +import org.webrtc.MediaStreamTrack; + +/** Java wrapper for a C++ RtpSenderInterface. */ +public class RtpSender { + private long nativeRtpSender; + + @Nullable private MediaStreamTrack cachedTrack; + private boolean ownsTrack = true; + private final @Nullable DtmfSender dtmfSender; + + @CalledByNative + public RtpSender(long nativeRtpSender) { + this.nativeRtpSender = nativeRtpSender; + long nativeTrack = nativeGetTrack(nativeRtpSender); + cachedTrack = MediaStreamTrack.createMediaStreamTrack(nativeTrack); + + if (nativeGetMediaType(nativeRtpSender).equalsIgnoreCase(MediaStreamTrack.AUDIO_TRACK_KIND)) { + long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender); + dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null; + } else { + dtmfSender = null; + } + } + + /** + * Starts sending a new track, without requiring additional SDP negotiation. + *

+ * Note: This is equivalent to replaceTrack in the official WebRTC API. It + * was just implemented before the standards group settled on a name. + * + * @param takeOwnership If true, the RtpSender takes ownership of the track + * from the caller, and will auto-dispose of it when no + * longer needed. `takeOwnership` should only be used if + * the caller owns the track; it is not appropriate when + * the track is owned by, for example, another RtpSender + * or a MediaStream. + * @return true on success and false on failure. + */ + public boolean setTrack(@Nullable MediaStreamTrack track, boolean takeOwnership) { + checkRtpSenderExists(); + if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.getNativeMediaStreamTrack())) { + return false; + } + if (cachedTrack != null && ownsTrack) { + cachedTrack.dispose(); + } + cachedTrack = track; + ownsTrack = takeOwnership; + return true; + } + + @Nullable + public MediaStreamTrack track() { + return cachedTrack; + } + + public void setStreams(List streamIds) { + checkRtpSenderExists(); + nativeSetStreams(nativeRtpSender, streamIds); + } + + public List getStreams() { + checkRtpSenderExists(); + return nativeGetStreams(nativeRtpSender); + } + + public boolean setParameters(RtpParameters parameters) { + checkRtpSenderExists(); + return nativeSetParameters(nativeRtpSender, parameters); + } + + public RtpParameters getParameters() { + checkRtpSenderExists(); + return nativeGetParameters(nativeRtpSender); + } + + public String id() { + checkRtpSenderExists(); + return nativeGetId(nativeRtpSender); + } + + @Nullable + public DtmfSender dtmf() { + return dtmfSender; + } + + public void setFrameEncryptor(FrameEncryptor frameEncryptor) { + checkRtpSenderExists(); + nativeSetFrameEncryptor(nativeRtpSender, frameEncryptor.getNativeFrameEncryptor()); + } + + public void dispose() { + checkRtpSenderExists(); + if (dtmfSender != null) { + dtmfSender.dispose(); + } + if (cachedTrack != null && ownsTrack) { + cachedTrack.dispose(); + } + JniCommon.nativeReleaseRef(nativeRtpSender); + nativeRtpSender = 0; + } + + /** Returns a pointer to webrtc::RtpSenderInterface. */ + long getNativeRtpSender() { + checkRtpSenderExists(); + return nativeRtpSender; + } + + private void checkRtpSenderExists() { + if (nativeRtpSender == 0) { + throw new IllegalStateException("RtpSender has been disposed."); + } + } + + private static native boolean nativeSetTrack(long rtpSender, long nativeTrack); + + // This should increment the reference count of the track. + // Will be released in dispose() or setTrack(). + private static native long nativeGetTrack(long rtpSender); + + private static native void nativeSetStreams(long rtpSender, List streamIds); + + private static native List nativeGetStreams(long rtpSender); + + // This should increment the reference count of the DTMF sender. + // Will be released in dispose(). + private static native long nativeGetDtmfSender(long rtpSender); + + private static native boolean nativeSetParameters(long rtpSender, RtpParameters parameters); + + private static native RtpParameters nativeGetParameters(long rtpSender); + + private static native String nativeGetId(long rtpSender); + + private static native void nativeSetFrameEncryptor(long rtpSender, long nativeFrameEncryptor); + + private static native String nativeGetMediaType(long rtpSender); +}; diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpTransceiver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpTransceiver.java new file mode 100644 index 00000000..d2ba733d --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/RtpTransceiver.java @@ -0,0 +1,273 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.webrtc.MediaStreamTrack; +import org.webrtc.RtpParameters; + +/** + * Java wrapper for a C++ RtpTransceiverInterface. + * + *

The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the WebRTC + * specification. A transceiver represents a combination of an RTCRtpSender + * and an RTCRtpReceiver that share a common mid. As defined in JSEP, an + * RTCRtpTransceiver is said to be associated with a media description if its + * mid property is non-nil; otherwise, it is said to be disassociated. + * JSEP: https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24 + * + *

Note that RTCRtpTransceivers are only supported when using + * RTCPeerConnection with Unified Plan SDP. + * + *

WebRTC specification for RTCRtpTransceiver, the JavaScript analog: + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver + */ +public class RtpTransceiver { + /** Java version of webrtc::RtpTransceiverDirection - the ordering must be kept in sync. */ + public enum RtpTransceiverDirection { + SEND_RECV(0), + SEND_ONLY(1), + RECV_ONLY(2), + INACTIVE(3), + STOPPED(4); + + private final int nativeIndex; + + private RtpTransceiverDirection(int nativeIndex) { + this.nativeIndex = nativeIndex; + } + + @CalledByNative("RtpTransceiverDirection") + int getNativeIndex() { + return nativeIndex; + } + + @CalledByNative("RtpTransceiverDirection") + static RtpTransceiverDirection fromNativeIndex(int nativeIndex) { + for (RtpTransceiverDirection type : RtpTransceiverDirection.values()) { + if (type.getNativeIndex() == nativeIndex) { + return type; + } + } + throw new IllegalArgumentException( + "Uknown native RtpTransceiverDirection type" + nativeIndex); + } + } + + /** + * Tracks webrtc::RtpTransceiverInit. https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit + * A structure for initializing an RtpTransceiver in a call to addTransceiver. + * Note: This does not contain a list of encoding parameters, because they are currently + * not being used natively. + */ + public static final class RtpTransceiverInit { + private final RtpTransceiverDirection direction; + private final List streamIds; + private final List sendEncodings; + + public RtpTransceiverInit() { + this(RtpTransceiverDirection.SEND_RECV); + } + + public RtpTransceiverInit(RtpTransceiverDirection direction) { + this(direction, Collections.emptyList(), Collections.emptyList()); + } + + public RtpTransceiverInit(RtpTransceiverDirection direction, List streamIds) { + this(direction, streamIds, Collections.emptyList()); + } + + public RtpTransceiverInit(RtpTransceiverDirection direction, List streamIds, + List sendEncodings) { + this.direction = direction; + this.streamIds = new ArrayList(streamIds); + this.sendEncodings = new ArrayList(sendEncodings); + } + + @CalledByNative("RtpTransceiverInit") + int getDirectionNativeIndex() { + return direction.getNativeIndex(); + } + + @CalledByNative("RtpTransceiverInit") + List getStreamIds() { + return new ArrayList(this.streamIds); + } + + @CalledByNative("RtpTransceiverInit") + List getSendEncodings() { + return new ArrayList(this.sendEncodings); + } + } + + private long nativeRtpTransceiver; + private RtpSender cachedSender; + private RtpReceiver cachedReceiver; + + @CalledByNative + protected RtpTransceiver(long nativeRtpTransceiver) { + this.nativeRtpTransceiver = nativeRtpTransceiver; + cachedSender = nativeGetSender(nativeRtpTransceiver); + cachedReceiver = nativeGetReceiver(nativeRtpTransceiver); + } + + /** + * Media type of the transceiver. Any sender(s)/receiver(s) will have this + * type as well. + */ + public MediaStreamTrack.MediaType getMediaType() { + checkRtpTransceiverExists(); + return nativeGetMediaType(nativeRtpTransceiver); + } + + /** + * The mid attribute is the mid negotiated and present in the local and + * remote descriptions. Before negotiation is complete, the mid value may be + * null. After rollbacks, the value may change from a non-null value to null. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid + */ + public String getMid() { + checkRtpTransceiverExists(); + return nativeGetMid(nativeRtpTransceiver); + } + + /** + * The sender attribute exposes the RtpSender corresponding to the RTP media + * that may be sent with the transceiver's mid. The sender is always present, + * regardless of the direction of media. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender + */ + public RtpSender getSender() { + return cachedSender; + } + + /** + * The receiver attribute exposes the RtpReceiver corresponding to the RTP + * media that may be received with the transceiver's mid. The receiver is + * always present, regardless of the direction of media. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver + */ + public RtpReceiver getReceiver() { + return cachedReceiver; + } + + /** + * The stopped attribute indicates that the sender of this transceiver will no + * longer send, and that the receiver will no longer receive. It is true if + * either stop has been called or if setting the local or remote description + * has caused the RtpTransceiver to be stopped. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped + */ + public boolean isStopped() { + checkRtpTransceiverExists(); + return nativeStopped(nativeRtpTransceiver); + } + + /** + * The direction attribute indicates the preferred direction of this + * transceiver, which will be used in calls to CreateOffer and CreateAnswer. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction + */ + public RtpTransceiverDirection getDirection() { + checkRtpTransceiverExists(); + return nativeDirection(nativeRtpTransceiver); + } + + /** + * The current_direction attribute indicates the current direction negotiated + * for this transceiver. If this transceiver has never been represented in an + * offer/answer exchange, or if the transceiver is stopped, the value is null. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection + */ + public RtpTransceiverDirection getCurrentDirection() { + checkRtpTransceiverExists(); + return nativeCurrentDirection(nativeRtpTransceiver); + } + + /** + * Sets the preferred direction of this transceiver. An update of + * directionality does not take effect immediately. Instead, future calls to + * CreateOffer and CreateAnswer mark the corresponding media descriptions as + * sendrecv, sendonly, recvonly, or inactive. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction + */ + public boolean setDirection(RtpTransceiverDirection rtpTransceiverDirection) { + checkRtpTransceiverExists(); + return nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection); + } + + /** + * The Stop method will for the time being call the StopInternal method. + * After a migration procedure, stop() will be equivalent to StopStandard. + */ + public void stop() { + checkRtpTransceiverExists(); + nativeStopInternal(nativeRtpTransceiver); + } + + public void setCodecPreferences(List codecs) { + checkRtpTransceiverExists(); + nativeSetCodecPreferences(nativeRtpTransceiver, codecs); + } + + /** + * The StopInternal method stops the RtpTransceiver, like Stop, but goes + * immediately to Stopped state. + */ + public void stopInternal() { + checkRtpTransceiverExists(); + nativeStopInternal(nativeRtpTransceiver); + } + + /** + * The StopStandard method irreversibly stops the RtpTransceiver. The sender + * of this transceiver will no longer send, the receiver will no longer + * receive. + * + *

The transceiver will enter Stopping state and signal NegotiationNeeded. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop + */ + public void stopStandard() { + checkRtpTransceiverExists(); + nativeStopStandard(nativeRtpTransceiver); + } + + @CalledByNative + public void dispose() { + checkRtpTransceiverExists(); + cachedSender.dispose(); + cachedReceiver.dispose(); + JniCommon.nativeReleaseRef(nativeRtpTransceiver); + nativeRtpTransceiver = 0; + } + + private void checkRtpTransceiverExists() { + if (nativeRtpTransceiver == 0) { + throw new IllegalStateException("RtpTransceiver has been disposed."); + } + } + + private static native MediaStreamTrack.MediaType nativeGetMediaType(long rtpTransceiver); + private static native String nativeGetMid(long rtpTransceiver); + private static native RtpSender nativeGetSender(long rtpTransceiver); + private static native RtpReceiver nativeGetReceiver(long rtpTransceiver); + private static native boolean nativeStopped(long rtpTransceiver); + private static native RtpTransceiverDirection nativeDirection(long rtpTransceiver); + private static native RtpTransceiverDirection nativeCurrentDirection(long rtpTransceiver); + private static native void nativeStopInternal(long rtpTransceiver); + private static native void nativeStopStandard(long rtpTransceiver); + private static native boolean nativeSetDirection( + long rtpTransceiver, RtpTransceiverDirection rtpTransceiverDirection); + private static native void nativeSetCodecPreferences( + long rtpTransceiver, List codecs); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SSLCertificateVerifier.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SSLCertificateVerifier.java new file mode 100644 index 00000000..461cd3b1 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SSLCertificateVerifier.java @@ -0,0 +1,27 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * The SSLCertificateVerifier interface allows API users to provide custom + * logic to verify certificates. + */ +public interface SSLCertificateVerifier { + /** + * Implementations of verify allow applications to provide custom logic for + * verifying certificates. This is not required by default and should be used + * with care. + * + * @param certificate A byte array containing a DER encoded X509 certificate. + * @return True if the certificate is verified and trusted else false. + */ + @CalledByNative boolean verify(byte[] certificate); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/ScreenCapturerAndroid.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/ScreenCapturerAndroid.java new file mode 100644 index 00000000..08b03bd6 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/ScreenCapturerAndroid.java @@ -0,0 +1,223 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.app.Activity; +import android.content.Context; +import android.content.Intent; +import android.hardware.display.DisplayManager; +import android.hardware.display.VirtualDisplay; +import android.media.projection.MediaProjection; +import android.media.projection.MediaProjectionManager; +import android.os.Build.VERSION; +import android.os.Build.VERSION_CODES; +import android.view.Surface; +import androidx.annotation.Nullable; + +/** + * An implementation of VideoCapturer to capture the screen content as a video stream. + * Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this + * {@code SurfaceTexture} using a {@code SurfaceTextureHelper}. + * The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in + * {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it + * as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes + * place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame, + * the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new + * frames. At any time, at most one frame is being processed. + */ +public class ScreenCapturerAndroid implements VideoCapturer, VideoSink { + private static final int DISPLAY_FLAGS = + DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION; + // DPI for VirtualDisplay, does not seem to matter for us. + private static final int VIRTUAL_DISPLAY_DPI = 400; + + private final Intent mediaProjectionPermissionResultData; + private final MediaProjection.Callback mediaProjectionCallback; + + private int width; + private int height; + @Nullable private VirtualDisplay virtualDisplay; + @Nullable private SurfaceTextureHelper surfaceTextureHelper; + @Nullable private CapturerObserver capturerObserver; + private long numCapturedFrames; + @Nullable private MediaProjection mediaProjection; + private boolean isDisposed; + @Nullable private MediaProjectionManager mediaProjectionManager; + + /** + * Constructs a new Screen Capturer. + * + * @param mediaProjectionPermissionResultData the result data of MediaProjection permission + * activity; the calling app must validate that result code is Activity.RESULT_OK before + * calling this method. + * @param mediaProjectionCallback MediaProjection callback to implement application specific + * logic in events such as when the user revokes a previously granted capture permission. + **/ + public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData, + MediaProjection.Callback mediaProjectionCallback) { + this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData; + this.mediaProjectionCallback = mediaProjectionCallback; + } + + private void checkNotDisposed() { + if (isDisposed) { + throw new RuntimeException("capturer is disposed."); + } + } + + @Nullable + public MediaProjection getMediaProjection() { + return mediaProjection; + } + + @Override + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper, + final Context applicationContext, final CapturerObserver capturerObserver) { + checkNotDisposed(); + + if (capturerObserver == null) { + throw new RuntimeException("capturerObserver not set."); + } + this.capturerObserver = capturerObserver; + + if (surfaceTextureHelper == null) { + throw new RuntimeException("surfaceTextureHelper not set."); + } + this.surfaceTextureHelper = surfaceTextureHelper; + + mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService( + Context.MEDIA_PROJECTION_SERVICE); + } + + @Override + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void startCapture( + final int width, final int height, final int ignoredFramerate) { + checkNotDisposed(); + + this.width = width; + this.height = height; + + mediaProjection = mediaProjectionManager.getMediaProjection( + Activity.RESULT_OK, mediaProjectionPermissionResultData); + + // Let MediaProjection callback use the SurfaceTextureHelper thread. + mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler()); + + updateVirtualDisplay(); + capturerObserver.onCapturerStarted(true); + surfaceTextureHelper.startListening(ScreenCapturerAndroid.this); + } + + @Override + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void stopCapture() { + checkNotDisposed(); + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + surfaceTextureHelper.stopListening(); + capturerObserver.onCapturerStopped(); + + if (virtualDisplay != null) { + virtualDisplay.release(); + virtualDisplay = null; + } + + if (mediaProjection != null) { + // Unregister the callback before stopping, otherwise the callback recursively + // calls this method. + mediaProjection.unregisterCallback(mediaProjectionCallback); + mediaProjection.stop(); + mediaProjection = null; + } + } + }); + } + + @Override + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void dispose() { + isDisposed = true; + } + + /** + * Changes output video format. This method can be used to scale the output + * video, or to change orientation when the captured screen is rotated for example. + * + * @param width new output video width + * @param height new output video height + * @param ignoredFramerate ignored + */ + @Override + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void changeCaptureFormat( + final int width, final int height, final int ignoredFramerate) { + checkNotDisposed(); + + this.width = width; + this.height = height; + + if (virtualDisplay == null) { + // Capturer is stopped, the virtual display will be created in startCapture(). + return; + } + + // Create a new virtual display on the surfaceTextureHelper thread to avoid interference + // with frame processing, which happens on the same thread (we serialize events by running + // them on the same thread). + ThreadUtils.invokeAtFrontUninterruptibly( + surfaceTextureHelper.getHandler(), this::updateVirtualDisplay); + } + + private void updateVirtualDisplay() { + surfaceTextureHelper.setTextureSize(width, height); + // Before Android S (12), resizing the virtual display can cause the captured screen to be + // scaled incorrectly, so keep the behavior of recreating the virtual display prior to Android + // S. + if (virtualDisplay == null || VERSION.SDK_INT < VERSION_CODES.S) { + createVirtualDisplay(); + } else { + virtualDisplay.resize(width, height, VIRTUAL_DISPLAY_DPI); + virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); + } + } + private void createVirtualDisplay() { + if (virtualDisplay != null) { + virtualDisplay.release(); + } + virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height, + VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()), + null /* callback */, null /* callback handler */); + } + + // This is called on the internal looper thread of {@Code SurfaceTextureHelper}. + @Override + public void onFrame(VideoFrame frame) { + numCapturedFrames++; + capturerObserver.onFrameCaptured(frame); + } + + @Override + public boolean isScreencast() { + return true; + } + + public long getNumCapturedFrames() { + return numCapturedFrames; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SdpObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SdpObserver.java new file mode 100644 index 00000000..afa99bc5 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SdpObserver.java @@ -0,0 +1,26 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Interface for observing SDP-related events. */ +public interface SdpObserver { + /** Called on success of Create{Offer,Answer}(). */ + @CalledByNative void onCreateSuccess(SessionDescription sdp); + + /** Called on success of Set{Local,Remote}Description(). */ + @CalledByNative void onSetSuccess(); + + /** Called on error of Create{Offer,Answer}(). */ + @CalledByNative void onCreateFailure(String error); + + /** Called on error of Set{Local,Remote}Description(). */ + @CalledByNative void onSetFailure(String error); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SessionDescription.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SessionDescription.java new file mode 100644 index 00000000..be89599a --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SessionDescription.java @@ -0,0 +1,56 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.Locale; + +/** + * Description of an RFC 4566 Session. + * SDPs are passed as serialized Strings in Java-land and are materialized + * to SessionDescriptionInterface as appropriate in the JNI layer. + */ +public class SessionDescription { + /** Java-land enum version of SessionDescriptionInterface's type() string. */ + public static enum Type { + OFFER, + PRANSWER, + ANSWER, + ROLLBACK; + + public String canonicalForm() { + return name().toLowerCase(Locale.US); + } + + @CalledByNative("Type") + public static Type fromCanonicalForm(String canonical) { + return Type.valueOf(Type.class, canonical.toUpperCase(Locale.US)); + } + } + + public final Type type; + public final String description; + + @CalledByNative + public SessionDescription(Type type, String description) { + this.type = type; + this.description = description; + } + + @CalledByNative + String getDescription() { + return description; + } + + @CalledByNative + String getTypeInCanonicalForm() { + return type.canonicalForm(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/Size.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/Size.java new file mode 100644 index 00000000..a711b5d2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/Size.java @@ -0,0 +1,45 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Class for representing size of an object. Very similar to android.util.Size but available on all + * devices. + */ +public class Size { + public int width; + public int height; + + public Size(int width, int height) { + this.width = width; + this.height = height; + } + + @Override + public String toString() { + return width + "x" + height; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof Size)) { + return false; + } + final Size otherSize = (Size) other; + return width == otherSize.width && height == otherSize.height; + } + + @Override + public int hashCode() { + // Use prime close to 2^16 to avoid collisions for normal values less than 2^16. + return 1 + 65537 * width + height; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SoftwareVideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SoftwareVideoDecoderFactory.java new file mode 100644 index 00000000..2ac42e83 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SoftwareVideoDecoderFactory.java @@ -0,0 +1,53 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.List; + +public class SoftwareVideoDecoderFactory implements VideoDecoderFactory { + private static final String TAG = "SoftwareVideoDecoderFactory"; + + private final long nativeFactory; + + public SoftwareVideoDecoderFactory() { + this.nativeFactory = nativeCreateFactory(); + } + + @Nullable + @Override + public VideoDecoder createDecoder(VideoCodecInfo info) { + long nativeDecoder = nativeCreateDecoder(nativeFactory, info); + if (nativeDecoder == 0) { + Logging.w(TAG, "Trying to create decoder for unsupported format. " + info); + return null; + } + + return new WrappedNativeVideoDecoder() { + @Override + public long createNativeVideoDecoder() { + return nativeDecoder; + } + }; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]); + } + + private static native long nativeCreateFactory(); + + private static native long nativeCreateDecoder(long factory, VideoCodecInfo videoCodecInfo); + + private static native List nativeGetSupportedCodecs(long factory); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SoftwareVideoEncoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SoftwareVideoEncoderFactory.java new file mode 100644 index 00000000..7f4c457b --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SoftwareVideoEncoderFactory.java @@ -0,0 +1,58 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.List; + +public class SoftwareVideoEncoderFactory implements VideoEncoderFactory { + private static final String TAG = "SoftwareVideoEncoderFactory"; + + private final long nativeFactory; + + public SoftwareVideoEncoderFactory() { + this.nativeFactory = nativeCreateFactory(); + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo info) { + long nativeEncoder = nativeCreateEncoder(nativeFactory, info); + if (nativeEncoder == 0) { + Logging.w(TAG, "Trying to create encoder for unsupported format. " + info); + return null; + } + + return new WrappedNativeVideoEncoder() { + @Override + public long createNativeVideoEncoder() { + return nativeEncoder; + } + + @Override + public boolean isHardwareEncoder() { + return false; + } + }; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]); + } + + private static native long nativeCreateFactory(); + + private static native long nativeCreateEncoder(long factory, VideoCodecInfo videoCodecInfo); + + private static native List nativeGetSupportedCodecs(long factory); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/StatsObserver.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/StatsObserver.java new file mode 100644 index 00000000..b9984c18 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/StatsObserver.java @@ -0,0 +1,17 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Interface for observing Stats reports (see webrtc::StatsObservers). */ +public interface StatsObserver { + /** Called when the reports are ready.*/ + @CalledByNative public void onComplete(StatsReport[] reports); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/StatsReport.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/StatsReport.java new file mode 100644 index 00000000..b8f1cf87 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/StatsReport.java @@ -0,0 +1,63 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Java version of webrtc::StatsReport. */ +public class StatsReport { + /** Java version of webrtc::StatsReport::Value. */ + public static class Value { + public final String name; + public final String value; + + @CalledByNative("Value") + public Value(String name, String value) { + this.name = name; + this.value = value; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("[").append(name).append(": ").append(value).append("]"); + return builder.toString(); + } + } + + public final String id; + public final String type; + // Time since 1970-01-01T00:00:00Z in milliseconds. + public final double timestamp; + public final Value[] values; + + @CalledByNative + public StatsReport(String id, String type, double timestamp, Value[] values) { + this.id = id; + this.type = type; + this.timestamp = timestamp; + this.values = values; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("id: ") + .append(id) + .append(", type: ") + .append(type) + .append(", timestamp: ") + .append(timestamp) + .append(", values: "); + for (int i = 0; i < values.length; ++i) { + builder.append(values[i].toString()).append(", "); + } + return builder.toString(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceEglRenderer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceEglRenderer.java new file mode 100644 index 00000000..6cba3f47 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceEglRenderer.java @@ -0,0 +1,160 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.view.SurfaceHolder; +import java.util.concurrent.CountDownLatch; + +/** + * Display the video stream on a Surface. + * renderFrame() is asynchronous to avoid blocking the calling thread. + * This class is thread safe and handles access from potentially three different threads: + * Interaction from the main app in init, release and setMirror. + * Interaction from C++ rtc::VideoSinkInterface in renderFrame. + * Interaction from SurfaceHolder lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed. + */ +public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Callback { + private static final String TAG = "SurfaceEglRenderer"; + + // Callback for reporting renderer events. Read-only after initialization so no lock required. + private RendererCommon.RendererEvents rendererEvents; + + private final Object layoutLock = new Object(); + private boolean isRenderingPaused; + private boolean isFirstFrameRendered; + private int rotatedFrameWidth; + private int rotatedFrameHeight; + private int frameRotation; + + /** + * In order to render something, you must first call init(). + */ + public SurfaceEglRenderer(String name) { + super(name); + } + + /** + * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used + * for drawing frames on the EGLSurface. This class is responsible for calling release() on + * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous + * init()/release() cycle. + */ + public void init(final EglBase.Context sharedContext, + RendererCommon.RendererEvents rendererEvents, final int[] configAttributes, + RendererCommon.GlDrawer drawer) { + ThreadUtils.checkIsOnMainThread(); + this.rendererEvents = rendererEvents; + synchronized (layoutLock) { + isFirstFrameRendered = false; + rotatedFrameWidth = 0; + rotatedFrameHeight = 0; + frameRotation = 0; + } + super.init(sharedContext, configAttributes, drawer); + } + + @Override + public void init(final EglBase.Context sharedContext, final int[] configAttributes, + RendererCommon.GlDrawer drawer) { + init(sharedContext, null /* rendererEvents */, configAttributes, drawer); + } + + /** + * Limit render framerate. + * + * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps + * reduction. + */ + @Override + public void setFpsReduction(float fps) { + synchronized (layoutLock) { + isRenderingPaused = fps == 0f; + } + super.setFpsReduction(fps); + } + + @Override + public void disableFpsReduction() { + synchronized (layoutLock) { + isRenderingPaused = false; + } + super.disableFpsReduction(); + } + + @Override + public void pauseVideo() { + synchronized (layoutLock) { + isRenderingPaused = true; + } + super.pauseVideo(); + } + + // VideoSink interface. + @Override + public void onFrame(VideoFrame frame) { + updateFrameDimensionsAndReportEvents(frame); + super.onFrame(frame); + } + + // SurfaceHolder.Callback interface. + @Override + public void surfaceCreated(final SurfaceHolder holder) { + ThreadUtils.checkIsOnMainThread(); + createEglSurface(holder.getSurface()); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + ThreadUtils.checkIsOnMainThread(); + final CountDownLatch completionLatch = new CountDownLatch(1); + releaseEglSurface(completionLatch::countDown); + ThreadUtils.awaitUninterruptibly(completionLatch); + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + ThreadUtils.checkIsOnMainThread(); + logD("surfaceChanged: format: " + format + " size: " + width + "x" + height); + } + + // Update frame dimensions and report any changes to `rendererEvents`. + private void updateFrameDimensionsAndReportEvents(VideoFrame frame) { + synchronized (layoutLock) { + if (isRenderingPaused) { + return; + } + if (!isFirstFrameRendered) { + isFirstFrameRendered = true; + logD("Reporting first rendered frame."); + if (rendererEvents != null) { + rendererEvents.onFirstFrameRendered(); + } + } + if (rotatedFrameWidth != frame.getRotatedWidth() + || rotatedFrameHeight != frame.getRotatedHeight() + || frameRotation != frame.getRotation()) { + logD("Reporting frame resolution changed to " + frame.getBuffer().getWidth() + "x" + + frame.getBuffer().getHeight() + " with rotation " + frame.getRotation()); + if (rendererEvents != null) { + rendererEvents.onFrameResolutionChanged( + frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotation()); + } + rotatedFrameWidth = frame.getRotatedWidth(); + rotatedFrameHeight = frame.getRotatedHeight(); + frameRotation = frame.getRotation(); + } + } + } + + private void logD(String string) { + Logging.d(TAG, name + ": " + string); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceTextureHelper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceTextureHelper.java new file mode 100644 index 00000000..3ea22736 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceTextureHelper.java @@ -0,0 +1,390 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.annotation.TargetApi; +import android.graphics.SurfaceTexture; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.os.Build; +import android.os.Handler; +import android.os.HandlerThread; +import androidx.annotation.Nullable; +import java.util.concurrent.Callable; +import org.webrtc.EglBase.Context; +import org.webrtc.TextureBufferImpl.RefCountMonitor; +import org.webrtc.VideoFrame.TextureBuffer; + +/** + * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC + * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only + * one texture frame can be in flight at once, so the frame must be released in order to receive a + * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all + * resources once the texture frame is released. + */ +public class SurfaceTextureHelper { + /** + * Interface for monitoring texture buffers created from this SurfaceTexture. Since only one + * texture buffer can exist at a time, this can be used to monitor for stuck frames. + */ + public interface FrameRefMonitor { + /** A new frame was created. New frames start with ref count of 1. */ + void onNewBuffer(TextureBuffer textureBuffer); + /** Ref count of the frame was incremented by the calling thread. */ + void onRetainBuffer(TextureBuffer textureBuffer); + /** Ref count of the frame was decremented by the calling thread. */ + void onReleaseBuffer(TextureBuffer textureBuffer); + /** Frame was destroyed (ref count reached 0). */ + void onDestroyBuffer(TextureBuffer textureBuffer); + } + + private static final String TAG = "SurfaceTextureHelper"; + /** + * Construct a new SurfaceTextureHelper sharing OpenGL resources with `sharedContext`. A dedicated + * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to + * initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame + * timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to + * rtc::TimeNanos() there is no need for aligning timestamps again in + * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and + * closer to actual creation time. + */ + public static SurfaceTextureHelper create(final String threadName, + final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter, + FrameRefMonitor frameRefMonitor) { + final HandlerThread thread = new HandlerThread(threadName); + thread.start(); + final Handler handler = new Handler(thread.getLooper()); + + // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See: + // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195. + // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper + // is constructed on the `handler` thread. + return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable() { + @Nullable + @Override + public SurfaceTextureHelper call() { + try { + return new SurfaceTextureHelper( + sharedContext, handler, alignTimestamps, yuvConverter, frameRefMonitor); + } catch (RuntimeException e) { + Logging.e(TAG, threadName + " create failure", e); + return null; + } + } + }); + } + + /** + * Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter. + * + * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor) + */ + public static SurfaceTextureHelper create( + final String threadName, final EglBase.Context sharedContext) { + return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter(), + /*frameRefMonitor=*/null); + } + + /** + * Same as above with yuvConverter set to new YuvConverter. + * + * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor) + */ + public static SurfaceTextureHelper create( + final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) { + return create( + threadName, sharedContext, alignTimestamps, new YuvConverter(), /*frameRefMonitor=*/null); + } + + /** + * Create a SurfaceTextureHelper without frame ref monitor. + * + * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor) + */ + public static SurfaceTextureHelper create(final String threadName, + final EglBase.Context sharedContext, boolean alignTimestamps, YuvConverter yuvConverter) { + return create( + threadName, sharedContext, alignTimestamps, yuvConverter, /*frameRefMonitor=*/null); + } + + private final RefCountMonitor textureRefCountMonitor = new RefCountMonitor() { + @Override + public void onRetain(TextureBufferImpl textureBuffer) { + if (frameRefMonitor != null) { + frameRefMonitor.onRetainBuffer(textureBuffer); + } + } + + @Override + public void onRelease(TextureBufferImpl textureBuffer) { + if (frameRefMonitor != null) { + frameRefMonitor.onReleaseBuffer(textureBuffer); + } + } + + @Override + public void onDestroy(TextureBufferImpl textureBuffer) { + returnTextureFrame(); + if (frameRefMonitor != null) { + frameRefMonitor.onDestroyBuffer(textureBuffer); + } + } + }; + + private final Handler handler; + private final EglBase eglBase; + private final SurfaceTexture surfaceTexture; + private final int oesTextureId; + private final YuvConverter yuvConverter; + @Nullable private final TimestampAligner timestampAligner; + private final FrameRefMonitor frameRefMonitor; + + // These variables are only accessed from the `handler` thread. + @Nullable private VideoSink listener; + // The possible states of this class. + private boolean hasPendingTexture; + private volatile boolean isTextureInUse; + private boolean isQuitting; + private int frameRotation; + private int textureWidth; + private int textureHeight; + // `pendingListener` is set in setListener() and the runnable is posted to the handler thread. + // setListener() is not allowed to be called again before stopListening(), so this is thread safe. + @Nullable private VideoSink pendingListener; + final Runnable setListenerRunnable = new Runnable() { + @Override + public void run() { + Logging.d(TAG, "Setting listener to " + pendingListener); + listener = pendingListener; + pendingListener = null; + // May have a pending frame from the previous capture session - drop it. + if (hasPendingTexture) { + // Calling updateTexImage() is neccessary in order to receive new frames. + updateTexImage(); + hasPendingTexture = false; + } + } + }; + + private SurfaceTextureHelper(Context sharedContext, Handler handler, boolean alignTimestamps, + YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor) { + if (handler.getLooper().getThread() != Thread.currentThread()) { + throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread"); + } + this.handler = handler; + this.timestampAligner = alignTimestamps ? new TimestampAligner() : null; + this.yuvConverter = yuvConverter; + this.frameRefMonitor = frameRefMonitor; + + eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER); + try { + // Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682. + eglBase.createDummyPbufferSurface(); + eglBase.makeCurrent(); + } catch (RuntimeException e) { + // Clean up before rethrowing the exception. + eglBase.release(); + handler.getLooper().quit(); + throw e; + } + + oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); + surfaceTexture = new SurfaceTexture(oesTextureId); + surfaceTexture.setOnFrameAvailableListener(st -> { + if (hasPendingTexture) { + Logging.d(TAG, "A frame is already pending, dropping frame."); + } + + hasPendingTexture = true; + tryDeliverTextureFrame(); + }, handler); + } + + /** + * Start to stream textures to the given `listener`. If you need to change listener, you need to + * call stopListening() first. + */ + public void startListening(final VideoSink listener) { + if (this.listener != null || this.pendingListener != null) { + throw new IllegalStateException("SurfaceTextureHelper listener has already been set."); + } + this.pendingListener = listener; + handler.post(setListenerRunnable); + } + + /** + * Stop listening. The listener set in startListening() is guaranteded to not receive any more + * onFrame() callbacks after this function returns. + */ + public void stopListening() { + Logging.d(TAG, "stopListening()"); + handler.removeCallbacks(setListenerRunnable); + ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> { + listener = null; + pendingListener = null; + }); + } + + /** + * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself + * since this class needs to be aware of the texture size. + */ + public void setTextureSize(int textureWidth, int textureHeight) { + if (textureWidth <= 0) { + throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth); + } + if (textureHeight <= 0) { + throw new IllegalArgumentException( + "Texture height must be positive, but was " + textureHeight); + } + surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight); + handler.post(() -> { + this.textureWidth = textureWidth; + this.textureHeight = textureHeight; + tryDeliverTextureFrame(); + }); + } + + /** + * Forces a frame to be produced. If no new frame is available, the last frame is sent to the + * listener again. + */ + public void forceFrame() { + handler.post(() -> { + hasPendingTexture = true; + tryDeliverTextureFrame(); + }); + } + + /** Set the rotation of the delivered frames. */ + public void setFrameRotation(int rotation) { + handler.post(() -> this.frameRotation = rotation); + } + + /** + * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video + * producer such as a camera or decoder. + */ + public SurfaceTexture getSurfaceTexture() { + return surfaceTexture; + } + + /** Retrieve the handler that calls onFrame(). This handler is valid until dispose() is called. */ + public Handler getHandler() { + return handler; + } + + /** + * This function is called when the texture frame is released. Only one texture frame can be in + * flight at once, so this function must be called before a new frame is delivered. + */ + private void returnTextureFrame() { + handler.post(() -> { + isTextureInUse = false; + if (isQuitting) { + release(); + } else { + tryDeliverTextureFrame(); + } + }); + } + + public boolean isTextureInUse() { + return isTextureInUse; + } + + /** + * Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is + * stopped when the texture frame has been released. You are guaranteed to not receive any more + * onFrame() after this function returns. + */ + public void dispose() { + Logging.d(TAG, "dispose()"); + ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> { + isQuitting = true; + if (!isTextureInUse) { + release(); + } + }); + } + + /** + * Posts to the correct thread to convert `textureBuffer` to I420. + * + * @deprecated Use toI420() instead. + */ + @Deprecated + public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) { + return textureBuffer.toI420(); + } + + private void updateTexImage() { + // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers, + // as observed on Nexus 5. Therefore, synchronize it with the EGL functions. + // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info. + synchronized (EglBase.lock) { + surfaceTexture.updateTexImage(); + } + } + + private void tryDeliverTextureFrame() { + if (handler.getLooper().getThread() != Thread.currentThread()) { + throw new IllegalStateException("Wrong thread."); + } + if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) { + return; + } + if (textureWidth == 0 || textureHeight == 0) { + // Information about the resolution needs to be provided by a call to setTextureSize() before + // frames are produced. + Logging.w(TAG, "Texture size has not been set."); + return; + } + isTextureInUse = true; + hasPendingTexture = false; + + updateTexImage(); + + final float[] transformMatrix = new float[16]; + surfaceTexture.getTransformMatrix(transformMatrix); + long timestampNs = surfaceTexture.getTimestamp(); + if (timestampAligner != null) { + timestampNs = timestampAligner.translateTimestamp(timestampNs); + } + final VideoFrame.TextureBuffer buffer = + new TextureBufferImpl(textureWidth, textureHeight, TextureBuffer.Type.OES, oesTextureId, + RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix), handler, + yuvConverter, textureRefCountMonitor); + if (frameRefMonitor != null) { + frameRefMonitor.onNewBuffer(buffer); + } + final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs); + listener.onFrame(frame); + frame.release(); + } + + private void release() { + if (handler.getLooper().getThread() != Thread.currentThread()) { + throw new IllegalStateException("Wrong thread."); + } + if (isTextureInUse || !isQuitting) { + throw new IllegalStateException("Unexpected release."); + } + yuvConverter.release(); + GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0); + surfaceTexture.release(); + eglBase.release(); + handler.getLooper().quit(); + if (timestampAligner != null) { + timestampAligner.dispose(); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceViewRenderer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceViewRenderer.java new file mode 100644 index 00000000..6c9140ab --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/SurfaceViewRenderer.java @@ -0,0 +1,300 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; +import android.content.res.Resources.NotFoundException; +import android.graphics.Point; +import android.os.Looper; +import android.util.AttributeSet; +import android.view.SurfaceHolder; +import android.view.SurfaceView; + +/** + * Display the video stream on a SurfaceView. + */ +public class SurfaceViewRenderer extends SurfaceView + implements SurfaceHolder.Callback, VideoSink, RendererCommon.RendererEvents { + private static final String TAG = "SurfaceViewRenderer"; + + // Cached resource name. + private final String resourceName; + private final RendererCommon.VideoLayoutMeasure videoLayoutMeasure = + new RendererCommon.VideoLayoutMeasure(); + private final SurfaceEglRenderer eglRenderer; + + // Callback for reporting renderer events. Read-only after initialization so no lock required. + private RendererCommon.RendererEvents rendererEvents; + + // Accessed only on the main thread. + private int rotatedFrameWidth; + private int rotatedFrameHeight; + private boolean enableFixedSize; + private int surfaceWidth; + private int surfaceHeight; + + /** + * Standard View constructor. In order to render something, you must first call init(). + */ + public SurfaceViewRenderer(Context context) { + super(context); + this.resourceName = getResourceName(); + eglRenderer = new SurfaceEglRenderer(resourceName); + getHolder().addCallback(this); + getHolder().addCallback(eglRenderer); + } + + /** + * Standard View constructor. In order to render something, you must first call init(). + */ + public SurfaceViewRenderer(Context context, AttributeSet attrs) { + super(context, attrs); + this.resourceName = getResourceName(); + eglRenderer = new SurfaceEglRenderer(resourceName); + getHolder().addCallback(this); + getHolder().addCallback(eglRenderer); + } + + /** + * Initialize this class, sharing resources with `sharedContext`. It is allowed to call init() to + * reinitialize the renderer after a previous init()/release() cycle. + */ + public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) { + init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer()); + } + + /** + * Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used + * for drawing frames on the EGLSurface. This class is responsible for calling release() on + * `drawer`. It is allowed to call init() to reinitialize the renderer after a previous + * init()/release() cycle. + */ + public void init(final EglBase.Context sharedContext, + RendererCommon.RendererEvents rendererEvents, final int[] configAttributes, + RendererCommon.GlDrawer drawer) { + ThreadUtils.checkIsOnMainThread(); + this.rendererEvents = rendererEvents; + rotatedFrameWidth = 0; + rotatedFrameHeight = 0; + eglRenderer.init(sharedContext, this /* rendererEvents */, configAttributes, drawer); + } + + /** + * Block until any pending frame is returned and all GL resources released, even if an interrupt + * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function + * should be called before the Activity is destroyed and the EGLContext is still valid. If you + * don't call this function, the GL resources might leak. + */ + public void release() { + eglRenderer.release(); + } + + /** + * Register a callback to be invoked when a new video frame has been received. + * + * @param listener The callback to be invoked. The callback will be invoked on the render thread. + * It should be lightweight and must not call removeFrameListener. + * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is + * required. + * @param drawer Custom drawer to use for this frame listener. + */ + public void addFrameListener( + EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) { + eglRenderer.addFrameListener(listener, scale, drawerParam); + } + + /** + * Register a callback to be invoked when a new video frame has been received. This version uses + * the drawer of the EglRenderer that was passed in init. + * + * @param listener The callback to be invoked. The callback will be invoked on the render thread. + * It should be lightweight and must not call removeFrameListener. + * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is + * required. + */ + public void addFrameListener(EglRenderer.FrameListener listener, float scale) { + eglRenderer.addFrameListener(listener, scale); + } + + public void removeFrameListener(EglRenderer.FrameListener listener) { + eglRenderer.removeFrameListener(listener); + } + + /** + * Enables fixed size for the surface. This provides better performance but might be buggy on some + * devices. By default this is turned off. + */ + public void setEnableHardwareScaler(boolean enabled) { + ThreadUtils.checkIsOnMainThread(); + enableFixedSize = enabled; + updateSurfaceSize(); + } + + /** + * Set if the video stream should be mirrored or not. + */ + public void setMirror(final boolean mirror) { + eglRenderer.setMirror(mirror); + } + + /** + * Set how the video will fill the allowed layout area. + */ + public void setScalingType(RendererCommon.ScalingType scalingType) { + ThreadUtils.checkIsOnMainThread(); + videoLayoutMeasure.setScalingType(scalingType); + requestLayout(); + } + + public void setScalingType(RendererCommon.ScalingType scalingTypeMatchOrientation, + RendererCommon.ScalingType scalingTypeMismatchOrientation) { + ThreadUtils.checkIsOnMainThread(); + videoLayoutMeasure.setScalingType(scalingTypeMatchOrientation, scalingTypeMismatchOrientation); + requestLayout(); + } + + /** + * Limit render framerate. + * + * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps + * reduction. + */ + public void setFpsReduction(float fps) { + eglRenderer.setFpsReduction(fps); + } + + public void disableFpsReduction() { + eglRenderer.disableFpsReduction(); + } + + public void pauseVideo() { + eglRenderer.pauseVideo(); + } + + // VideoSink interface. + @Override + public void onFrame(VideoFrame frame) { + eglRenderer.onFrame(frame); + } + + // View layout interface. + @Override + protected void onMeasure(int widthSpec, int heightSpec) { + ThreadUtils.checkIsOnMainThread(); + Point size = + videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight); + setMeasuredDimension(size.x, size.y); + logD("onMeasure(). New size: " + size.x + "x" + size.y); + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + ThreadUtils.checkIsOnMainThread(); + eglRenderer.setLayoutAspectRatio((right - left) / (float) (bottom - top)); + updateSurfaceSize(); + } + + private void updateSurfaceSize() { + ThreadUtils.checkIsOnMainThread(); + if (enableFixedSize && rotatedFrameWidth != 0 && rotatedFrameHeight != 0 && getWidth() != 0 + && getHeight() != 0) { + final float layoutAspectRatio = getWidth() / (float) getHeight(); + final float frameAspectRatio = rotatedFrameWidth / (float) rotatedFrameHeight; + final int drawnFrameWidth; + final int drawnFrameHeight; + if (frameAspectRatio > layoutAspectRatio) { + drawnFrameWidth = (int) (rotatedFrameHeight * layoutAspectRatio); + drawnFrameHeight = rotatedFrameHeight; + } else { + drawnFrameWidth = rotatedFrameWidth; + drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio); + } + // Aspect ratio of the drawn frame and the view is the same. + final int width = Math.min(getWidth(), drawnFrameWidth); + final int height = Math.min(getHeight(), drawnFrameHeight); + logD("updateSurfaceSize. Layout size: " + getWidth() + "x" + getHeight() + ", frame size: " + + rotatedFrameWidth + "x" + rotatedFrameHeight + ", requested surface size: " + width + + "x" + height + ", old surface size: " + surfaceWidth + "x" + surfaceHeight); + if (width != surfaceWidth || height != surfaceHeight) { + surfaceWidth = width; + surfaceHeight = height; + getHolder().setFixedSize(width, height); + } + } else { + surfaceWidth = surfaceHeight = 0; + getHolder().setSizeFromLayout(); + } + } + + // SurfaceHolder.Callback interface. + @Override + public void surfaceCreated(final SurfaceHolder holder) { + ThreadUtils.checkIsOnMainThread(); + surfaceWidth = surfaceHeight = 0; + updateSurfaceSize(); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) {} + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {} + + private String getResourceName() { + try { + return getResources().getResourceEntryName(getId()); + } catch (NotFoundException e) { + return ""; + } + } + + /** + * Post a task to clear the SurfaceView to a transparent uniform color. + */ + public void clearImage() { + eglRenderer.clearImage(); + } + + @Override + public void onFirstFrameRendered() { + if (rendererEvents != null) { + rendererEvents.onFirstFrameRendered(); + } + } + + @Override + public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { + if (rendererEvents != null) { + rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation); + } + int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight; + int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth; + // run immediately if possible for ui thread tests + postOrRun(() -> { + rotatedFrameWidth = rotatedWidth; + rotatedFrameHeight = rotatedHeight; + updateSurfaceSize(); + requestLayout(); + }); + } + + private void postOrRun(Runnable r) { + if (Thread.currentThread() == Looper.getMainLooper().getThread()) { + r.run(); + } else { + post(r); + } + } + + private void logD(String string) { + Logging.d(TAG, resourceName + ": " + string); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/TextureBufferImpl.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/TextureBufferImpl.java new file mode 100644 index 00000000..7e2bed84 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/TextureBufferImpl.java @@ -0,0 +1,197 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Matrix; +import android.os.Handler; +import androidx.annotation.Nullable; + +/** + * Android texture buffer that glues together the necessary information together with a generic + * release callback. ToI420() is implemented by providing a Handler and a YuvConverter. + */ +public class TextureBufferImpl implements VideoFrame.TextureBuffer { + interface RefCountMonitor { + void onRetain(TextureBufferImpl textureBuffer); + void onRelease(TextureBufferImpl textureBuffer); + void onDestroy(TextureBufferImpl textureBuffer); + } + + // This is the full resolution the texture has in memory after applying the transformation matrix + // that might include cropping. This resolution is useful to know when sampling the texture to + // avoid downscaling artifacts. + private final int unscaledWidth; + private final int unscaledHeight; + // This is the resolution that has been applied after cropAndScale(). + private final int width; + private final int height; + private final Type type; + private final int id; + private final Matrix transformMatrix; + private final Handler toI420Handler; + private final YuvConverter yuvConverter; + private final RefCountDelegate refCountDelegate; + private final RefCountMonitor refCountMonitor; + + public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix, + Handler toI420Handler, YuvConverter yuvConverter, @Nullable Runnable releaseCallback) { + this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter, + new RefCountMonitor() { + @Override + public void onRetain(TextureBufferImpl textureBuffer) {} + + @Override + public void onRelease(TextureBufferImpl textureBuffer) {} + + @Override + public void onDestroy(TextureBufferImpl textureBuffer) { + if (releaseCallback != null) { + releaseCallback.run(); + } + } + }); + } + + TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix, + Handler toI420Handler, YuvConverter yuvConverter, RefCountMonitor refCountMonitor) { + this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter, + refCountMonitor); + } + + private TextureBufferImpl(int unscaledWidth, int unscaledHeight, int width, int height, Type type, + int id, Matrix transformMatrix, Handler toI420Handler, YuvConverter yuvConverter, + RefCountMonitor refCountMonitor) { + this.unscaledWidth = unscaledWidth; + this.unscaledHeight = unscaledHeight; + this.width = width; + this.height = height; + this.type = type; + this.id = id; + this.transformMatrix = transformMatrix; + this.toI420Handler = toI420Handler; + this.yuvConverter = yuvConverter; + this.refCountDelegate = new RefCountDelegate(() -> refCountMonitor.onDestroy(this)); + this.refCountMonitor = refCountMonitor; + } + + @Override + public VideoFrame.TextureBuffer.Type getType() { + return type; + } + + @Override + public int getTextureId() { + return id; + } + + @Override + public Matrix getTransformMatrix() { + return transformMatrix; + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public VideoFrame.I420Buffer toI420() { + return ThreadUtils.invokeAtFrontUninterruptibly( + toI420Handler, () -> yuvConverter.convert(this)); + } + + @Override + public void retain() { + refCountMonitor.onRetain(this); + refCountDelegate.retain(); + } + + @Override + public void release() { + refCountMonitor.onRelease(this); + refCountDelegate.release(); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + final Matrix cropAndScaleMatrix = new Matrix(); + // In WebRTC, Y=0 is the top row, while in OpenGL Y=0 is the bottom row. This means that the Y + // direction is effectively reversed. + final int cropYFromBottom = height - (cropY + cropHeight); + cropAndScaleMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height); + cropAndScaleMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height); + + return applyTransformMatrix(cropAndScaleMatrix, + (int) Math.round(unscaledWidth * cropWidth / (float) width), + (int) Math.round(unscaledHeight * cropHeight / (float) height), scaleWidth, scaleHeight); + } + + @Override + public int getUnscaledWidth() { + return unscaledWidth; + } + + @Override + public int getUnscaledHeight() { + return unscaledHeight; + } + + public Handler getToI420Handler() { + return toI420Handler; + } + + public YuvConverter getYuvConverter() { + return yuvConverter; + } + + /** + * Create a new TextureBufferImpl with an applied transform matrix and a new size. The + * existing buffer is unchanged. The given transform matrix is applied first when texture + * coordinates are still in the unmodified [0, 1] range. + */ + @Override + public TextureBufferImpl applyTransformMatrix( + Matrix transformMatrix, int newWidth, int newHeight) { + return applyTransformMatrix(transformMatrix, /* unscaledWidth= */ newWidth, + /* unscaledHeight= */ newHeight, /* scaledWidth= */ newWidth, + /* scaledHeight= */ newHeight); + } + + private TextureBufferImpl applyTransformMatrix(Matrix transformMatrix, int unscaledWidth, + int unscaledHeight, int scaledWidth, int scaledHeight) { + final Matrix newMatrix = new Matrix(this.transformMatrix); + newMatrix.preConcat(transformMatrix); + retain(); + return new TextureBufferImpl(unscaledWidth, unscaledHeight, scaledWidth, scaledHeight, type, id, + newMatrix, toI420Handler, yuvConverter, new RefCountMonitor() { + @Override + public void onRetain(TextureBufferImpl textureBuffer) { + refCountMonitor.onRetain(TextureBufferImpl.this); + } + + @Override + public void onRelease(TextureBufferImpl textureBuffer) { + refCountMonitor.onRelease(TextureBufferImpl.this); + } + + @Override + public void onDestroy(TextureBufferImpl textureBuffer) { + release(); + } + }); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/ThreadUtils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/ThreadUtils.java new file mode 100644 index 00000000..0c502b1b --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/ThreadUtils.java @@ -0,0 +1,212 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.os.Handler; +import android.os.Looper; +import android.os.SystemClock; +import androidx.annotation.Nullable; +import java.util.concurrent.Callable; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +public class ThreadUtils { + /** + * Utility class to be used for checking that a method is called on the correct thread. + */ + public static class ThreadChecker { + @Nullable private Thread thread = Thread.currentThread(); + + public void checkIsOnValidThread() { + if (thread == null) { + thread = Thread.currentThread(); + } + if (Thread.currentThread() != thread) { + throw new IllegalStateException("Wrong thread"); + } + } + + public void detachThread() { + thread = null; + } + } + + /** + * Throws exception if called from other than main thread. + */ + public static void checkIsOnMainThread() { + if (Thread.currentThread() != Looper.getMainLooper().getThread()) { + throw new IllegalStateException("Not on main thread!"); + } + } + + /** + * Utility interface to be used with executeUninterruptibly() to wait for blocking operations + * to complete without getting interrupted.. + */ + public interface BlockingOperation { void run() throws InterruptedException; } + + /** + * Utility method to make sure a blocking operation is executed to completion without getting + * interrupted. This should be used in cases where the operation is waiting for some critical + * work, e.g. cleanup, that must complete before returning. If the thread is interrupted during + * the blocking operation, this function will re-run the operation until completion, and only then + * re-interrupt the thread. + */ + public static void executeUninterruptibly(BlockingOperation operation) { + boolean wasInterrupted = false; + while (true) { + try { + operation.run(); + break; + } catch (InterruptedException e) { + // Someone is asking us to return early at our convenience. We can't cancel this operation, + // but we should preserve the information and pass it along. + wasInterrupted = true; + } + } + // Pass interruption information along. + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + } + + public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) { + final long startTimeMs = SystemClock.elapsedRealtime(); + long timeRemainingMs = timeoutMs; + boolean wasInterrupted = false; + while (timeRemainingMs > 0) { + try { + thread.join(timeRemainingMs); + break; + } catch (InterruptedException e) { + // Someone is asking us to return early at our convenience. We can't cancel this operation, + // but we should preserve the information and pass it along. + wasInterrupted = true; + final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs; + timeRemainingMs = timeoutMs - elapsedTimeMs; + } + } + // Pass interruption information along. + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + return !thread.isAlive(); + } + + public static void joinUninterruptibly(final Thread thread) { + executeUninterruptibly(new BlockingOperation() { + @Override + public void run() throws InterruptedException { + thread.join(); + } + }); + } + + public static void awaitUninterruptibly(final CountDownLatch latch) { + executeUninterruptibly(new BlockingOperation() { + @Override + public void run() throws InterruptedException { + latch.await(); + } + }); + } + + public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) { + final long startTimeMs = SystemClock.elapsedRealtime(); + long timeRemainingMs = timeoutMs; + boolean wasInterrupted = false; + boolean result = false; + do { + try { + result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS); + break; + } catch (InterruptedException e) { + // Someone is asking us to return early at our convenience. We can't cancel this operation, + // but we should preserve the information and pass it along. + wasInterrupted = true; + final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs; + timeRemainingMs = timeoutMs - elapsedTimeMs; + } + } while (timeRemainingMs > 0); + // Pass interruption information along. + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + return result; + } + + /** + * Post `callable` to `handler` and wait for the result. + */ + public static V invokeAtFrontUninterruptibly( + final Handler handler, final Callable callable) { + if (handler.getLooper().getThread() == Thread.currentThread()) { + try { + return callable.call(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + // Place-holder classes that are assignable inside nested class. + class CaughtException { + Exception e; + } + class Result { + public V value; + } + final Result result = new Result(); + final CaughtException caughtException = new CaughtException(); + final CountDownLatch barrier = new CountDownLatch(1); + handler.post(new Runnable() { + @Override + public void run() { + try { + result.value = callable.call(); + } catch (Exception e) { + caughtException.e = e; + } + barrier.countDown(); + } + }); + awaitUninterruptibly(barrier); + // Re-throw any runtime exception caught inside the other thread. Since this is an invoke, add + // stack trace for the waiting thread as well. + if (caughtException.e != null) { + final RuntimeException runtimeException = new RuntimeException(caughtException.e); + runtimeException.setStackTrace( + concatStackTraces(caughtException.e.getStackTrace(), runtimeException.getStackTrace())); + throw runtimeException; + } + return result.value; + } + + /** + * Post `runner` to `handler`, at the front, and wait for completion. + */ + public static void invokeAtFrontUninterruptibly(final Handler handler, final Runnable runner) { + invokeAtFrontUninterruptibly(handler, new Callable() { + @Override + public Void call() { + runner.run(); + return null; + } + }); + } + + static StackTraceElement[] concatStackTraces( + StackTraceElement[] inner, StackTraceElement[] outer) { + final StackTraceElement[] combined = new StackTraceElement[inner.length + outer.length]; + System.arraycopy(inner, 0, combined, 0, inner.length); + System.arraycopy(outer, 0, combined, inner.length, outer.length); + return combined; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/TimestampAligner.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/TimestampAligner.java new file mode 100644 index 00000000..d96c9395 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/TimestampAligner.java @@ -0,0 +1,59 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * The TimestampAligner class helps translating camera timestamps into the same timescale as is + * used by rtc::TimeNanos(). Some cameras have built in timestamping which is more accurate than + * reading the system clock, but using a different epoch and unknown clock drift. Frame timestamps + * in webrtc should use rtc::TimeNanos (system monotonic time), and this class provides a filter + * which lets us use the rtc::TimeNanos timescale, and at the same time take advantage of higher + * accuracy of the camera clock. This class is a wrapper on top of rtc::TimestampAligner. + */ +public class TimestampAligner { + /** + * Wrapper around rtc::TimeNanos(). This is normally same as System.nanoTime(), but call this + * function to be safe. + */ + public static long getRtcTimeNanos() { + return nativeRtcTimeNanos(); + } + + private volatile long nativeTimestampAligner = nativeCreateTimestampAligner(); + + /** + * Translates camera timestamps to the same timescale as is used by rtc::TimeNanos(). + * `cameraTimeNs` is assumed to be accurate, but with an unknown epoch and clock drift. Returns + * the translated timestamp. + */ + public long translateTimestamp(long cameraTimeNs) { + checkNativeAlignerExists(); + return nativeTranslateTimestamp(nativeTimestampAligner, cameraTimeNs); + } + + /** Dispose native timestamp aligner. */ + public void dispose() { + checkNativeAlignerExists(); + nativeReleaseTimestampAligner(nativeTimestampAligner); + nativeTimestampAligner = 0; + } + + private void checkNativeAlignerExists() { + if (nativeTimestampAligner == 0) { + throw new IllegalStateException("TimestampAligner has been disposed."); + } + } + + private static native long nativeRtcTimeNanos(); + private static native long nativeCreateTimestampAligner(); + private static native void nativeReleaseTimestampAligner(long timestampAligner); + private static native long nativeTranslateTimestamp(long timestampAligner, long cameraTimeNs); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/TurnCustomizer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/TurnCustomizer.java new file mode 100644 index 00000000..41bedb7d --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/TurnCustomizer.java @@ -0,0 +1,41 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Java wrapper for a C++ TurnCustomizer. */ +public class TurnCustomizer { + private long nativeTurnCustomizer; + + public TurnCustomizer(long nativeTurnCustomizer) { + this.nativeTurnCustomizer = nativeTurnCustomizer; + } + + public void dispose() { + checkTurnCustomizerExists(); + nativeFreeTurnCustomizer(nativeTurnCustomizer); + nativeTurnCustomizer = 0; + } + + private static native void nativeFreeTurnCustomizer(long turnCustomizer); + + /** Return a pointer to webrtc::TurnCustomizer. */ + @CalledByNative + long getNativeTurnCustomizer() { + checkTurnCustomizerExists(); + return nativeTurnCustomizer; + } + + private void checkTurnCustomizerExists() { + if (nativeTurnCustomizer == 0) { + throw new IllegalStateException("TurnCustomizer has been disposed."); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCapturer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCapturer.java new file mode 100644 index 00000000..67eb7ab0 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCapturer.java @@ -0,0 +1,53 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; + +// Base interface for all VideoCapturers to implement. +public interface VideoCapturer { + /** + * This function is used to initialize the camera thread, the android application context, and the + * capture observer. It will be called only once and before any startCapture() request. The + * camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants + * to deliver texture frames, it should do this by rendering on the SurfaceTexture in + * {@code surfaceTextureHelper}, register itself as a listener, and forward the frames to + * CapturerObserver.onFrameCaptured(). The caller still has ownership of {@code + * surfaceTextureHelper} and is responsible for making sure surfaceTextureHelper.dispose() is + * called. This also means that the caller can reuse the SurfaceTextureHelper to initialize a new + * VideoCapturer once the previous VideoCapturer has been disposed. + */ + void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, + CapturerObserver capturerObserver); + + /** + * Start capturing frames in a format that is as close as possible to {@code width x height} and + * {@code framerate}. + */ + void startCapture(int width, int height, int framerate); + + /** + * Stop capturing. This function should block until capture is actually stopped. + */ + void stopCapture() throws InterruptedException; + + void changeCaptureFormat(int width, int height, int framerate); + + /** + * Perform any final cleanup here. No more capturing will be done after this call. + */ + void dispose(); + + /** + * @return true if-and-only-if this is a screen capturer. + */ + boolean isScreencast(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecInfo.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecInfo.java new file mode 100644 index 00000000..4f97cf74 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecInfo.java @@ -0,0 +1,86 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.Arrays; +import java.util.Locale; +import java.util.Map; + +/** + * Represent a video codec as encoded in SDP. + */ +public class VideoCodecInfo { + // Keys for H264 VideoCodecInfo properties. + public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id"; + public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed"; + public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode"; + + public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0"; + public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c"; + public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex. + public static final String H264_CONSTRAINED_HIGH_3_1 = + H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1; + public static final String H264_CONSTRAINED_BASELINE_3_1 = + H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1; + + public final String name; + public final Map params; + @Deprecated public final int payload; + + @CalledByNative + public VideoCodecInfo(String name, Map params) { + this.payload = 0; + this.name = name; + this.params = params; + } + + @Deprecated + public VideoCodecInfo(int payload, String name, Map params) { + this.payload = payload; + this.name = name; + this.params = params; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (obj == null) + return false; + if (obj == this) + return true; + if (!(obj instanceof VideoCodecInfo)) + return false; + + VideoCodecInfo otherInfo = (VideoCodecInfo) obj; + return name.equalsIgnoreCase(otherInfo.name) && params.equals(otherInfo.params); + } + + @Override + public int hashCode() { + Object[] values = {name.toUpperCase(Locale.ROOT), params}; + return Arrays.hashCode(values); + } + + @Override + public String toString() { + return "VideoCodec{" + name + " " + params + "}"; + } + + @CalledByNative + String getName() { + return name; + } + + @CalledByNative + Map getParams() { + return params; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecMimeType.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecMimeType.java new file mode 100644 index 00000000..5538f320 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecMimeType.java @@ -0,0 +1,30 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** Enumeration of supported video codec types. */ +enum VideoCodecMimeType { + VP8("video/x-vnd.on2.vp8"), + VP9("video/x-vnd.on2.vp9"), + H264("video/avc"), + AV1("video/av01"), + H265("video/hevc"); + + private final String mimeType; + + private VideoCodecMimeType(String mimeType) { + this.mimeType = mimeType; + } + + String mimeType() { + return mimeType; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecStatus.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecStatus.java new file mode 100644 index 00000000..670d2558 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecStatus.java @@ -0,0 +1,42 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Status codes reported by video encoding/decoding components. This should be kept in sync with + * video_error_codes.h. + */ +public enum VideoCodecStatus { + TARGET_BITRATE_OVERSHOOT(5), + REQUEST_SLI(2), + NO_OUTPUT(1), + OK(0), + ERROR(-1), + LEVEL_EXCEEDED(-2), + MEMORY(-3), + ERR_PARAMETER(-4), + ERR_SIZE(-5), + TIMEOUT(-6), + UNINITIALIZED(-7), + ERR_REQUEST_SLI(-12), + FALLBACK_SOFTWARE(-13); + + private final int number; + + private VideoCodecStatus(int number) { + this.number = number; + } + + @CalledByNative + public int getNumber() { + return number; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecType.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecType.java new file mode 100644 index 00000000..37928dbf --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoCodecType.java @@ -0,0 +1,19 @@ +// +// Source code recreated from a .class file by IntelliJ IDEA +// (powered by FernFlower decompiler) +// + +package org.webrtc; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@Retention(RetentionPolicy.SOURCE) +public @interface VideoCodecType { + int VIDEO_CODEC_GENERIC = 0; + int VIDEO_CODEC_VP8 = 1; + int VIDEO_CODEC_VP9 = 2; + int VIDEO_CODEC_AV1 = 3; + int VIDEO_CODEC_H264 = 4; + int VIDEO_CODEC_MULTIPLEX = 5; +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoder.java new file mode 100644 index 00000000..a80fa4fe --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoder.java @@ -0,0 +1,94 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Interface for a video decoder that can be used in WebRTC. All calls to the class will be made on + * a single decoding thread. + */ +public interface VideoDecoder { + /** Settings passed to the decoder by WebRTC. */ + public class Settings { + public final int numberOfCores; + public final int width; + public final int height; + + @CalledByNative("Settings") + public Settings(int numberOfCores, int width, int height) { + this.numberOfCores = numberOfCores; + this.width = width; + this.height = height; + } + } + + /** Additional info for decoding. */ + public class DecodeInfo { + public final boolean isMissingFrames; + public final long renderTimeMs; + + public DecodeInfo(boolean isMissingFrames, long renderTimeMs) { + this.isMissingFrames = isMissingFrames; + this.renderTimeMs = renderTimeMs; + } + } + + public interface Callback { + /** + * Call to return a decoded frame. Can be called on any thread. + * + * @param frame Decoded frame + * @param decodeTimeMs Time it took to decode the frame in milliseconds or null if not available + * @param qp QP value of the decoded frame or null if not available + */ + void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp); + } + + /** + * The decoder implementation backing this interface is either 1) a Java + * decoder (e.g., an Android platform decoder), or alternatively 2) a native + * decoder (e.g., a software decoder or a C++ decoder adapter). + * + * For case 1), createNativeVideoDecoder() should return zero. + * In this case, we expect the native library to call the decoder through + * JNI using the Java interface declared below. + * + * For case 2), createNativeVideoDecoder() should return a non-zero value. + * In this case, we expect the native library to treat the returned value as + * a raw pointer of type webrtc::VideoDecoder* (ownership is transferred to + * the caller). The native library should then directly call the + * webrtc::VideoDecoder interface without going through JNI. All calls to + * the Java interface methods declared below should thus throw an + * UnsupportedOperationException. + */ + @CalledByNative + default long createNativeVideoDecoder() { + return 0; + } + + /** + * Initializes the decoding process with specified settings. Will be called on the decoding thread + * before any decode calls. + */ + @CalledByNative VideoCodecStatus initDecode(Settings settings, Callback decodeCallback); + /** + * Called when the decoder is no longer needed. Any more calls to decode will not be made. + */ + @CalledByNative VideoCodecStatus release(); + /** + * Request the decoder to decode a frame. + */ + @CalledByNative VideoCodecStatus decode(EncodedImage frame, DecodeInfo info); + /** + * Should return a descriptive name for the implementation. Gets called once and cached. May be + * called from arbitrary thread. + */ + @CalledByNative String getImplementationName(); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderFactory.java new file mode 100644 index 00000000..8b25516e --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderFactory.java @@ -0,0 +1,30 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +/** Factory for creating VideoDecoders. */ +public interface VideoDecoderFactory { + /** + * Creates a VideoDecoder for the given codec. Supports the same codecs supported by + * VideoEncoderFactory. + */ + @Nullable @CalledByNative VideoDecoder createDecoder(VideoCodecInfo info); + + /** + * Enumerates the list of supported video codecs. + */ + @CalledByNative + default VideoCodecInfo[] getSupportedCodecs() { + return new VideoCodecInfo[0]; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderFallback.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderFallback.java new file mode 100644 index 00000000..ddfa3ecd --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderFallback.java @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * A combined video decoder that falls back on a secondary decoder if the primary decoder fails. + */ +public class VideoDecoderFallback extends WrappedNativeVideoDecoder { + private final VideoDecoder fallback; + private final VideoDecoder primary; + + public VideoDecoderFallback(VideoDecoder fallback, VideoDecoder primary) { + this.fallback = fallback; + this.primary = primary; + } + + @Override + public long createNativeVideoDecoder() { + return nativeCreateDecoder(fallback, primary); + } + + private static native long nativeCreateDecoder(VideoDecoder fallback, VideoDecoder primary); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderWrapper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderWrapper.java new file mode 100644 index 00000000..2aae0416 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoDecoderWrapper.java @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.VideoDecoder; + +/** + * This class contains the Java glue code for JNI generation of VideoDecoder. + */ +class VideoDecoderWrapper { + @CalledByNative + static VideoDecoder.Callback createDecoderCallback(final long nativeDecoder) { + return (VideoFrame frame, Integer decodeTimeMs, + Integer qp) -> nativeOnDecodedFrame(nativeDecoder, frame, decodeTimeMs, qp); + } + + private static native void nativeOnDecodedFrame( + long nativeVideoDecoderWrapper, VideoFrame frame, Integer decodeTimeMs, Integer qp); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoder.java new file mode 100644 index 00000000..0d8cf830 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoder.java @@ -0,0 +1,385 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import org.webrtc.EncodedImage; + +/** + * Interface for a video encoder that can be used with WebRTC. All calls will be made on the + * encoding thread. The encoder may be constructed on a different thread and changing thread after + * calling release is allowed. + */ +public interface VideoEncoder { + /** Settings passed to the encoder by WebRTC. */ + public class Settings { + public final int numberOfCores; + public final int width; + public final int height; + public final int startBitrate; // Kilobits per second. + public final int maxFramerate; + public final int numberOfSimulcastStreams; + public final boolean automaticResizeOn; + public final Capabilities capabilities; + + // TODO(bugs.webrtc.org/10720): Remove. + @Deprecated + public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate, + int numberOfSimulcastStreams, boolean automaticResizeOn) { + this(numberOfCores, width, height, startBitrate, maxFramerate, numberOfSimulcastStreams, + automaticResizeOn, new VideoEncoder.Capabilities(false /* lossNotification */)); + } + + @CalledByNative("Settings") + public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate, + int numberOfSimulcastStreams, boolean automaticResizeOn, Capabilities capabilities) { + this.numberOfCores = numberOfCores; + this.width = width; + this.height = height; + this.startBitrate = startBitrate; + this.maxFramerate = maxFramerate; + this.numberOfSimulcastStreams = numberOfSimulcastStreams; + this.automaticResizeOn = automaticResizeOn; + this.capabilities = capabilities; + } + } + + /** Capabilities (loss notification, etc.) passed to the encoder by WebRTC. */ + public class Capabilities { + /** + * The remote side has support for the loss notification RTCP feedback message format, and will + * be sending these feedback messages if necessary. + */ + public final boolean lossNotification; + + @CalledByNative("Capabilities") + public Capabilities(boolean lossNotification) { + this.lossNotification = lossNotification; + } + } + + /** Additional info for encoding. */ + public class EncodeInfo { + public final EncodedImage.FrameType[] frameTypes; + + @CalledByNative("EncodeInfo") + public EncodeInfo(EncodedImage.FrameType[] frameTypes) { + this.frameTypes = frameTypes; + } + } + + // TODO(sakal): Add values to these classes as necessary. + /** Codec specific information about the encoded frame. */ + public class CodecSpecificInfo {} + + public class CodecSpecificInfoVP8 extends CodecSpecificInfo {} + + public class CodecSpecificInfoVP9 extends CodecSpecificInfo {} + + public class CodecSpecificInfoH264 extends CodecSpecificInfo {} + + public class CodecSpecificInfoAV1 extends CodecSpecificInfo {} + + /** + * Represents bitrate allocated for an encoder to produce frames. Bitrate can be divided between + * spatial and temporal layers. + */ + public class BitrateAllocation { + // First index is the spatial layer and second the temporal layer. + public final int[][] bitratesBbs; + + /** + * Initializes the allocation with a two dimensional array of bitrates. The first index of the + * array is the spatial layer and the second index in the temporal layer. + */ + @CalledByNative("BitrateAllocation") + public BitrateAllocation(int[][] bitratesBbs) { + this.bitratesBbs = bitratesBbs; + } + + /** + * Gets the total bitrate allocated for all layers. + */ + public int getSum() { + int sum = 0; + for (int[] spatialLayer : bitratesBbs) { + for (int bitrate : spatialLayer) { + sum += bitrate; + } + } + return sum; + } + } + + /** Settings for WebRTC quality based scaling. */ + public class ScalingSettings { + public final boolean on; + @Nullable public final Integer low; + @Nullable public final Integer high; + + /** + * Settings to disable quality based scaling. + */ + public static final ScalingSettings OFF = new ScalingSettings(); + + /** + * Creates settings to enable quality based scaling. + * + * @param low Average QP at which to scale up the resolution. + * @param high Average QP at which to scale down the resolution. + */ + public ScalingSettings(int low, int high) { + this.on = true; + this.low = low; + this.high = high; + } + + private ScalingSettings() { + this.on = false; + this.low = null; + this.high = null; + } + + // TODO(bugs.webrtc.org/8830): Below constructors are deprecated. + // Default thresholds are going away, so thresholds have to be set + // when scaling is on. + /** + * Creates quality based scaling setting. + * + * @param on True if quality scaling is turned on. + */ + @Deprecated + public ScalingSettings(boolean on) { + this.on = on; + this.low = null; + this.high = null; + } + + /** + * Creates quality based scaling settings with custom thresholds. + * + * @param on True if quality scaling is turned on. + * @param low Average QP at which to scale up the resolution. + * @param high Average QP at which to scale down the resolution. + */ + @Deprecated + public ScalingSettings(boolean on, int low, int high) { + this.on = on; + this.low = low; + this.high = high; + } + + @Override + public String toString() { + return on ? "[ " + low + ", " + high + " ]" : "OFF"; + } + } + + /** + * Bitrate limits for resolution. + */ + public class ResolutionBitrateLimits { + /** + * Maximum size of video frame, in pixels, the bitrate limits are intended for. + */ + public final int frameSizePixels; + + /** + * Recommended minimum bitrate to start encoding. + */ + public final int minStartBitrateBps; + + /** + * Recommended minimum bitrate. + */ + public final int minBitrateBps; + + /** + * Recommended maximum bitrate. + */ + public final int maxBitrateBps; + + public ResolutionBitrateLimits( + int frameSizePixels, int minStartBitrateBps, int minBitrateBps, int maxBitrateBps) { + this.frameSizePixels = frameSizePixels; + this.minStartBitrateBps = minStartBitrateBps; + this.minBitrateBps = minBitrateBps; + this.maxBitrateBps = maxBitrateBps; + } + + @CalledByNative("ResolutionBitrateLimits") + public int getFrameSizePixels() { + return frameSizePixels; + } + + @CalledByNative("ResolutionBitrateLimits") + public int getMinStartBitrateBps() { + return minStartBitrateBps; + } + + @CalledByNative("ResolutionBitrateLimits") + public int getMinBitrateBps() { + return minBitrateBps; + } + + @CalledByNative("ResolutionBitrateLimits") + public int getMaxBitrateBps() { + return maxBitrateBps; + } + } + + /** Rate control parameters. */ + public class RateControlParameters { + /** + * Adjusted target bitrate, per spatial/temporal layer. May be lower or higher than the target + * depending on encoder behaviour. + */ + public final BitrateAllocation bitrate; + + /** + * Target framerate, in fps. A value <= 0.0 is invalid and should be interpreted as framerate + * target not available. In this case the encoder should fall back to the max framerate + * specified in `codec_settings` of the last InitEncode() call. + */ + public final double framerateFps; + + @CalledByNative("RateControlParameters") + public RateControlParameters(BitrateAllocation bitrate, double framerateFps) { + this.bitrate = bitrate; + this.framerateFps = framerateFps; + } + } + + /** + * Metadata about the Encoder. + */ + public class EncoderInfo { + /** + * The width and height of the incoming video frames should be divisible by + * |requested_resolution_alignment| + */ + public final int requestedResolutionAlignment; + + /** + * Same as above but if true, each simulcast layer should also be divisible by + * |requested_resolution_alignment|. + */ + public final boolean applyAlignmentToAllSimulcastLayers; + + public EncoderInfo( + int requestedResolutionAlignment, boolean applyAlignmentToAllSimulcastLayers) { + this.requestedResolutionAlignment = requestedResolutionAlignment; + this.applyAlignmentToAllSimulcastLayers = applyAlignmentToAllSimulcastLayers; + } + + @CalledByNative("EncoderInfo") + public int getRequestedResolutionAlignment() { + return requestedResolutionAlignment; + } + + @CalledByNative("EncoderInfo") + public boolean getApplyAlignmentToAllSimulcastLayers() { + return applyAlignmentToAllSimulcastLayers; + } + } + + public interface Callback { + /** + * Old encoders assume that the byte buffer held by `frame` is not accessed after the call to + * this method returns. If the pipeline downstream needs to hold on to the buffer, it then has + * to make its own copy. We want to move to a model where no copying is needed, and instead use + * retain()/release() to signal to the encoder when it is safe to reuse the buffer. + * + * Over the transition, implementations of this class should use the maybeRetain() method if + * they want to keep a reference to the buffer, and fall back to copying if that method returns + * false. + */ + void onEncodedFrame(EncodedImage frame, CodecSpecificInfo info); + } + + /** + * The encoder implementation backing this interface is either 1) a Java + * encoder (e.g., an Android platform encoder), or alternatively 2) a native + * encoder (e.g., a software encoder or a C++ encoder adapter). + * + * For case 1), createNativeVideoEncoder() should return zero. + * In this case, we expect the native library to call the encoder through + * JNI using the Java interface declared below. + * + * For case 2), createNativeVideoEncoder() should return a non-zero value. + * In this case, we expect the native library to treat the returned value as + * a raw pointer of type webrtc::VideoEncoder* (ownership is transferred to + * the caller). The native library should then directly call the + * webrtc::VideoEncoder interface without going through JNI. All calls to + * the Java interface methods declared below should thus throw an + * UnsupportedOperationException. + */ + @CalledByNative + default long createNativeVideoEncoder() { + return 0; + } + + /** + * Returns true if the encoder is backed by hardware. + */ + @CalledByNative + default boolean isHardwareEncoder() { + return true; + } + + /** + * Initializes the encoding process. Call before any calls to encode. + */ + @CalledByNative VideoCodecStatus initEncode(Settings settings, Callback encodeCallback); + + /** + * Releases the encoder. No more calls to encode will be made after this call. + */ + @CalledByNative VideoCodecStatus release(); + + /** + * Requests the encoder to encode a frame. + */ + @CalledByNative VideoCodecStatus encode(VideoFrame frame, EncodeInfo info); + + /** Sets the bitrate allocation and the target framerate for the encoder. */ + VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate); + + /** Sets the bitrate allocation and the target framerate for the encoder. */ + default @CalledByNative VideoCodecStatus setRates(RateControlParameters rcParameters) { + // Round frame rate up to avoid overshoots. + int framerateFps = (int) Math.ceil(rcParameters.framerateFps); + return setRateAllocation(rcParameters.bitrate, framerateFps); + } + + /** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */ + @CalledByNative ScalingSettings getScalingSettings(); + + /** Returns the list of bitrate limits. */ + @CalledByNative + default ResolutionBitrateLimits[] getResolutionBitrateLimits() { + // TODO(ssilkin): Update downstream projects and remove default implementation. + ResolutionBitrateLimits bitrate_limits[] = {}; + return bitrate_limits; + } + + /** + * Should return a descriptive name for the implementation. Gets called once and cached. May be + * called from arbitrary thread. + */ + @CalledByNative String getImplementationName(); + + @CalledByNative + default EncoderInfo getEncoderInfo() { + return new EncoderInfo( + /* requestedResolutionAlignment= */ 1, /* applyAlignmentToAllSimulcastLayers= */ false); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderFactory.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderFactory.java new file mode 100644 index 00000000..2a46662d --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderFactory.java @@ -0,0 +1,72 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +/** Factory for creating VideoEncoders. */ +public interface VideoEncoderFactory { + public interface VideoEncoderSelector { + /** Called with the VideoCodecInfo of the currently used encoder. */ + @CalledByNative("VideoEncoderSelector") void onCurrentEncoder(VideoCodecInfo info); + + /** + * Called with the current available bitrate. Returns null if the encoder selector prefers to + * keep the current encoder or a VideoCodecInfo if a new encoder is preferred. + */ + @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onAvailableBitrate(int kbps); + + /** + * Called every time the encoder input resolution change. Returns null if the encoder selector + * prefers to keep the current encoder or a VideoCodecInfo if a new encoder is preferred. + */ + @Nullable + @CalledByNative("VideoEncoderSelector") + default VideoCodecInfo onResolutionChange(int widht, int height) { + return null; + } + + /** + * Called when the currently used encoder signal itself as broken. Returns null if the encoder + * selector prefers to keep the current encoder or a VideoCodecInfo if a new encoder is + * preferred. + */ + @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onEncoderBroken(); + } + + /** Creates an encoder for the given video codec. */ + @Nullable @CalledByNative VideoEncoder createEncoder(VideoCodecInfo info); + + /** + * Enumerates the list of supported video codecs. This method will only be called once and the + * result will be cached. + */ + @CalledByNative VideoCodecInfo[] getSupportedCodecs(); + + /** + * Enumerates the list of supported video codecs that can also be tagged with + * implementation information. This method will only be called once and the + * result will be cached. + */ + @CalledByNative + default VideoCodecInfo[] getImplementations() { + return getSupportedCodecs(); + } + + /** + * Returns a VideoEncoderSelector if implemented by the VideoEncoderFactory, + * null otherwise. + */ + @CalledByNative + default VideoEncoderSelector getEncoderSelector() { + return null; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderFallback.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderFallback.java new file mode 100644 index 00000000..fa36b7c9 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderFallback.java @@ -0,0 +1,36 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * A combined video encoder that falls back on a secondary encoder if the primary encoder fails. + */ +public class VideoEncoderFallback extends WrappedNativeVideoEncoder { + private final VideoEncoder fallback; + private final VideoEncoder primary; + + public VideoEncoderFallback(VideoEncoder fallback, VideoEncoder primary) { + this.fallback = fallback; + this.primary = primary; + } + + @Override + public long createNativeVideoEncoder() { + return nativeCreateEncoder(fallback, primary); + } + + @Override + public boolean isHardwareEncoder() { + return primary.isHardwareEncoder(); + } + + private static native long nativeCreateEncoder(VideoEncoder fallback, VideoEncoder primary); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderWrapper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderWrapper.java new file mode 100644 index 00000000..b5485d4e --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoEncoderWrapper.java @@ -0,0 +1,46 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +// Explicit imports necessary for JNI generation. +import androidx.annotation.Nullable; +import org.webrtc.VideoEncoder; + +/** + * This class contains the Java glue code for JNI generation of VideoEncoder. + */ +class VideoEncoderWrapper { + @CalledByNative + static boolean getScalingSettingsOn(VideoEncoder.ScalingSettings scalingSettings) { + return scalingSettings.on; + } + + @Nullable + @CalledByNative + static Integer getScalingSettingsLow(VideoEncoder.ScalingSettings scalingSettings) { + return scalingSettings.low; + } + + @Nullable + @CalledByNative + static Integer getScalingSettingsHigh(VideoEncoder.ScalingSettings scalingSettings) { + return scalingSettings.high; + } + + @CalledByNative + static VideoEncoder.Callback createEncoderCallback(final long nativeEncoder) { + return (EncodedImage frame, + VideoEncoder.CodecSpecificInfo info) -> nativeOnEncodedFrame(nativeEncoder, frame); + } + + private static native void nativeOnEncodedFrame( + long nativeVideoEncoderWrapper, EncodedImage frame); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFileRenderer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFileRenderer.java new file mode 100644 index 00000000..aef80304 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFileRenderer.java @@ -0,0 +1,162 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.os.Handler; +import android.os.HandlerThread; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.util.concurrent.CountDownLatch; + +/** + * Can be used to save the video frames to file. + */ +public class VideoFileRenderer implements VideoSink { + private static final String TAG = "VideoFileRenderer"; + + private final HandlerThread renderThread; + private final Handler renderThreadHandler; + private final HandlerThread fileThread; + private final Handler fileThreadHandler; + private final FileOutputStream videoOutFile; + private final String outputFileName; + private final int outputFileWidth; + private final int outputFileHeight; + private final int outputFrameSize; + private final ByteBuffer outputFrameBuffer; + private EglBase eglBase; + private YuvConverter yuvConverter; + private int frameCount; + + public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight, + final EglBase.Context sharedContext) throws IOException { + if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) { + throw new IllegalArgumentException("Does not support uneven width or height"); + } + + this.outputFileName = outputFile; + this.outputFileWidth = outputFileWidth; + this.outputFileHeight = outputFileHeight; + + outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2; + outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize); + + videoOutFile = new FileOutputStream(outputFile); + videoOutFile.write( + ("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n") + .getBytes(Charset.forName("US-ASCII"))); + + renderThread = new HandlerThread(TAG + "RenderThread"); + renderThread.start(); + renderThreadHandler = new Handler(renderThread.getLooper()); + + fileThread = new HandlerThread(TAG + "FileThread"); + fileThread.start(); + fileThreadHandler = new Handler(fileThread.getLooper()); + + ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() { + @Override + public void run() { + eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER); + eglBase.createDummyPbufferSurface(); + eglBase.makeCurrent(); + yuvConverter = new YuvConverter(); + } + }); + } + + @Override + public void onFrame(VideoFrame frame) { + frame.retain(); + renderThreadHandler.post(() -> renderFrameOnRenderThread(frame)); + } + + private void renderFrameOnRenderThread(VideoFrame frame) { + final VideoFrame.Buffer buffer = frame.getBuffer(); + + // If the frame is rotated, it will be applied after cropAndScale. Therefore, if the frame is + // rotated by 90 degrees, swap width and height. + final int targetWidth = frame.getRotation() % 180 == 0 ? outputFileWidth : outputFileHeight; + final int targetHeight = frame.getRotation() % 180 == 0 ? outputFileHeight : outputFileWidth; + + final float frameAspectRatio = (float) buffer.getWidth() / (float) buffer.getHeight(); + final float fileAspectRatio = (float) targetWidth / (float) targetHeight; + + // Calculate cropping to equalize the aspect ratio. + int cropWidth = buffer.getWidth(); + int cropHeight = buffer.getHeight(); + if (fileAspectRatio > frameAspectRatio) { + cropHeight = (int) (cropHeight * (frameAspectRatio / fileAspectRatio)); + } else { + cropWidth = (int) (cropWidth * (fileAspectRatio / frameAspectRatio)); + } + + final int cropX = (buffer.getWidth() - cropWidth) / 2; + final int cropY = (buffer.getHeight() - cropHeight) / 2; + + final VideoFrame.Buffer scaledBuffer = + buffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, targetWidth, targetHeight); + frame.release(); + + final VideoFrame.I420Buffer i420 = scaledBuffer.toI420(); + scaledBuffer.release(); + + fileThreadHandler.post(() -> { + YuvHelper.I420Rotate(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), + i420.getDataV(), i420.getStrideV(), outputFrameBuffer, i420.getWidth(), i420.getHeight(), + frame.getRotation()); + i420.release(); + + try { + videoOutFile.write("FRAME\n".getBytes(Charset.forName("US-ASCII"))); + videoOutFile.write( + outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize); + } catch (IOException e) { + throw new RuntimeException("Error writing video to disk", e); + } + frameCount++; + }); + } + + /** + * Release all resources. All already posted frames will be rendered first. + */ + public void release() { + final CountDownLatch cleanupBarrier = new CountDownLatch(1); + renderThreadHandler.post(() -> { + yuvConverter.release(); + eglBase.release(); + renderThread.quit(); + cleanupBarrier.countDown(); + }); + ThreadUtils.awaitUninterruptibly(cleanupBarrier); + fileThreadHandler.post(() -> { + try { + videoOutFile.close(); + Logging.d(TAG, + "Video written to disk as " + outputFileName + ". The number of frames is " + frameCount + + " and the dimensions of the frames are " + outputFileWidth + "x" + + outputFileHeight + "."); + } catch (IOException e) { + throw new RuntimeException("Error closing output file", e); + } + fileThread.quit(); + }); + try { + fileThread.join(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + Logging.e(TAG, "Interrupted while waiting for the write to disk to complete.", e); + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrame.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrame.java new file mode 100644 index 00000000..443a0315 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrame.java @@ -0,0 +1,234 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Matrix; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; + +/** + * Java version of webrtc::VideoFrame and webrtc::VideoFrameBuffer. A difference from the C++ + * version is that no explicit tag is used, and clients are expected to use 'instanceof' to find the + * right subclass of the buffer. This allows clients to create custom VideoFrame.Buffer in + * arbitrary format in their custom VideoSources, and then cast it back to the correct subclass in + * their custom VideoSinks. All implementations must also implement the toI420() function, + * converting from the underlying representation if necessary. I420 is the most widely accepted + * format and serves as a fallback for video sinks that can only handle I420, e.g. the internal + * WebRTC software encoders. + */ +public class VideoFrame implements RefCounted { + /** + * Implements image storage medium. Might be for example an OpenGL texture or a memory region + * containing I420-data. + * + *

Reference counting is needed since a video buffer can be shared between multiple VideoSinks, + * and the buffer needs to be returned to the VideoSource as soon as all references are gone. + */ + public interface Buffer extends RefCounted { + /** + * Representation of the underlying buffer. Currently, only NATIVE and I420 are supported. + */ + @CalledByNative("Buffer") + @VideoFrameBufferType + default int getBufferType() { + return VideoFrameBufferType.NATIVE; + } + + /** + * Resolution of the buffer in pixels. + */ + @CalledByNative("Buffer") int getWidth(); + @CalledByNative("Buffer") int getHeight(); + + /** + * Returns a memory-backed frame in I420 format. If the pixel data is in another format, a + * conversion will take place. All implementations must provide a fallback to I420 for + * compatibility with e.g. the internal WebRTC software encoders. + * + *

Conversion may fail, for example if reading the pixel data from a texture fails. If the + * conversion fails, null is returned. + */ + @Nullable @CalledByNative("Buffer") I420Buffer toI420(); + + @Override @CalledByNative("Buffer") void retain(); + @Override @CalledByNative("Buffer") void release(); + + /** + * Crops a region defined by `cropx`, `cropY`, `cropWidth` and `cropHeight`. Scales it to size + * `scaleWidth` x `scaleHeight`. + */ + @CalledByNative("Buffer") + Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight); + } + + /** + * Interface for I420 buffers. + */ + public interface I420Buffer extends Buffer { + @Override + default int getBufferType() { + return VideoFrameBufferType.I420; + } + + /** + * Returns a direct ByteBuffer containing Y-plane data. The buffer capacity is at least + * getStrideY() * getHeight() bytes. The position of the returned buffer is ignored and must + * be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so + * implementations must return a new ByteBuffer or slice for each call. + */ + @CalledByNative("I420Buffer") ByteBuffer getDataY(); + /** + * Returns a direct ByteBuffer containing U-plane data. The buffer capacity is at least + * getStrideU() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored + * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so + * implementations must return a new ByteBuffer or slice for each call. + */ + @CalledByNative("I420Buffer") ByteBuffer getDataU(); + /** + * Returns a direct ByteBuffer containing V-plane data. The buffer capacity is at least + * getStrideV() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored + * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so + * implementations must return a new ByteBuffer or slice for each call. + */ + @CalledByNative("I420Buffer") ByteBuffer getDataV(); + + @CalledByNative("I420Buffer") int getStrideY(); + @CalledByNative("I420Buffer") int getStrideU(); + @CalledByNative("I420Buffer") int getStrideV(); + } + + /** + * Interface for buffers that are stored as a single texture, either in OES or RGB format. + */ + public interface TextureBuffer extends Buffer { + enum Type { + OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES), + RGB(GLES20.GL_TEXTURE_2D); + + private final int glTarget; + + private Type(final int glTarget) { + this.glTarget = glTarget; + } + + public int getGlTarget() { + return glTarget; + } + } + + Type getType(); + int getTextureId(); + + /** + * Retrieve the transform matrix associated with the frame. This transform matrix maps 2D + * homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to + * the coordinate that should be used to sample that location from the buffer. + */ + Matrix getTransformMatrix(); + + /** + * Create a new TextureBufferImpl with an applied transform matrix and a new size. The existing + * buffer is unchanged. The given transform matrix is applied first when texture coordinates are + * still in the unmodified [0, 1] range. + */ + default TextureBuffer applyTransformMatrix( + Matrix transformMatrix, int newWidth, int newHeight) { + throw new UnsupportedOperationException("Not implemented"); + } + + /** + * Returns the width of the texture in memory. This should only be used for downscaling, and you + * should still respect the width from getWidth(). + */ + default public int getUnscaledWidth() { + return getWidth(); + } + + /** + * Returns the height of the texture in memory. This should only be used for downscaling, and + * you should still respect the height from getHeight(). + */ + default public int getUnscaledHeight() { + return getHeight(); + } + } + + private final Buffer buffer; + private final int rotation; + private final long timestampNs; + + /** + * Constructs a new VideoFrame backed by the given {@code buffer}. + * + * @note Ownership of the buffer object is tranferred to the new VideoFrame. + */ + @CalledByNative + public VideoFrame(Buffer buffer, int rotation, long timestampNs) { + if (buffer == null) { + throw new IllegalArgumentException("buffer not allowed to be null"); + } + if (rotation % 90 != 0) { + throw new IllegalArgumentException("rotation must be a multiple of 90"); + } + this.buffer = buffer; + this.rotation = rotation; + this.timestampNs = timestampNs; + } + + @CalledByNative + public Buffer getBuffer() { + return buffer; + } + + /** + * Rotation of the frame in degrees. + */ + @CalledByNative + public int getRotation() { + return rotation; + } + + /** + * Timestamp of the frame in nano seconds. + */ + @CalledByNative + public long getTimestampNs() { + return timestampNs; + } + + public int getRotatedWidth() { + if (rotation % 180 == 0) { + return buffer.getWidth(); + } + return buffer.getHeight(); + } + + public int getRotatedHeight() { + if (rotation % 180 == 0) { + return buffer.getHeight(); + } + return buffer.getWidth(); + } + + @Override + public void retain() { + buffer.retain(); + } + + @Override + @CalledByNative + public void release() { + buffer.release(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrameBufferType.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrameBufferType.java new file mode 100644 index 00000000..0895c78f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrameBufferType.java @@ -0,0 +1,18 @@ + +// IntelliJ API Decompiler stub source generated from a class file +// Implementation of methods is not available + +package org.webrtc; + +@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) +public @interface VideoFrameBufferType { + int NATIVE = 0; + int I420 = 1; + int I420A = 2; + int I422 = 3; + int I444 = 4; + int I010 = 5; + int I210 = 6; + int I410 = 7; + int NV12 = 8; +} \ No newline at end of file diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrameDrawer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrameDrawer.java new file mode 100644 index 00000000..af325878 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoFrameDrawer.java @@ -0,0 +1,241 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Matrix; +import android.graphics.Point; +import android.opengl.GLES20; +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; + +/** + * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or + * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation + * taken into account. You can supply an additional render matrix for custom transformations. + */ +public class VideoFrameDrawer { + public static final String TAG = "VideoFrameDrawer"; + /** + * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb + * depending on the type of the buffer. You can supply an additional render matrix. This is + * used multiplied together with the transformation matrix of the frame. (M = renderMatrix * + * transformationMatrix) + */ + public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer, + Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY, + int viewportWidth, int viewportHeight) { + Matrix finalMatrix = new Matrix(buffer.getTransformMatrix()); + finalMatrix.preConcat(renderMatrix); + float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix); + switch (buffer.getType()) { + case OES: + drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX, + viewportY, viewportWidth, viewportHeight); + break; + case RGB: + drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX, + viewportY, viewportWidth, viewportHeight); + break; + default: + throw new RuntimeException("Unknown texture type."); + } + } + + /** + * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This + * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies. + */ + private static class YuvUploader { + // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width. + // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader + // that handles stride and compare performance with intermediate copy. + @Nullable private ByteBuffer copyBuffer; + @Nullable private int[] yuvTextures; + + /** + * Upload `planes` into OpenGL textures, taking stride into consideration. + * + * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively. + */ + @Nullable + public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) { + final int[] planeWidths = new int[] {width, width / 2, width / 2}; + final int[] planeHeights = new int[] {height, height / 2, height / 2}; + // Make a first pass to see if we need a temporary copy buffer. + int copyCapacityNeeded = 0; + for (int i = 0; i < 3; ++i) { + if (strides[i] > planeWidths[i]) { + copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]); + } + } + // Allocate copy buffer if necessary. + if (copyCapacityNeeded > 0 + && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) { + copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded); + } + // Make sure YUV textures are allocated. + if (yuvTextures == null) { + yuvTextures = new int[3]; + for (int i = 0; i < 3; i++) { + yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); + } + } + // Upload each plane. + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); + // GLES only accepts packed data, i.e. stride == planeWidth. + final ByteBuffer packedByteBuffer; + if (strides[i] == planeWidths[i]) { + // Input is packed already. + packedByteBuffer = planes[i]; + } else { + YuvHelper.copyPlane( + planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]); + packedByteBuffer = copyBuffer; + } + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i], + planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer); + } + return yuvTextures; + } + + @Nullable + public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) { + int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()}; + ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()}; + return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes); + } + + @Nullable + public int[] getYuvTextures() { + return yuvTextures; + } + + /** + * Releases cached resources. Uploader can still be used and the resources will be reallocated + * on first use. + */ + public void release() { + copyBuffer = null; + if (yuvTextures != null) { + GLES20.glDeleteTextures(3, yuvTextures, 0); + yuvTextures = null; + } + } + } + + private static int distance(float x0, float y0, float x1, float y1) { + return (int) Math.round(Math.hypot(x1 - x0, y1 - y0)); + } + + // These points are used to calculate the size of the part of the frame we are rendering. + final static float[] srcPoints = + new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */}; + private final float[] dstPoints = new float[6]; + private final Point renderSize = new Point(); + private int renderWidth; + private int renderHeight; + + // Calculate the frame size after `renderMatrix` is applied. Stores the output in member variables + // `renderWidth` and `renderHeight` to avoid allocations since this function is called for every + // frame. + private void calculateTransformedRenderSize( + int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) { + if (renderMatrix == null) { + renderWidth = frameWidth; + renderHeight = frameHeight; + return; + } + // Transform the texture coordinates (in the range [0, 1]) according to `renderMatrix`. + renderMatrix.mapPoints(dstPoints, srcPoints); + + // Multiply with the width and height to get the positions in terms of pixels. + for (int i = 0; i < 3; ++i) { + dstPoints[i * 2 + 0] *= frameWidth; + dstPoints[i * 2 + 1] *= frameHeight; + } + + // Get the length of the sides of the transformed rectangle in terms of pixels. + renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]); + renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]); + } + + private final YuvUploader yuvUploader = new YuvUploader(); + // This variable will only be used for checking reference equality and is used for caching I420 + // textures. + @Nullable private VideoFrame lastI420Frame; + private final Matrix renderMatrix = new Matrix(); + + public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) { + drawFrame(frame, drawer, null /* additionalRenderMatrix */); + } + + public void drawFrame( + VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) { + drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */, + frame.getRotatedWidth(), frame.getRotatedHeight()); + } + + public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer, + @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth, + int viewportHeight) { + final int width = frame.getRotatedWidth(); + final int height = frame.getRotatedHeight(); + calculateTransformedRenderSize(width, height, additionalRenderMatrix); + if (renderWidth <= 0 || renderHeight <= 0) { + Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight); + return; + } + + final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer; + renderMatrix.reset(); + renderMatrix.preTranslate(0.5f, 0.5f); + if (!isTextureFrame) { + renderMatrix.preScale(1f, -1f); // I420-frames are upside down + } + renderMatrix.preRotate(frame.getRotation()); + renderMatrix.preTranslate(-0.5f, -0.5f); + if (additionalRenderMatrix != null) { + renderMatrix.preConcat(additionalRenderMatrix); + } + + if (isTextureFrame) { + lastI420Frame = null; + drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth, + renderHeight, viewportX, viewportY, viewportWidth, viewportHeight); + } else { + // Only upload the I420 data to textures once per frame, if we are called multiple times + // with the same frame. + if (frame != lastI420Frame) { + lastI420Frame = frame; + final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420(); + yuvUploader.uploadFromBuffer(i420Buffer); + i420Buffer.release(); + } + + drawer.drawYuv(yuvUploader.getYuvTextures(), + RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth, + renderHeight, viewportX, viewportY, viewportWidth, viewportHeight); + } + } + + public VideoFrame.Buffer prepareBufferForViewportSize( + VideoFrame.Buffer buffer, int width, int height) { + buffer.retain(); + return buffer; + } + + public void release() { + yuvUploader.release(); + lastI420Frame = null; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoProcessor.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoProcessor.java new file mode 100644 index 00000000..c39a55c2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoProcessor.java @@ -0,0 +1,76 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +/** + * Lightweight abstraction for an object that can receive video frames, process them, and pass them + * on to another object. This object is also allowed to observe capturer start/stop. + */ +public interface VideoProcessor extends CapturerObserver { + public static class FrameAdaptationParameters { + public final int cropX; + public final int cropY; + public final int cropWidth; + public final int cropHeight; + public final int scaleWidth; + public final int scaleHeight; + public final long timestampNs; + public final boolean drop; + + public FrameAdaptationParameters(int cropX, int cropY, int cropWidth, int cropHeight, + int scaleWidth, int scaleHeight, long timestampNs, boolean drop) { + this.cropX = cropX; + this.cropY = cropY; + this.cropWidth = cropWidth; + this.cropHeight = cropHeight; + this.scaleWidth = scaleWidth; + this.scaleHeight = scaleHeight; + this.timestampNs = timestampNs; + this.drop = drop; + } + } + + /** + * This is a chance to access an unadapted frame. The default implementation applies the + * adaptation and forwards the frame to {@link #onFrameCaptured(VideoFrame)}. + */ + default void onFrameCaptured(VideoFrame frame, FrameAdaptationParameters parameters) { + VideoFrame adaptedFrame = applyFrameAdaptationParameters(frame, parameters); + if (adaptedFrame != null) { + onFrameCaptured(adaptedFrame); + adaptedFrame.release(); + } + } + + /** + * Set the sink that receives the output from this processor. Null can be passed in to unregister + * a sink. + */ + void setSink(@Nullable VideoSink sink); + + /** + * Applies the frame adaptation parameters to a frame. Returns null if the frame is meant to be + * dropped. Returns a new frame. The caller is responsible for releasing the returned frame. + */ + public static @Nullable VideoFrame applyFrameAdaptationParameters( + VideoFrame frame, FrameAdaptationParameters parameters) { + if (parameters.drop) { + return null; + } + + final VideoFrame.Buffer adaptedBuffer = + frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth, + parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight); + return new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoSink.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoSink.java new file mode 100644 index 00000000..5a0a6c71 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoSink.java @@ -0,0 +1,23 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Java version of rtc::VideoSinkInterface. + */ +public interface VideoSink { + /** + * Implementations should call frame.retain() if they need to hold a reference to the frame after + * this function returns. Each call to retain() should be followed by a call to frame.release() + * when the reference is no longer needed. + */ + @CalledByNative void onFrame(VideoFrame frame); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoSource.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoSource.java new file mode 100644 index 00000000..2e22d1a2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoSource.java @@ -0,0 +1,162 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +/** + * Java wrapper of native AndroidVideoTrackSource. + */ +public class VideoSource extends MediaSource { + /** Simple aspect ratio clas for use in constraining output format. */ + public static class AspectRatio { + public static final AspectRatio UNDEFINED = new AspectRatio(/* width= */ 0, /* height= */ 0); + + public final int width; + public final int height; + + public AspectRatio(int width, int height) { + this.width = width; + this.height = height; + } + } + + private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource; + private final Object videoProcessorLock = new Object(); + @Nullable private VideoProcessor videoProcessor; + private boolean isCapturerRunning; + + private final CapturerObserver capturerObserver = new CapturerObserver() { + @Override + public void onCapturerStarted(boolean success) { + nativeAndroidVideoTrackSource.setState(success); + synchronized (videoProcessorLock) { + isCapturerRunning = success; + if (videoProcessor != null) { + videoProcessor.onCapturerStarted(success); + } + } + } + + @Override + public void onCapturerStopped() { + nativeAndroidVideoTrackSource.setState(/* isLive= */ false); + synchronized (videoProcessorLock) { + isCapturerRunning = false; + if (videoProcessor != null) { + videoProcessor.onCapturerStopped(); + } + } + } + + @Override + public void onFrameCaptured(VideoFrame frame) { + final VideoProcessor.FrameAdaptationParameters parameters = + nativeAndroidVideoTrackSource.adaptFrame(frame); + synchronized (videoProcessorLock) { + if (videoProcessor != null) { + videoProcessor.onFrameCaptured(frame, parameters); + return; + } + } + + VideoFrame adaptedFrame = VideoProcessor.applyFrameAdaptationParameters(frame, parameters); + if (adaptedFrame != null) { + nativeAndroidVideoTrackSource.onFrameCaptured(adaptedFrame); + adaptedFrame.release(); + } + } + }; + + public VideoSource(long nativeSource) { + super(nativeSource); + this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource); + } + + /** + * Calling this function will cause frames to be scaled down to the requested resolution. Also, + * frames will be cropped to match the requested aspect ratio, and frames will be dropped to match + * the requested fps. The requested aspect ratio is orientation agnostic and will be adjusted to + * maintain the input orientation, so it doesn't matter if e.g. 1280x720 or 720x1280 is requested. + */ + public void adaptOutputFormat(int width, int height, int fps) { + final int maxSide = Math.max(width, height); + final int minSide = Math.min(width, height); + adaptOutputFormat(maxSide, minSide, minSide, maxSide, fps); + } + + /** + * Same as above, but allows setting two different target resolutions depending on incoming + * frame orientation. This gives more fine-grained control and can e.g. be used to force landscape + * video to be cropped to portrait video. + */ + public void adaptOutputFormat( + int landscapeWidth, int landscapeHeight, int portraitWidth, int portraitHeight, int fps) { + adaptOutputFormat(new AspectRatio(landscapeWidth, landscapeHeight), + /* maxLandscapePixelCount= */ landscapeWidth * landscapeHeight, + new AspectRatio(portraitWidth, portraitHeight), + /* maxPortraitPixelCount= */ portraitWidth * portraitHeight, fps); + } + + /** Same as above, with even more control as each constraint is optional. */ + public void adaptOutputFormat(AspectRatio targetLandscapeAspectRatio, + @Nullable Integer maxLandscapePixelCount, AspectRatio targetPortraitAspectRatio, + @Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) { + nativeAndroidVideoTrackSource.adaptOutputFormat(targetLandscapeAspectRatio, + maxLandscapePixelCount, targetPortraitAspectRatio, maxPortraitPixelCount, maxFps); + } + + public void setIsScreencast(boolean isScreencast) { + nativeAndroidVideoTrackSource.setIsScreencast(isScreencast); + } + + /** + * Hook for injecting a custom video processor before frames are passed onto WebRTC. The frames + * will be cropped and scaled depending on CPU and network conditions before they are passed to + * the video processor. Frames will be delivered to the video processor on the same thread they + * are passed to this object. The video processor is allowed to deliver the processed frames + * back on any thread. + */ + public void setVideoProcessor(@Nullable VideoProcessor newVideoProcessor) { + synchronized (videoProcessorLock) { + if (videoProcessor != null) { + videoProcessor.setSink(/* sink= */ null); + if (isCapturerRunning) { + videoProcessor.onCapturerStopped(); + } + } + videoProcessor = newVideoProcessor; + if (newVideoProcessor != null) { + newVideoProcessor.setSink( + (frame) + -> runWithReference(() -> nativeAndroidVideoTrackSource.onFrameCaptured(frame))); + if (isCapturerRunning) { + newVideoProcessor.onCapturerStarted(/* success= */ true); + } + } + } + } + + public CapturerObserver getCapturerObserver() { + return capturerObserver; + } + + /** Returns a pointer to webrtc::VideoTrackSourceInterface. */ + long getNativeVideoTrackSource() { + return getNativeMediaSource(); + } + + @Override + public void dispose() { + setVideoProcessor(/* newVideoProcessor= */ null); + super.dispose(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoTrack.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoTrack.java new file mode 100644 index 00000000..512e46c2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/VideoTrack.java @@ -0,0 +1,76 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.util.IdentityHashMap; + +/** Java version of VideoTrackInterface. */ +public class VideoTrack extends MediaStreamTrack { + private final IdentityHashMap sinks = new IdentityHashMap(); + + public VideoTrack(long nativeTrack) { + super(nativeTrack); + } + + /** + * Adds a VideoSink to the track. + * + * A track can have any number of VideoSinks. VideoSinks will replace + * renderers. However, converting old style texture frames will involve costly + * conversion to I420 so it is not recommended to upgrade before all your + * sources produce VideoFrames. + */ + public void addSink(VideoSink sink) { + if (sink == null) { + throw new IllegalArgumentException("The VideoSink is not allowed to be null"); + } + // We allow calling addSink() with the same sink multiple times. This is similar to the C++ + // VideoTrack::AddOrUpdateSink(). + if (!sinks.containsKey(sink)) { + final long nativeSink = nativeWrapSink(sink); + sinks.put(sink, nativeSink); + nativeAddSink(getNativeMediaStreamTrack(), nativeSink); + } + } + + /** + * Removes a VideoSink from the track. + * + * If the VideoSink was not attached to the track, this is a no-op. + */ + public void removeSink(VideoSink sink) { + final Long nativeSink = sinks.remove(sink); + if (nativeSink != null) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + } + + @Override + public void dispose() { + for (long nativeSink : sinks.values()) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + sinks.clear(); + super.dispose(); + } + + /** Returns a pointer to webrtc::VideoTrackInterface. */ + public long getNativeVideoTrack() { + return getNativeMediaStreamTrack(); + } + + private static native void nativeAddSink(long track, long nativeSink); + private static native void nativeRemoveSink(long track, long nativeSink); + private static native long nativeWrapSink(VideoSink sink); + private static native void nativeFreeSink(long sink); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/WebRtcClassLoader.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/WebRtcClassLoader.java new file mode 100644 index 00000000..023e92cf --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/WebRtcClassLoader.java @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * This class provides a ClassLoader that is capable of loading WebRTC Java classes regardless of + * what thread it's called from. Such a ClassLoader is needed for the few cases where the JNI + * mechanism is unable to automatically determine the appropriate ClassLoader instance. + */ +class WebRtcClassLoader { + @CalledByNative + static Object getClassLoader() { + Object loader = WebRtcClassLoader.class.getClassLoader(); + if (loader == null) { + throw new RuntimeException("Failed to get WebRTC class loader."); + } + return loader; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeI420Buffer.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeI420Buffer.java new file mode 100644 index 00000000..0461660f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeI420Buffer.java @@ -0,0 +1,110 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** + * This class wraps a webrtc::I420BufferInterface into a VideoFrame.I420Buffer. + */ +class WrappedNativeI420Buffer implements VideoFrame.I420Buffer { + private final int width; + private final int height; + private final ByteBuffer dataY; + private final int strideY; + private final ByteBuffer dataU; + private final int strideU; + private final ByteBuffer dataV; + private final int strideV; + private final long nativeBuffer; + + @CalledByNative + WrappedNativeI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU, + int strideU, ByteBuffer dataV, int strideV, long nativeBuffer) { + this.width = width; + this.height = height; + this.dataY = dataY; + this.strideY = strideY; + this.dataU = dataU; + this.strideU = strideU; + this.dataV = dataV; + this.strideV = strideV; + this.nativeBuffer = nativeBuffer; + + retain(); + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public ByteBuffer getDataY() { + // Return a slice to prevent relative reads from changing the position. + return dataY.slice(); + } + + @Override + public ByteBuffer getDataU() { + // Return a slice to prevent relative reads from changing the position. + return dataU.slice(); + } + + @Override + public ByteBuffer getDataV() { + // Return a slice to prevent relative reads from changing the position. + return dataV.slice(); + } + + @Override + public int getStrideY() { + return strideY; + } + + @Override + public int getStrideU() { + return strideU; + } + + @Override + public int getStrideV() { + return strideV; + } + + @Override + public VideoFrame.I420Buffer toI420() { + retain(); + return this; + } + + @Override + public void retain() { + JniCommon.nativeAddRef(nativeBuffer); + } + + @Override + public void release() { + JniCommon.nativeReleaseRef(nativeBuffer); + } + + @Override + public VideoFrame.Buffer cropAndScale( + int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { + return JavaI420Buffer.cropAndScaleI420( + this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeVideoDecoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeVideoDecoder.java new file mode 100644 index 00000000..027120e4 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeVideoDecoder.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Wraps a native webrtc::VideoDecoder. + */ +public abstract class WrappedNativeVideoDecoder implements VideoDecoder { + @Override public abstract long createNativeVideoDecoder(); + + @Override + public final VideoCodecStatus initDecode(Settings settings, Callback decodeCallback) { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final VideoCodecStatus release() { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final String getImplementationName() { + throw new UnsupportedOperationException("Not implemented."); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeVideoEncoder.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeVideoEncoder.java new file mode 100644 index 00000000..7d0908a6 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/WrappedNativeVideoEncoder.java @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Wraps a native webrtc::VideoEncoder. + */ +public abstract class WrappedNativeVideoEncoder implements VideoEncoder { + @Override public abstract long createNativeVideoEncoder(); + @Override public abstract boolean isHardwareEncoder(); + + @Override + public final VideoCodecStatus initEncode(Settings settings, Callback encodeCallback) { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final VideoCodecStatus release() { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final VideoCodecStatus encode(VideoFrame frame, EncodeInfo info) { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate) { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final ScalingSettings getScalingSettings() { + throw new UnsupportedOperationException("Not implemented."); + } + + @Override + public final String getImplementationName() { + throw new UnsupportedOperationException("Not implemented."); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/YuvConverter.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/YuvConverter.java new file mode 100644 index 00000000..c855d4be --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/YuvConverter.java @@ -0,0 +1,252 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.graphics.Matrix; +import android.opengl.GLES20; +import android.opengl.GLException; +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; +import org.webrtc.VideoFrame.I420Buffer; +import org.webrtc.VideoFrame.TextureBuffer; + +/** + * Class for converting OES textures to a YUV ByteBuffer. It can be constructed on any thread, but + * should only be operated from a single thread with an active EGL context. + */ +public final class YuvConverter { + private static final String TAG = "YuvConverter"; + + private static final String FRAGMENT_SHADER = + // Difference in texture coordinate corresponding to one + // sub-pixel in the x direction. + "uniform vec2 xUnit;\n" + // Color conversion coefficients, including constant term + + "uniform vec4 coeffs;\n" + + "\n" + + "void main() {\n" + // Since the alpha read from the texture is always 1, this could + // be written as a mat4 x vec4 multiply. However, that seems to + // give a worse framerate, possibly because the additional + // multiplies by 1.0 consume resources. + + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n" + + " sample(tc - 1.5 * xUnit).rgb);\n" + + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n" + + " sample(tc - 0.5 * xUnit).rgb);\n" + + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n" + + " sample(tc + 0.5 * xUnit).rgb);\n" + + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n" + + " sample(tc + 1.5 * xUnit).rgb);\n" + + "}\n"; + + private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks { + // Y'UV444 to RGB888, see https://en.wikipedia.org/wiki/YUV#Y%E2%80%B2UV444_to_RGB888_conversion + // We use the ITU-R BT.601 coefficients for Y, U and V. + // The values in Wikipedia are inaccurate, the accurate values derived from the spec are: + // Y = 0.299 * R + 0.587 * G + 0.114 * B + // U = -0.168736 * R - 0.331264 * G + 0.5 * B + 0.5 + // V = 0.5 * R - 0.418688 * G - 0.0813124 * B + 0.5 + // To map the Y-values to range [16-235] and U- and V-values to range [16-240], the matrix has + // been multiplied with matrix: + // {{219 / 255, 0, 0, 16 / 255}, + // {0, 224 / 255, 0, 16 / 255}, + // {0, 0, 224 / 255, 16 / 255}, + // {0, 0, 0, 1}} + private static final float[] yCoeffs = + new float[] {0.256788f, 0.504129f, 0.0979059f, 0.0627451f}; + private static final float[] uCoeffs = + new float[] {-0.148223f, -0.290993f, 0.439216f, 0.501961f}; + private static final float[] vCoeffs = + new float[] {0.439216f, -0.367788f, -0.0714274f, 0.501961f}; + + private int xUnitLoc; + private int coeffsLoc; + + private float[] coeffs; + private float stepSize; + + public void setPlaneY() { + coeffs = yCoeffs; + stepSize = 1.0f; + } + + public void setPlaneU() { + coeffs = uCoeffs; + stepSize = 2.0f; + } + + public void setPlaneV() { + coeffs = vCoeffs; + stepSize = 2.0f; + } + + @Override + public void onNewShader(GlShader shader) { + xUnitLoc = shader.getUniformLocation("xUnit"); + coeffsLoc = shader.getUniformLocation("coeffs"); + } + + @Override + public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight, + int viewportWidth, int viewportHeight) { + GLES20.glUniform4fv(coeffsLoc, /* count= */ 1, coeffs, /* offset= */ 0); + // Matrix * (1;0;0;0) / (width / stepSize). Note that OpenGL uses column major order. + GLES20.glUniform2f( + xUnitLoc, stepSize * texMatrix[0] / frameWidth, stepSize * texMatrix[1] / frameWidth); + } + } + + private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); + private final GlTextureFrameBuffer i420TextureFrameBuffer = + new GlTextureFrameBuffer(GLES20.GL_RGBA); + private final ShaderCallbacks shaderCallbacks = new ShaderCallbacks(); + private final GlGenericDrawer drawer = new GlGenericDrawer(FRAGMENT_SHADER, shaderCallbacks); + private final VideoFrameDrawer videoFrameDrawer; + + /** + * This class should be constructed on a thread that has an active EGL context. + */ + public YuvConverter() { + this(new VideoFrameDrawer()); + } + + public YuvConverter(VideoFrameDrawer videoFrameDrawer) { + this.videoFrameDrawer = videoFrameDrawer; + threadChecker.detachThread(); + } + + /** Converts the texture buffer to I420. */ + @Nullable + public I420Buffer convert(TextureBuffer inputTextureBuffer) { + try { + return convertInternal(inputTextureBuffer); + } catch (GLException e) { + Logging.w(TAG, "Failed to convert TextureBuffer", e); + } + return null; + } + + private I420Buffer convertInternal(TextureBuffer inputTextureBuffer) { + TextureBuffer preparedBuffer = (TextureBuffer) videoFrameDrawer.prepareBufferForViewportSize( + inputTextureBuffer, inputTextureBuffer.getWidth(), inputTextureBuffer.getHeight()); + + // We draw into a buffer laid out like + // + // +---------+ + // | | + // | Y | + // | | + // | | + // +----+----+ + // | U | V | + // | | | + // +----+----+ + // + // In memory, we use the same stride for all of Y, U and V. The + // U data starts at offset `height` * `stride` from the Y data, + // and the V data starts at at offset |stride/2| from the U + // data, with rows of U and V data alternating. + // + // Now, it would have made sense to allocate a pixel buffer with + // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE, + // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be + // unsupported by devices. So do the following hack: Allocate an + // RGBA buffer, of width `stride`/4. To render each of these + // large pixels, sample the texture at 4 different x coordinates + // and store the results in the four components. + // + // Since the V data needs to start on a boundary of such a + // larger pixel, it is not sufficient that `stride` is even, it + // has to be a multiple of 8 pixels. + final int frameWidth = preparedBuffer.getWidth(); + final int frameHeight = preparedBuffer.getHeight(); + final int stride = ((frameWidth + 7) / 8) * 8; + final int uvHeight = (frameHeight + 1) / 2; + // Total height of the combined memory layout. + final int totalHeight = frameHeight + uvHeight; + final ByteBuffer i420ByteBuffer = JniCommon.nativeAllocateByteBuffer(stride * totalHeight); + // Viewport width is divided by four since we are squeezing in four color bytes in each RGBA + // pixel. + final int viewportWidth = stride / 4; + + // Produce a frame buffer starting at top-left corner, not bottom-left. + final Matrix renderMatrix = new Matrix(); + renderMatrix.preTranslate(0.5f, 0.5f); + renderMatrix.preScale(1f, -1f); + renderMatrix.preTranslate(-0.5f, -0.5f); + + i420TextureFrameBuffer.setSize(viewportWidth, totalHeight); + + // Bind our framebuffer. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, i420TextureFrameBuffer.getFrameBufferId()); + GlUtil.checkNoGLES2Error("glBindFramebuffer"); + + // Draw Y. + shaderCallbacks.setPlaneY(); + VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, + /* viewportX= */ 0, /* viewportY= */ 0, viewportWidth, + /* viewportHeight= */ frameHeight); + + // Draw U. + shaderCallbacks.setPlaneU(); + VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, + /* viewportX= */ 0, /* viewportY= */ frameHeight, viewportWidth / 2, + /* viewportHeight= */ uvHeight); + + // Draw V. + shaderCallbacks.setPlaneV(); + VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, + /* viewportX= */ viewportWidth / 2, /* viewportY= */ frameHeight, viewportWidth / 2, + /* viewportHeight= */ uvHeight); + + GLES20.glReadPixels(0, 0, i420TextureFrameBuffer.getWidth(), i420TextureFrameBuffer.getHeight(), + GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, i420ByteBuffer); + + GlUtil.checkNoGLES2Error("YuvConverter.convert"); + + // Restore normal framebuffer. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + + // Prepare Y, U, and V ByteBuffer slices. + final int yPos = 0; + final int uPos = yPos + stride * frameHeight; + // Rows of U and V alternate in the buffer, so V data starts after the first row of U. + final int vPos = uPos + stride / 2; + + i420ByteBuffer.position(yPos); + i420ByteBuffer.limit(yPos + stride * frameHeight); + final ByteBuffer dataY = i420ByteBuffer.slice(); + + i420ByteBuffer.position(uPos); + // The last row does not have padding. + final int uvSize = stride * (uvHeight - 1) + stride / 2; + i420ByteBuffer.limit(uPos + uvSize); + final ByteBuffer dataU = i420ByteBuffer.slice(); + + i420ByteBuffer.position(vPos); + i420ByteBuffer.limit(vPos + uvSize); + final ByteBuffer dataV = i420ByteBuffer.slice(); + + preparedBuffer.release(); + + return JavaI420Buffer.wrap(frameWidth, frameHeight, dataY, stride, dataU, stride, dataV, stride, + () -> { JniCommon.nativeFreeByteBuffer(i420ByteBuffer); }); + } + + public void release() { + threadChecker.checkIsOnValidThread(); + drawer.release(); + i420TextureFrameBuffer.release(); + videoFrameDrawer.release(); + // Allow this class to be reused. + threadChecker.detachThread(); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/YuvHelper.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/YuvHelper.java new file mode 100644 index 00000000..8a46a571 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/YuvHelper.java @@ -0,0 +1,236 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** Wraps libyuv methods to Java. All passed byte buffers must be direct byte buffers. */ +public class YuvHelper { + /** + * Copy I420 Buffer to a contiguously allocated buffer. + *

In Android, MediaCodec can request a buffer of a specific layout with the stride and + * slice-height (or plane height), and this function is used in this case. + *

For more information, see + * https://cs.android.com/android/platform/superproject/+/64fea7e5726daebc40f46890100837c01091100d:frameworks/base/media/java/android/media/MediaFormat.java;l=568 + * @param dstStrideY the stride of output buffers' Y plane. + * @param dstSliceHeightY the slice-height of output buffer's Y plane. + * @param dstStrideU the stride of output buffers' U (and V) plane. + * @param dstSliceHeightU the slice-height of output buffer's U (and V) plane + */ + public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY, + int dstSliceHeightY, int dstStrideU, int dstSliceHeightU) { + final int chromaWidth = (dstWidth + 1) / 2; + final int chromaHeight = (dstHeight + 1) / 2; + + final int dstStartY = 0; + final int dstEndY = dstStartY + dstStrideY * dstHeight; + final int dstStartU = dstStartY + dstStrideY * dstSliceHeightY; + final int dstEndU = dstStartU + dstStrideU * chromaHeight; + final int dstStartV = dstStartU + dstStrideU * dstSliceHeightU; + // The last line doesn't need any padding, so use chromaWidth to calculate the exact end + // position. + final int dstEndV = dstStartV + dstStrideU * (chromaHeight - 1) + chromaWidth; + if (dst.capacity() < dstEndV) { + throw new IllegalArgumentException("Expected destination buffer capacity to be at least " + + dstEndV + " was " + dst.capacity()); + } + + dst.limit(dstEndY); + dst.position(dstStartY); + final ByteBuffer dstY = dst.slice(); + dst.limit(dstEndU); + dst.position(dstStartU); + final ByteBuffer dstU = dst.slice(); + dst.limit(dstEndV); + dst.position(dstStartV); + final ByteBuffer dstV = dst.slice(); + + I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU, + dstStrideU, dstV, dstStrideU, dstWidth, dstHeight); + } + + /** Helper method for copying I420 to tightly packed destination buffer. */ + public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) { + I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight, + dstWidth, dstHeight, (dstWidth + 1) / 2, (dstHeight + 1) / 2); + } + + /** Helper method for copying I420 to buffer with the given stride and slice height. */ + public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStride, + int dstSliceHeight) { + I420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight, + dstStride, dstSliceHeight, (dstStride + 1) / 2, (dstSliceHeight + 1) / 2); + } + + /** + * Copy I420 Buffer to a contiguously allocated buffer. + * @param dstStrideY the stride of output buffers' Y plane. + * @param dstSliceHeightY the slice-height of output buffer's Y plane. + */ + public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight, int dstStrideY, + int dstSliceHeightY) { + final int chromaHeight = (dstHeight + 1) / 2; + final int chromaWidth = (dstWidth + 1) / 2; + + final int dstStartY = 0; + final int dstEndY = dstStartY + dstStrideY * dstHeight; + final int dstStartUV = dstStartY + dstStrideY * dstSliceHeightY; + final int dstEndUV = dstStartUV + chromaWidth * chromaHeight * 2; + if (dst.capacity() < dstEndUV) { + throw new IllegalArgumentException("Expected destination buffer capacity to be at least " + + dstEndUV + " was " + dst.capacity()); + } + + dst.limit(dstEndY); + dst.position(dstStartY); + final ByteBuffer dstY = dst.slice(); + dst.limit(dstEndUV); + dst.position(dstStartUV); + final ByteBuffer dstUV = dst.slice(); + + I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV, + chromaWidth * 2, dstWidth, dstHeight); + } + + /** Helper method for copying I420 to tightly packed NV12 destination buffer. */ + public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int dstWidth, int dstHeight) { + I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dst, dstWidth, dstHeight, + dstWidth, dstHeight); + } + + /** Helper method for rotating I420 to tightly packed destination buffer. */ + public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int srcWidth, int srcHeight, + int rotationMode) { + checkNotNull(srcY, "srcY"); + checkNotNull(srcU, "srcU"); + checkNotNull(srcV, "srcV"); + checkNotNull(dst, "dst"); + final int dstWidth = rotationMode % 180 == 0 ? srcWidth : srcHeight; + final int dstHeight = rotationMode % 180 == 0 ? srcHeight : srcWidth; + + final int dstChromaHeight = (dstHeight + 1) / 2; + final int dstChromaWidth = (dstWidth + 1) / 2; + + final int minSize = dstWidth * dstHeight + dstChromaWidth * dstChromaHeight * 2; + if (dst.capacity() < minSize) { + throw new IllegalArgumentException("Expected destination buffer capacity to be at least " + + minSize + " was " + dst.capacity()); + } + + final int startY = 0; + final int startU = dstHeight * dstWidth; + final int startV = startU + dstChromaHeight * dstChromaWidth; + + dst.position(startY); + final ByteBuffer dstY = dst.slice(); + dst.position(startU); + final ByteBuffer dstU = dst.slice(); + dst.position(startV); + final ByteBuffer dstV = dst.slice(); + + nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstWidth, dstU, + dstChromaWidth, dstV, dstChromaWidth, srcWidth, srcHeight, rotationMode); + } + + /** Helper method for copying a single colour plane. */ + public static void copyPlane( + ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) { + nativeCopyPlane( + checkNotNull(src, "src"), srcStride, checkNotNull(dst, "dst"), dstStride, width, height); + } + + /** Converts ABGR little endian (rgba in memory) to I420. */ + public static void ABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY, int dstStrideY, + ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) { + nativeABGRToI420(checkNotNull(src, "src"), srcStride, checkNotNull(dstY, "dstY"), dstStrideY, + checkNotNull(dstU, "dstU"), dstStrideU, checkNotNull(dstV, "dstV"), dstStrideV, width, + height); + } + + /** + * Copies I420 to the I420 dst buffer. + *

Unlike `libyuv::I420Copy`, this function checks if the height <= 0, so flipping is not + * supported. + */ + public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, + int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) { + checkNotNull(srcY, "srcY"); + checkNotNull(srcU, "srcU"); + checkNotNull(srcV, "srcV"); + checkNotNull(dstY, "dstY"); + checkNotNull(dstU, "dstU"); + checkNotNull(dstV, "dstV"); + if (width <= 0 || height <= 0) { + throw new IllegalArgumentException("I420Copy: width and height should not be negative"); + } + nativeI420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU, + dstStrideU, dstV, dstStrideV, width, height); + } + + public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstUV, + int dstStrideUV, int width, int height) { + checkNotNull(srcY, "srcY"); + checkNotNull(srcU, "srcU"); + checkNotNull(srcV, "srcV"); + checkNotNull(dstY, "dstY"); + checkNotNull(dstUV, "dstUV"); + if (width <= 0 || height <= 0) { + throw new IllegalArgumentException("I420ToNV12: width and height should not be negative"); + } + nativeI420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV, + dstStrideUV, width, height); + } + + public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, + ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, + int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight, + int rotationMode) { + checkNotNull(srcY, "srcY"); + checkNotNull(srcU, "srcU"); + checkNotNull(srcV, "srcV"); + checkNotNull(dstY, "dstY"); + checkNotNull(dstU, "dstU"); + checkNotNull(dstV, "dstV"); + nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU, + dstStrideU, dstV, dstStrideV, srcWidth, srcHeight, rotationMode); + } + + private static T checkNotNull(T obj, String description) { + if (obj == null) { + throw new NullPointerException(description + " should not be null"); + } + return obj; + } + + private static native void nativeCopyPlane( + ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height); + private static native void nativeI420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, + int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, + ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height); + private static native void nativeI420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, + int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, + ByteBuffer dstUV, int dstStrideUV, int width, int height); + private static native void nativeI420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, + int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, + ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight, + int rotationMode); + private static native void nativeABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY, + int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, + int height); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/AudioDeviceModule.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/AudioDeviceModule.java new file mode 100644 index 00000000..5a0bf5c7 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/AudioDeviceModule.java @@ -0,0 +1,56 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +/** + * This interface is a thin wrapper on top of a native C++ webrtc::AudioDeviceModule (ADM). The + * reason for basing it on a native ADM instead of a pure Java interface is that we have two native + * Android implementations (OpenSLES and AAudio) that does not make sense to wrap through JNI. + * + *

Note: This class is still under development and may change without notice. + */ +public interface AudioDeviceModule { + /** + * Returns a C++ pointer to a webrtc::AudioDeviceModule. Caller does _not_ take ownership and + * lifetime is handled through the release() call. + */ + long getNativeAudioDeviceModulePointer(); + + /** + * Release resources for this AudioDeviceModule, including native resources. The object should not + * be used after this call. + */ + void release(); + + /** Control muting/unmuting the speaker. */ + void setSpeakerMute(boolean mute); + + /** Control muting/unmuting the microphone. */ + void setMicrophoneMute(boolean mute); + + /** + * Enable or disable built in noise suppressor. Returns true if the enabling was successful, + * otherwise false is returned. + */ + default boolean setNoiseSuppressorEnabled(boolean enabled) { + return false; + } + + /** + * Sets the preferred field dimension for the built-in microphone. Returns + * true if setting was successful, otherwise false is returned. + * This functionality can be implemented with + * {@code android.media.MicrophoneDirection.setPreferredMicrophoneFieldDimension}. + */ + default boolean setPreferredMicrophoneFieldDimension(float dimension) { + return false; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java new file mode 100644 index 00000000..8dd22af5 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java @@ -0,0 +1,444 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioDeviceInfo; +import android.media.AudioManager; +import android.media.AudioRecord; +import android.os.Build; +import androidx.annotation.RequiresApi; +import java.util.concurrent.ScheduledExecutorService; +import org.webrtc.JniCommon; +import org.webrtc.Logging; + +/** + * AudioDeviceModule implemented using android.media.AudioRecord as input and + * android.media.AudioTrack as output. + */ +public class JavaAudioDeviceModule implements AudioDeviceModule { + private static final String TAG = "JavaAudioDeviceModule"; + + public static Builder builder(Context context) { + return new Builder(context); + } + + public static class Builder { + private final Context context; + private ScheduledExecutorService scheduler; + private final AudioManager audioManager; + private int inputSampleRate; + private int outputSampleRate; + private int audioSource = WebRtcAudioRecord.DEFAULT_AUDIO_SOURCE; + private int audioFormat = WebRtcAudioRecord.DEFAULT_AUDIO_FORMAT; + private AudioTrackErrorCallback audioTrackErrorCallback; + private AudioRecordErrorCallback audioRecordErrorCallback; + private SamplesReadyCallback samplesReadyCallback; + private AudioTrackStateCallback audioTrackStateCallback; + private AudioRecordStateCallback audioRecordStateCallback; + private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported(); + private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported(); + private boolean useStereoInput; + private boolean useStereoOutput; + private AudioAttributes audioAttributes; + private boolean useLowLatency; + private boolean enableVolumeLogger; + + private Builder(Context context) { + this.context = context; + this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + this.inputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); + this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); + this.useLowLatency = false; + this.enableVolumeLogger = true; + } + + public Builder setScheduler(ScheduledExecutorService scheduler) { + this.scheduler = scheduler; + return this; + } + + /** + * Call this method if the default handling of querying the native sample rate shall be + * overridden. Can be useful on some devices where the available Android APIs are known to + * return invalid results. + */ + public Builder setSampleRate(int sampleRate) { + Logging.d(TAG, "Input/Output sample rate overridden to: " + sampleRate); + this.inputSampleRate = sampleRate; + this.outputSampleRate = sampleRate; + return this; + } + + /** + * Call this method to specifically override input sample rate. + */ + public Builder setInputSampleRate(int inputSampleRate) { + Logging.d(TAG, "Input sample rate overridden to: " + inputSampleRate); + this.inputSampleRate = inputSampleRate; + return this; + } + + /** + * Call this method to specifically override output sample rate. + */ + public Builder setOutputSampleRate(int outputSampleRate) { + Logging.d(TAG, "Output sample rate overridden to: " + outputSampleRate); + this.outputSampleRate = outputSampleRate; + return this; + } + + /** + * Call this to change the audio source. The argument should be one of the values from + * android.media.MediaRecorder.AudioSource. The default is AudioSource.VOICE_COMMUNICATION. + */ + public Builder setAudioSource(int audioSource) { + this.audioSource = audioSource; + return this; + } + + /** + * Call this to change the audio format. The argument should be one of the values from + * android.media.AudioFormat ENCODING_PCM_8BIT, ENCODING_PCM_16BIT or ENCODING_PCM_FLOAT. + * Default audio data format is PCM 16 bit per sample. + * Guaranteed to be supported by all devices. + */ + public Builder setAudioFormat(int audioFormat) { + this.audioFormat = audioFormat; + return this; + } + + /** + * Set a callback to retrieve errors from the AudioTrack. + */ + public Builder setAudioTrackErrorCallback(AudioTrackErrorCallback audioTrackErrorCallback) { + this.audioTrackErrorCallback = audioTrackErrorCallback; + return this; + } + + /** + * Set a callback to retrieve errors from the AudioRecord. + */ + public Builder setAudioRecordErrorCallback(AudioRecordErrorCallback audioRecordErrorCallback) { + this.audioRecordErrorCallback = audioRecordErrorCallback; + return this; + } + + /** + * Set a callback to listen to the raw audio input from the AudioRecord. + */ + public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback) { + this.samplesReadyCallback = samplesReadyCallback; + return this; + } + + /** + * Set a callback to retrieve information from the AudioTrack on when audio starts and stop. + */ + public Builder setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback) { + this.audioTrackStateCallback = audioTrackStateCallback; + return this; + } + + /** + * Set a callback to retrieve information from the AudioRecord on when audio starts and stops. + */ + public Builder setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback) { + this.audioRecordStateCallback = audioRecordStateCallback; + return this; + } + + /** + * Control if the built-in HW noise suppressor should be used or not. The default is on if it is + * supported. It is possible to query support by calling isBuiltInNoiseSuppressorSupported(). + */ + public Builder setUseHardwareNoiseSuppressor(boolean useHardwareNoiseSuppressor) { + if (useHardwareNoiseSuppressor && !isBuiltInNoiseSuppressorSupported()) { + Logging.e(TAG, "HW NS not supported"); + useHardwareNoiseSuppressor = false; + } + this.useHardwareNoiseSuppressor = useHardwareNoiseSuppressor; + return this; + } + + /** + * Control if the built-in HW acoustic echo canceler should be used or not. The default is on if + * it is supported. It is possible to query support by calling + * isBuiltInAcousticEchoCancelerSupported(). + */ + public Builder setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler) { + if (useHardwareAcousticEchoCanceler && !isBuiltInAcousticEchoCancelerSupported()) { + Logging.e(TAG, "HW AEC not supported"); + useHardwareAcousticEchoCanceler = false; + } + this.useHardwareAcousticEchoCanceler = useHardwareAcousticEchoCanceler; + return this; + } + + /** + * Control if stereo input should be used or not. The default is mono. + */ + public Builder setUseStereoInput(boolean useStereoInput) { + this.useStereoInput = useStereoInput; + return this; + } + + /** + * Control if stereo output should be used or not. The default is mono. + */ + public Builder setUseStereoOutput(boolean useStereoOutput) { + this.useStereoOutput = useStereoOutput; + return this; + } + + /** + * Control if the low-latency mode should be used. The default is disabled. + */ + public Builder setUseLowLatency(boolean useLowLatency) { + this.useLowLatency = useLowLatency; + return this; + } + + /** + * Set custom {@link AudioAttributes} to use. + */ + public Builder setAudioAttributes(AudioAttributes audioAttributes) { + this.audioAttributes = audioAttributes; + return this; + } + + /** Disables the volume logger on the audio output track. */ + public Builder setEnableVolumeLogger(boolean enableVolumeLogger) { + this.enableVolumeLogger = enableVolumeLogger; + return this; + } + + /** + * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership + * and is responsible for calling release(). + */ + public JavaAudioDeviceModule createAudioDeviceModule() { + Logging.d(TAG, "createAudioDeviceModule"); + if (useHardwareNoiseSuppressor) { + Logging.d(TAG, "HW NS will be used."); + } else { + if (isBuiltInNoiseSuppressorSupported()) { + Logging.d(TAG, "Overriding default behavior; now using WebRTC NS!"); + } + Logging.d(TAG, "HW NS will not be used."); + } + if (useHardwareAcousticEchoCanceler) { + Logging.d(TAG, "HW AEC will be used."); + } else { + if (isBuiltInAcousticEchoCancelerSupported()) { + Logging.d(TAG, "Overriding default behavior; now using WebRTC AEC!"); + } + Logging.d(TAG, "HW AEC will not be used."); + } + // Low-latency mode was introduced in API version 26, see + // https://developer.android.com/reference/android/media/AudioTrack#PERFORMANCE_MODE_LOW_LATENCY + final int MIN_LOW_LATENCY_SDK_VERSION = 26; + if (useLowLatency && Build.VERSION.SDK_INT >= MIN_LOW_LATENCY_SDK_VERSION) { + Logging.d(TAG, "Low latency mode will be used."); + } + ScheduledExecutorService executor = this.scheduler; + if (executor == null) { + executor = WebRtcAudioRecord.newDefaultScheduler(); + } + final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, + audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, + samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + + final WebRtcAudioTrack audioOutput = + new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, + audioTrackStateCallback, useLowLatency, enableVolumeLogger); + return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, + inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); + } + } + + /* AudioRecord */ + // Audio recording error handler functions. + public enum AudioRecordStartErrorCode { + AUDIO_RECORD_START_EXCEPTION, + AUDIO_RECORD_START_STATE_MISMATCH, + } + + public static interface AudioRecordErrorCallback { + void onWebRtcAudioRecordInitError(String errorMessage); + void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage); + void onWebRtcAudioRecordError(String errorMessage); + } + + /** Called when audio recording starts and stops. */ + public static interface AudioRecordStateCallback { + void onWebRtcAudioRecordStart(); + void onWebRtcAudioRecordStop(); + } + + /** + * Contains audio sample information. + */ + public static class AudioSamples { + /** See {@link AudioRecord#getAudioFormat()} */ + private final int audioFormat; + /** See {@link AudioRecord#getChannelCount()} */ + private final int channelCount; + /** See {@link AudioRecord#getSampleRate()} */ + private final int sampleRate; + + private final byte[] data; + + public AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data) { + this.audioFormat = audioFormat; + this.channelCount = channelCount; + this.sampleRate = sampleRate; + this.data = data; + } + + public int getAudioFormat() { + return audioFormat; + } + + public int getChannelCount() { + return channelCount; + } + + public int getSampleRate() { + return sampleRate; + } + + public byte[] getData() { + return data; + } + } + + /** Called when new audio samples are ready. This should only be set for debug purposes */ + public static interface SamplesReadyCallback { + void onWebRtcAudioRecordSamplesReady(AudioSamples samples); + } + + /* AudioTrack */ + // Audio playout/track error handler functions. + public enum AudioTrackStartErrorCode { + AUDIO_TRACK_START_EXCEPTION, + AUDIO_TRACK_START_STATE_MISMATCH, + } + + public static interface AudioTrackErrorCallback { + void onWebRtcAudioTrackInitError(String errorMessage); + void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage); + void onWebRtcAudioTrackError(String errorMessage); + } + + /** Called when audio playout starts and stops. */ + public static interface AudioTrackStateCallback { + void onWebRtcAudioTrackStart(); + void onWebRtcAudioTrackStop(); + } + + /** + * Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can + * be excluded). + */ + public static boolean isBuiltInAcousticEchoCancelerSupported() { + return WebRtcAudioEffects.isAcousticEchoCancelerSupported(); + } + + /** + * Returns true if the device supports built-in HW NS, and the UUID is approved (some UUIDs can be + * excluded). + */ + public static boolean isBuiltInNoiseSuppressorSupported() { + return WebRtcAudioEffects.isNoiseSuppressorSupported(); + } + + private final Context context; + private final AudioManager audioManager; + private final WebRtcAudioRecord audioInput; + private final WebRtcAudioTrack audioOutput; + private final int inputSampleRate; + private final int outputSampleRate; + private final boolean useStereoInput; + private final boolean useStereoOutput; + + private final Object nativeLock = new Object(); + private long nativeAudioDeviceModule; + + private JavaAudioDeviceModule(Context context, AudioManager audioManager, + WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, + int outputSampleRate, boolean useStereoInput, boolean useStereoOutput) { + this.context = context; + this.audioManager = audioManager; + this.audioInput = audioInput; + this.audioOutput = audioOutput; + this.inputSampleRate = inputSampleRate; + this.outputSampleRate = outputSampleRate; + this.useStereoInput = useStereoInput; + this.useStereoOutput = useStereoOutput; + } + + @Override + public long getNativeAudioDeviceModulePointer() { + synchronized (nativeLock) { + if (nativeAudioDeviceModule == 0) { + nativeAudioDeviceModule = nativeCreateAudioDeviceModule(context, audioManager, audioInput, + audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); + } + return nativeAudioDeviceModule; + } + } + + @Override + public void release() { + synchronized (nativeLock) { + if (nativeAudioDeviceModule != 0) { + JniCommon.nativeReleaseRef(nativeAudioDeviceModule); + nativeAudioDeviceModule = 0; + } + } + } + + @Override + public void setSpeakerMute(boolean mute) { + Logging.d(TAG, "setSpeakerMute: " + mute); + audioOutput.setSpeakerMute(mute); + } + + @Override + public void setMicrophoneMute(boolean mute) { + Logging.d(TAG, "setMicrophoneMute: " + mute); + audioInput.setMicrophoneMute(mute); + } + + @Override + public boolean setNoiseSuppressorEnabled(boolean enabled) { + Logging.d(TAG, "setNoiseSuppressorEnabled: " + enabled); + return audioInput.setNoiseSuppressorEnabled(enabled); + } + + /** + * Start to prefer a specific {@link AudioDeviceInfo} device for recording. Typically this should + * only be used if a client gives an explicit option for choosing a physical device to record + * from. Otherwise the best-matching device for other parameters will be used. Calling after + * recording is started may cause a temporary interruption if the audio routing changes. + */ + @RequiresApi(Build.VERSION_CODES.M) + public void setPreferredInputDevice(AudioDeviceInfo preferredInputDevice) { + Logging.d(TAG, "setPreferredInputDevice: " + preferredInputDevice); + audioInput.setPreferredDevice(preferredInputDevice); + } + + private static native long nativeCreateAudioDeviceModule(Context context, + AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, + int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput); +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/LegacyAudioDeviceModule.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/LegacyAudioDeviceModule.java new file mode 100644 index 00000000..de0d0d61 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/LegacyAudioDeviceModule.java @@ -0,0 +1,46 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import org.webrtc.voiceengine.WebRtcAudioRecord; +import org.webrtc.voiceengine.WebRtcAudioTrack; + +/** + * This class represents the legacy AudioDeviceModule that is currently hardcoded into C++ WebRTC. + * It will return a null native AudioDeviceModule pointer, leading to an internal object being + * created inside WebRTC that is controlled by static calls to the classes under the voiceengine + * package. Please use the new JavaAudioDeviceModule instead of this class. + */ +@Deprecated +public class LegacyAudioDeviceModule implements AudioDeviceModule { + @Override + public long getNativeAudioDeviceModulePointer() { + // Returning a null pointer will make WebRTC construct the built-in legacy AudioDeviceModule for + // Android internally. + return 0; + } + + @Override + public void release() { + // All control for this ADM goes through static global methods and the C++ object is owned + // internally by WebRTC. + } + + @Override + public void setSpeakerMute(boolean mute) { + WebRtcAudioTrack.setSpeakerMute(mute); + } + + @Override + public void setMicrophoneMute(boolean mute) { + WebRtcAudioRecord.setMicrophoneMute(mute); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/LowLatencyAudioBufferManager.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/LowLatencyAudioBufferManager.java new file mode 100644 index 00000000..70c625ab --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/LowLatencyAudioBufferManager.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.media.AudioTrack; +import android.os.Build; +import org.webrtc.Logging; + +// Lowers the buffer size if no underruns are detected for 100 ms. Once an +// underrun is detected, the buffer size is increased by 10 ms and it will not +// be lowered further. The buffer size will never be increased more than +// 5 times, to avoid the possibility of the buffer size increasing without +// bounds. +class LowLatencyAudioBufferManager { + private static final String TAG = "LowLatencyAudioBufferManager"; + // The underrun count that was valid during the previous call to maybeAdjustBufferSize(). Used to + // detect increases in the value. + private int prevUnderrunCount; + // The number of ticks to wait without an underrun before decreasing the buffer size. + private int ticksUntilNextDecrease; + // Indicate if we should continue to decrease the buffer size. + private boolean keepLoweringBufferSize; + // How often the buffer size was increased. + private int bufferIncreaseCounter; + + public LowLatencyAudioBufferManager() { + this.prevUnderrunCount = 0; + this.ticksUntilNextDecrease = 10; + this.keepLoweringBufferSize = true; + this.bufferIncreaseCounter = 0; + } + + public void maybeAdjustBufferSize(AudioTrack audioTrack) { + if (Build.VERSION.SDK_INT >= 26) { + final int underrunCount = audioTrack.getUnderrunCount(); + if (underrunCount > prevUnderrunCount) { + // Don't increase buffer more than 5 times. Continuing to increase the buffer size + // could be harmful on low-power devices that regularly experience underruns under + // normal conditions. + if (bufferIncreaseCounter < 5) { + // Underrun detected, increase buffer size by 10ms. + final int currentBufferSize = audioTrack.getBufferSizeInFrames(); + final int newBufferSize = currentBufferSize + audioTrack.getPlaybackRate() / 100; + Logging.d(TAG, + "Underrun detected! Increasing AudioTrack buffer size from " + currentBufferSize + + " to " + newBufferSize); + audioTrack.setBufferSizeInFrames(newBufferSize); + bufferIncreaseCounter++; + } + // Stop trying to lower the buffer size. + keepLoweringBufferSize = false; + prevUnderrunCount = underrunCount; + ticksUntilNextDecrease = 10; + } else if (keepLoweringBufferSize) { + ticksUntilNextDecrease--; + if (ticksUntilNextDecrease <= 0) { + // No underrun seen for 100 ms, try to lower the buffer size by 10ms. + final int bufferSize10ms = audioTrack.getPlaybackRate() / 100; + // Never go below a buffer size of 10ms. + final int currentBufferSize = audioTrack.getBufferSizeInFrames(); + final int newBufferSize = Math.max(bufferSize10ms, currentBufferSize - bufferSize10ms); + if (newBufferSize != currentBufferSize) { + Logging.d(TAG, + "Lowering AudioTrack buffer size from " + currentBufferSize + " to " + + newBufferSize); + audioTrack.setBufferSizeInFrames(newBufferSize); + } + ticksUntilNextDecrease = 10; + } + } + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/VolumeLogger.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/VolumeLogger.java new file mode 100644 index 00000000..06d5cd3a --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/VolumeLogger.java @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.media.AudioManager; +import androidx.annotation.Nullable; +import java.util.Timer; +import java.util.TimerTask; +import org.webrtc.Logging; + +// TODO(magjed): Do we really need to spawn a new thread just to log volume? Can we re-use the +// AudioTrackThread instead? +/** + * Private utility class that periodically checks and logs the volume level of the audio stream that + * is currently controlled by the volume control. A timer triggers logs once every 30 seconds and + * the timer's associated thread is named "WebRtcVolumeLevelLoggerThread". + */ +class VolumeLogger { + private static final String TAG = "VolumeLogger"; + private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread"; + private static final int TIMER_PERIOD_IN_SECONDS = 30; + + private final AudioManager audioManager; + private @Nullable Timer timer; + + public VolumeLogger(AudioManager audioManager) { + this.audioManager = audioManager; + } + + public void start() { + Logging.d(TAG, "start" + WebRtcAudioUtils.getThreadInfo()); + if (timer != null) { + return; + } + Logging.d(TAG, "audio mode is: " + WebRtcAudioUtils.modeToString(audioManager.getMode())); + + timer = new Timer(THREAD_NAME); + timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING), + audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)), + 0, TIMER_PERIOD_IN_SECONDS * 1000); + } + + private class LogVolumeTask extends TimerTask { + private final int maxRingVolume; + private final int maxVoiceCallVolume; + + LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) { + this.maxRingVolume = maxRingVolume; + this.maxVoiceCallVolume = maxVoiceCallVolume; + } + + @Override + public void run() { + final int mode = audioManager.getMode(); + if (mode == AudioManager.MODE_RINGTONE) { + Logging.d(TAG, + "STREAM_RING stream volume: " + audioManager.getStreamVolume(AudioManager.STREAM_RING) + + " (max=" + maxRingVolume + ")"); + } else if (mode == AudioManager.MODE_IN_COMMUNICATION) { + Logging.d(TAG, + "VOICE_CALL stream volume: " + + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + + " (max=" + maxVoiceCallVolume + ")"); + } + } + } + + public void stop() { + Logging.d(TAG, "stop" + WebRtcAudioUtils.getThreadInfo()); + if (timer != null) { + timer.cancel(); + timer = null; + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioEffects.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioEffects.java new file mode 100644 index 00000000..1e80e485 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioEffects.java @@ -0,0 +1,240 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.media.audiofx.AcousticEchoCanceler; +import android.media.audiofx.AudioEffect; +import android.media.audiofx.AudioEffect.Descriptor; +import android.media.audiofx.NoiseSuppressor; +import android.os.Build; +import androidx.annotation.Nullable; +import java.util.UUID; +import org.webrtc.Logging; + +// This class wraps control of three different platform effects. Supported +// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS). +// Calling enable() will active all effects that are +// supported by the device if the corresponding `shouldEnableXXX` member is set. +class WebRtcAudioEffects { + private static final boolean DEBUG = false; + + private static final String TAG = "WebRtcAudioEffectsExternal"; + + // UUIDs for Software Audio Effects that we want to avoid using. + // The implementor field will be set to "The Android Open Source Project". + private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER = + UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b"); + private static final UUID AOSP_NOISE_SUPPRESSOR = + UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b"); + + // Contains the available effect descriptors returned from the + // AudioEffect.getEffects() call. This result is cached to avoid doing the + // slow OS call multiple times. + private static @Nullable Descriptor[] cachedEffects; + + // Contains the audio effect objects. Created in enable() and destroyed + // in release(). + private @Nullable AcousticEchoCanceler aec; + private @Nullable NoiseSuppressor ns; + + // Affects the final state given to the setEnabled() method on each effect. + // The default state is set to "disabled" but each effect can also be enabled + // by calling setAEC() and setNS(). + private boolean shouldEnableAec; + private boolean shouldEnableNs; + + // Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are + // fulfilled. + public static boolean isAcousticEchoCancelerSupported() { + return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER); + } + + // Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled. + public static boolean isNoiseSuppressorSupported() { + return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR); + } + + public WebRtcAudioEffects() { + Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); + } + + // Call this method to enable or disable the platform AEC. It modifies + // `shouldEnableAec` which is used in enable() where the actual state + // of the AEC effect is modified. Returns true if HW AEC is supported and + // false otherwise. + public boolean setAEC(boolean enable) { + Logging.d(TAG, "setAEC(" + enable + ")"); + if (!isAcousticEchoCancelerSupported()) { + Logging.w(TAG, "Platform AEC is not supported"); + shouldEnableAec = false; + return false; + } + if (aec != null && (enable != shouldEnableAec)) { + Logging.e(TAG, "Platform AEC state can't be modified while recording"); + return false; + } + shouldEnableAec = enable; + return true; + } + + // Call this method to enable or disable the platform NS. It modifies + // `shouldEnableNs` which is used in enable() where the actual state + // of the NS effect is modified. Returns true if HW NS is supported and + // false otherwise. + public boolean setNS(boolean enable) { + Logging.d(TAG, "setNS(" + enable + ")"); + if (!isNoiseSuppressorSupported()) { + Logging.w(TAG, "Platform NS is not supported"); + shouldEnableNs = false; + return false; + } + if (ns != null && (enable != shouldEnableNs)) { + Logging.e(TAG, "Platform NS state can't be modified while recording"); + return false; + } + shouldEnableNs = enable; + return true; + } + + // Toggles an existing NoiseSuppressor to be enabled or disabled. + // Returns true if the toggling was successful, otherwise false is returned (this is also the case + // if no NoiseSuppressor was present). + public boolean toggleNS(boolean enable) { + if (ns == null) { + Logging.e(TAG, "Attempting to enable or disable nonexistent NoiseSuppressor."); + return false; + } + Logging.d(TAG, "toggleNS(" + enable + ")"); + boolean toggling_succeeded = ns.setEnabled(enable) == AudioEffect.SUCCESS; + return toggling_succeeded; + } + + public void enable(int audioSession) { + Logging.d(TAG, "enable(audioSession=" + audioSession + ")"); + assertTrue(aec == null); + assertTrue(ns == null); + + if (DEBUG) { + // Add logging of supported effects but filter out "VoIP effects", i.e., + // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the + // DEBUG flag is set since we have seen crashes in this API. + for (Descriptor d : AudioEffect.queryEffects()) { + if (effectTypeIsVoIP(d.type)) { + Logging.d(TAG, + "name: " + d.name + ", " + + "mode: " + d.connectMode + ", " + + "implementor: " + d.implementor + ", " + + "UUID: " + d.uuid); + } + } + } + + if (isAcousticEchoCancelerSupported()) { + // Create an AcousticEchoCanceler and attach it to the AudioRecord on + // the specified audio session. + aec = AcousticEchoCanceler.create(audioSession); + if (aec != null) { + boolean enabled = aec.getEnabled(); + boolean enable = shouldEnableAec && isAcousticEchoCancelerSupported(); + if (aec.setEnabled(enable) != AudioEffect.SUCCESS) { + Logging.e(TAG, "Failed to set the AcousticEchoCanceler state"); + } + Logging.d(TAG, + "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: " + + enable + ", is now: " + (aec.getEnabled() ? "enabled" : "disabled")); + } else { + Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance"); + } + } + + if (isNoiseSuppressorSupported()) { + // Create an NoiseSuppressor and attach it to the AudioRecord on the + // specified audio session. + ns = NoiseSuppressor.create(audioSession); + if (ns != null) { + boolean enabled = ns.getEnabled(); + boolean enable = shouldEnableNs && isNoiseSuppressorSupported(); + if (ns.setEnabled(enable) != AudioEffect.SUCCESS) { + Logging.e(TAG, "Failed to set the NoiseSuppressor state"); + } + Logging.d(TAG, + "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable + + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled")); + } else { + Logging.e(TAG, "Failed to create the NoiseSuppressor instance"); + } + } + } + + // Releases all native audio effect resources. It is a good practice to + // release the effect engine when not in use as control can be returned + // to other applications or the native resources released. + public void release() { + Logging.d(TAG, "release"); + if (aec != null) { + aec.release(); + aec = null; + } + if (ns != null) { + ns.release(); + ns = null; + } + } + + // Returns true for effect types in `type` that are of "VoIP" types: + // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or + // Noise Suppressor (NS). Note that, an extra check for support is needed + // in each comparison since some devices includes effects in the + // AudioEffect.Descriptor array that are actually not available on the device. + // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but + // AutomaticGainControl.isAvailable() returns false. + private boolean effectTypeIsVoIP(UUID type) { + return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported()) + || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported()); + } + + // Helper method which throws an exception when an assertion has failed. + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + // Returns the cached copy of the audio effects array, if available, or + // queries the operating system for the list of effects. + private static @Nullable Descriptor[] getAvailableEffects() { + if (cachedEffects != null) { + return cachedEffects; + } + // The caching is best effort only - if this method is called from several + // threads in parallel, they may end up doing the underlying OS call + // multiple times. It's normally only called on one thread so there's no + // real need to optimize for the multiple threads case. + cachedEffects = AudioEffect.queryEffects(); + return cachedEffects; + } + + // Returns true if an effect of the specified type is available. Functionally + // equivalent to (NoiseSuppressor`AutomaticGainControl`...).isAvailable(), but + // faster as it avoids the expensive OS call to enumerate effects. + private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUuid) { + Descriptor[] effects = getAvailableEffects(); + if (effects == null) { + return false; + } + for (Descriptor d : effects) { + if (d.type.equals(effectType)) { + return !d.uuid.equals(blockListedUuid); + } + } + return false; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioManager.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioManager.java new file mode 100644 index 00000000..506e33ff --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioManager.java @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.content.Context; +import android.content.pm.PackageManager; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioRecord; +import android.media.AudioTrack; +import android.os.Build; +import org.webrtc.Logging; +import org.webrtc.CalledByNative; + +/** + * This class contains static functions to query sample rate and input/output audio buffer sizes. + */ +class WebRtcAudioManager { + private static final String TAG = "WebRtcAudioManagerExternal"; + + private static final int DEFAULT_SAMPLE_RATE_HZ = 16000; + + // Default audio data format is PCM 16 bit per sample. + // Guaranteed to be supported by all devices. + private static final int BITS_PER_SAMPLE = 16; + + private static final int DEFAULT_FRAME_PER_BUFFER = 256; + + @CalledByNative + static AudioManager getAudioManager(Context context) { + return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + } + + @CalledByNative + static int getOutputBufferSize( + Context context, AudioManager audioManager, int sampleRate, int numberOfOutputChannels) { + return isLowLatencyOutputSupported(context) + ? getLowLatencyFramesPerBuffer(audioManager) + : getMinOutputFrameSize(sampleRate, numberOfOutputChannels); + } + + @CalledByNative + static int getInputBufferSize( + Context context, AudioManager audioManager, int sampleRate, int numberOfInputChannels) { + return isLowLatencyInputSupported(context) + ? getLowLatencyFramesPerBuffer(audioManager) + : getMinInputFrameSize(sampleRate, numberOfInputChannels); + } + + @CalledByNative + static boolean isLowLatencyOutputSupported(Context context) { + return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); + } + + @CalledByNative + static boolean isLowLatencyInputSupported(Context context) { + // TODO(henrika): investigate if some sort of device list is needed here + // as well. The NDK doc states that: "As of API level 21, lower latency + // audio input is supported on select devices. To take advantage of this + // feature, first confirm that lower latency output is available". + return isLowLatencyOutputSupported(context); + } + + /** + * Returns the native input/output sample rate for this device's output stream. + */ + @CalledByNative + static int getSampleRate(AudioManager audioManager) { + // Override this if we're running on an old emulator image which only + // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE. + if (WebRtcAudioUtils.runningOnEmulator()) { + Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz."); + return 8000; + } + // Deliver best possible estimate based on default Android AudioManager APIs. + final int sampleRateHz = getSampleRateForApiLevel(audioManager); + Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz"); + return sampleRateHz; + } + + private static int getSampleRateForApiLevel(AudioManager audioManager) { + String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); + return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString); + } + + // Returns the native output buffer size for low-latency output streams. + private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) { + String framesPerBuffer = + audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); + return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer); + } + + // Returns the minimum output buffer size for Java based audio (AudioTrack). + // This size can also be used for OpenSL ES implementations on devices that + // lacks support of low-latency output. + private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) { + final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); + final int channelConfig = + (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); + return AudioTrack.getMinBufferSize( + sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) + / bytesPerFrame; + } + + // Returns the minimum input buffer size for Java based audio (AudioRecord). + // This size can calso be used for OpenSL ES implementations on devices that + // lacks support of low-latency input. + private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) { + final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); + final int channelConfig = + (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); + return AudioRecord.getMinBufferSize( + sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) + / bytesPerFrame; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java new file mode 100644 index 00000000..fe2db0c2 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -0,0 +1,755 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.annotation.TargetApi; +import android.content.Context; +import android.media.AudioDeviceInfo; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioRecord; +import android.media.AudioRecordingConfiguration; +import android.media.AudioTimestamp; +import android.media.MediaRecorder.AudioSource; +import android.os.Build; +import android.os.Process; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import java.lang.System; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import org.webrtc.CalledByNative; +import org.webrtc.Logging; +import org.webrtc.ThreadUtils; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode; +import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback; +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; + +class WebRtcAudioRecord { + private static final String TAG = "WebRtcAudioRecordExternal"; + + // Requested size of each recorded buffer provided to the client. + private static final int CALLBACK_BUFFER_SIZE_MS = 10; + + // Average number of callbacks per second. + private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS; + + // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required + // buffer size). The extra space is allocated to guard against glitches under + // high load. + private static final int BUFFER_SIZE_FACTOR = 2; + + // The AudioRecordJavaThread is allowed to wait for successful call to join() + // but the wait times out afther this amount of time. + private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000; + + public static final int DEFAULT_AUDIO_SOURCE = AudioSource.VOICE_COMMUNICATION; + + // Default audio data format is PCM 16 bit per sample. + // Guaranteed to be supported by all devices. + public static final int DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + + // Indicates AudioRecord has started recording audio. + private static final int AUDIO_RECORD_START = 0; + + // Indicates AudioRecord has stopped recording audio. + private static final int AUDIO_RECORD_STOP = 1; + + // Time to wait before checking recording status after start has been called. Tests have + // shown that the result can sometimes be invalid (our own status might be missing) if we check + // directly after start. + private static final int CHECK_REC_STATUS_DELAY_MS = 100; + + private final Context context; + private final AudioManager audioManager; + private final int audioSource; + private final int audioFormat; + + private long nativeAudioRecord; + + private final WebRtcAudioEffects effects = new WebRtcAudioEffects(); + + private @Nullable ByteBuffer byteBuffer; + + private @Nullable AudioRecord audioRecord; + private @Nullable AudioRecordThread audioThread; + private @Nullable AudioDeviceInfo preferredDevice; + + private final ScheduledExecutorService executor; + private @Nullable ScheduledFuture future; + + private volatile boolean microphoneMute; + private final AtomicReference audioSourceMatchesRecordingSessionRef = + new AtomicReference<>(); + private byte[] emptyBytes; + + private final @Nullable AudioRecordErrorCallback errorCallback; + private final @Nullable AudioRecordStateCallback stateCallback; + private final @Nullable SamplesReadyCallback audioSamplesReadyCallback; + private final boolean isAcousticEchoCancelerSupported; + private final boolean isNoiseSuppressorSupported; + + /** + * Audio thread which keeps calling ByteBuffer.read() waiting for audio + * to be recorded. Feeds recorded data to the native counterpart as a + * periodic sequence of callbacks using DataIsRecorded(). + * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority. + */ + private class AudioRecordThread extends Thread { + private volatile boolean keepAlive = true; + + public AudioRecordThread(String name) { + super(name); + } + + @Override + public void run() { + Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); + Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); + assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); + + // Audio recording has started and the client is informed about it. + doAudioRecordStateCallback(AUDIO_RECORD_START); + + long lastTime = System.nanoTime(); + AudioTimestamp audioTimestamp = null; + if (Build.VERSION.SDK_INT >= 24) { + audioTimestamp = new AudioTimestamp(); + } + while (keepAlive) { + int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); + if (bytesRead == byteBuffer.capacity()) { + if (microphoneMute) { + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + } + // It's possible we've been shut down during the read, and stopRecording() tried and + // failed to join this thread. To be a bit safer, try to avoid calling any native methods + // in case they've been unregistered after stopRecording() returned. + if (keepAlive) { + long captureTimeNs = 0; + if (Build.VERSION.SDK_INT >= 24) { + if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC) + == AudioRecord.SUCCESS) { + captureTimeNs = audioTimestamp.nanoTime; + } + } + nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); + } + if (audioSamplesReadyCallback != null) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + byteBuffer.capacity() + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), + audioRecord.getChannelCount(), audioRecord.getSampleRate(), data)); + } + } else { + String errorMessage = "AudioRecord.read failed: " + bytesRead; + Logging.e(TAG, errorMessage); + if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { + keepAlive = false; + reportWebRtcAudioRecordError(errorMessage); + } + } + } + + try { + if (audioRecord != null) { + audioRecord.stop(); + doAudioRecordStateCallback(AUDIO_RECORD_STOP); + } + } catch (IllegalStateException e) { + Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); + } + } + + // Stops the inner thread loop and also calls AudioRecord.stop(). + // Does not block the calling thread. + public void stopThread() { + Logging.d(TAG, "stopThread"); + keepAlive = false; + } + } + + @CalledByNative + WebRtcAudioRecord(Context context, AudioManager audioManager) { + this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, + DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, + null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + WebRtcAudioEffects.isNoiseSuppressorSupported()); + } + + public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, + AudioManager audioManager, int audioSource, int audioFormat, + @Nullable AudioRecordErrorCallback errorCallback, + @Nullable AudioRecordStateCallback stateCallback, + @Nullable SamplesReadyCallback audioSamplesReadyCallback, + boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { + if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { + throw new IllegalArgumentException("HW AEC not supported"); + } + if (isNoiseSuppressorSupported && !WebRtcAudioEffects.isNoiseSuppressorSupported()) { + throw new IllegalArgumentException("HW NS not supported"); + } + this.context = context; + this.executor = scheduler; + this.audioManager = audioManager; + this.audioSource = audioSource; + this.audioFormat = audioFormat; + this.errorCallback = errorCallback; + this.stateCallback = stateCallback; + this.audioSamplesReadyCallback = audioSamplesReadyCallback; + this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; + this.isNoiseSuppressorSupported = isNoiseSuppressorSupported; + Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); + } + + @CalledByNative + public void setNativeAudioRecord(long nativeAudioRecord) { + this.nativeAudioRecord = nativeAudioRecord; + } + + @CalledByNative + boolean isAcousticEchoCancelerSupported() { + return isAcousticEchoCancelerSupported; + } + + @CalledByNative + boolean isNoiseSuppressorSupported() { + return isNoiseSuppressorSupported; + } + + // Returns true if a valid call to verifyAudioConfig() has been done. Should always be + // checked before using the returned value of isAudioSourceMatchingRecordingSession(). + @CalledByNative + boolean isAudioConfigVerified() { + return audioSourceMatchesRecordingSessionRef.get() != null; + } + + // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when + // startRecording() has been called. Hence, should preferably be called in combination with + // stopRecording() to ensure that it has been set properly. `isAudioConfigVerified` is + // enabled in WebRtcAudioRecord to ensure that the returned value is valid. + @CalledByNative + boolean isAudioSourceMatchingRecordingSession() { + Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get(); + if (audioSourceMatchesRecordingSession == null) { + Logging.w(TAG, "Audio configuration has not yet been verified"); + return false; + } + return audioSourceMatchesRecordingSession; + } + + @CalledByNative + private boolean enableBuiltInAEC(boolean enable) { + Logging.d(TAG, "enableBuiltInAEC(" + enable + ")"); + return effects.setAEC(enable); + } + + @CalledByNative + private boolean enableBuiltInNS(boolean enable) { + Logging.d(TAG, "enableBuiltInNS(" + enable + ")"); + return effects.setNS(enable); + } + + @CalledByNative + private int initRecording(int sampleRate, int channels) { + Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")"); + if (audioRecord != null) { + reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); + return -1; + } + final int bytesPerFrame = channels * getBytesPerSample(audioFormat); + final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND; + byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); + if (!(byteBuffer.hasArray())) { + reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array."); + return -1; + } + Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); + emptyBytes = new byte[byteBuffer.capacity()]; + // Rather than passing the ByteBuffer with every callback (requiring + // the potentially expensive GetDirectBufferAddress) we simply have the + // the native class cache the address to the memory once. + nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); + + // Get the minimum buffer size required for the successful creation of + // an AudioRecord object, in byte units. + // Note that this size doesn't guarantee a smooth recording under load. + final int channelConfig = channelCountToConfiguration(channels); + int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); + if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { + reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); + return -1; + } + Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize); + + // Use a larger buffer size than the minimum required when creating the + // AudioRecord instance to ensure smooth recording under load. It has been + // verified that it does not increase the actual recording latency. + int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity()); + Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + // Use the AudioRecord.Builder class on Android M (23) and above. + // Throws IllegalArgumentException. + audioRecord = createAudioRecordOnMOrHigher( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + if (preferredDevice != null) { + setPreferredDevice(preferredDevice); + } + } else { + // Use the old AudioRecord constructor for API levels below 23. + // Throws UnsupportedOperationException. + audioRecord = createAudioRecordOnLowerThanM( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + } + } catch (IllegalArgumentException | UnsupportedOperationException e) { + // Report of exception message is sufficient. Example: "Cannot create AudioRecord". + reportWebRtcAudioRecordInitError(e.getMessage()); + releaseAudioResources(); + return -1; + } + if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { + reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed."); + releaseAudioResources(); + return -1; + } + effects.enable(audioRecord.getAudioSessionId()); + logMainParameters(); + logMainParametersExtended(); + // Check number of active recording sessions. Should be zero but we have seen conflict cases + // and adding a log for it can help us figure out details about conflicting sessions. + final int numActiveRecordingSessions = + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); + if (numActiveRecordingSessions != 0) { + // Log the conflict as a warning since initialization did in fact succeed. Most likely, the + // upcoming call to startRecording() will fail under these conditions. + Logging.w( + TAG, "Potential microphone conflict. Active sessions: " + numActiveRecordingSessions); + } + return framesPerBuffer; + } + + /** + * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts + * is valid but may cause a temporary interruption if the audio routing changes. + */ + @RequiresApi(Build.VERSION_CODES.M) + @TargetApi(Build.VERSION_CODES.M) + void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { + Logging.d( + TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null)); + this.preferredDevice = preferredDevice; + if (audioRecord != null) { + if (!audioRecord.setPreferredDevice(preferredDevice)) { + Logging.e(TAG, "setPreferredDevice failed"); + } + } + } + + @CalledByNative + private boolean startRecording() { + Logging.d(TAG, "startRecording"); + assertTrue(audioRecord != null); + assertTrue(audioThread == null); + try { + audioRecord.startRecording(); + } catch (IllegalStateException e) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, + "AudioRecord.startRecording failed: " + e.getMessage()); + return false; + } + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); + return false; + } + audioThread = new AudioRecordThread("AudioRecordJavaThread"); + audioThread.start(); + scheduleLogRecordingConfigurationsTask(audioRecord); + return true; + } + + @CalledByNative + private boolean stopRecording() { + Logging.d(TAG, "stopRecording"); + assertTrue(audioThread != null); + if (future != null) { + if (!future.isDone()) { + // Might be needed if the client calls startRecording(), stopRecording() back-to-back. + future.cancel(true /* mayInterruptIfRunning */); + } + future = null; + } + audioThread.stopThread(); + if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + } + audioThread = null; + effects.release(); + releaseAudioResources(); + return true; + } + + @TargetApi(Build.VERSION_CODES.M) + private static AudioRecord createAudioRecordOnMOrHigher( + int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { + Logging.d(TAG, "createAudioRecordOnMOrHigher"); + return new AudioRecord.Builder() + .setAudioSource(audioSource) + .setAudioFormat(new AudioFormat.Builder() + .setEncoding(audioFormat) + .setSampleRate(sampleRate) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSizeInBytes) + .build(); + } + + private static AudioRecord createAudioRecordOnLowerThanM( + int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { + Logging.d(TAG, "createAudioRecordOnLowerThanM"); + return new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + } + + private void logMainParameters() { + Logging.d(TAG, + "AudioRecord: " + + "session ID: " + audioRecord.getAudioSessionId() + ", " + + "channels: " + audioRecord.getChannelCount() + ", " + + "sample rate: " + audioRecord.getSampleRate()); + } + + @TargetApi(Build.VERSION_CODES.M) + private void logMainParametersExtended() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Logging.d(TAG, + "AudioRecord: " + // The frame count of the native AudioRecord buffer. + + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); + } + } + + @TargetApi(Build.VERSION_CODES.N) + // Checks the number of active recording sessions and logs the states of all active sessions. + // Returns number of active sessions. Note that this could occur on arbituary thread. + private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { + Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher"); + return 0; + } + if (audioRecord == null) { + return 0; + } + + // Get a list of the currently active audio recording configurations of the device (can be more + // than one). An empty list indicates there is no recording active when queried. + List configs = audioManager.getActiveRecordingConfigurations(); + final int numActiveRecordingSessions = configs.size(); + Logging.d(TAG, "Number of active recording sessions: " + numActiveRecordingSessions); + if (numActiveRecordingSessions > 0) { + logActiveRecordingConfigs(audioRecord.getAudioSessionId(), configs); + if (verifyAudioConfig) { + // Run an extra check to verify that the existing audio source doing the recording (tied + // to the AudioRecord instance) is matching what the audio recording configuration lists + // as its client parameters. If these do not match, recording might work but under invalid + // conditions. + audioSourceMatchesRecordingSessionRef.set( + verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), + audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs)); + } + } + return numActiveRecordingSessions; + } + + // Helper method which throws an exception when an assertion has failed. + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + private int channelCountToConfiguration(int channels) { + return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); + } + + private native void nativeCacheDirectBufferAddress( + long nativeAudioRecordJni, ByteBuffer byteBuffer); + private native void nativeDataIsRecorded( + long nativeAudioRecordJni, int bytes, long captureTimestampNs); + + // Sets all recorded samples to zero if `mute` is true, i.e., ensures that + // the microphone is muted. + public void setMicrophoneMute(boolean mute) { + Logging.w(TAG, "setMicrophoneMute(" + mute + ")"); + microphoneMute = mute; + } + + // Sets whether NoiseSuppressor should be enabled or disabled. + // Returns true if the enabling was successful, otherwise false is returned (this is also the case + // if the NoiseSuppressor effect is not supported). + public boolean setNoiseSuppressorEnabled(boolean enabled) { + if (!WebRtcAudioEffects.isNoiseSuppressorSupported()) { + Logging.e(TAG, "Noise suppressor is not supported."); + return false; + } + Logging.w(TAG, "SetNoiseSuppressorEnabled(" + enabled + ")"); + return effects.toggleNS(enabled); + } + + // Releases the native AudioRecord resources. + private void releaseAudioResources() { + Logging.d(TAG, "releaseAudioResources"); + if (audioRecord != null) { + audioRecord.release(); + audioRecord = null; + } + audioSourceMatchesRecordingSessionRef.set(null); + } + + private void reportWebRtcAudioRecordInitError(String errorMessage) { + Logging.e(TAG, "Init recording error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordInitError(errorMessage); + } + } + + private void reportWebRtcAudioRecordStartError( + AudioRecordStartErrorCode errorCode, String errorMessage) { + Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); + } + } + + private void reportWebRtcAudioRecordError(String errorMessage) { + Logging.e(TAG, "Run-time recording error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordError(errorMessage); + } + } + + private void doAudioRecordStateCallback(int audioState) { + Logging.d(TAG, "doAudioRecordStateCallback: " + audioStateToString(audioState)); + if (stateCallback != null) { + if (audioState == WebRtcAudioRecord.AUDIO_RECORD_START) { + stateCallback.onWebRtcAudioRecordStart(); + } else if (audioState == WebRtcAudioRecord.AUDIO_RECORD_STOP) { + stateCallback.onWebRtcAudioRecordStop(); + } else { + Logging.e(TAG, "Invalid audio state"); + } + } + } + + // Reference from Android code, AudioFormat.getBytesPerSample. BitPerSample / 8 + // Default audio data format is PCM 16 bits per sample. + // Guaranteed to be supported by all devices + private static int getBytesPerSample(int audioFormat) { + switch (audioFormat) { + case AudioFormat.ENCODING_PCM_8BIT: + return 1; + case AudioFormat.ENCODING_PCM_16BIT: + case AudioFormat.ENCODING_IEC61937: + case AudioFormat.ENCODING_DEFAULT: + return 2; + case AudioFormat.ENCODING_PCM_FLOAT: + return 4; + case AudioFormat.ENCODING_INVALID: + default: + throw new IllegalArgumentException("Bad audio format " + audioFormat); + } + } + + // Use an ExecutorService to schedule a task after a given delay where the task consists of + // checking (by logging) the current status of active recording sessions. + private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { + Logging.d(TAG, "scheduleLogRecordingConfigurationsTask"); + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { + return; + } + + Callable callable = () -> { + if (this.audioRecord == audioRecord) { + logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */); + } else { + Logging.d(TAG, "audio record has changed"); + } + return "Scheduled task is done"; + }; + + if (future != null && !future.isDone()) { + future.cancel(true /* mayInterruptIfRunning */); + } + // Schedule call to logRecordingConfigurations() from executor thread after fixed delay. + future = executor.schedule(callable, CHECK_REC_STATUS_DELAY_MS, TimeUnit.MILLISECONDS); + }; + + @TargetApi(Build.VERSION_CODES.N) + private static boolean logActiveRecordingConfigs( + int session, List configs) { + assertTrue(!configs.isEmpty()); + final Iterator it = configs.iterator(); + Logging.d(TAG, "AudioRecordingConfigurations: "); + while (it.hasNext()) { + final AudioRecordingConfiguration config = it.next(); + StringBuilder conf = new StringBuilder(); + // The audio source selected by the client. + final int audioSource = config.getClientAudioSource(); + conf.append(" client audio source=") + .append(WebRtcAudioUtils.audioSourceToString(audioSource)) + .append(", client session id=") + .append(config.getClientAudioSessionId()) + // Compare with our own id (based on AudioRecord#getAudioSessionId()). + .append(" (") + .append(session) + .append(")") + .append("\n"); + // Audio format at which audio is recorded on this Android device. Note that it may differ + // from the client application recording format (see getClientFormat()). + AudioFormat format = config.getFormat(); + conf.append(" Device AudioFormat: ") + .append("channel count=") + .append(format.getChannelCount()) + .append(", channel index mask=") + .append(format.getChannelIndexMask()) + // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. + .append(", channel mask=") + .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) + .append(", encoding=") + .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) + .append(", sample rate=") + .append(format.getSampleRate()) + .append("\n"); + // Audio format at which the client application is recording audio. + format = config.getClientFormat(); + conf.append(" Client AudioFormat: ") + .append("channel count=") + .append(format.getChannelCount()) + .append(", channel index mask=") + .append(format.getChannelIndexMask()) + // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. + .append(", channel mask=") + .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) + .append(", encoding=") + .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) + .append(", sample rate=") + .append(format.getSampleRate()) + .append("\n"); + // Audio input device used for this recording session. + final AudioDeviceInfo device = config.getAudioDevice(); + if (device != null) { + assertTrue(device.isSource()); + conf.append(" AudioDevice: ") + .append("type=") + .append(WebRtcAudioUtils.deviceTypeToString(device.getType())) + .append(", id=") + .append(device.getId()); + } + Logging.d(TAG, conf.toString()); + } + return true; + } + + // Verify that the client audio configuration (device and format) matches the requested + // configuration (same as AudioRecord's). + @TargetApi(Build.VERSION_CODES.N) + private static boolean verifyAudioConfig(int source, int session, AudioFormat format, + AudioDeviceInfo device, List configs) { + assertTrue(!configs.isEmpty()); + final Iterator it = configs.iterator(); + while (it.hasNext()) { + final AudioRecordingConfiguration config = it.next(); + final AudioDeviceInfo configDevice = config.getAudioDevice(); + if (configDevice == null) { + continue; + } + if ((config.getClientAudioSource() == source) + && (config.getClientAudioSessionId() == session) + // Check the client format (should match the format of the AudioRecord instance). + && (config.getClientFormat().getEncoding() == format.getEncoding()) + && (config.getClientFormat().getSampleRate() == format.getSampleRate()) + && (config.getClientFormat().getChannelMask() == format.getChannelMask()) + && (config.getClientFormat().getChannelIndexMask() == format.getChannelIndexMask()) + // Ensure that the device format is properly configured. + && (config.getFormat().getEncoding() != AudioFormat.ENCODING_INVALID) + && (config.getFormat().getSampleRate() > 0) + // For the channel mask, either the position or index-based value must be valid. + && ((config.getFormat().getChannelMask() != AudioFormat.CHANNEL_INVALID) + || (config.getFormat().getChannelIndexMask() != AudioFormat.CHANNEL_INVALID)) + && checkDeviceMatch(configDevice, device)) { + Logging.d(TAG, "verifyAudioConfig: PASS"); + return true; + } + } + Logging.e(TAG, "verifyAudioConfig: FAILED"); + return false; + } + + @TargetApi(Build.VERSION_CODES.N) + // Returns true if device A parameters matches those of device B. + // TODO(henrika): can be improved by adding AudioDeviceInfo#getAddress() but it requires API 29. + private static boolean checkDeviceMatch(AudioDeviceInfo devA, AudioDeviceInfo devB) { + return ((devA.getId() == devB.getId() && (devA.getType() == devB.getType()))); + } + + private static String audioStateToString(int state) { + switch (state) { + case WebRtcAudioRecord.AUDIO_RECORD_START: + return "START"; + case WebRtcAudioRecord.AUDIO_RECORD_STOP: + return "STOP"; + default: + return "INVALID"; + } + } + + private static final AtomicInteger nextSchedulerId = new AtomicInteger(0); + + static ScheduledExecutorService newDefaultScheduler() { + AtomicInteger nextThreadId = new AtomicInteger(0); + return Executors.newScheduledThreadPool(0, new ThreadFactory() { + /** + * Constructs a new {@code Thread} + */ + @Override + public Thread newThread(Runnable r) { + Thread thread = Executors.defaultThreadFactory().newThread(r); + thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s", + nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement())); + return thread; + } + }); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java new file mode 100644 index 00000000..2b34e340 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -0,0 +1,585 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.annotation.TargetApi; +import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.os.Build; +import android.os.Process; +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; +import org.webrtc.CalledByNative; +import org.webrtc.Logging; +import org.webrtc.ThreadUtils; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode; +import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; +import org.webrtc.audio.LowLatencyAudioBufferManager; + +class WebRtcAudioTrack { + private static final String TAG = "WebRtcAudioTrackExternal"; + + // Default audio data format is PCM 16 bit per sample. + // Guaranteed to be supported by all devices. + private static final int BITS_PER_SAMPLE = 16; + + // Requested size of each recorded buffer provided to the client. + private static final int CALLBACK_BUFFER_SIZE_MS = 10; + + // Average number of callbacks per second. + private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS; + + // The AudioTrackThread is allowed to wait for successful call to join() + // but the wait times out afther this amount of time. + private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000; + + // By default, WebRTC creates audio tracks with a usage attribute + // corresponding to voice communications, such as telephony or VoIP. + private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION; + + // Indicates the AudioTrack has started playing audio. + private static final int AUDIO_TRACK_START = 0; + + // Indicates the AudioTrack has stopped playing audio. + private static final int AUDIO_TRACK_STOP = 1; + + private long nativeAudioTrack; + private final Context context; + private final AudioManager audioManager; + private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); + + private ByteBuffer byteBuffer; + + private @Nullable final AudioAttributes audioAttributes; + private @Nullable AudioTrack audioTrack; + private @Nullable AudioTrackThread audioThread; + private final VolumeLogger volumeLogger; + + // Samples to be played are replaced by zeros if `speakerMute` is set to true. + // Can be used to ensure that the speaker is fully muted. + private volatile boolean speakerMute; + private byte[] emptyBytes; + private boolean useLowLatency; + private int initialBufferSizeInFrames; + + private final @Nullable AudioTrackErrorCallback errorCallback; + private final @Nullable AudioTrackStateCallback stateCallback; + + /** + * Audio thread which keeps calling AudioTrack.write() to stream audio. + * Data is periodically acquired from the native WebRTC layer using the + * nativeGetPlayoutData callback function. + * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority. + */ + private class AudioTrackThread extends Thread { + private volatile boolean keepAlive = true; + private LowLatencyAudioBufferManager bufferManager; + + public AudioTrackThread(String name) { + super(name); + bufferManager = new LowLatencyAudioBufferManager(); + } + + @Override + public void run() { + Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); + Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); + assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); + + // Audio playout has started and the client is informed about it. + doAudioTrackStateCallback(AUDIO_TRACK_START); + + // Fixed size in bytes of each 10ms block of audio data that we ask for + // using callbacks to the native WebRTC client. + final int sizeInBytes = byteBuffer.capacity(); + + while (keepAlive) { + // Get 10ms of PCM data from the native WebRTC client. Audio data is + // written into the common ByteBuffer using the address that was + // cached at construction. + nativeGetPlayoutData(nativeAudioTrack, sizeInBytes); + // Write data until all data has been written to the audio sink. + // Upon return, the buffer position will have been advanced to reflect + // the amount of data that was successfully written to the AudioTrack. + assertTrue(sizeInBytes <= byteBuffer.remaining()); + if (speakerMute) { + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + byteBuffer.position(0); + } + int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING); + if (bytesWritten != sizeInBytes) { + Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten); + // If a write() returns a negative value, an error has occurred. + // Stop playing and report an error in this case. + if (bytesWritten < 0) { + keepAlive = false; + reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); + } + } + if (useLowLatency) { + bufferManager.maybeAdjustBufferSize(audioTrack); + } + // The byte buffer must be rewinded since byteBuffer.position() is + // increased at each call to AudioTrack.write(). If we don't do this, + // next call to AudioTrack.write() will fail. + byteBuffer.rewind(); + + // TODO(henrika): it is possible to create a delay estimate here by + // counting number of written frames and subtracting the result from + // audioTrack.getPlaybackHeadPosition(). + } + } + + // Stops the inner thread loop which results in calling AudioTrack.stop(). + // Does not block the calling thread. + public void stopThread() { + Logging.d(TAG, "stopThread"); + keepAlive = false; + } + } + + @CalledByNative + WebRtcAudioTrack(Context context, AudioManager audioManager) { + this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, + null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); + } + + WebRtcAudioTrack(Context context, AudioManager audioManager, + @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, + @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, + boolean enableVolumeLogger) { + threadChecker.detachThread(); + this.context = context; + this.audioManager = audioManager; + this.audioAttributes = audioAttributes; + this.errorCallback = errorCallback; + this.stateCallback = stateCallback; + this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null; + this.useLowLatency = useLowLatency; + Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); + } + + @CalledByNative + public void setNativeAudioTrack(long nativeAudioTrack) { + this.nativeAudioTrack = nativeAudioTrack; + } + + @CalledByNative + private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, + "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + + ", bufferSizeFactor=" + bufferSizeFactor + ")"); + final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); + byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); + Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); + emptyBytes = new byte[byteBuffer.capacity()]; + // Rather than passing the ByteBuffer with every callback (requiring + // the potentially expensive GetDirectBufferAddress) we simply have the + // the native class cache the address to the memory once. + nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer); + + // Get the minimum buffer size required for the successful creation of an + // AudioTrack object to be created in the MODE_STREAM mode. + // Note that this size doesn't guarantee a smooth playback under load. + final int channelConfig = channelCountToConfiguration(channels); + final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig, + AudioFormat.ENCODING_PCM_16BIT) + * bufferSizeFactor); + Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes); + // For the streaming mode, data must be written to the audio sink in + // chunks of size (given by byteBuffer.capacity()) less than or equal + // to the total buffer size `minBufferSizeInBytes`. But, we have seen + // reports of "getMinBufferSize(): error querying hardware". Hence, it + // can happen that `minBufferSizeInBytes` contains an invalid value. + if (minBufferSizeInBytes < byteBuffer.capacity()) { + reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value."); + return -1; + } + + // Don't use low-latency mode when a bufferSizeFactor > 1 is used. When bufferSizeFactor > 1 + // we want to use a larger buffer to prevent underruns. However, low-latency mode would + // decrease the buffer size, which makes the bufferSizeFactor have no effect. + if (bufferSizeFactor > 1.0) { + useLowLatency = false; + } + + // Ensure that prevision audio session was stopped correctly before trying + // to create a new AudioTrack. + if (audioTrack != null) { + reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack."); + return -1; + } + try { + // Create an AudioTrack object and initialize its associated audio buffer. + // The size of this buffer determines how long an AudioTrack can play + // before running out of data. + if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + // On API level 26 or higher, we can use a low latency mode. + audioTrack = createAudioTrackOnOreoOrHigher( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); + } else { + // As we are on API level 21 or higher, it is possible to use a special AudioTrack + // constructor that uses AudioAttributes and AudioFormat as input. It allows us to + // supersede the notion of stream types for defining the behavior of audio playback, + // and to allow certain platforms or routing policies to use this information for more + // refined volume or routing decisions. + audioTrack = createAudioTrackBeforeOreo( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); + } + } catch (IllegalArgumentException e) { + reportWebRtcAudioTrackInitError(e.getMessage()); + releaseAudioResources(); + return -1; + } + + // It can happen that an AudioTrack is created but it was not successfully + // initialized upon creation. Seems to be the case e.g. when the maximum + // number of globally available audio tracks is exceeded. + if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) { + reportWebRtcAudioTrackInitError("Initialization of audio track failed."); + releaseAudioResources(); + return -1; + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + initialBufferSizeInFrames = audioTrack.getBufferSizeInFrames(); + } else { + initialBufferSizeInFrames = -1; + } + logMainParameters(); + logMainParametersExtended(); + return minBufferSizeInBytes; + } + + @CalledByNative + private boolean startPlayout() { + threadChecker.checkIsOnValidThread(); + if (volumeLogger != null) { + volumeLogger.start(); + } + Logging.d(TAG, "startPlayout"); + assertTrue(audioTrack != null); + assertTrue(audioThread == null); + + // Starts playing an audio track. + try { + audioTrack.play(); + } catch (IllegalStateException e) { + reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION, + "AudioTrack.play failed: " + e.getMessage()); + releaseAudioResources(); + return false; + } + if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { + reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH, + "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState()); + releaseAudioResources(); + return false; + } + + // Create and start new high-priority thread which calls AudioTrack.write() + // and where we also call the native nativeGetPlayoutData() callback to + // request decoded audio from WebRTC. + audioThread = new AudioTrackThread("AudioTrackJavaThread"); + audioThread.start(); + return true; + } + + @CalledByNative + private boolean stopPlayout() { + threadChecker.checkIsOnValidThread(); + if (volumeLogger != null) { + volumeLogger.stop(); + } + Logging.d(TAG, "stopPlayout"); + assertTrue(audioThread != null); + logUnderrunCount(); + audioThread.stopThread(); + + Logging.d(TAG, "Stopping the AudioTrackThread..."); + audioThread.interrupt(); + if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioTrackThread timed out."); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + } + Logging.d(TAG, "AudioTrackThread has now been stopped."); + audioThread = null; + if (audioTrack != null) { + Logging.d(TAG, "Calling AudioTrack.stop..."); + try { + audioTrack.stop(); + Logging.d(TAG, "AudioTrack.stop is done."); + doAudioTrackStateCallback(AUDIO_TRACK_STOP); + } catch (IllegalStateException e) { + Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage()); + } + } + releaseAudioResources(); + return true; + } + + // Get max possible volume index for a phone call audio stream. + @CalledByNative + private int getStreamMaxVolume() { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, "getStreamMaxVolume"); + return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL); + } + + // Set current volume level for a phone call audio stream. + @CalledByNative + private boolean setStreamVolume(int volume) { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, "setStreamVolume(" + volume + ")"); + if (audioManager.isVolumeFixed()) { + Logging.e(TAG, "The device implements a fixed volume policy."); + return false; + } + audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0); + return true; + } + + /** Get current volume level for a phone call audio stream. */ + @CalledByNative + private int getStreamVolume() { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, "getStreamVolume"); + return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); + } + + @CalledByNative + private int GetPlayoutUnderrunCount() { + if (Build.VERSION.SDK_INT >= 24) { + if (audioTrack != null) { + return audioTrack.getUnderrunCount(); + } else { + return -1; + } + } else { + return -2; + } + } + + private void logMainParameters() { + Logging.d(TAG, + "AudioTrack: " + + "session ID: " + audioTrack.getAudioSessionId() + ", " + + "channels: " + audioTrack.getChannelCount() + ", " + + "sample rate: " + audioTrack.getSampleRate() + + ", " + // Gain (>=1.0) expressed as linear multiplier on sample values. + + "max gain: " + AudioTrack.getMaxVolume()); + } + + private static void logNativeOutputSampleRate(int requestedSampleRateInHz) { + final int nativeOutputSampleRate = + AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL); + Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate); + if (requestedSampleRateInHz != nativeOutputSampleRate) { + Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); + } + } + + private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) { + AudioAttributes.Builder attributesBuilder = + new AudioAttributes.Builder() + .setUsage(DEFAULT_USAGE) + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH); + + if (overrideAttributes != null) { + if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) { + attributesBuilder.setUsage(overrideAttributes.getUsage()); + } + if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) { + attributesBuilder.setContentType(overrideAttributes.getContentType()); + } + + attributesBuilder.setFlags(overrideAttributes.getFlags()); + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + attributesBuilder = applyAttributesOnQOrHigher(attributesBuilder, overrideAttributes); + } + } + return attributesBuilder.build(); + } + + // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. + // It allows certain platforms or routing policies to use this information for more + // refined volume or routing decisions. + private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig, + int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + Logging.d(TAG, "createAudioTrackBeforeOreo"); + logNativeOutputSampleRate(sampleRateInHz); + + // Create an audio track where the audio usage is for VoIP and the content type is speech. + return new AudioTrack(getAudioAttributes(overrideAttributes), + new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build(), + bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE); + } + + // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. + // Use the low-latency mode to improve audio latency. Note that the low-latency mode may + // prevent effects (such as AEC) from working. Assuming AEC is working, the delay changes + // that happen in low-latency mode during the call will cause the AEC to perform worse. + // The behavior of the low-latency mode may be device dependent, use at your own risk. + @TargetApi(Build.VERSION_CODES.O) + private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig, + int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + Logging.d(TAG, "createAudioTrackOnOreoOrHigher"); + logNativeOutputSampleRate(sampleRateInHz); + + // Create an audio track where the audio usage is for VoIP and the content type is speech. + return new AudioTrack.Builder() + .setAudioAttributes(getAudioAttributes(overrideAttributes)) + .setAudioFormat(new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSizeInBytes) + .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY) + .setTransferMode(AudioTrack.MODE_STREAM) + .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE) + .build(); + } + + @TargetApi(Build.VERSION_CODES.Q) + private static AudioAttributes.Builder applyAttributesOnQOrHigher( + AudioAttributes.Builder builder, AudioAttributes overrideAttributes) { + return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()); + } + + private void logBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Logging.d(TAG, + "AudioTrack: " + // The effective size of the AudioTrack buffer that the app writes to. + + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); + } + } + + @CalledByNative + private int getBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return audioTrack.getBufferSizeInFrames(); + } + return -1; + } + + @CalledByNative + private int getInitialBufferSizeInFrames() { + return initialBufferSizeInFrames; + } + + private void logBufferCapacityInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + Logging.d(TAG, + "AudioTrack: " + // Maximum size of the AudioTrack buffer in frames. + + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames()); + } + } + + private void logMainParametersExtended() { + logBufferSizeInFrames(); + logBufferCapacityInFrames(); + } + + // Prints the number of underrun occurrences in the application-level write + // buffer since the AudioTrack was created. An underrun occurs if the app does + // not write audio data quickly enough, causing the buffer to underflow and a + // potential audio glitch. + // TODO(henrika): keep track of this value in the field and possibly add new + // UMA stat if needed. + private void logUnderrunCount() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount()); + } + } + + // Helper method which throws an exception when an assertion has failed. + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + private int channelCountToConfiguration(int channels) { + return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); + } + + private static native void nativeCacheDirectBufferAddress( + long nativeAudioTrackJni, ByteBuffer byteBuffer); + private static native void nativeGetPlayoutData(long nativeAudioTrackJni, int bytes); + + // Sets all samples to be played out to zero if `mute` is true, i.e., + // ensures that the speaker is muted. + public void setSpeakerMute(boolean mute) { + Logging.w(TAG, "setSpeakerMute(" + mute + ")"); + speakerMute = mute; + } + + // Releases the native AudioTrack resources. + private void releaseAudioResources() { + Logging.d(TAG, "releaseAudioResources"); + if (audioTrack != null) { + audioTrack.release(); + audioTrack = null; + } + } + + private void reportWebRtcAudioTrackInitError(String errorMessage) { + Logging.e(TAG, "Init playout error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackInitError(errorMessage); + } + } + + private void reportWebRtcAudioTrackStartError( + AudioTrackStartErrorCode errorCode, String errorMessage) { + Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage); + } + } + + private void reportWebRtcAudioTrackError(String errorMessage) { + Logging.e(TAG, "Run-time playback error: " + errorMessage); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackError(errorMessage); + } + } + + private void doAudioTrackStateCallback(int audioState) { + Logging.d(TAG, "doAudioTrackStateCallback: " + audioState); + if (stateCallback != null) { + if (audioState == WebRtcAudioTrack.AUDIO_TRACK_START) { + stateCallback.onWebRtcAudioTrackStart(); + } else if (audioState == WebRtcAudioTrack.AUDIO_TRACK_STOP) { + stateCallback.onWebRtcAudioTrackStop(); + } else { + Logging.e(TAG, "Invalid audio state"); + } + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioUtils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioUtils.java new file mode 100644 index 00000000..462f0bcd --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/audio/WebRtcAudioUtils.java @@ -0,0 +1,303 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import static android.media.AudioManager.MODE_IN_CALL; +import static android.media.AudioManager.MODE_IN_COMMUNICATION; +import static android.media.AudioManager.MODE_NORMAL; +import static android.media.AudioManager.MODE_RINGTONE; + +import android.annotation.TargetApi; +import android.content.Context; +import android.content.pm.PackageManager; +import android.media.AudioDeviceInfo; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.MediaRecorder.AudioSource; +import android.os.Build; +import java.lang.Thread; +import java.util.Arrays; +import org.webrtc.Logging; + +public class WebRtcAudioUtils { + private static final String TAG = "WebRtcAudioUtilsExternal"; + + // Helper method for building a string of thread information. + public static String getThreadInfo() { + return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() + + "]"; + } + + // Returns true if we're running on emulator. + public static boolean runningOnEmulator() { + return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_"); + } + + // Information about the current build, taken from system properties. + static void logDeviceInfo(String tag) { + Logging.d(tag, + "Android SDK: " + Build.VERSION.SDK_INT + ", " + + "Release: " + Build.VERSION.RELEASE + ", " + + "Brand: " + Build.BRAND + ", " + + "Device: " + Build.DEVICE + ", " + + "Id: " + Build.ID + ", " + + "Hardware: " + Build.HARDWARE + ", " + + "Manufacturer: " + Build.MANUFACTURER + ", " + + "Model: " + Build.MODEL + ", " + + "Product: " + Build.PRODUCT); + } + + // Logs information about the current audio state. The idea is to call this + // method when errors are detected to log under what conditions the error + // occurred. Hopefully it will provide clues to what might be the root cause. + static void logAudioState(String tag, Context context, AudioManager audioManager) { + logDeviceInfo(tag); + logAudioStateBasic(tag, context, audioManager); + logAudioStateVolume(tag, audioManager); + logAudioDeviceInfo(tag, audioManager); + } + + // Converts AudioDeviceInfo types to local string representation. + static String deviceTypeToString(int type) { + switch (type) { + case AudioDeviceInfo.TYPE_UNKNOWN: + return "TYPE_UNKNOWN"; + case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE: + return "TYPE_BUILTIN_EARPIECE"; + case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER: + return "TYPE_BUILTIN_SPEAKER"; + case AudioDeviceInfo.TYPE_WIRED_HEADSET: + return "TYPE_WIRED_HEADSET"; + case AudioDeviceInfo.TYPE_WIRED_HEADPHONES: + return "TYPE_WIRED_HEADPHONES"; + case AudioDeviceInfo.TYPE_LINE_ANALOG: + return "TYPE_LINE_ANALOG"; + case AudioDeviceInfo.TYPE_LINE_DIGITAL: + return "TYPE_LINE_DIGITAL"; + case AudioDeviceInfo.TYPE_BLUETOOTH_SCO: + return "TYPE_BLUETOOTH_SCO"; + case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP: + return "TYPE_BLUETOOTH_A2DP"; + case AudioDeviceInfo.TYPE_HDMI: + return "TYPE_HDMI"; + case AudioDeviceInfo.TYPE_HDMI_ARC: + return "TYPE_HDMI_ARC"; + case AudioDeviceInfo.TYPE_USB_DEVICE: + return "TYPE_USB_DEVICE"; + case AudioDeviceInfo.TYPE_USB_ACCESSORY: + return "TYPE_USB_ACCESSORY"; + case AudioDeviceInfo.TYPE_DOCK: + return "TYPE_DOCK"; + case AudioDeviceInfo.TYPE_FM: + return "TYPE_FM"; + case AudioDeviceInfo.TYPE_BUILTIN_MIC: + return "TYPE_BUILTIN_MIC"; + case AudioDeviceInfo.TYPE_FM_TUNER: + return "TYPE_FM_TUNER"; + case AudioDeviceInfo.TYPE_TV_TUNER: + return "TYPE_TV_TUNER"; + case AudioDeviceInfo.TYPE_TELEPHONY: + return "TYPE_TELEPHONY"; + case AudioDeviceInfo.TYPE_AUX_LINE: + return "TYPE_AUX_LINE"; + case AudioDeviceInfo.TYPE_IP: + return "TYPE_IP"; + case AudioDeviceInfo.TYPE_BUS: + return "TYPE_BUS"; + case AudioDeviceInfo.TYPE_USB_HEADSET: + return "TYPE_USB_HEADSET"; + default: + return "TYPE_UNKNOWN"; + } + } + + @TargetApi(Build.VERSION_CODES.N) + public static String audioSourceToString(int source) { + // AudioSource.UNPROCESSED requires API level 29. Use local define instead. + final int VOICE_PERFORMANCE = 10; + switch (source) { + case AudioSource.DEFAULT: + return "DEFAULT"; + case AudioSource.MIC: + return "MIC"; + case AudioSource.VOICE_UPLINK: + return "VOICE_UPLINK"; + case AudioSource.VOICE_DOWNLINK: + return "VOICE_DOWNLINK"; + case AudioSource.VOICE_CALL: + return "VOICE_CALL"; + case AudioSource.CAMCORDER: + return "CAMCORDER"; + case AudioSource.VOICE_RECOGNITION: + return "VOICE_RECOGNITION"; + case AudioSource.VOICE_COMMUNICATION: + return "VOICE_COMMUNICATION"; + case AudioSource.UNPROCESSED: + return "UNPROCESSED"; + case VOICE_PERFORMANCE: + return "VOICE_PERFORMANCE"; + default: + return "INVALID"; + } + } + + public static String channelMaskToString(int mask) { + // For input or AudioRecord, the mask should be AudioFormat#CHANNEL_IN_MONO or + // AudioFormat#CHANNEL_IN_STEREO. AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all + // devices. + switch (mask) { + case AudioFormat.CHANNEL_IN_STEREO: + return "IN_STEREO"; + case AudioFormat.CHANNEL_IN_MONO: + return "IN_MONO"; + default: + return "INVALID"; + } + } + + @TargetApi(Build.VERSION_CODES.N) + public static String audioEncodingToString(int enc) { + switch (enc) { + case AudioFormat.ENCODING_INVALID: + return "INVALID"; + case AudioFormat.ENCODING_PCM_16BIT: + return "PCM_16BIT"; + case AudioFormat.ENCODING_PCM_8BIT: + return "PCM_8BIT"; + case AudioFormat.ENCODING_PCM_FLOAT: + return "PCM_FLOAT"; + case AudioFormat.ENCODING_AC3: + return "AC3"; + case AudioFormat.ENCODING_E_AC3: + return "AC3"; + case AudioFormat.ENCODING_DTS: + return "DTS"; + case AudioFormat.ENCODING_DTS_HD: + return "DTS_HD"; + case AudioFormat.ENCODING_MP3: + return "MP3"; + default: + return "Invalid encoding: " + enc; + } + } + + // Reports basic audio statistics. + private static void logAudioStateBasic(String tag, Context context, AudioManager audioManager) { + Logging.d(tag, + "Audio State: " + + "audio mode: " + modeToString(audioManager.getMode()) + ", " + + "has mic: " + hasMicrophone(context) + ", " + + "mic muted: " + audioManager.isMicrophoneMute() + ", " + + "music active: " + audioManager.isMusicActive() + ", " + + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", " + + "BT SCO: " + audioManager.isBluetoothScoOn()); + } + + // Adds volume information for all possible stream types. + private static void logAudioStateVolume(String tag, AudioManager audioManager) { + final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC, + AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION, + AudioManager.STREAM_SYSTEM}; + Logging.d(tag, "Audio State: "); + // Some devices may not have volume controls and might use a fixed volume. + boolean fixedVolume = audioManager.isVolumeFixed(); + Logging.d(tag, " fixed volume=" + fixedVolume); + if (!fixedVolume) { + for (int stream : streams) { + StringBuilder info = new StringBuilder(); + info.append(" " + streamTypeToString(stream) + ": "); + info.append("volume=").append(audioManager.getStreamVolume(stream)); + info.append(", max=").append(audioManager.getStreamMaxVolume(stream)); + logIsStreamMute(tag, audioManager, stream, info); + Logging.d(tag, info.toString()); + } + } + } + + private static void logIsStreamMute( + String tag, AudioManager audioManager, int stream, StringBuilder info) { + if (Build.VERSION.SDK_INT >= 23) { + info.append(", muted=").append(audioManager.isStreamMute(stream)); + } + } + + private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { + if (Build.VERSION.SDK_INT < 23) { + return; + } + final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL); + if (devices.length == 0) { + return; + } + Logging.d(tag, "Audio Devices: "); + for (AudioDeviceInfo device : devices) { + StringBuilder info = new StringBuilder(); + info.append(" ").append(deviceTypeToString(device.getType())); + info.append(device.isSource() ? "(in): " : "(out): "); + // An empty array indicates that the device supports arbitrary channel counts. + if (device.getChannelCounts().length > 0) { + info.append("channels=").append(Arrays.toString(device.getChannelCounts())); + info.append(", "); + } + if (device.getEncodings().length > 0) { + // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4. + info.append("encodings=").append(Arrays.toString(device.getEncodings())); + info.append(", "); + } + if (device.getSampleRates().length > 0) { + info.append("sample rates=").append(Arrays.toString(device.getSampleRates())); + info.append(", "); + } + info.append("id=").append(device.getId()); + Logging.d(tag, info.toString()); + } + } + + // Converts media.AudioManager modes into local string representation. + static String modeToString(int mode) { + switch (mode) { + case MODE_IN_CALL: + return "MODE_IN_CALL"; + case MODE_IN_COMMUNICATION: + return "MODE_IN_COMMUNICATION"; + case MODE_NORMAL: + return "MODE_NORMAL"; + case MODE_RINGTONE: + return "MODE_RINGTONE"; + default: + return "MODE_INVALID"; + } + } + + private static String streamTypeToString(int stream) { + switch (stream) { + case AudioManager.STREAM_VOICE_CALL: + return "STREAM_VOICE_CALL"; + case AudioManager.STREAM_MUSIC: + return "STREAM_MUSIC"; + case AudioManager.STREAM_RING: + return "STREAM_RING"; + case AudioManager.STREAM_ALARM: + return "STREAM_ALARM"; + case AudioManager.STREAM_NOTIFICATION: + return "STREAM_NOTIFICATION"; + case AudioManager.STREAM_SYSTEM: + return "STREAM_SYSTEM"; + default: + return "STREAM_INVALID"; + } + } + + // Returns true if the device can record audio via a microphone. + private static boolean hasMicrophone(Context context) { + return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/BuildInfo.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/BuildInfo.java new file mode 100644 index 00000000..cda9127b --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/BuildInfo.java @@ -0,0 +1,50 @@ +// +// Source code recreated from a .class file by IntelliJ IDEA +// (powered by FernFlower decompiler) +// + +package org.webrtc.voiceengine; + +import android.os.Build; +import android.os.Build.VERSION; + +public final class BuildInfo { + public BuildInfo() { + } + + public static String getDevice() { + return Build.DEVICE; + } + + public static String getDeviceModel() { + return Build.MODEL; + } + + public static String getProduct() { + return Build.PRODUCT; + } + + public static String getBrand() { + return Build.BRAND; + } + + public static String getDeviceManufacturer() { + return Build.MANUFACTURER; + } + + public static String getAndroidBuildId() { + return Build.ID; + } + + public static String getBuildType() { + return Build.TYPE; + } + + public static String getBuildRelease() { + return VERSION.RELEASE; + } + + public static int getSdkVersion() { + return VERSION.SDK_INT; + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioEffects.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioEffects.java new file mode 100644 index 00000000..58617f77 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioEffects.java @@ -0,0 +1,251 @@ +// +// Source code recreated from a .class file by IntelliJ IDEA +// (powered by FernFlower decompiler) +// + +package org.webrtc.voiceengine; + +import android.media.audiofx.AcousticEchoCanceler; +import android.media.audiofx.AudioEffect; +import android.media.audiofx.NoiseSuppressor; +import android.os.Build; +import android.os.Build.VERSION; + +import androidx.annotation.Nullable; + +import org.webrtc.Logging; + +import java.util.List; +import java.util.UUID; + +public class WebRtcAudioEffects { + private static final boolean DEBUG = false; + private static final String TAG = "WebRtcAudioEffects"; + private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER = UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b"); + private static final UUID AOSP_NOISE_SUPPRESSOR = UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b"); + @Nullable + private static AudioEffect.Descriptor[] cachedEffects; + @Nullable + private AcousticEchoCanceler aec; + @Nullable + private NoiseSuppressor ns; + private boolean shouldEnableAec; + private boolean shouldEnableNs; + + public static boolean isAcousticEchoCancelerSupported() { + return isAcousticEchoCancelerEffectAvailable(); + } + + public static boolean isNoiseSuppressorSupported() { + return isNoiseSuppressorEffectAvailable(); + } + + public static boolean isAcousticEchoCancelerBlacklisted() { + List blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage(); + boolean isBlacklisted = blackListedModels.contains(Build.MODEL); + if (isBlacklisted) { + Logging.w("WebRtcAudioEffects", Build.MODEL + " is blacklisted for HW AEC usage!"); + } + + return isBlacklisted; + } + + public static boolean isNoiseSuppressorBlacklisted() { + List blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage(); + boolean isBlacklisted = blackListedModels.contains(Build.MODEL); + if (isBlacklisted) { + Logging.w("WebRtcAudioEffects", Build.MODEL + " is blacklisted for HW NS usage!"); + } + + return isBlacklisted; + } + + private static boolean isAcousticEchoCancelerExcludedByUUID() { + if (VERSION.SDK_INT < 18) { + return false; + } else { + AudioEffect.Descriptor[] var0 = getAvailableEffects(); + int var1 = var0.length; + + for(int var2 = 0; var2 < var1; ++var2) { + AudioEffect.Descriptor d = var0[var2]; + if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) && d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) { + return true; + } + } + + return false; + } + } + + private static boolean isNoiseSuppressorExcludedByUUID() { + if (VERSION.SDK_INT < 18) { + return false; + } else { + AudioEffect.Descriptor[] var0 = getAvailableEffects(); + int var1 = var0.length; + + for(int var2 = 0; var2 < var1; ++var2) { + AudioEffect.Descriptor d = var0[var2]; + if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) { + return true; + } + } + + return false; + } + } + + private static boolean isAcousticEchoCancelerEffectAvailable() { + return VERSION.SDK_INT < 18 ? false : isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC); + } + + private static boolean isNoiseSuppressorEffectAvailable() { + return VERSION.SDK_INT < 18 ? false : isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS); + } + + public static boolean canUseAcousticEchoCanceler() { + boolean canUseAcousticEchoCanceler = isAcousticEchoCancelerSupported() && !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler() && !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerExcludedByUUID(); + Logging.d("WebRtcAudioEffects", "canUseAcousticEchoCanceler: " + canUseAcousticEchoCanceler); + return canUseAcousticEchoCanceler; + } + + public static boolean canUseNoiseSuppressor() { + boolean canUseNoiseSuppressor = isNoiseSuppressorSupported() && !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor() && !isNoiseSuppressorBlacklisted() && !isNoiseSuppressorExcludedByUUID(); + Logging.d("WebRtcAudioEffects", "canUseNoiseSuppressor: " + canUseNoiseSuppressor); + return canUseNoiseSuppressor; + } + + public static WebRtcAudioEffects create() { + return new WebRtcAudioEffects(); + } + + private WebRtcAudioEffects() { + Logging.d("WebRtcAudioEffects", "ctor" + WebRtcAudioUtils.getThreadInfo()); + } + + public boolean setAEC(boolean enable) { + Logging.d("WebRtcAudioEffects", "setAEC(" + enable + ")"); + if (!canUseAcousticEchoCanceler()) { + Logging.w("WebRtcAudioEffects", "Platform AEC is not supported"); + this.shouldEnableAec = false; + return false; + } else if (this.aec != null && enable != this.shouldEnableAec) { + Logging.e("WebRtcAudioEffects", "Platform AEC state can't be modified while recording"); + return false; + } else { + this.shouldEnableAec = enable; + return true; + } + } + + public boolean setNS(boolean enable) { + Logging.d("WebRtcAudioEffects", "setNS(" + enable + ")"); + if (!canUseNoiseSuppressor()) { + Logging.w("WebRtcAudioEffects", "Platform NS is not supported"); + this.shouldEnableNs = false; + return false; + } else if (this.ns != null && enable != this.shouldEnableNs) { + Logging.e("WebRtcAudioEffects", "Platform NS state can't be modified while recording"); + return false; + } else { + this.shouldEnableNs = enable; + return true; + } + } + + public void enable(int audioSession) { + Logging.d("WebRtcAudioEffects", "enable(audioSession=" + audioSession + ")"); + assertTrue(this.aec == null); + assertTrue(this.ns == null); + boolean enabled; + boolean enable; + if (isAcousticEchoCancelerSupported()) { + this.aec = AcousticEchoCanceler.create(audioSession); + if (this.aec != null) { + enabled = this.aec.getEnabled(); + enable = this.shouldEnableAec && canUseAcousticEchoCanceler(); + if (this.aec.setEnabled(enable) != 0) { + Logging.e("WebRtcAudioEffects", "Failed to set the AcousticEchoCanceler state"); + } + + Logging.d("WebRtcAudioEffects", "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable + ", is now: " + (this.aec.getEnabled() ? "enabled" : "disabled")); + } else { + Logging.e("WebRtcAudioEffects", "Failed to create the AcousticEchoCanceler instance"); + } + } + + if (isNoiseSuppressorSupported()) { + this.ns = NoiseSuppressor.create(audioSession); + if (this.ns != null) { + enabled = this.ns.getEnabled(); + enable = this.shouldEnableNs && canUseNoiseSuppressor(); + if (this.ns.setEnabled(enable) != 0) { + Logging.e("WebRtcAudioEffects", "Failed to set the NoiseSuppressor state"); + } + + Logging.d("WebRtcAudioEffects", "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable + ", is now: " + (this.ns.getEnabled() ? "enabled" : "disabled")); + } else { + Logging.e("WebRtcAudioEffects", "Failed to create the NoiseSuppressor instance"); + } + } + + } + + public void release() { + Logging.d("WebRtcAudioEffects", "release"); + if (this.aec != null) { + this.aec.release(); + this.aec = null; + } + + if (this.ns != null) { + this.ns.release(); + this.ns = null; + } + + } + + private boolean effectTypeIsVoIP(UUID type) { + if (VERSION.SDK_INT < 18) { + return false; + } else { + return AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported() || AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported(); + } + } + + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + @Nullable + private static AudioEffect.Descriptor[] getAvailableEffects() { + if (cachedEffects != null) { + return cachedEffects; + } else { + cachedEffects = AudioEffect.queryEffects(); + return cachedEffects; + } + } + + private static boolean isEffectTypeAvailable(UUID effectType) { + AudioEffect.Descriptor[] effects = getAvailableEffects(); + if (effects == null) { + return false; + } else { + AudioEffect.Descriptor[] var2 = effects; + int var3 = effects.length; + + for(int var4 = 0; var4 < var3; ++var4) { + AudioEffect.Descriptor d = var2[var4]; + if (d.type.equals(effectType)) { + return true; + } + } + + return false; + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioManager.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioManager.java new file mode 100644 index 00000000..89b33547 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioManager.java @@ -0,0 +1,262 @@ +// +// Source code recreated from a .class file by IntelliJ IDEA +// (powered by FernFlower decompiler) +// + +package org.webrtc.voiceengine; + +import android.media.AudioManager; +import android.media.AudioRecord; +import android.media.AudioTrack; +import android.os.Build; +import android.os.Build.VERSION; + +import androidx.annotation.Nullable; + +import org.webrtc.ContextUtils; +import org.webrtc.Logging; + +import java.util.Timer; +import java.util.TimerTask; + +public class WebRtcAudioManager { + private static final boolean DEBUG = false; + private static final String TAG = "WebRtcAudioManager"; + private static final boolean blacklistDeviceForAAudioUsage = true; + private static boolean useStereoOutput; + private static boolean useStereoInput; + private static boolean blacklistDeviceForOpenSLESUsage; + private static boolean blacklistDeviceForOpenSLESUsageIsOverridden; + private static final int BITS_PER_SAMPLE = 16; + private static final int DEFAULT_FRAME_PER_BUFFER = 256; + private final long nativeAudioManager; + private final AudioManager audioManager; + private boolean initialized; + private int nativeSampleRate; + private int nativeChannels; + private boolean hardwareAEC; + private boolean hardwareAGC; + private boolean hardwareNS; + private boolean lowLatencyOutput; + private boolean lowLatencyInput; + private boolean proAudio; + private boolean aAudio; + private int sampleRate; + private int outputChannels; + private int inputChannels; + private int outputBufferSize; + private int inputBufferSize; + private final VolumeLogger volumeLogger; + + public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) { + blacklistDeviceForOpenSLESUsageIsOverridden = true; + blacklistDeviceForOpenSLESUsage = enable; + } + + public static synchronized void setStereoOutput(boolean enable) { + Logging.w("WebRtcAudioManager", "Overriding default output behavior: setStereoOutput(" + enable + ')'); + useStereoOutput = enable; + } + + public static synchronized void setStereoInput(boolean enable) { + Logging.w("WebRtcAudioManager", "Overriding default input behavior: setStereoInput(" + enable + ')'); + useStereoInput = enable; + } + + public static synchronized boolean getStereoOutput() { + return useStereoOutput; + } + + public static synchronized boolean getStereoInput() { + return useStereoInput; + } + + WebRtcAudioManager(long nativeAudioManager) { + Logging.d("WebRtcAudioManager", "ctor" + WebRtcAudioUtils.getThreadInfo()); + this.nativeAudioManager = nativeAudioManager; + this.audioManager = (AudioManager)ContextUtils.getApplicationContext().getSystemService("audio"); + this.volumeLogger = new VolumeLogger(this.audioManager); + this.storeAudioParameters(); + this.nativeCacheAudioParameters(this.sampleRate, this.outputChannels, this.inputChannels, this.hardwareAEC, this.hardwareAGC, this.hardwareNS, this.lowLatencyOutput, this.lowLatencyInput, this.proAudio, this.aAudio, this.outputBufferSize, this.inputBufferSize, nativeAudioManager); + WebRtcAudioUtils.logAudioState("WebRtcAudioManager"); + } + + private boolean init() { + Logging.d("WebRtcAudioManager", "init" + WebRtcAudioUtils.getThreadInfo()); + if (this.initialized) { + return true; + } else { + Logging.d("WebRtcAudioManager", "audio mode is: " + WebRtcAudioUtils.modeToString(this.audioManager.getMode())); + this.initialized = true; + this.volumeLogger.start(); + return true; + } + } + + private void dispose() { + Logging.d("WebRtcAudioManager", "dispose" + WebRtcAudioUtils.getThreadInfo()); + if (this.initialized) { + this.volumeLogger.stop(); + } + } + + private boolean isCommunicationModeEnabled() { + return this.audioManager.getMode() == 3; + } + + private boolean isDeviceBlacklistedForOpenSLESUsage() { + boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden ? blacklistDeviceForOpenSLESUsage : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage(); + if (blacklisted) { + Logging.d("WebRtcAudioManager", Build.MODEL + " is blacklisted for OpenSL ES usage!"); + } + + return blacklisted; + } + + private void storeAudioParameters() { + this.outputChannels = getStereoOutput() ? 2 : 1; + this.inputChannels = getStereoInput() ? 2 : 1; + this.sampleRate = this.getNativeOutputSampleRate(); + this.hardwareAEC = isAcousticEchoCancelerSupported(); + this.hardwareAGC = false; + this.hardwareNS = isNoiseSuppressorSupported(); + this.lowLatencyOutput = this.isLowLatencyOutputSupported(); + this.lowLatencyInput = this.isLowLatencyInputSupported(); + this.proAudio = this.isProAudioSupported(); + this.aAudio = this.isAAudioSupported(); + this.outputBufferSize = this.lowLatencyOutput ? this.getLowLatencyOutputFramesPerBuffer() : getMinOutputFrameSize(this.sampleRate, this.outputChannels); + this.inputBufferSize = this.lowLatencyInput ? this.getLowLatencyInputFramesPerBuffer() : getMinInputFrameSize(this.sampleRate, this.inputChannels); + } + + private boolean hasEarpiece() { + return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature("android.hardware.telephony"); + } + + private boolean isLowLatencyOutputSupported() { + return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature("android.hardware.audio.low_latency"); + } + + public boolean isLowLatencyInputSupported() { + return VERSION.SDK_INT >= 21 && this.isLowLatencyOutputSupported(); + } + + private boolean isProAudioSupported() { + return VERSION.SDK_INT >= 23 && ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature("android.hardware.audio.pro"); + } + + private boolean isAAudioSupported() { + Logging.w("WebRtcAudioManager", "AAudio support is currently disabled on all devices!"); + return false; + } + + private int getNativeOutputSampleRate() { + if (WebRtcAudioUtils.runningOnEmulator()) { + Logging.d("WebRtcAudioManager", "Running emulator, overriding sample rate to 8 kHz."); + return 8000; + } else if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) { + Logging.d("WebRtcAudioManager", "Default sample rate is overriden to " + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz"); + return WebRtcAudioUtils.getDefaultSampleRateHz(); + } else { + int sampleRateHz = this.getSampleRateForApiLevel(); + Logging.d("WebRtcAudioManager", "Sample rate is set to " + sampleRateHz + " Hz"); + return sampleRateHz; + } + } + + private int getSampleRateForApiLevel() { + if (VERSION.SDK_INT < 17) { + return WebRtcAudioUtils.getDefaultSampleRateHz(); + } else { + String sampleRateString = this.audioManager.getProperty("android.media.property.OUTPUT_SAMPLE_RATE"); + return sampleRateString == null ? WebRtcAudioUtils.getDefaultSampleRateHz() : Integer.parseInt(sampleRateString); + } + } + + private int getLowLatencyOutputFramesPerBuffer() { + assertTrue(this.isLowLatencyOutputSupported()); + if (VERSION.SDK_INT < 17) { + return 256; + } else { + String framesPerBuffer = this.audioManager.getProperty("android.media.property.OUTPUT_FRAMES_PER_BUFFER"); + return framesPerBuffer == null ? 256 : Integer.parseInt(framesPerBuffer); + } + } + + private static boolean isAcousticEchoCancelerSupported() { + return WebRtcAudioEffects.canUseAcousticEchoCanceler(); + } + + private static boolean isNoiseSuppressorSupported() { + return WebRtcAudioEffects.canUseNoiseSuppressor(); + } + + private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) { + int bytesPerFrame = numChannels * 2; + int channelConfig = numChannels == 1 ? 4 : 12; + return AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, 2) / bytesPerFrame; + } + + private int getLowLatencyInputFramesPerBuffer() { + assertTrue(this.isLowLatencyInputSupported()); + return this.getLowLatencyOutputFramesPerBuffer(); + } + + private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) { + int bytesPerFrame = numChannels * 2; + int channelConfig = numChannels == 1 ? 16 : 12; + return AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, 2) / bytesPerFrame; + } + + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + private native void nativeCacheAudioParameters(int var1, int var2, int var3, boolean var4, boolean var5, boolean var6, boolean var7, boolean var8, boolean var9, boolean var10, int var11, int var12, long var13); + + private static class VolumeLogger { + private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread"; + private static final int TIMER_PERIOD_IN_SECONDS = 30; + private final AudioManager audioManager; + @Nullable + private Timer timer; + + public VolumeLogger(AudioManager audioManager) { + this.audioManager = audioManager; + } + + public void start() { + this.timer = new Timer("WebRtcVolumeLevelLoggerThread"); + this.timer.schedule(new LogVolumeTask(this.audioManager.getStreamMaxVolume(2), this.audioManager.getStreamMaxVolume(0)), 0L, 30000L); + } + + private void stop() { + if (this.timer != null) { + this.timer.cancel(); + this.timer = null; + } + + } + + private class LogVolumeTask extends TimerTask { + private final int maxRingVolume; + private final int maxVoiceCallVolume; + + LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) { + this.maxRingVolume = maxRingVolume; + this.maxVoiceCallVolume = maxVoiceCallVolume; + } + + public void run() { + int mode = VolumeLogger.this.audioManager.getMode(); + if (mode == 1) { + Logging.d("WebRtcAudioManager", "STREAM_RING stream volume: " + VolumeLogger.this.audioManager.getStreamVolume(2) + " (max=" + this.maxRingVolume + ")"); + } else if (mode == 3) { + Logging.d("WebRtcAudioManager", "VOICE_CALL stream volume: " + VolumeLogger.this.audioManager.getStreamVolume(0) + " (max=" + this.maxVoiceCallVolume + ")"); + } + + } + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioRecord.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioRecord.java new file mode 100644 index 00000000..108ee93f --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioRecord.java @@ -0,0 +1,350 @@ +// +// Source code recreated from a .class file by IntelliJ IDEA +// (powered by FernFlower decompiler) +// + +package org.webrtc.voiceengine; + +import android.media.AudioRecord; +import android.os.Build.VERSION; +import android.os.Process; + +import androidx.annotation.Nullable; + +import org.webrtc.Logging; +import org.webrtc.ThreadUtils; + +import java.nio.ByteBuffer; +import java.util.Arrays; + +public class WebRtcAudioRecord { + private static final boolean DEBUG = false; + private static final String TAG = "WebRtcAudioRecord"; + private static final int BITS_PER_SAMPLE = 16; + private static final int CALLBACK_BUFFER_SIZE_MS = 10; + private static final int BUFFERS_PER_SECOND = 100; + private static final int BUFFER_SIZE_FACTOR = 2; + private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000L; + private static final int DEFAULT_AUDIO_SOURCE = getDefaultAudioSource(); + private static int audioSource; + private final long nativeAudioRecord; + @Nullable + private WebRtcAudioEffects effects; + private ByteBuffer byteBuffer; + @Nullable + private AudioRecord audioRecord; + @Nullable + private AudioRecordThread audioThread; + private static volatile boolean microphoneMute; + private byte[] emptyBytes; + @Nullable + private static WebRtcAudioRecordErrorCallback errorCallback; + @Nullable + private static WebRtcAudioRecordSamplesReadyCallback audioSamplesReadyCallback; + + public static void setErrorCallback(WebRtcAudioRecordErrorCallback errorCallback) { + Logging.d("WebRtcAudioRecord", "Set error callback"); + WebRtcAudioRecord.errorCallback = errorCallback; + } + + public static void setOnAudioSamplesReady(WebRtcAudioRecordSamplesReadyCallback callback) { + audioSamplesReadyCallback = callback; + } + + public WebRtcAudioRecord(long nativeAudioRecord) { + Logging.d("WebRtcAudioRecord", "ctor" + WebRtcAudioUtils.getThreadInfo()); + this.nativeAudioRecord = nativeAudioRecord; + this.effects = WebRtcAudioEffects.create(); + } + + private boolean enableBuiltInAEC(boolean enable) { + Logging.d("WebRtcAudioRecord", "enableBuiltInAEC(" + enable + ')'); + if (this.effects == null) { + Logging.e("WebRtcAudioRecord", "Built-in AEC is not supported on this platform"); + return false; + } else { + return this.effects.setAEC(enable); + } + } + + private boolean enableBuiltInNS(boolean enable) { + Logging.d("WebRtcAudioRecord", "enableBuiltInNS(" + enable + ')'); + if (this.effects == null) { + Logging.e("WebRtcAudioRecord", "Built-in NS is not supported on this platform"); + return false; + } else { + return this.effects.setNS(enable); + } + } + + private int initRecording(int sampleRate, int channels) { + Logging.d("WebRtcAudioRecord", "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")"); + if (this.audioRecord != null) { + this.reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); + return -1; + } else { + int bytesPerFrame = channels * 2; + int framesPerBuffer = sampleRate / 100; + this.byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); + Logging.d("WebRtcAudioRecord", "byteBuffer.capacity: " + this.byteBuffer.capacity()); + this.emptyBytes = new byte[this.byteBuffer.capacity()]; + this.nativeCacheDirectBufferAddress(this.byteBuffer, this.nativeAudioRecord); + int channelConfig = this.channelCountToConfiguration(channels); + int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, 2); + if (minBufferSize != -1 && minBufferSize != -2) { + Logging.d("WebRtcAudioRecord", "AudioRecord.getMinBufferSize: " + minBufferSize); + int bufferSizeInBytes = Math.max(2 * minBufferSize, this.byteBuffer.capacity()); + Logging.d("WebRtcAudioRecord", "bufferSizeInBytes: " + bufferSizeInBytes); + + try { + this.audioRecord = new AudioRecord(audioSource, sampleRate, channelConfig, 2, bufferSizeInBytes); + } catch (IllegalArgumentException var9) { + this.reportWebRtcAudioRecordInitError("AudioRecord ctor error: " + var9.getMessage()); + this.releaseAudioResources(); + return -1; + } + + if (this.audioRecord != null && this.audioRecord.getState() == 1) { + if (this.effects != null) { + this.effects.enable(this.audioRecord.getAudioSessionId()); + } + + this.logMainParameters(); + this.logMainParametersExtended(); + return framesPerBuffer; + } else { + this.reportWebRtcAudioRecordInitError("Failed to create a new AudioRecord instance"); + this.releaseAudioResources(); + return -1; + } + } else { + this.reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); + return -1; + } + } + } + + private boolean startRecording() { + Logging.d("WebRtcAudioRecord", "startRecording"); + assertTrue(this.audioRecord != null); + assertTrue(this.audioThread == null); + + try { + this.audioRecord.startRecording(); + } catch (IllegalStateException var2) { + this.reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, "AudioRecord.startRecording failed: " + var2.getMessage()); + return false; + } + + if (this.audioRecord.getRecordingState() != 3) { + this.reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, "AudioRecord.startRecording failed - incorrect state :" + this.audioRecord.getRecordingState()); + return false; + } else { + this.audioThread = new AudioRecordThread("AudioRecordJavaThread"); + this.audioThread.start(); + return true; + } + } + + private boolean stopRecording() { + Logging.d("WebRtcAudioRecord", "stopRecording"); + assertTrue(this.audioThread != null); + this.audioThread.stopThread(); + if (!ThreadUtils.joinUninterruptibly(this.audioThread, 2000L)) { + Logging.e("WebRtcAudioRecord", "Join of AudioRecordJavaThread timed out"); + WebRtcAudioUtils.logAudioState("WebRtcAudioRecord"); + } + + this.audioThread = null; + if (this.effects != null) { + this.effects.release(); + } + + this.releaseAudioResources(); + return true; + } + + private void logMainParameters() { + Logging.d("WebRtcAudioRecord", "AudioRecord: session ID: " + this.audioRecord.getAudioSessionId() + ", channels: " + this.audioRecord.getChannelCount() + ", sample rate: " + this.audioRecord.getSampleRate()); + } + + private void logMainParametersExtended() { + if (VERSION.SDK_INT >= 23) { + Logging.d("WebRtcAudioRecord", "AudioRecord: buffer size in frames: " + this.audioRecord.getBufferSizeInFrames()); + } + + } + + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + private int channelCountToConfiguration(int channels) { + return channels == 1 ? 16 : 12; + } + + private native void nativeCacheDirectBufferAddress(ByteBuffer var1, long var2); + + private native void nativeDataIsRecorded(int var1, long var2); + + public static synchronized void setAudioSource(int source) { + Logging.w("WebRtcAudioRecord", "Audio source is changed from: " + audioSource + " to " + source); + audioSource = source; + } + + private static int getDefaultAudioSource() { + return 7; + } + + public static void setMicrophoneMute(boolean mute) { + Logging.w("WebRtcAudioRecord", "setMicrophoneMute(" + mute + ")"); + microphoneMute = mute; + } + + private void releaseAudioResources() { + Logging.d("WebRtcAudioRecord", "releaseAudioResources"); + if (this.audioRecord != null) { + this.audioRecord.release(); + this.audioRecord = null; + } + + } + + private void reportWebRtcAudioRecordInitError(String errorMessage) { + Logging.e("WebRtcAudioRecord", "Init recording error: " + errorMessage); + WebRtcAudioUtils.logAudioState("WebRtcAudioRecord"); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordInitError(errorMessage); + } + + } + + private void reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage) { + Logging.e("WebRtcAudioRecord", "Start recording error: " + errorCode + ". " + errorMessage); + WebRtcAudioUtils.logAudioState("WebRtcAudioRecord"); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); + } + + } + + private void reportWebRtcAudioRecordError(String errorMessage) { + Logging.e("WebRtcAudioRecord", "Run-time recording error: " + errorMessage); + WebRtcAudioUtils.logAudioState("WebRtcAudioRecord"); + if (errorCallback != null) { + errorCallback.onWebRtcAudioRecordError(errorMessage); + } + + } + + static { + audioSource = DEFAULT_AUDIO_SOURCE; + } + + private class AudioRecordThread extends Thread { + private volatile boolean keepAlive = true; + + public AudioRecordThread(String name) { + super(name); + } + + public void run() { + Process.setThreadPriority(-19); + Logging.d("WebRtcAudioRecord", "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); + WebRtcAudioRecord.assertTrue(WebRtcAudioRecord.this.audioRecord.getRecordingState() == 3); + long lastTime = System.nanoTime(); + + while(this.keepAlive) { + int bytesRead = WebRtcAudioRecord.this.audioRecord.read(WebRtcAudioRecord.this.byteBuffer, WebRtcAudioRecord.this.byteBuffer.capacity()); + if (bytesRead == WebRtcAudioRecord.this.byteBuffer.capacity()) { + if (WebRtcAudioRecord.microphoneMute) { + WebRtcAudioRecord.this.byteBuffer.clear(); + WebRtcAudioRecord.this.byteBuffer.put(WebRtcAudioRecord.this.emptyBytes); + } + + if (this.keepAlive) { + WebRtcAudioRecord.this.nativeDataIsRecorded(bytesRead, WebRtcAudioRecord.this.nativeAudioRecord); + } + + if (WebRtcAudioRecord.audioSamplesReadyCallback != null) { + byte[] data = Arrays.copyOf(WebRtcAudioRecord.this.byteBuffer.array(), WebRtcAudioRecord.this.byteBuffer.capacity()); + WebRtcAudioRecord.audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady(new AudioSamples(WebRtcAudioRecord.this.audioRecord, data)); + } + } else { + String errorMessage = "AudioRecord.read failed: " + bytesRead; + Logging.e("WebRtcAudioRecord", errorMessage); + if (bytesRead == -3) { + this.keepAlive = false; + WebRtcAudioRecord.this.reportWebRtcAudioRecordError(errorMessage); + } + } + } + + try { + if (WebRtcAudioRecord.this.audioRecord != null) { + WebRtcAudioRecord.this.audioRecord.stop(); + } + } catch (IllegalStateException var5) { + Logging.e("WebRtcAudioRecord", "AudioRecord.stop failed: " + var5.getMessage()); + } + + } + + public void stopThread() { + Logging.d("WebRtcAudioRecord", "stopThread"); + this.keepAlive = false; + } + } + + public interface WebRtcAudioRecordSamplesReadyCallback { + void onWebRtcAudioRecordSamplesReady(AudioSamples var1); + } + + public static class AudioSamples { + private final int audioFormat; + private final int channelCount; + private final int sampleRate; + private final byte[] data; + + private AudioSamples(AudioRecord audioRecord, byte[] data) { + this.audioFormat = audioRecord.getAudioFormat(); + this.channelCount = audioRecord.getChannelCount(); + this.sampleRate = audioRecord.getSampleRate(); + this.data = data; + } + + public int getAudioFormat() { + return this.audioFormat; + } + + public int getChannelCount() { + return this.channelCount; + } + + public int getSampleRate() { + return this.sampleRate; + } + + public byte[] getData() { + return this.data; + } + } + + public interface WebRtcAudioRecordErrorCallback { + void onWebRtcAudioRecordInitError(String var1); + + void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode var1, String var2); + + void onWebRtcAudioRecordError(String var1); + } + + public static enum AudioRecordStartErrorCode { + AUDIO_RECORD_START_EXCEPTION, + AUDIO_RECORD_START_STATE_MISMATCH; + + private AudioRecordStartErrorCode() { + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java new file mode 100644 index 00000000..220b2412 --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -0,0 +1,397 @@ +// +// Source code recreated from a .class file by IntelliJ IDEA +// (powered by FernFlower decompiler) +// + +package org.webrtc.voiceengine; + +import android.annotation.TargetApi; +import android.media.AudioAttributes; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.os.Build.VERSION; +import android.os.Process; + +import androidx.annotation.Nullable; + +import org.webrtc.ContextUtils; +import org.webrtc.Logging; +import org.webrtc.ThreadUtils; + +import java.nio.ByteBuffer; + +public class WebRtcAudioTrack { + private static final boolean DEBUG = false; + private static final String TAG = "WebRtcAudioTrack"; + private static final int BITS_PER_SAMPLE = 16; + private static final int CALLBACK_BUFFER_SIZE_MS = 10; + private static final int BUFFERS_PER_SECOND = 100; + private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000L; + private static final int DEFAULT_USAGE = getDefaultUsageAttribute(); + private static int usageAttribute; + private final long nativeAudioTrack; + private final AudioManager audioManager; + private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); + private ByteBuffer byteBuffer; + @Nullable + private AudioTrack audioTrack; + @Nullable + private AudioTrackThread audioThread; + private static volatile boolean speakerMute; + private byte[] emptyBytes; + @Nullable + private static WebRtcAudioTrackErrorCallback errorCallbackOld; + @Nullable + private static ErrorCallback errorCallback; + + public static synchronized void setAudioTrackUsageAttribute(int usage) { + Logging.w("WebRtcAudioTrack", "Default usage attribute is changed from: " + DEFAULT_USAGE + " to " + usage); + usageAttribute = usage; + } + + private static int getDefaultUsageAttribute() { + return VERSION.SDK_INT >= 21 ? 2 : 0; + } + + /** @deprecated */ + @Deprecated + public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) { + Logging.d("WebRtcAudioTrack", "Set error callback (deprecated"); + errorCallbackOld = errorCallback; + } + + public static void setErrorCallback(ErrorCallback errorCallback) { + Logging.d("WebRtcAudioTrack", "Set extended error callback"); + WebRtcAudioTrack.errorCallback = errorCallback; + } + + WebRtcAudioTrack(long nativeAudioTrack) { + this.threadChecker.checkIsOnValidThread(); + Logging.d("WebRtcAudioTrack", "ctor" + WebRtcAudioUtils.getThreadInfo()); + this.nativeAudioTrack = nativeAudioTrack; + this.audioManager = (AudioManager)ContextUtils.getApplicationContext().getSystemService("audio"); + } + + private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { + this.threadChecker.checkIsOnValidThread(); + Logging.d("WebRtcAudioTrack", "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ", bufferSizeFactor=" + bufferSizeFactor + ")"); + int bytesPerFrame = channels * 2; + this.byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / 100)); + Logging.d("WebRtcAudioTrack", "byteBuffer.capacity: " + this.byteBuffer.capacity()); + this.emptyBytes = new byte[this.byteBuffer.capacity()]; + this.nativeCacheDirectBufferAddress(this.byteBuffer, this.nativeAudioTrack); + int channelConfig = this.channelCountToConfiguration(channels); + int minBufferSizeInBytes = (int)((double)AudioTrack.getMinBufferSize(sampleRate, channelConfig, 2) * bufferSizeFactor); + Logging.d("WebRtcAudioTrack", "minBufferSizeInBytes: " + minBufferSizeInBytes); + if (minBufferSizeInBytes < this.byteBuffer.capacity()) { + this.reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value."); + return -1; + } else if (this.audioTrack != null) { + this.reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack."); + return -1; + } else { + try { + if (VERSION.SDK_INT >= 21) { + this.audioTrack = createAudioTrackOnLollipopOrHigher(sampleRate, channelConfig, minBufferSizeInBytes); + } else { + this.audioTrack = createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes); + } + } catch (IllegalArgumentException var9) { + this.reportWebRtcAudioTrackInitError(var9.getMessage()); + this.releaseAudioResources(); + return -1; + } + + if (this.audioTrack != null && this.audioTrack.getState() == 1) { + this.logMainParameters(); + this.logMainParametersExtended(); + return minBufferSizeInBytes; + } else { + this.reportWebRtcAudioTrackInitError("Initialization of audio track failed."); + this.releaseAudioResources(); + return -1; + } + } + } + + private boolean startPlayout() { + this.threadChecker.checkIsOnValidThread(); + Logging.d("WebRtcAudioTrack", "startPlayout"); + assertTrue(this.audioTrack != null); + assertTrue(this.audioThread == null); + + try { + this.audioTrack.play(); + } catch (IllegalStateException var2) { + this.reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION, "AudioTrack.play failed: " + var2.getMessage()); + this.releaseAudioResources(); + return false; + } + + if (this.audioTrack.getPlayState() != 3) { + this.reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH, "AudioTrack.play failed - incorrect state :" + this.audioTrack.getPlayState()); + this.releaseAudioResources(); + return false; + } else { + this.audioThread = new AudioTrackThread("AudioTrackJavaThread"); + this.audioThread.start(); + return true; + } + } + + private boolean stopPlayout() { + this.threadChecker.checkIsOnValidThread(); + Logging.d("WebRtcAudioTrack", "stopPlayout"); + assertTrue(this.audioThread != null); + this.logUnderrunCount(); + this.audioThread.stopThread(); + Logging.d("WebRtcAudioTrack", "Stopping the AudioTrackThread..."); + this.audioThread.interrupt(); + if (!ThreadUtils.joinUninterruptibly(this.audioThread, 2000L)) { + Logging.e("WebRtcAudioTrack", "Join of AudioTrackThread timed out."); + WebRtcAudioUtils.logAudioState("WebRtcAudioTrack"); + } + + Logging.d("WebRtcAudioTrack", "AudioTrackThread has now been stopped."); + this.audioThread = null; + this.releaseAudioResources(); + return true; + } + + private int getStreamMaxVolume() { + this.threadChecker.checkIsOnValidThread(); + Logging.d("WebRtcAudioTrack", "getStreamMaxVolume"); + assertTrue(this.audioManager != null); + return this.audioManager.getStreamMaxVolume(0); + } + + private boolean setStreamVolume(int volume) { + this.threadChecker.checkIsOnValidThread(); + Logging.d("WebRtcAudioTrack", "setStreamVolume(" + volume + ")"); + assertTrue(this.audioManager != null); + if (this.isVolumeFixed()) { + Logging.e("WebRtcAudioTrack", "The device implements a fixed volume policy."); + return false; + } else { + this.audioManager.setStreamVolume(0, volume, 0); + return true; + } + } + + private boolean isVolumeFixed() { + return VERSION.SDK_INT < 21 ? false : this.audioManager.isVolumeFixed(); + } + + private int getStreamVolume() { + this.threadChecker.checkIsOnValidThread(); + Logging.d("WebRtcAudioTrack", "getStreamVolume"); + assertTrue(this.audioManager != null); + return this.audioManager.getStreamVolume(0); + } + + private void logMainParameters() { + Logging.d("WebRtcAudioTrack", "AudioTrack: session ID: " + this.audioTrack.getAudioSessionId() + ", channels: " + this.audioTrack.getChannelCount() + ", sample rate: " + this.audioTrack.getSampleRate() + ", max gain: " + AudioTrack.getMaxVolume()); + } + + @TargetApi(21) + private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { + Logging.d("WebRtcAudioTrack", "createAudioTrackOnLollipopOrHigher"); + int nativeOutputSampleRate = AudioTrack.getNativeOutputSampleRate(0); + Logging.d("WebRtcAudioTrack", "nativeOutputSampleRate: " + nativeOutputSampleRate); + if (sampleRateInHz != nativeOutputSampleRate) { + Logging.w("WebRtcAudioTrack", "Unable to use fast mode since requested sample rate is not native"); + } + + if (usageAttribute != DEFAULT_USAGE) { + Logging.w("WebRtcAudioTrack", "A non default usage attribute is used: " + usageAttribute); + } + + return new AudioTrack((new AudioAttributes.Builder()).setUsage(usageAttribute).setContentType(1).build(), (new AudioFormat.Builder()).setEncoding(2).setSampleRate(sampleRateInHz).setChannelMask(channelConfig).build(), bufferSizeInBytes, 1, 0); + } + + private static AudioTrack createAudioTrackOnLowerThanLollipop(int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { + return new AudioTrack(0, sampleRateInHz, channelConfig, 2, bufferSizeInBytes, 1); + } + + private void logBufferSizeInFrames() { + if (VERSION.SDK_INT >= 23) { + Logging.d("WebRtcAudioTrack", "AudioTrack: buffer size in frames: " + this.audioTrack.getBufferSizeInFrames()); + } + + } + + private int getBufferSizeInFrames() { + return VERSION.SDK_INT >= 23 ? this.audioTrack.getBufferSizeInFrames() : -1; + } + + private void logBufferCapacityInFrames() { + if (VERSION.SDK_INT >= 24) { + Logging.d("WebRtcAudioTrack", "AudioTrack: buffer capacity in frames: " + this.audioTrack.getBufferCapacityInFrames()); + } + + } + + private void logMainParametersExtended() { + this.logBufferSizeInFrames(); + this.logBufferCapacityInFrames(); + } + + private void logUnderrunCount() { + if (VERSION.SDK_INT >= 24) { + Logging.d("WebRtcAudioTrack", "underrun count: " + this.audioTrack.getUnderrunCount()); + } + + } + + private static void assertTrue(boolean condition) { + if (!condition) { + throw new AssertionError("Expected condition to be true"); + } + } + + private int channelCountToConfiguration(int channels) { + return channels == 1 ? 4 : 12; + } + + private native void nativeCacheDirectBufferAddress(ByteBuffer var1, long var2); + + private native void nativeGetPlayoutData(int var1, long var2); + + public static void setSpeakerMute(boolean mute) { + Logging.w("WebRtcAudioTrack", "setSpeakerMute(" + mute + ")"); + speakerMute = mute; + } + + private void releaseAudioResources() { + Logging.d("WebRtcAudioTrack", "releaseAudioResources"); + if (this.audioTrack != null) { + this.audioTrack.release(); + this.audioTrack = null; + } + + } + + private void reportWebRtcAudioTrackInitError(String errorMessage) { + Logging.e("WebRtcAudioTrack", "Init playout error: " + errorMessage); + WebRtcAudioUtils.logAudioState("WebRtcAudioTrack"); + if (errorCallbackOld != null) { + errorCallbackOld.onWebRtcAudioTrackInitError(errorMessage); + } + + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackInitError(errorMessage); + } + + } + + private void reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage) { + Logging.e("WebRtcAudioTrack", "Start playout error: " + errorCode + ". " + errorMessage); + WebRtcAudioUtils.logAudioState("WebRtcAudioTrack"); + if (errorCallbackOld != null) { + errorCallbackOld.onWebRtcAudioTrackStartError(errorMessage); + } + + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage); + } + + } + + private void reportWebRtcAudioTrackError(String errorMessage) { + Logging.e("WebRtcAudioTrack", "Run-time playback error: " + errorMessage); + WebRtcAudioUtils.logAudioState("WebRtcAudioTrack"); + if (errorCallbackOld != null) { + errorCallbackOld.onWebRtcAudioTrackError(errorMessage); + } + + if (errorCallback != null) { + errorCallback.onWebRtcAudioTrackError(errorMessage); + } + + } + + static { + usageAttribute = DEFAULT_USAGE; + } + + private class AudioTrackThread extends Thread { + private volatile boolean keepAlive = true; + + public AudioTrackThread(String name) { + super(name); + } + + public void run() { + Process.setThreadPriority(-19); + Logging.d("WebRtcAudioTrack", "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); + WebRtcAudioTrack.assertTrue(WebRtcAudioTrack.this.audioTrack.getPlayState() == 3); + + for(int sizeInBytes = WebRtcAudioTrack.this.byteBuffer.capacity(); this.keepAlive; WebRtcAudioTrack.this.byteBuffer.rewind()) { + WebRtcAudioTrack.this.nativeGetPlayoutData(sizeInBytes, WebRtcAudioTrack.this.nativeAudioTrack); + WebRtcAudioTrack.assertTrue(sizeInBytes <= WebRtcAudioTrack.this.byteBuffer.remaining()); + if (WebRtcAudioTrack.speakerMute) { + WebRtcAudioTrack.this.byteBuffer.clear(); + WebRtcAudioTrack.this.byteBuffer.put(WebRtcAudioTrack.this.emptyBytes); + WebRtcAudioTrack.this.byteBuffer.position(0); + } + + int bytesWritten = this.writeBytes(WebRtcAudioTrack.this.audioTrack, WebRtcAudioTrack.this.byteBuffer, sizeInBytes); + if (bytesWritten != sizeInBytes) { + Logging.e("WebRtcAudioTrack", "AudioTrack.write played invalid number of bytes: " + bytesWritten); + if (bytesWritten < 0) { + this.keepAlive = false; + WebRtcAudioTrack.this.reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); + } + } + } + + if (WebRtcAudioTrack.this.audioTrack != null) { + Logging.d("WebRtcAudioTrack", "Calling AudioTrack.stop..."); + + try { + WebRtcAudioTrack.this.audioTrack.stop(); + Logging.d("WebRtcAudioTrack", "AudioTrack.stop is done."); + } catch (IllegalStateException var3) { + Logging.e("WebRtcAudioTrack", "AudioTrack.stop failed: " + var3.getMessage()); + } + } + + } + + private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) { + return VERSION.SDK_INT >= 21 ? audioTrack.write(byteBuffer, sizeInBytes, 0) : audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); + } + + public void stopThread() { + Logging.d("WebRtcAudioTrack", "stopThread"); + this.keepAlive = false; + } + } + + public interface ErrorCallback { + void onWebRtcAudioTrackInitError(String var1); + + void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode var1, String var2); + + void onWebRtcAudioTrackError(String var1); + } + + /** @deprecated */ + @Deprecated + public interface WebRtcAudioTrackErrorCallback { + void onWebRtcAudioTrackInitError(String var1); + + void onWebRtcAudioTrackStartError(String var1); + + void onWebRtcAudioTrackError(String var1); + } + + public static enum AudioTrackStartErrorCode { + AUDIO_TRACK_START_EXCEPTION, + AUDIO_TRACK_START_STATE_MISMATCH; + + private AudioTrackStartErrorCode() { + } + } +} diff --git a/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioUtils.java b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioUtils.java new file mode 100644 index 00000000..07456aba --- /dev/null +++ b/webrtc_player/android/zlm/src/main/java/org/webrtc/voiceengine/WebRtcAudioUtils.java @@ -0,0 +1,288 @@ +// +// Source code recreated from a .class file by IntelliJ IDEA +// (powered by FernFlower decompiler) +// + +package org.webrtc.voiceengine; + +import android.content.Context; +import android.media.AudioDeviceInfo; +import android.media.AudioManager; +import android.os.Build; +import android.os.Build.VERSION; + +import org.webrtc.ContextUtils; +import org.webrtc.Logging; + +import java.util.Arrays; +import java.util.List; + +public final class WebRtcAudioUtils { + private static final String TAG = "WebRtcAudioUtils"; + private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[0]; + private static final String[] BLACKLISTED_AEC_MODELS = new String[0]; + private static final String[] BLACKLISTED_NS_MODELS = new String[0]; + private static final int DEFAULT_SAMPLE_RATE_HZ = 16000; + private static int defaultSampleRateHz = 16000; + private static boolean isDefaultSampleRateOverridden; + private static boolean useWebRtcBasedAcousticEchoCanceler; + private static boolean useWebRtcBasedNoiseSuppressor; + + public WebRtcAudioUtils() { + } + + public static synchronized void setWebRtcBasedAcousticEchoCanceler(boolean enable) { + useWebRtcBasedAcousticEchoCanceler = enable; + } + + public static synchronized void setWebRtcBasedNoiseSuppressor(boolean enable) { + useWebRtcBasedNoiseSuppressor = enable; + } + + public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) { + Logging.w("WebRtcAudioUtils", "setWebRtcBasedAutomaticGainControl() is deprecated"); + } + + public static synchronized boolean useWebRtcBasedAcousticEchoCanceler() { + if (useWebRtcBasedAcousticEchoCanceler) { + Logging.w("WebRtcAudioUtils", "Overriding default behavior; now using WebRTC AEC!"); + } + + return useWebRtcBasedAcousticEchoCanceler; + } + + public static synchronized boolean useWebRtcBasedNoiseSuppressor() { + if (useWebRtcBasedNoiseSuppressor) { + Logging.w("WebRtcAudioUtils", "Overriding default behavior; now using WebRTC NS!"); + } + + return useWebRtcBasedNoiseSuppressor; + } + + public static synchronized boolean useWebRtcBasedAutomaticGainControl() { + return true; + } + + public static boolean isAcousticEchoCancelerSupported() { + return WebRtcAudioEffects.canUseAcousticEchoCanceler(); + } + + public static boolean isNoiseSuppressorSupported() { + return WebRtcAudioEffects.canUseNoiseSuppressor(); + } + + public static boolean isAutomaticGainControlSupported() { + return false; + } + + public static synchronized void setDefaultSampleRateHz(int sampleRateHz) { + isDefaultSampleRateOverridden = true; + defaultSampleRateHz = sampleRateHz; + } + + public static synchronized boolean isDefaultSampleRateOverridden() { + return isDefaultSampleRateOverridden; + } + + public static synchronized int getDefaultSampleRateHz() { + return defaultSampleRateHz; + } + + public static List getBlackListedModelsForAecUsage() { + return Arrays.asList(BLACKLISTED_AEC_MODELS); + } + + public static List getBlackListedModelsForNsUsage() { + return Arrays.asList(BLACKLISTED_NS_MODELS); + } + + public static String getThreadInfo() { + return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() + "]"; + } + + public static boolean runningOnEmulator() { + return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_"); + } + + public static boolean deviceIsBlacklistedForOpenSLESUsage() { + List blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS); + return blackListedModels.contains(Build.MODEL); + } + + static void logDeviceInfo(String tag) { + Logging.d(tag, "Android SDK: " + VERSION.SDK_INT + ", Release: " + VERSION.RELEASE + ", Brand: " + Build.BRAND + ", Device: " + Build.DEVICE + ", Id: " + Build.ID + ", Hardware: " + Build.HARDWARE + ", Manufacturer: " + Build.MANUFACTURER + ", Model: " + Build.MODEL + ", Product: " + Build.PRODUCT); + } + + static void logAudioState(String tag) { + logDeviceInfo(tag); + Context context = ContextUtils.getApplicationContext(); + AudioManager audioManager = (AudioManager)context.getSystemService("audio"); + logAudioStateBasic(tag, audioManager); + logAudioStateVolume(tag, audioManager); + logAudioDeviceInfo(tag, audioManager); + } + + private static void logAudioStateBasic(String tag, AudioManager audioManager) { + Logging.d(tag, "Audio State: audio mode: " + modeToString(audioManager.getMode()) + ", has mic: " + hasMicrophone() + ", mic muted: " + audioManager.isMicrophoneMute() + ", music active: " + audioManager.isMusicActive() + ", speakerphone: " + audioManager.isSpeakerphoneOn() + ", BT SCO: " + audioManager.isBluetoothScoOn()); + } + + private static boolean isVolumeFixed(AudioManager audioManager) { + return VERSION.SDK_INT < 21 ? false : audioManager.isVolumeFixed(); + } + + private static void logAudioStateVolume(String tag, AudioManager audioManager) { + int[] streams = new int[]{0, 3, 2, 4, 5, 1}; + Logging.d(tag, "Audio State: "); + boolean fixedVolume = isVolumeFixed(audioManager); + Logging.d(tag, " fixed volume=" + fixedVolume); + if (!fixedVolume) { + int[] var4 = streams; + int var5 = streams.length; + + for(int var6 = 0; var6 < var5; ++var6) { + int stream = var4[var6]; + StringBuilder info = new StringBuilder(); + info.append(" " + streamTypeToString(stream) + ": "); + info.append("volume=").append(audioManager.getStreamVolume(stream)); + info.append(", max=").append(audioManager.getStreamMaxVolume(stream)); + logIsStreamMute(tag, audioManager, stream, info); + Logging.d(tag, info.toString()); + } + } + + } + + private static void logIsStreamMute(String tag, AudioManager audioManager, int stream, StringBuilder info) { + if (VERSION.SDK_INT >= 23) { + info.append(", muted=").append(audioManager.isStreamMute(stream)); + } + + } + + private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { + if (VERSION.SDK_INT >= 23) { + AudioDeviceInfo[] devices = audioManager.getDevices(3); + if (devices.length != 0) { + Logging.d(tag, "Audio Devices: "); + AudioDeviceInfo[] var3 = devices; + int var4 = devices.length; + + for(int var5 = 0; var5 < var4; ++var5) { + AudioDeviceInfo device = var3[var5]; + StringBuilder info = new StringBuilder(); + info.append(" ").append(deviceTypeToString(device.getType())); + info.append(device.isSource() ? "(in): " : "(out): "); + if (device.getChannelCounts().length > 0) { + info.append("channels=").append(Arrays.toString(device.getChannelCounts())); + info.append(", "); + } + + if (device.getEncodings().length > 0) { + info.append("encodings=").append(Arrays.toString(device.getEncodings())); + info.append(", "); + } + + if (device.getSampleRates().length > 0) { + info.append("sample rates=").append(Arrays.toString(device.getSampleRates())); + info.append(", "); + } + + info.append("id=").append(device.getId()); + Logging.d(tag, info.toString()); + } + + } + } + } + + static String modeToString(int mode) { + switch (mode) { + case 0: + return "MODE_NORMAL"; + case 1: + return "MODE_RINGTONE"; + case 2: + return "MODE_IN_CALL"; + case 3: + return "MODE_IN_COMMUNICATION"; + default: + return "MODE_INVALID"; + } + } + + private static String streamTypeToString(int stream) { + switch (stream) { + case 0: + return "STREAM_VOICE_CALL"; + case 1: + return "STREAM_SYSTEM"; + case 2: + return "STREAM_RING"; + case 3: + return "STREAM_MUSIC"; + case 4: + return "STREAM_ALARM"; + case 5: + return "STREAM_NOTIFICATION"; + default: + return "STREAM_INVALID"; + } + } + + private static String deviceTypeToString(int type) { + switch (type) { + case 0: + return "TYPE_UNKNOWN"; + case 1: + return "TYPE_BUILTIN_EARPIECE"; + case 2: + return "TYPE_BUILTIN_SPEAKER"; + case 3: + return "TYPE_WIRED_HEADSET"; + case 4: + return "TYPE_WIRED_HEADPHONES"; + case 5: + return "TYPE_LINE_ANALOG"; + case 6: + return "TYPE_LINE_DIGITAL"; + case 7: + return "TYPE_BLUETOOTH_SCO"; + case 8: + return "TYPE_BLUETOOTH_A2DP"; + case 9: + return "TYPE_HDMI"; + case 10: + return "TYPE_HDMI_ARC"; + case 11: + return "TYPE_USB_DEVICE"; + case 12: + return "TYPE_USB_ACCESSORY"; + case 13: + return "TYPE_DOCK"; + case 14: + return "TYPE_FM"; + case 15: + return "TYPE_BUILTIN_MIC"; + case 16: + return "TYPE_FM_TUNER"; + case 17: + return "TYPE_TV_TUNER"; + case 18: + return "TYPE_TELEPHONY"; + case 19: + return "TYPE_AUX_LINE"; + case 20: + return "TYPE_IP"; + case 21: + return "TYPE_BUS"; + case 22: + return "TYPE_USB_HEADSET"; + default: + return "TYPE_UNKNOWN"; + } + } + + private static boolean hasMicrophone() { + return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature("android.hardware.microphone"); + } +}