This commit is contained in:
li 2024-05-11 16:36:20 +08:00
parent 43dba5f870
commit 6632cc308c
197 changed files with 27444 additions and 46 deletions

View File

@ -12,7 +12,7 @@ android {
defaultConfig {
applicationId "com.zlmediakit.webrtc"
minSdk 21
minSdk 24
targetSdk 32
versionCode 1
versionName "1.0"
@ -41,7 +41,6 @@ dependencies {
implementation 'androidx.appcompat:appcompat:1.5.1'
implementation 'com.google.android.material:material:1.6.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
implementation project(':zlm')
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
@ -50,6 +49,6 @@ dependencies {
implementation("com.squareup.okhttp3:okhttp:4.10.0")
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'org.webrtc:google-webrtc:1.0.32006'
implementation project(':zlm')
}

View File

@ -28,12 +28,13 @@
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AndroidWebRTC"
tools:overrideLibrary="com.zlm.rtc"
android:usesCleartextTraffic="true"
android:name=".App"
tools:targetApi="31">
<activity
android:name=".MainActivity"
android:screenOrientation="portrait"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
@ -43,7 +44,8 @@
</activity>
<activity android:name=".PlayDemoActivity"
android:screenOrientation="portrait"/>
<activity android:name=".PlayerDemoActivity" />
<activity android:name=".PlayerDemoActivity"
android:screenOrientation="portrait"/>
</application>
</manifest>

View File

@ -0,0 +1,11 @@
package com.zlmediakit.webrtc
import android.app.Application
class App: Application() {
override fun onCreate() {
super.onCreate()
}
}

View File

@ -10,10 +10,12 @@ class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
}
fun toPlayActivity(view: View) {
startActivity(Intent(this, PlayDemoActivity::class.java))
startActivity(Intent(this, PlayerDemoActivity::class.java))
}
fun toPushActivity(view: View) {

View File

@ -1,6 +1,7 @@
package com.zlmediakit.webrtc
import android.os.Bundle
import android.os.Handler
import androidx.appcompat.app.AppCompatActivity
import com.zlm.rtc.ZLMRTCPlayer
import kotlinx.android.synthetic.main.activity_player.surface_view_renderer
@ -10,10 +11,14 @@ class PlayerDemoActivity:AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_play)
setContentView(R.layout.activity_player)
ZLMRTCPlayer.shareInstance().bind(this,surface_view_renderer,true)
ZLMRTCPlayer.shareInstance().bind(applicationContext,surface_view_renderer,true)
Handler().postDelayed({
ZLMRTCPlayer.shareInstance().play("live","test")
},1000)
}
}

View File

@ -38,17 +38,24 @@ android {
kotlinOptions {
jvmTarget = '1.8'
}
sourceSets {
main{
jniLibs.srcDirs = ['libs']
}
}
}
dependencies {
implementation 'androidx.core:core-ktx:1.13.1'
implementation platform('org.jetbrains.kotlin:kotlin-bom:1.8.0')
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'com.google.android.material:material:1.12.0'
implementation 'androidx.core:core-ktx:1.7.0'
implementation 'androidx.appcompat:appcompat:1.5.1'
implementation 'com.google.android.material:material:1.6.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.5'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
implementation 'com.google.code.gson:gson:2.8.9'
implementation 'org.webrtc:google-webrtc:1.0.32006'
implementation("com.squareup.okhttp3:okhttp:4.10.0")
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
}

View File

@ -1,4 +1,16 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.CAPTURE_VIDEO_OUTPUT"/>
<uses-permission android:name="android.permission.READ_PHONE_STATE"/>
</manifest>

View File

@ -14,7 +14,7 @@ project("rtc")
add_library(ZLToolKit IMPORTED STATIC)
set_target_properties(tools PROPERTIES
set_target_properties(ZLToolKit PROPERTIES
IMPORTED_LOCATION "${CMAKE_CURRENT_SOURCE_DIR}/../../../libs/${CMAKE_ANDROID_ARCH_ABI}/libZLToolKit.a"
INTERFACE_INCLUDE_DIRECTORIES "${CMAKE_CURRENT_SOURCE_DIR}/include"
)

View File

@ -1,5 +1,12 @@
#include <jni.h>
#include <string>
#include <Http/HttpRequester.h>
#include "Http/HttpClient.h"
using namespace toolkit;
using namespace mediakit;
using namespace std;
extern "C" JNIEXPORT jstring JNICALL
Java_com_zlm_rtc_NativeLib_stringFromJNI(
@ -7,4 +14,60 @@ Java_com_zlm_rtc_NativeLib_stringFromJNI(
jobject /* this */) {
std::string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_zlm_rtc_NativeLib_exchangeSessionDescription(JNIEnv *env, jobject thiz,
jstring description) {
static semaphore sem;
//加载证书,证书包含公钥和私钥
SSL_Initor::Instance().loadCertificate((exeDir() + "ssl.p12").data());
//信任某个自签名证书
SSL_Initor::Instance().trustCertificate((exeDir() + "ssl.p12").data());
//不忽略无效证书证书(例如自签名或过期证书)
SSL_Initor::Instance().ignoreInvalidCertificate(false);
//创建一个Http请求器
HttpRequester::Ptr requesterUploader(new HttpRequester());
//使用POST方式请求
requesterUploader->setMethod("POST");
//设置http请求头
HttpArgs argsUploader;
argsUploader["query"] = "test";
static string boundary = "0xKhTmLbOuNdArY";
HttpMultiFormBody::Ptr body(new HttpMultiFormBody(argsUploader, exePath(), boundary));
requesterUploader->setBody(body);
requesterUploader->addHeader("Content-Type", HttpMultiFormBody::multiFormContentType(boundary));
//开启请求
requesterUploader->startRequester("https://zlmediakit.com/index/api/webrtc?app=live&stream=test&type=play",//url地址
[](const SockException &ex, //网络相关的失败信息,如果为空就代表成功
const Parser &parser) { //http回复body
DebugL << "=====================HttpRequester Uploader==========================";
if (ex) {
//网络相关的错误
WarnL << "network err:" << ex.getErrCode() << " " << ex.what();
} else {
//打印http回复信息
_StrPrinter printer;
for (auto &pr: parser.getHeader()) {
printer << pr.first << ":" << pr.second << "\r\n";
}
InfoL << "status:" << parser.status() << "\r\n"
<< "header:\r\n" << (printer << endl)
<< "\r\nbody:" << parser.content();
}
});
sem.wait();
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_zlm_rtc_NativeLib_makeUrl(JNIEnv *env, jobject thiz, jstring app, jstring stream_id) {
const char *appString = env->GetStringUTFChars(app, 0);
const char *streamIdString = env->GetStringUTFChars(stream_id, 0);
char url[100];
sprintf(url,"https://zlmediakit.com/index/api/webrtc?app=%s&stream=%s&type=play",appString,streamIdString);
return env->NewStringUTF(url);
}

View File

@ -8,6 +8,11 @@ class NativeLib {
*/
external fun stringFromJNI(): String
external fun exchangeSessionDescription(description:String): String
external fun makeUrl(app:String,streamId:String): String
companion object {
// Used to load the 'rtc' library on application startup.
init {

View File

@ -14,6 +14,9 @@ abstract class ZLMRTCPlayer {
}
constructor()
public abstract fun bind(context: Context,surface: SurfaceViewRenderer, localPreview:Boolean)

View File

@ -0,0 +1,395 @@
package com.zlm.rtc.client;
import java.io.*;
import java.net.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class HttpClient {
//MIME部分文件类型对照表
private static final Map<String, String> FILE_TYPE = new HashMap<>();
static {
FILE_TYPE.put(".jpeg", "image/jpeg");
FILE_TYPE.put(".jpg", "image/jpg");
FILE_TYPE.put(".png", "image/png");
FILE_TYPE.put(".bmp", "image/bmp");
FILE_TYPE.put(".gif", "image/gif");
FILE_TYPE.put(".mp4", "video/mp4");
FILE_TYPE.put(".txt", "text/plain");
FILE_TYPE.put(".xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet");
FILE_TYPE.put(".docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document");
FILE_TYPE.put(".pptx", "application/vnd.openxmlformats-officedocument.presentationml.presentation");
FILE_TYPE.put(".pdf", "application/pdf");
}
/**
* GET请求
*
* @param url
* @param params
* @param headers
* @return
*/
public static String doGet(String url, Map<String, String> params, Map<String, String> headers) {
BufferedReader reader = null;
try {
//1拼接url
StringBuffer stringBuffer = new StringBuffer(url);
if (params != null && !params.isEmpty()) {
stringBuffer.append("?");
for (Map.Entry<String, String> entry : params.entrySet()) {
stringBuffer.append(entry.getKey()).append("=").append(entry.getValue()).append("&");
}
stringBuffer.deleteCharAt(stringBuffer.length() - 1);
}
URL testUrl = new URL(stringBuffer.toString());
//2建立链接
HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection();
connection.setConnectTimeout(3000); //设置连接超时
connection.setReadTimeout(3000); //设置读取响应超时
if (headers != null && !headers.isEmpty()) {
for (Map.Entry<String, String> entry : headers.entrySet()) {
connection.setRequestProperty(entry.getKey(), entry.getValue());
}
}
//3发送请求
InputStream inputStream = connection.getInputStream();
reader = new BufferedReader(new InputStreamReader(inputStream));
String line = "";
StringBuffer response = new StringBuffer();
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
return response.toString();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
System.out.println("输入流关闭失败");
}
}
}
return null;
}
/**
* POST请求
*
* @param url
* @param params
* @param headers
* @return
*/
public static String doPost(String url, Map<String, String> params, Map<String, String> headers) {
OutputStream outputStream = null;
BufferedReader reader = null;
try {
//建立连接
URL testUrl = new URL(url);
HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true); //允许写入输出流
connection.setUseCaches(false); //禁用缓存
connection.setRequestProperty("Content-Type", "application/json; charset=utf-8");
if (headers != null && !headers.isEmpty()) {
for (Map.Entry<String, String> entry : headers.entrySet()) {
connection.setRequestProperty(entry.getKey(), entry.getValue());
}
}
//写入请求体
outputStream = connection.getOutputStream();
StringBuffer payload = new StringBuffer();
if (params != null && !params.isEmpty()) {
for (Map.Entry<String, String> entry : params.entrySet()) {
payload.append(entry.getKey()).append("=").append(entry.getValue()).append("&");
}
payload.deleteCharAt(payload.length() - 1);
}
outputStream.write(payload.toString().getBytes());
outputStream.flush();
outputStream.close();
//发送请求
InputStream inputStream = connection.getInputStream();
reader = new BufferedReader(new InputStreamReader(inputStream));
String line = "";
StringBuffer response = new StringBuffer();
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
return response.toString();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
System.out.println("输出流关闭失败");
}
}
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
System.out.println("输入流关闭失败");
}
}
}
return null;
}
/**
* GET请求下载文件
*
* @param url
* @param params
* @param headers
* @param filePath
*/
public static void doGetDownload(String url, Map<String, String> params, Map<String, String> headers, String filePath) {
BufferedInputStream inputStream = null;
FileOutputStream outputStream = null;
try {
//1拼接url
StringBuffer stringBuffer = new StringBuffer(url);
if (params != null && !params.isEmpty()) {
stringBuffer.append("?");
for (Map.Entry<String, String> entry : params.entrySet()) {
stringBuffer.append(entry.getKey()).append("=").append(entry.getValue()).append("&");
}
stringBuffer.deleteCharAt(stringBuffer.length() - 1);
}
URL testUrl = new URL(stringBuffer.toString());
//2建立链接
HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection();
connection.setConnectTimeout(3000); //设置连接超时
connection.setReadTimeout(3000); //设置读取响应超时
if (headers != null && !headers.isEmpty()) {
for (Map.Entry<String, String> entry : headers.entrySet()) {
connection.setRequestProperty(entry.getKey(), entry.getValue());
}
}
//3发送请求
inputStream = new BufferedInputStream(connection.getInputStream());
String contentDisposition = connection.getHeaderField("Content-Disposition");
String regex = "attachment; filename=(.+\\.\\w+)";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(contentDisposition);
if (matcher.find()) {
String fileName = matcher.group(1);
File file = new File(filePath + "\\" + fileName);
outputStream = new FileOutputStream(file);
int n;
while ((n = inputStream.read()) != -1) {
outputStream.write(n);
}
outputStream.flush();
outputStream.close();
}
inputStream.close();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
System.out.println("输出流关闭失败");
}
}
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
System.out.println("输入流关闭失败");
}
}
}
}
/**
* POST请求上传文件
*
* @param url
* @param fileUrl
* @param params
* @param headers
* @return
*/
public static String doPostUpload(String url, String fileUrl, Map<String, String> params, Map<String, String> headers) {
FileInputStream fileInputStream = null;
OutputStream outputStream = null;
BufferedReader reader = null;
try {
//读文件
File file = new File(fileUrl);
fileInputStream = new FileInputStream(file);
byte[] bytes = new byte[(int) file.length()];
fileInputStream.read(bytes);
fileInputStream.close();
URL testUrl = new URL(url);
HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true); //允许写入输出流
connection.setUseCaches(false); //禁用缓存
String boundary = UUID.randomUUID().toString();
connection.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary);
if (headers != null && !headers.isEmpty()) {
for (Map.Entry<String, String> entry : headers.entrySet()) {
connection.setRequestProperty(entry.getKey(), entry.getValue());
}
}
//写入请求体
outputStream = connection.getOutputStream();
StringBuffer start = new StringBuffer();
start.append("--").append(boundary).append("\r\n");
String fileName = file.getName();
String fileExtension = fileName.substring(fileName.lastIndexOf('.'));
start.append("Content-Disposition: form-data; name=\"file\"; filename=").append(fileName).append("\r\n");
start.append("Content-Type: ").append(FILE_TYPE.get(fileExtension)).append("\r\n\r\n");
outputStream.write(start.toString().getBytes());
outputStream.write(bytes);
outputStream.write("\r\n".getBytes());
StringBuffer mid = new StringBuffer();
if (params != null && !params.isEmpty()) {
for (Map.Entry<String, String> entry : params.entrySet()) {
mid.append("--").append(boundary).append("\r\n");
mid.append("Content-Disposition: form-data; name=\"").append(entry.getKey()).append("\"\r\n\r\n");
mid.append(entry.getValue()).append("\r\n");
}
outputStream.write(mid.toString().getBytes());
}
String end = "--" + boundary + "--";
outputStream.write(end.getBytes());
outputStream.flush();
outputStream.close();
//发送请求
InputStream inputStream = connection.getInputStream();
reader = new BufferedReader(new InputStreamReader(inputStream));
String line = "";
StringBuffer response = new StringBuffer();
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
return response.toString();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (fileInputStream != null) {
try {
fileInputStream.close();
} catch (IOException e) {
System.out.println("文件流关闭失败");
}
}
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
System.out.println("输出流关闭失败");
}
}
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
System.out.println("输入流关闭失败");
}
}
}
return null;
}
/**
* 从返回头中获取登录token
*
* @param url
* @param params
* @param headers
* @return
*/
public static String getToken(String url, Map<String, String> params, Map<String, String> headers) {
OutputStream outputStream = null;
try {
//建立连接
URL testUrl = new URL(url);
HttpURLConnection connection = (HttpURLConnection) testUrl.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true); //允许写入输出流
connection.setUseCaches(false); //禁用缓存
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
connection.setInstanceFollowRedirects(false); //禁用跟随重定向
//写入请求体
outputStream = connection.getOutputStream();
StringBuffer payload = new StringBuffer();
if (params != null && !params.isEmpty()) {
for (Map.Entry<String, String> entry : params.entrySet()) {
payload.append(entry.getKey()).append("=").append(entry.getValue()).append("&");
}
payload.deleteCharAt(payload.length() - 1);
}
outputStream.write(payload.toString().getBytes());
outputStream.flush();
outputStream.close();
//发送请求重定向到返回头中的Location
connection.connect();
URL location = new URL(connection.getHeaderField("Location"));
HttpURLConnection connection2 = (HttpURLConnection) location.openConnection();
//请求Location获取返回头中的所有Set-Cookie
connection2.setRequestMethod("GET");
connection2.setInstanceFollowRedirects(false);
connection2.connect();
List<String> cookies = connection2.getHeaderFields().get("Set-Cookie");
for (String cookie : cookies) {
if (cookie.contains("token-test=")) {
return cookie;
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
System.out.println("输出流关闭失败");
}
}
}
return null;
}
}

View File

@ -0,0 +1,158 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package com.zlm.rtc.client;
import android.media.AudioFormat;
import android.os.Environment;
import android.util.Log;
import androidx.annotation.Nullable;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import org.webrtc.voiceengine.WebRtcAudioRecord;
import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.concurrent.ExecutorService;
/**
* Implements the AudioRecordSamplesReadyCallback interface and writes
* recorded raw audio samples to an output file.
*/
public class RecordedAudioToFileController
implements SamplesReadyCallback, WebRtcAudioRecordSamplesReadyCallback {
private static final String TAG = "RecordedAudioToFile";
private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L;
private final Object lock = new Object();
private final ExecutorService executor;
@Nullable
private OutputStream rawAudioFileOutputStream = null;
private boolean isRunning;
private long fileSizeInBytes = 0;
public RecordedAudioToFileController(ExecutorService executor) {
Log.d(TAG, "ctor");
this.executor = executor;
}
/**
* Should be called on the same executor thread as the one provided at
* construction.
*/
public boolean start() {
Log.d(TAG, "start");
if (!isExternalStorageWritable()) {
Log.e(TAG, "Writing to external media is not possible");
return false;
}
synchronized (lock) {
isRunning = true;
}
return true;
}
/**
* Should be called on the same executor thread as the one provided at
* construction.
*/
public void stop() {
Log.d(TAG, "stop");
synchronized (lock) {
isRunning = false;
if (rawAudioFileOutputStream != null) {
try {
rawAudioFileOutputStream.close();
} catch (IOException e) {
Log.e(TAG, "Failed to close file with saved input audio: " + e);
}
rawAudioFileOutputStream = null;
}
fileSizeInBytes = 0;
}
}
// Checks if external storage is available for read and write.
private boolean isExternalStorageWritable() {
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
return true;
}
return false;
}
// Utilizes audio parameters to create a file name which contains sufficient
// information so that the file can be played using an external file player.
// Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm.
private void openRawAudioOutputFile(int sampleRate, int channelCount) {
final String fileName = Environment.getExternalStorageDirectory().getPath() + File.separator
+ "recorded_audio_16bits_" + String.valueOf(sampleRate) + "Hz"
+ ((channelCount == 1) ? "_mono" : "_stereo") + ".pcm";
final File outputFile = new File(fileName);
try {
rawAudioFileOutputStream = new FileOutputStream(outputFile);
} catch (FileNotFoundException e) {
Log.e(TAG, "Failed to open audio output file: " + e.getMessage());
}
Log.d(TAG, "Opened file for recording: " + fileName);
}
// Called when new audio samples are ready.
@Override
public void onWebRtcAudioRecordSamplesReady(WebRtcAudioRecord.AudioSamples samples) {
onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(samples.getAudioFormat(),
samples.getChannelCount(), samples.getSampleRate(), samples.getData()));
}
// Called when new audio samples are ready.
@Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) {
// The native audio layer on Android should use 16-bit PCM format.
if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) {
Log.e(TAG, "Invalid audio format");
return;
}
synchronized (lock) {
// Abort early if stop() has been called.
if (!isRunning) {
return;
}
// Open a new file for the first callback only since it allows us to add audio parameters to
// the file name.
if (rawAudioFileOutputStream == null) {
openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount());
fileSizeInBytes = 0;
}
}
// Append the recorded 16-bit audio samples to the open output file.
executor.execute(() -> {
if (rawAudioFileOutputStream != null) {
try {
// Set a limit on max file size. 58348800 bytes corresponds to
// approximately 10 minutes of recording in mono at 48kHz.
if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) {
// Writes samples.getData().length bytes to output stream.
rawAudioFileOutputStream.write(samples.getData());
fileSizeInBytes += samples.getData().length;
}
} catch (IOException e) {
Log.e(TAG, "Failed to write audio to file: " + e.getMessage());
}
}
});
}
}

View File

@ -0,0 +1,75 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package com.zlm.rtc.client;
import android.os.ParcelFileDescriptor;
import android.util.Log;
import org.webrtc.PeerConnection;
import java.io.File;
import java.io.IOException;
public class RtcEventLog {
private static final String TAG = "RtcEventLog";
private static final int OUTPUT_FILE_MAX_BYTES = 10_000_000;
private final PeerConnection peerConnection;
private RtcEventLogState state = RtcEventLogState.INACTIVE;
enum RtcEventLogState {
INACTIVE,
STARTED,
STOPPED,
}
public RtcEventLog(PeerConnection peerConnection) {
if (peerConnection == null) {
throw new NullPointerException("The peer connection is null.");
}
this.peerConnection = peerConnection;
}
public void start(final File outputFile) {
if (state == RtcEventLogState.STARTED) {
Log.e(TAG, "RtcEventLog has already started.");
return;
}
final ParcelFileDescriptor fileDescriptor;
try {
fileDescriptor = ParcelFileDescriptor.open(outputFile,
ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
| ParcelFileDescriptor.MODE_TRUNCATE);
} catch (IOException e) {
Log.e(TAG, "Failed to create a new file", e);
return;
}
// Passes ownership of the file to WebRTC.
boolean success =
peerConnection.startRtcEventLog(fileDescriptor.detachFd(), OUTPUT_FILE_MAX_BYTES);
if (!success) {
Log.e(TAG, "Failed to start RTC event log.");
return;
}
state = RtcEventLogState.STARTED;
Log.d(TAG, "RtcEventLog started.");
}
public void stop() {
if (state != RtcEventLogState.STARTED) {
Log.e(TAG, "RtcEventLog was not started.");
return;
}
peerConnection.stopRtcEventLog();
state = RtcEventLogState.STOPPED;
Log.d(TAG, "RtcEventLog stopped.");
}
}

View File

@ -0,0 +1,13 @@
package com.zlm.rtc.client
class SdpBean {
var code = 0
var id:String?=null
var msg:String?=null
var sdp: String? = null
var type:String?=null
}

View File

@ -0,0 +1,14 @@
package com.zlm.rtc.client;
import org.webrtc.PeerConnection;
import org.webrtc.VideoTrack;
import java.math.BigInteger;
public class ZLMConnection {
public BigInteger handleId;
public PeerConnection peerConnection;
public PeerConnectionClient.SDPObserver sdpObserver;
public VideoTrack videoTrack;
public boolean type;
}

View File

@ -2,54 +2,113 @@ package com.zlm.rtc.play
import android.content.Context
import android.graphics.Bitmap
import android.util.Log
import com.zlm.rtc.NativeLib
import com.zlm.rtc.ZLMRTCPlayer
import org.webrtc.AudioSource
import org.webrtc.AudioTrack
import com.zlm.rtc.client.HttpClient
import com.zlm.rtc.client.PeerConnectionClient
import org.json.JSONObject
import org.webrtc.Camera1Enumerator
import org.webrtc.Camera2Enumerator
import org.webrtc.CameraEnumerator
import org.webrtc.EglBase
import org.webrtc.PeerConnection
import org.webrtc.IceCandidate
import org.webrtc.PeerConnectionFactory
import org.webrtc.SurfaceTextureHelper
import org.webrtc.SessionDescription
import org.webrtc.StatsReport
import org.webrtc.SurfaceViewRenderer
import org.webrtc.VideoCapturer
import org.webrtc.VideoSource
import org.webrtc.VideoTrack
import java.math.BigInteger
class ZLMRTCPlayerImpl: ZLMRTCPlayer() {
class ZLMRTCPlayerImpl : ZLMRTCPlayer(), PeerConnectionClient.PeerConnectionEvents {
private val context: Context? = null
private var context: Context? = null
private val eglBase: EglBase? by lazy {
EglBase.create()
private val peerConnectionClient: PeerConnectionClient? by lazy {
PeerConnectionClient(
context, EglBase.create(),
PeerConnectionClient.PeerConnectionParameters(
true,
false,
false,
1080,
960,
0,
0,
"VP8",
true,
false,
0,
"OPUS",
false,
false,
false,
false,
false,
false,
false,
false, false, false, null
), this
)
}
private var playUrl: String? = null
private var peerConnection: PeerConnection? = null
private var surfaceViewRenderer: SurfaceViewRenderer? = null
private var peerConnectionFactory: PeerConnectionFactory? = null
private var audioSource: AudioSource? = null
private var videoSource: VideoSource? = null
private var localAudioTrack: AudioTrack? = null
private var localVideoTrack: VideoTrack? = null
private var captureAndroid: VideoCapturer? = null
private var surfaceTextureHelper: SurfaceTextureHelper? = null
private var isShowCamera = true
private var isPublishMode = false //isPublish true为推流 false为拉流
private var defaultFps = 24
private var isPreviewing = false
private var isFirst = true
init {
}
private fun logger(msg: String) {
Log.i("ZLMRTCPlayerImpl", msg)
}
fun createVideoCapture(context: Context?): VideoCapturer? {
val videoCapturer: VideoCapturer? = if (Camera2Enumerator.isSupported(context)) {
createCameraCapture(Camera2Enumerator(context))
} else {
createCameraCapture(Camera1Enumerator(true))
}
return videoCapturer
}
/**
* 创建相机媒体流
*/
private fun createCameraCapture(enumerator: CameraEnumerator): VideoCapturer? {
val deviceNames = enumerator.deviceNames
// Front facing camera not found, try something else
for (deviceName in deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null)
if (videoCapturer != null) {
return videoCapturer
}
}
}
// First, try to find front facing camera
for (deviceName in deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null)
if (videoCapturer != null) {
return videoCapturer
}
}
}
return null
}
override fun bind(context: Context, surface: SurfaceViewRenderer, localPreview: Boolean) {
this.context = context
peerConnectionClient?.setAudioEnabled(true)
peerConnectionClient?.createPeerConnectionFactory(PeerConnectionFactory.Options())
peerConnectionClient?.createPeerConnection(createVideoCapture(context), BigInteger.ZERO)
peerConnectionClient?.createOffer((BigInteger.ZERO))
override fun bind(surface: SurfaceViewRenderer, localPreview: Boolean) {
this.surfaceViewRenderer = surface
surfaceViewRenderer?.init(eglBase?.eglBaseContext,null)
}
override fun play(app: String, streamId: String) {
}
override fun setSpeakerphoneOn(on: Boolean) {
@ -81,4 +140,71 @@ class ZLMRTCPlayerImpl: ZLMRTCPlayer() {
}
override fun onLocalDescription(handleId: BigInteger?, sdp: SessionDescription?) {
val url = NativeLib().makeUrl("live", "li")
logger("handleId: " + url)
logger("handleId: " + sdp?.description)
val doPost = HttpClient.doPost(
url,
mutableMapOf(Pair("sdp", sdp?.description)),
mutableMapOf()
)
val result = JSONObject(doPost)
val code = result.getInt("code")
if (code == 0) {
logger("handleId: " + doPost)
val sdp = result.getString("sdp")
peerConnectionClient?.setRemoteDescription(handleId,SessionDescription(SessionDescription.Type.ANSWER,sdp))
} else {
val msg = result.getString("msg")
logger("handleId: " + msg)
}
}
override fun onIceCandidate(handleId: BigInteger?, candidate: IceCandidate?) {
}
override fun onIceCandidatesRemoved(
handleId: BigInteger?,
candidates: Array<out IceCandidate>?
) {
}
override fun onIceConnected(handleId: BigInteger?) {
}
override fun onIceDisconnected(handleId: BigInteger?) {
}
override fun onPeerConnectionClosed(handleId: BigInteger?) {
}
override fun onPeerConnectionStatsReady(
handleId: BigInteger?,
reports: Array<out StatsReport>?
) {
}
override fun onPeerConnectionError(handleId: BigInteger?, description: String?) {
}
override fun onLocalRender(handleId: BigInteger?) {
}
override fun onRemoteRender(handleId: BigInteger?) {
}
}

View File

@ -0,0 +1,20 @@
/*
* Copyright 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Interface to handle completion of addIceCandidate */
public interface AddIceObserver {
/** Called when ICE candidate added successfully.*/
@CalledByNative public void onAddSuccess();
/** Called when ICE candidate addition failed.*/
@CalledByNative public void onAddFailure(String error);
}

View File

@ -0,0 +1,673 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.os.SystemClock;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/**
* Android hardware video decoder.
*/
class AndroidVideoDecoder implements VideoDecoder, VideoSink {
private static final String TAG = "AndroidVideoDecoder";
// MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
// this timeout.
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
// WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a
// long timeout (500 ms) to prevent this from causing the codec to return an error.
private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
// Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds).
// If this timeout is exceeded, the output thread will unblock and check if the decoder is still
// running. If it is, it will block on dequeue again. Otherwise, it will stop and release the
// MediaCodec.
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
private final String codecName;
private final VideoCodecMimeType codecType;
private static class FrameInfo {
final long decodeStartTimeMs;
final int rotation;
FrameInfo(long decodeStartTimeMs, int rotation) {
this.decodeStartTimeMs = decodeStartTimeMs;
this.rotation = rotation;
}
}
private final BlockingDeque<FrameInfo> frameInfos;
private int colorFormat;
// Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
// those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
// thread and is immutable while the codec is running.
@Nullable private Thread outputThread;
// Checker that ensures work is run on the output thread.
private ThreadChecker outputThreadChecker;
// Checker that ensures work is run on the decoder thread. The decoder thread is owned by the
// caller and must be used to call initDecode, decode, and release.
private ThreadChecker decoderThreadChecker;
private volatile boolean running;
@Nullable private volatile Exception shutdownException;
// Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
// or the output thread. Accesses should be protected with this lock.
private final Object dimensionLock = new Object();
private int width;
private int height;
private int stride;
private int sliceHeight;
// Whether the decoder has finished the first frame. The codec may not change output dimensions
// after delivering the first frame. Only accessed on the output thread while the decoder is
// running.
private boolean hasDecodedFirstFrame;
// Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed
// on the decoder thread.
private boolean keyFrameRequired;
private final @Nullable EglBase.Context sharedContext;
// Valid and immutable while the decoder is running.
@Nullable private SurfaceTextureHelper surfaceTextureHelper;
@Nullable private Surface surface;
private static class DecodedTextureMetadata {
final long presentationTimestampUs;
final Integer decodeTimeMs;
DecodedTextureMetadata(long presentationTimestampUs, Integer decodeTimeMs) {
this.presentationTimestampUs = presentationTimestampUs;
this.decodeTimeMs = decodeTimeMs;
}
}
// Metadata for the last frame rendered to the texture.
private final Object renderedTextureMetadataLock = new Object();
@Nullable private DecodedTextureMetadata renderedTextureMetadata;
// Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
// and immutable while the decoder is running.
@Nullable private Callback callback;
// Valid and immutable while the decoder is running.
@Nullable private MediaCodecWrapper codec;
AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) {
if (!isSupportedColorFormat(colorFormat)) {
throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
}
Logging.d(TAG,
"ctor name: " + codecName + " type: " + codecType + " color format: " + colorFormat
+ " context: " + sharedContext);
this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
this.codecName = codecName;
this.codecType = codecType;
this.colorFormat = colorFormat;
this.sharedContext = sharedContext;
this.frameInfos = new LinkedBlockingDeque<>();
}
@Override
public VideoCodecStatus initDecode(Settings settings, Callback callback) {
this.decoderThreadChecker = new ThreadChecker();
this.callback = callback;
if (sharedContext != null) {
surfaceTextureHelper = createSurfaceTextureHelper();
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
surfaceTextureHelper.startListening(this);
}
return initDecodeInternal(settings.width, settings.height);
}
// Internal variant is used when restarting the codec due to reconfiguration.
private VideoCodecStatus initDecodeInternal(int width, int height) {
decoderThreadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initDecodeInternal name: " + codecName + " type: " + codecType + " width: " + width
+ " height: " + height + " color format: " + colorFormat);
if (outputThread != null) {
Logging.e(TAG, "initDecodeInternal called while the codec is already running");
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
// Note: it is not necessary to initialize dimensions under the lock, since the output thread
// is not running.
this.width = width;
this.height = height;
stride = width;
sliceHeight = height;
hasDecodedFirstFrame = false;
keyFrameRequired = true;
try {
codec = mediaCodecWrapperFactory.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException | IllegalStateException e) {
Logging.e(TAG, "Cannot create media decoder " + codecName);
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
if (sharedContext == null) {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
}
codec.configure(format, surface, null, 0);
codec.start();
} catch (IllegalStateException | IllegalArgumentException e) {
Logging.e(TAG, "initDecode failed", e);
release();
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
running = true;
outputThread = createOutputThread();
outputThread.start();
Logging.d(TAG, "initDecodeInternal done");
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
decoderThreadChecker.checkIsOnValidThread();
if (codec == null || callback == null) {
Logging.d(TAG, "decode uninitalized, codec: " + (codec != null) + ", callback: " + callback);
return VideoCodecStatus.UNINITIALIZED;
}
if (frame.buffer == null) {
Logging.e(TAG, "decode() - no input data");
return VideoCodecStatus.ERR_PARAMETER;
}
int size = frame.buffer.remaining();
if (size == 0) {
Logging.e(TAG, "decode() - input buffer empty");
return VideoCodecStatus.ERR_PARAMETER;
}
// Load dimensions from shared memory under the dimension lock.
final int width;
final int height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
}
// Check if the resolution changed and reset the codec if necessary.
if (frame.encodedWidth * frame.encodedHeight > 0
&& (frame.encodedWidth != width || frame.encodedHeight != height)) {
VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight);
if (status != VideoCodecStatus.OK) {
return status;
}
}
if (keyFrameRequired) {
// Need to process a key frame first.
if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
Logging.e(TAG, "decode() - key frame required first");
return VideoCodecStatus.NO_OUTPUT;
}
}
int index;
try {
index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.ERROR;
}
if (index < 0) {
// Decoder is falling behind. No input buffers available.
// The decoder can't simply drop frames; it might lose a key frame.
Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind");
return VideoCodecStatus.ERROR;
}
ByteBuffer buffer;
try {
buffer = codec.getInputBuffer(index);
} catch (IllegalStateException e) {
Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
return VideoCodecStatus.ERROR;
}
if (buffer.capacity() < size) {
Logging.e(TAG, "decode() - HW buffer too small");
return VideoCodecStatus.ERROR;
}
buffer.put(frame.buffer);
frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
try {
codec.queueInputBuffer(index, 0 /* offset */, size,
TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
frameInfos.pollLast();
return VideoCodecStatus.ERROR;
}
if (keyFrameRequired) {
keyFrameRequired = false;
}
return VideoCodecStatus.OK;
}
@Override
public String getImplementationName() {
return codecName;
}
@Override
public VideoCodecStatus release() {
// TODO(sakal): This is not called on the correct thread but is still called synchronously.
// Re-enable the check once this is called on the correct thread.
// decoderThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "release");
VideoCodecStatus status = releaseInternal();
if (surface != null) {
releaseSurface();
surface = null;
surfaceTextureHelper.stopListening();
surfaceTextureHelper.dispose();
surfaceTextureHelper = null;
}
synchronized (renderedTextureMetadataLock) {
renderedTextureMetadata = null;
}
callback = null;
frameInfos.clear();
return status;
}
// Internal variant is used when restarting the codec due to reconfiguration.
private VideoCodecStatus releaseInternal() {
if (!running) {
Logging.d(TAG, "release: Decoder is not running.");
return VideoCodecStatus.OK;
}
try {
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
// Log an exception to capture the stack trace and turn it into a TIMEOUT error.
Logging.e(TAG, "Media decoder release timeout", new RuntimeException());
return VideoCodecStatus.TIMEOUT;
}
if (shutdownException != null) {
// Log the exception and turn it into an error. Wrap the exception in a new exception to
// capture both the output thread's stack trace and this thread's stack trace.
Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException));
shutdownException = null;
return VideoCodecStatus.ERROR;
}
} finally {
codec = null;
outputThread = null;
}
return VideoCodecStatus.OK;
}
private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
decoderThreadChecker.checkIsOnValidThread();
VideoCodecStatus status = releaseInternal();
if (status != VideoCodecStatus.OK) {
return status;
}
return initDecodeInternal(newWidth, newHeight);
}
private Thread createOutputThread() {
return new Thread("AndroidVideoDecoder.outputThread") {
@Override
public void run() {
outputThreadChecker = new ThreadChecker();
while (running) {
deliverDecodedFrame();
}
releaseCodecOnOutputThread();
}
};
}
// Visible for testing.
protected void deliverDecodedFrame() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
// Block until an output buffer is available (up to 100 milliseconds). If the timeout is
// exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
// thread's loop. Blocking here prevents the output thread from busy-waiting while the codec
// is idle.
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
reformat(codec.getOutputFormat());
return;
}
if (index < 0) {
Logging.v(TAG, "dequeueOutputBuffer returned " + index);
return;
}
FrameInfo frameInfo = frameInfos.poll();
Integer decodeTimeMs = null;
int rotation = 0;
if (frameInfo != null) {
decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
rotation = frameInfo.rotation;
}
hasDecodedFirstFrame = true;
if (surfaceTextureHelper != null) {
deliverTextureFrame(index, info, rotation, decodeTimeMs);
} else {
deliverByteFrame(index, info, rotation, decodeTimeMs);
}
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverDecodedFrame failed", e);
}
}
private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
final int rotation, final Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock.
final int width;
final int height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
}
synchronized (renderedTextureMetadataLock) {
if (renderedTextureMetadata != null) {
codec.releaseOutputBuffer(index, false);
return; // We are still waiting for texture for the previous frame, drop this one.
}
surfaceTextureHelper.setTextureSize(width, height);
surfaceTextureHelper.setFrameRotation(rotation);
renderedTextureMetadata = new DecodedTextureMetadata(info.presentationTimeUs, decodeTimeMs);
codec.releaseOutputBuffer(index, /* render= */ true);
}
}
@Override
public void onFrame(VideoFrame frame) {
final VideoFrame newFrame;
final Integer decodeTimeMs;
final long timestampNs;
synchronized (renderedTextureMetadataLock) {
if (renderedTextureMetadata == null) {
throw new IllegalStateException(
"Rendered texture metadata was null in onTextureFrameAvailable.");
}
timestampNs = renderedTextureMetadata.presentationTimestampUs * 1000;
decodeTimeMs = renderedTextureMetadata.decodeTimeMs;
renderedTextureMetadata = null;
}
// Change timestamp of frame.
final VideoFrame frameWithModifiedTimeStamp =
new VideoFrame(frame.getBuffer(), frame.getRotation(), timestampNs);
callback.onDecodedFrame(frameWithModifiedTimeStamp, decodeTimeMs, null /* qp */);
}
private void deliverByteFrame(
int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock.
int width;
int height;
int stride;
int sliceHeight;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
stride = this.stride;
sliceHeight = this.sliceHeight;
}
// Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
// bytes for each of the U and V channels.
if (info.size < width * height * 3 / 2) {
Logging.e(TAG, "Insufficient output buffer size: " + info.size);
return;
}
if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
// Some codecs (Exynos) report an incorrect stride. Correct it here.
// Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
// 2 * size / (3 * height).
stride = info.size * 2 / (height * 3);
}
ByteBuffer buffer = codec.getOutputBuffer(index);
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
buffer = buffer.slice();
final VideoFrame.Buffer frameBuffer;
if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
frameBuffer = copyI420Buffer(buffer, stride, sliceHeight, width, height);
} else {
// All other supported color formats are NV12.
frameBuffer = copyNV12ToI420Buffer(buffer, stride, sliceHeight, width, height);
}
codec.releaseOutputBuffer(index, /* render= */ false);
long presentationTimeNs = info.presentationTimeUs * 1000;
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
// Note that qp is parsed on the C++ side.
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
frame.release();
}
private VideoFrame.Buffer copyNV12ToI420Buffer(
ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
// toI420 copies the buffer.
return new NV12Buffer(width, height, stride, sliceHeight, buffer, null /* releaseCallback */)
.toI420();
}
private VideoFrame.Buffer copyI420Buffer(
ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
if (stride % 2 != 0) {
throw new AssertionError("Stride is not divisible by two: " + stride);
}
// Note that the case with odd `sliceHeight` is handled in a special way.
// The chroma height contained in the payload is rounded down instead of
// up, making it one row less than what we expect in WebRTC. Therefore, we
// have to duplicate the last chroma rows for this case. Also, the offset
// between the Y plane and the U plane is unintuitive for this case. See
// http://bugs.webrtc.org/6651 for more info.
final int chromaWidth = (width + 1) / 2;
final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
final int uvStride = stride / 2;
final int yPos = 0;
final int yEnd = yPos + stride * height;
final int uPos = yPos + stride * sliceHeight;
final int uEnd = uPos + uvStride * chromaHeight;
final int vPos = uPos + uvStride * sliceHeight / 2;
final int vEnd = vPos + uvStride * chromaHeight;
VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height);
buffer.limit(yEnd);
buffer.position(yPos);
copyPlane(
buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height);
buffer.limit(uEnd);
buffer.position(uPos);
copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
chromaWidth, chromaHeight);
if (sliceHeight % 2 == 1) {
buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
ByteBuffer dataU = frameBuffer.getDataU();
dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row.
dataU.put(buffer); // Copy the last row.
}
buffer.limit(vEnd);
buffer.position(vPos);
copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
chromaWidth, chromaHeight);
if (sliceHeight % 2 == 1) {
buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
ByteBuffer dataV = frameBuffer.getDataV();
dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row.
dataV.put(buffer); // Copy the last row.
}
return frameBuffer;
}
private void reformat(MediaFormat format) {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Decoder format changed: " + format);
final int newWidth;
final int newHeight;
if (format.containsKey(MediaFormat.KEY_CROP_LEFT)
&& format.containsKey(MediaFormat.KEY_CROP_RIGHT)
&& format.containsKey(MediaFormat.KEY_CROP_BOTTOM)
&& format.containsKey(MediaFormat.KEY_CROP_TOP)) {
newWidth = 1 + format.getInteger(MediaFormat.KEY_CROP_RIGHT)
- format.getInteger(MediaFormat.KEY_CROP_LEFT);
newHeight = 1 + format.getInteger(MediaFormat.KEY_CROP_BOTTOM)
- format.getInteger(MediaFormat.KEY_CROP_TOP);
} else {
newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
}
// Compare to existing width, height, and save values under the dimension lock.
synchronized (dimensionLock) {
if (newWidth != width || newHeight != height) {
if (hasDecodedFirstFrame) {
stopOnOutputThread(new RuntimeException("Unexpected size change. "
+ "Configured " + width + "*" + height + ". "
+ "New " + newWidth + "*" + newHeight));
return;
} else if (newWidth <= 0 || newHeight <= 0) {
Logging.w(TAG,
"Unexpected format dimensions. Configured " + width + "*" + height + ". "
+ "New " + newWidth + "*" + newHeight + ". Skip it");
return;
}
width = newWidth;
height = newHeight;
}
}
// Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
// color format updates.
if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
if (!isSupportedColorFormat(colorFormat)) {
stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
return;
}
}
// Save stride and sliceHeight under the dimension lock.
synchronized (dimensionLock) {
if (format.containsKey(MediaFormat.KEY_STRIDE)) {
stride = format.getInteger(MediaFormat.KEY_STRIDE);
}
if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
}
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
}
}
private void releaseCodecOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Releasing MediaCodec on output thread");
try {
codec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media decoder stop failed", e);
}
try {
codec.release();
} catch (Exception e) {
Logging.e(TAG, "Media decoder release failed", e);
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
Logging.d(TAG, "Release on output thread done");
}
private void stopOnOutputThread(Exception e) {
outputThreadChecker.checkIsOnValidThread();
running = false;
shutdownException = e;
}
private boolean isSupportedColorFormat(int colorFormat) {
for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) {
if (supported == colorFormat) {
return true;
}
}
return false;
}
// Visible for testing.
protected SurfaceTextureHelper createSurfaceTextureHelper() {
return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
}
// Visible for testing.
// TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC.
protected void releaseSurface() {
surface.release();
}
// Visible for testing.
protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
return JavaI420Buffer.allocate(width, height);
}
// Visible for testing.
protected void copyPlane(
ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height);
}
}

View File

@ -0,0 +1,20 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
public class ApplicationContextProvider {
@CalledByNative
public static Context getApplicationContext() {
return ContextUtils.getApplicationContext();
}
}

View File

@ -0,0 +1,21 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Implementations of this interface can create a native {@code webrtc::AudioDecoderFactory}.
*/
public interface AudioDecoderFactoryFactory {
/**
* Returns a pointer to a {@code webrtc::AudioDecoderFactory}. The caller takes ownership.
*/
long createNativeAudioDecoderFactory();
}

View File

@ -0,0 +1,21 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Implementations of this interface can create a native {@code webrtc::AudioEncoderFactory}.
*/
public interface AudioEncoderFactoryFactory {
/**
* Returns a pointer to a {@code webrtc::AudioEncoderFactory}. The caller takes ownership.
*/
long createNativeAudioEncoderFactory();
}

View File

@ -0,0 +1,20 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Factory for creating webrtc::AudioProcessing instances. */
public interface AudioProcessingFactory {
/**
* Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it.
* The caller takes ownership of the object.
*/
public long createNative();
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
* more {@code AudioTrack} objects.
*/
public class AudioSource extends MediaSource {
public AudioSource(long nativeSource) {
super(nativeSource);
}
/** Returns a pointer to webrtc::AudioSourceInterface. */
long getNativeAudioSource() {
return getNativeMediaSource();
}
}

View File

@ -0,0 +1,32 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ AudioTrackInterface */
public class AudioTrack extends MediaStreamTrack {
public AudioTrack(long nativeTrack) {
super(nativeTrack);
}
/** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
* 0 to 10.
*/
public void setVolume(double volume) {
nativeSetVolume(getNativeAudioTrack(), volume);
}
/** Returns a pointer to webrtc::AudioTrackInterface. */
long getNativeAudioTrack() {
return getNativeMediaStreamTrack();
}
private static native void nativeSetVolume(long track, double volume);
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
class BaseBitrateAdjuster implements BitrateAdjuster {
protected int targetBitrateBps;
protected double targetFramerateFps;
@Override
public void setTargets(int targetBitrateBps, double targetFramerateFps) {
this.targetBitrateBps = targetBitrateBps;
this.targetFramerateFps = targetFramerateFps;
}
@Override
public void reportEncodedFrame(int size) {
// No op.
}
@Override
public int getAdjustedBitrateBps() {
return targetBitrateBps;
}
@Override
public double getAdjustedFramerateFps() {
return targetFramerateFps;
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Object that adjusts the bitrate of a hardware codec. */
interface BitrateAdjuster {
/**
* Sets the target bitrate in bits per second and framerate in frames per second.
*/
void setTargets(int targetBitrateBps, double targetFramerateFps);
/**
* Should be used to report the size of an encoded frame to the bitrate adjuster. Use
* getAdjustedBitrateBps to get the updated bitrate after calling this method.
*/
void reportEncodedFrame(int size);
/** Gets the current bitrate. */
int getAdjustedBitrateBps();
/** Gets the current framerate. */
double getAdjustedFramerateFps();
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Creates a native {@code webrtc::AudioDecoderFactory} with the builtin audio decoders.
*/
public class BuiltinAudioDecoderFactoryFactory implements AudioDecoderFactoryFactory {
@Override
public long createNativeAudioDecoderFactory() {
return nativeCreateBuiltinAudioDecoderFactory();
}
private static native long nativeCreateBuiltinAudioDecoderFactory();
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* This class creates a native {@code webrtc::AudioEncoderFactory} with the builtin audio encoders.
*/
public class BuiltinAudioEncoderFactoryFactory implements AudioEncoderFactoryFactory {
@Override
public long createNativeAudioEncoderFactory() {
return nativeCreateBuiltinAudioEncoderFactory();
}
private static native long nativeCreateBuiltinAudioEncoderFactory();
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class CallSessionFileRotatingLogSink {
private long nativeSink;
public static byte[] getLogData(String dirPath) {
if (dirPath == null) {
throw new IllegalArgumentException("dirPath may not be null.");
}
return nativeGetLogData(dirPath);
}
public CallSessionFileRotatingLogSink(
String dirPath, int maxFileSize, Logging.Severity severity) {
if (dirPath == null) {
throw new IllegalArgumentException("dirPath may not be null.");
}
nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
}
public void dispose() {
if (nativeSink != 0) {
nativeDeleteSink(nativeSink);
nativeSink = 0;
}
}
private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
private static native void nativeDeleteSink(long sink);
private static native byte[] nativeGetLogData(String dirPath);
}

View File

@ -0,0 +1,29 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @CalledByNative is used by the JNI generator to create the necessary JNI
* bindings and expose this method to native code.
*/
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
@Retention(RetentionPolicy.CLASS)
public @interface CalledByNative {
/*
* If present, tells which inner class the method belongs to.
*/
public String value() default "";
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions.
* It only makes sense to use this annotation on methods that declare a throws... spec.
* However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception)
* such as NullPointerException, so the native code should differentiate these cases.
* Usage of this should be very rare; where possible handle exceptions in the Java side and use a
* return value to indicate success / failure.
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS)
public @interface CalledByNativeUnchecked {
/*
* If present, tells which inner class the method belongs to.
*/
public String value() default "";
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
public class Camera1Capturer extends CameraCapturer {
private final boolean captureToTexture;
public Camera1Capturer(
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
this.captureToTexture = captureToTexture;
}
@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
surfaceTextureHelper, cameraName, width, height, framerate);
}
}

View File

@ -0,0 +1,185 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@SuppressWarnings("deprecation")
public class Camera1Enumerator implements CameraEnumerator {
private final static String TAG = "Camera1Enumerator";
// Each entry contains the supported formats for corresponding camera index. The formats for all
// cameras are enumerated on the first call to getSupportedFormats(), and cached for future
// reference.
private static List<List<CaptureFormat>> cachedSupportedFormats;
private final boolean captureToTexture;
public Camera1Enumerator() {
this(true /* captureToTexture */);
}
public Camera1Enumerator(boolean captureToTexture) {
this.captureToTexture = captureToTexture;
}
// Returns device names that can be used to create a new VideoCapturerAndroid.
@Override
public String[] getDeviceNames() {
ArrayList<String> namesList = new ArrayList<>();
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
String name = getDeviceName(i);
if (name != null) {
namesList.add(name);
Logging.d(TAG, "Index: " + i + ". " + name);
} else {
Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
}
}
String[] namesArray = new String[namesList.size()];
return namesList.toArray(namesArray);
}
@Override
public boolean isFrontFacing(String deviceName) {
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
}
@Override
public boolean isBackFacing(String deviceName) {
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
}
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
return getSupportedFormats(getCameraIndex(deviceName));
}
@Override
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
}
private static @Nullable android.hardware.Camera.CameraInfo getCameraInfo(int index) {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
try {
android.hardware.Camera.getCameraInfo(index, info);
} catch (Exception e) {
Logging.e(TAG, "getCameraInfo failed on index " + index, e);
return null;
}
return info;
}
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
if (cachedSupportedFormats == null) {
cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
cachedSupportedFormats.add(enumerateFormats(i));
}
}
return cachedSupportedFormats.get(cameraId);
}
private static List<CaptureFormat> enumerateFormats(int cameraId) {
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
final android.hardware.Camera.Parameters parameters;
android.hardware.Camera camera = null;
try {
Logging.d(TAG, "Opening camera with index " + cameraId);
camera = android.hardware.Camera.open(cameraId);
parameters = camera.getParameters();
} catch (RuntimeException e) {
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
return new ArrayList<CaptureFormat>();
} finally {
if (camera != null) {
camera.release();
}
}
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
try {
int minFps = 0;
int maxFps = 0;
final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
if (listFpsRange != null) {
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
// corresponding to the highest fps.
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
}
for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
}
} catch (Exception e) {
Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
}
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
// Convert from android.hardware.Camera.Size to Size.
static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
final List<Size> sizes = new ArrayList<Size>();
for (android.hardware.Camera.Size size : cameraSizes) {
sizes.add(new Size(size.width, size.height));
}
return sizes;
}
// Convert from int[2] to CaptureFormat.FramerateRange.
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (int[] range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
}
return ranges;
}
// Returns the camera index for camera with name `deviceName`, or throws IllegalArgumentException
// if no such camera can be found.
static int getCameraIndex(String deviceName) {
Logging.d(TAG, "getCameraIndex: " + deviceName);
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(getDeviceName(i))) {
return i;
}
}
throw new IllegalArgumentException("No such camera: " + deviceName);
}
// Returns the name of the camera with camera index. Returns null if the
// camera can not be used.
static @Nullable String getDeviceName(int index) {
android.hardware.Camera.CameraInfo info = getCameraInfo(index);
if (info == null) {
return null;
}
String facing =
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
}
}

View File

@ -0,0 +1,340 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.hardware.Camera;
import android.os.Handler;
import android.os.SystemClock;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@SuppressWarnings("deprecation")
class Camera1Session implements CameraSession {
private static final String TAG = "Camera1Session";
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
private static final Histogram camera1StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
private static final Histogram camera1StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
private static enum SessionState { RUNNING, STOPPED }
private final Handler cameraThreadHandler;
private final Events events;
private final boolean captureToTexture;
private final Context applicationContext;
private final SurfaceTextureHelper surfaceTextureHelper;
private final int cameraId;
private final Camera camera;
private final Camera.CameraInfo info;
private final CaptureFormat captureFormat;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private SessionState state;
private boolean firstFrameReported;
// TODO(titovartem) make correct fix during webrtc:9175
@SuppressWarnings("ByteBufferBackingArray")
public static void create(final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper, final String cameraName,
final int width, final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraName);
events.onCameraOpening();
final int cameraId;
try {
cameraId = Camera1Enumerator.getCameraIndex(cameraName);
} catch (IllegalArgumentException e) {
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
final Camera camera;
try {
camera = Camera.open(cameraId);
} catch (RuntimeException e) {
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
if (camera == null) {
callback.onFailure(
FailureType.ERROR, "Camera.open returned null for camera id = " + cameraId);
return;
}
try {
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
} catch (IOException | RuntimeException e) {
camera.release();
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
final Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
final CaptureFormat captureFormat;
try {
final Camera.Parameters parameters = camera.getParameters();
captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
final Size pictureSize = findClosestPictureSize(parameters, width, height);
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
} catch (RuntimeException e) {
camera.release();
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
if (!captureToTexture) {
final int frameSize = captureFormat.frameSize();
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
camera.addCallbackBuffer(buffer.array());
}
}
// Calculate orientation manually and send it as CVO instead.
try {
camera.setDisplayOrientation(0 /* degrees */);
} catch (RuntimeException e) {
camera.release();
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
}
private static void updateCameraParameters(Camera camera, Camera.Parameters parameters,
CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) {
final List<String> focusModes = parameters.getSupportedFocusModes();
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
if (!captureToTexture) {
parameters.setPreviewFormat(captureFormat.imageFormat);
}
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
}
private static CaptureFormat findClosestCaptureFormat(
Camera.Parameters parameters, int width, int height, int framerate) {
// Find closest supported format for `width` x `height` @ `framerate`.
final List<CaptureFormat.FramerateRange> supportedFramerates =
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange fpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) {
return CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
}
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera,
Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
this.cameraThreadHandler = new Handler();
this.events = events;
this.captureToTexture = captureToTexture;
this.applicationContext = applicationContext;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraId = cameraId;
this.camera = camera;
this.info = info;
this.captureFormat = captureFormat;
this.constructionTimeNs = constructionTimeNs;
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
startCapturing();
}
@Override
public void stop() {
Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
checkIsOnCameraThread();
if (state != SessionState.STOPPED) {
final long stopStartTime = System.nanoTime();
stopInternal();
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void startCapturing() {
Logging.d(TAG, "Start capturing");
checkIsOnCameraThread();
state = SessionState.RUNNING;
camera.setErrorCallback(new Camera.ErrorCallback() {
@Override
public void onError(int error, Camera camera) {
String errorMessage;
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
stopInternal();
if (error == Camera.CAMERA_ERROR_EVICTED) {
events.onCameraDisconnected(Camera1Session.this);
} else {
events.onCameraError(Camera1Session.this, errorMessage);
}
}
});
if (captureToTexture) {
listenForTextureFrames();
} else {
listenForBytebufferFrames();
}
try {
camera.startPreview();
} catch (RuntimeException e) {
stopInternal();
events.onCameraError(this, e.getMessage());
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
if (state == SessionState.STOPPED) {
Logging.d(TAG, "Camera is already stopped");
return;
}
state = SessionState.STOPPED;
surfaceTextureHelper.stopListening();
// Note: stopPreview or other driver code might deadlock. Deadlock in
// Camera._stopPreview(Native Method) has been observed on
// Nexus 5 (hammerhead), OS version LMY48I.
camera.stopPreview();
camera.release();
events.onCameraClosed(this);
Logging.d(TAG, "Stop done");
}
private void listenForTextureFrames() {
surfaceTextureHelper.startListening((VideoFrame frame) -> {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
return;
}
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
final VideoFrame modifiedFrame =
new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
(TextureBufferImpl) frame.getBuffer(),
/* mirror= */ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT,
/* rotation= */ 0),
/* rotation= */ getFrameOrientation(), frame.getTimestampNs());
events.onFrameCaptured(Camera1Session.this, modifiedFrame);
modifiedFrame.release();
});
}
private void listenForBytebufferFrames() {
camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(final byte[] data, Camera callbackCamera) {
checkIsOnCameraThread();
if (callbackCamera != camera) {
Logging.e(TAG, "Callback from a different camera. This should never happen.");
return;
}
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
return;
}
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
VideoFrame.Buffer frameBuffer = new NV21Buffer(
data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
if (state == SessionState.RUNNING) {
camera.addCallbackBuffer(data);
}
}));
final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
events.onFrameCaptured(Camera1Session.this, frame);
frame.release();
}
});
}
private int getFrameOrientation() {
int rotation = CameraSession.getDeviceOrientation(applicationContext);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import androidx.annotation.Nullable;
public class Camera2Capturer extends CameraCapturer {
private final Context context;
@Nullable private final CameraManager cameraManager;
public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
super(cameraName, eventsHandler, new Camera2Enumerator(context));
this.context = context;
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
}
@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
surfaceTextureHelper, cameraName, width, height, framerate);
}
}

View File

@ -0,0 +1,239 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Build;
import android.os.SystemClock;
import android.util.Range;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
public class Camera2Enumerator implements CameraEnumerator {
private final static String TAG = "Camera2Enumerator";
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
// Each entry contains the supported formats for a given camera index. The formats are enumerated
// lazily in getSupportedFormats(), and cached for future reference.
private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
new HashMap<String, List<CaptureFormat>>();
final Context context;
@Nullable final CameraManager cameraManager;
public Camera2Enumerator(Context context) {
this.context = context;
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
}
@Override
public String[] getDeviceNames() {
try {
return cameraManager.getCameraIdList();
} catch (CameraAccessException e) {
Logging.e(TAG, "Camera access exception", e);
return new String[] {};
}
}
@Override
public boolean isFrontFacing(String deviceName) {
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
}
@Override
public boolean isBackFacing(String deviceName) {
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
return characteristics != null
&& characteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_BACK;
}
@Nullable
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
return getSupportedFormats(context, deviceName);
}
@Override
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new Camera2Capturer(context, deviceName, eventsHandler);
}
private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) {
try {
return cameraManager.getCameraCharacteristics(deviceName);
} catch (CameraAccessException | RuntimeException e) {
Logging.e(TAG, "Camera access exception", e);
return null;
}
}
/**
* Checks if API is supported and all cameras have better than legacy support.
*/
public static boolean isSupported(Context context) {
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
try {
String[] cameraIds = cameraManager.getCameraIdList();
for (String id : cameraIds) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
== CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return false;
}
}
} catch (CameraAccessException | RuntimeException e) {
Logging.e(TAG, "Failed to check if camera2 is supported", e);
return false;
}
return true;
}
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
if (fpsRanges.length == 0) {
return 1000;
}
return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final int supportLevel =
cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
final List<Size> sizes = convertSizes(nativeSizes);
// Video may be stretched pre LMR1 on legacy implementations.
// Filter out formats that have different aspect ratio than the sensor array.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
&& supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
final Rect activeArraySize =
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
final ArrayList<Size> filteredSizes = new ArrayList<Size>();
for (Size size : sizes) {
if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
filteredSizes.add(size);
}
}
return filteredSizes;
} else {
return sizes;
}
}
@Nullable
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
return getSupportedFormats(
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
}
@Nullable
static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
synchronized (cachedSupportedFormats) {
if (cachedSupportedFormats.containsKey(cameraId)) {
return cachedSupportedFormats.get(cameraId);
}
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
final CameraCharacteristics cameraCharacteristics;
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (Exception ex) {
Logging.e(TAG, "getCameraCharacteristics()", ex);
return new ArrayList<CaptureFormat>();
}
final StreamConfigurationMap streamMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Range<Integer>[] fpsRanges =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
List<CaptureFormat.FramerateRange> framerateRanges =
convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
List<Size> sizes = getSupportedSizes(cameraCharacteristics);
int defaultMaxFps = 0;
for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
}
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
for (Size size : sizes) {
long minFrameDurationNs = 0;
try {
minFrameDurationNs = streamMap.getOutputMinFrameDuration(
SurfaceTexture.class, new android.util.Size(size.width, size.height));
} catch (Exception e) {
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
}
final int maxFps = (minFrameDurationNs == 0)
? defaultMaxFps
: (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
}
cachedSupportedFormats.put(cameraId, formatList);
final long endTimeMs = SystemClock.elapsedRealtime();
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList;
}
}
// Convert from android.util.Size to Size.
private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
if (cameraSizes == null || cameraSizes.length == 0) {
return Collections.emptyList();
}
final List<Size> sizes = new ArrayList<>(cameraSizes.length);
for (android.util.Size size : cameraSizes) {
sizes.add(new Size(size.getWidth(), size.getHeight()));
}
return sizes;
}
// Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
static List<CaptureFormat.FramerateRange> convertFramerates(
Range<Integer>[] arrayRanges, int unitFactor) {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (Range<Integer> range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range.getLower() * unitFactor, range.getUpper() * unitFactor));
}
return ranges;
}
}

View File

@ -0,0 +1,426 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.os.Handler;
import android.util.Range;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
class Camera2Session implements CameraSession {
private static final String TAG = "Camera2Session";
private static final Histogram camera2StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
private static final Histogram camera2StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
private static enum SessionState { RUNNING, STOPPED }
private final Handler cameraThreadHandler;
private final CreateSessionCallback callback;
private final Events events;
private final Context applicationContext;
private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper;
private final String cameraId;
private final int width;
private final int height;
private final int framerate;
// Initialized at start
private CameraCharacteristics cameraCharacteristics;
private int cameraOrientation;
private boolean isCameraFrontFacing;
private int fpsUnitFactor;
private CaptureFormat captureFormat;
// Initialized when camera opens
@Nullable private CameraDevice cameraDevice;
@Nullable private Surface surface;
// Initialized when capture session is created
@Nullable private CameraCaptureSession captureSession;
// State
private SessionState state = SessionState.RUNNING;
private boolean firstFrameReported;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private class CameraStateCallback extends CameraDevice.StateCallback {
private String getErrorDescription(int errorCode) {
switch (errorCode) {
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
return "Camera device has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
return "Camera device could not be opened due to a device policy.";
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
return "Camera device is in use already.";
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
return "Camera service has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
return "Camera device could not be opened because"
+ " there are too many other open camera devices.";
default:
return "Unknown camera error: " + errorCode;
}
}
@Override
public void onDisconnected(CameraDevice camera) {
checkIsOnCameraThread();
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
} else {
events.onCameraDisconnected(Camera2Session.this);
}
}
@Override
public void onError(CameraDevice camera, int errorCode) {
checkIsOnCameraThread();
reportError(getErrorDescription(errorCode));
}
@Override
public void onOpened(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera opened.");
cameraDevice = camera;
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
try {
camera.createCaptureSession(
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e);
return;
}
}
@Override
public void onClosed(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera device closed.");
events.onCameraClosed(Camera2Session.this);
}
}
private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
@Override
public void onConfigureFailed(CameraCaptureSession session) {
checkIsOnCameraThread();
session.close();
reportError("Failed to configure capture session.");
}
@Override
public void onConfigured(CameraCaptureSession session) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera capture session configured.");
captureSession = session;
try {
/*
* The viable options for video capture requests are:
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
* post-processing.
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
* quality.
*/
final CaptureRequest.Builder captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Set auto exposure fps range.
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
chooseStabilizationMode(captureRequestBuilder);
chooseFocusMode(captureRequestBuilder);
captureRequestBuilder.addTarget(surface);
session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to start capture request. " + e);
return;
}
surfaceTextureHelper.startListening((VideoFrame frame) -> {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
return;
}
if (!firstFrameReported) {
firstFrameReported = true;
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera2StartTimeMsHistogram.addSample(startTimeMs);
}
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
// Also, undo camera orientation, we report it as rotation instead.
final VideoFrame modifiedFrame =
new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
(TextureBufferImpl) frame.getBuffer(),
/* mirror= */ isCameraFrontFacing,
/* rotation= */ -cameraOrientation),
/* rotation= */ getFrameOrientation(), frame.getTimestampNs());
events.onFrameCaptured(Camera2Session.this, modifiedFrame);
modifiedFrame.release();
});
Logging.d(TAG, "Camera device successfully started.");
callback.onDone(Camera2Session.this);
}
// Prefers optical stabilization over software stabilization if available. Only enables one of
// the stabilization modes at a time because having both enabled can cause strange results.
private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableOpticalStabilization = cameraCharacteristics.get(
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
if (availableOpticalStabilization != null) {
for (int mode : availableOpticalStabilization) {
if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using optical stabilization.");
return;
}
}
}
// If no optical mode is available, try software.
final int[] availableVideoStabilization = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
if (availableVideoStabilization != null) {
for (int mode : availableVideoStabilization) {
if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using video stabilization.");
return;
}
}
}
Logging.d(TAG, "Stabilization not available.");
}
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableFocusModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
for (int mode : availableFocusModes) {
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
captureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
Logging.d(TAG, "Using continuous video auto-focus.");
return;
}
}
Logging.d(TAG, "Auto-focus is not available.");
}
}
private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
@Override
public void onCaptureFailed(
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Logging.d(TAG, "Capture failed: " + failure);
}
}
public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
int framerate) {
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
cameraId, width, height, framerate);
}
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime();
this.cameraThreadHandler = new Handler();
this.callback = callback;
this.events = events;
this.applicationContext = applicationContext;
this.cameraManager = cameraManager;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraId = cameraId;
this.width = width;
this.height = height;
this.framerate = framerate;
start();
}
private void start() {
checkIsOnCameraThread();
Logging.d(TAG, "start");
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (CameraAccessException | IllegalArgumentException e) {
reportError("getCameraCharacteristics(): " + e.getMessage());
return;
}
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
findCaptureFormat();
if (captureFormat == null) {
// findCaptureFormat reports an error already.
return;
}
openCamera();
}
private void findCaptureFormat() {
checkIsOnCameraThread();
Range<Integer>[] fpsRanges =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
List<CaptureFormat.FramerateRange> framerateRanges =
Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
Logging.d(TAG, "Available preview sizes: " + sizes);
Logging.d(TAG, "Available fps ranges: " + framerateRanges);
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
reportError("No supported capture formats.");
return;
}
final CaptureFormat.FramerateRange bestFpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat);
}
private void openCamera() {
checkIsOnCameraThread();
Logging.d(TAG, "Opening camera " + cameraId);
events.onCameraOpening();
try {
cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
} catch (CameraAccessException | IllegalArgumentException | SecurityException e) {
reportError("Failed to open camera: " + e);
return;
}
}
@Override
public void stop() {
Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
checkIsOnCameraThread();
if (state != SessionState.STOPPED) {
final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED;
stopInternal();
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera2StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
surfaceTextureHelper.stopListening();
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
if (surface != null) {
surface.release();
surface = null;
}
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
Logging.d(TAG, "Stop done");
}
private void reportError(String error) {
checkIsOnCameraThread();
Logging.e(TAG, "Error: " + error);
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.ERROR, error);
} else {
events.onCameraError(this, error);
}
}
private int getFrameOrientation() {
int rotation = CameraSession.getDeviceOrientation(applicationContext);
if (!isCameraFrontFacing) {
rotation = 360 - rotation;
}
return (cameraOrientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}

View File

@ -0,0 +1,458 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
@SuppressWarnings("deprecation")
abstract class CameraCapturer implements CameraVideoCapturer {
enum SwitchState {
IDLE, // No switch requested.
PENDING, // Waiting for previous capture session to open.
IN_PROGRESS, // Waiting for new switched capture session to start.
}
private static final String TAG = "CameraCapturer";
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500;
private final static int OPEN_CAMERA_TIMEOUT = 10000;
private final CameraEnumerator cameraEnumerator;
private final CameraEventsHandler eventsHandler;
private final Handler uiThreadHandler;
@Nullable
private final CameraSession.CreateSessionCallback createSessionCallback =
new CameraSession.CreateSessionCallback() {
@Override
public void onDone(CameraSession session) {
checkIsOnCameraThread();
Logging.d(TAG, "Create session done. Switch state: " + switchState);
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) {
capturerObserver.onCapturerStarted(true /* success */);
sessionOpening = false;
currentSession = session;
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
firstFrameObserved = false;
stateLock.notifyAll();
if (switchState == SwitchState.IN_PROGRESS) {
switchState = SwitchState.IDLE;
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
switchEventsHandler = null;
}
} else if (switchState == SwitchState.PENDING) {
String selectedCameraName = pendingCameraName;
pendingCameraName = null;
switchState = SwitchState.IDLE;
switchCameraInternal(switchEventsHandler, selectedCameraName);
}
}
}
@Override
public void onFailure(CameraSession.FailureType failureType, String error) {
checkIsOnCameraThread();
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) {
capturerObserver.onCapturerStarted(false /* success */);
openAttemptsRemaining--;
if (openAttemptsRemaining <= 0) {
Logging.w(TAG, "Opening camera failed, passing: " + error);
sessionOpening = false;
stateLock.notifyAll();
if (switchState != SwitchState.IDLE) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
switchEventsHandler = null;
}
switchState = SwitchState.IDLE;
}
if (failureType == CameraSession.FailureType.DISCONNECTED) {
eventsHandler.onCameraDisconnected();
} else {
eventsHandler.onCameraError(error);
}
} else {
Logging.w(TAG, "Opening camera failed, retry: " + error);
createSessionInternal(OPEN_CAMERA_DELAY_MS);
}
}
}
};
@Nullable
private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
@Override
public void onCameraOpening() {
checkIsOnCameraThread();
synchronized (stateLock) {
if (currentSession != null) {
Logging.w(TAG, "onCameraOpening while session was open.");
return;
}
eventsHandler.onCameraOpening(cameraName);
}
}
@Override
public void onCameraError(CameraSession session, String error) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onCameraError from another session: " + error);
return;
}
eventsHandler.onCameraError(error);
stopCapture();
}
}
@Override
public void onCameraDisconnected(CameraSession session) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onCameraDisconnected from another session.");
return;
}
eventsHandler.onCameraDisconnected();
stopCapture();
}
}
@Override
public void onCameraClosed(CameraSession session) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession && currentSession != null) {
Logging.d(TAG, "onCameraClosed from another session.");
return;
}
eventsHandler.onCameraClosed();
}
}
@Override
public void onFrameCaptured(CameraSession session, VideoFrame frame) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onFrameCaptured from another session.");
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onFrameCaptured(frame);
}
}
};
private final Runnable openCameraTimeoutRunnable = new Runnable() {
@Override
public void run() {
eventsHandler.onCameraError("Camera failed to start within timeout.");
}
};
// Initialized on initialize
// -------------------------
private Handler cameraThreadHandler;
private Context applicationContext;
private org.webrtc.CapturerObserver capturerObserver;
private SurfaceTextureHelper surfaceHelper;
private final Object stateLock = new Object();
private boolean sessionOpening; /* guarded by stateLock */
@Nullable private CameraSession currentSession; /* guarded by stateLock */
private String cameraName; /* guarded by stateLock */
private String pendingCameraName; /* guarded by stateLock */
private int width; /* guarded by stateLock */
private int height; /* guarded by stateLock */
private int framerate; /* guarded by stateLock */
private int openAttemptsRemaining; /* guarded by stateLock */
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
@Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
// Valid from onDone call until stopCapture, otherwise null.
@Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */
public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
CameraEnumerator cameraEnumerator) {
if (eventsHandler == null) {
eventsHandler = new CameraEventsHandler() {
@Override
public void onCameraError(String errorDescription) {}
@Override
public void onCameraDisconnected() {}
@Override
public void onCameraFreezed(String errorDescription) {}
@Override
public void onCameraOpening(String cameraName) {}
@Override
public void onFirstFrameAvailable() {}
@Override
public void onCameraClosed() {}
};
}
this.eventsHandler = eventsHandler;
this.cameraEnumerator = cameraEnumerator;
this.cameraName = cameraName;
List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
uiThreadHandler = new Handler(Looper.getMainLooper());
if (deviceNames.isEmpty()) {
throw new RuntimeException("No cameras attached.");
}
if (!deviceNames.contains(this.cameraName)) {
throw new IllegalArgumentException(
"Camera name " + this.cameraName + " does not match any known camera device.");
}
}
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
org.webrtc.CapturerObserver capturerObserver) {
this.applicationContext = applicationContext;
this.capturerObserver = capturerObserver;
this.surfaceHelper = surfaceTextureHelper;
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
}
@Override
public void startCapture(int width, int height, int framerate) {
Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
if (applicationContext == null) {
throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
}
synchronized (stateLock) {
if (sessionOpening || currentSession != null) {
Logging.w(TAG, "Session already open");
return;
}
this.width = width;
this.height = height;
this.framerate = framerate;
sessionOpening = true;
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
createSessionInternal(0);
}
}
private void createSessionInternal(int delayMs) {
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
cameraThreadHandler.postDelayed(new Runnable() {
@Override
public void run() {
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
surfaceHelper, cameraName, width, height, framerate);
}
}, delayMs);
}
@Override
public void stopCapture() {
Logging.d(TAG, "Stop capture");
synchronized (stateLock) {
while (sessionOpening) {
Logging.d(TAG, "Stop capture: Waiting for session to open");
try {
stateLock.wait();
} catch (InterruptedException e) {
Logging.w(TAG, "Stop capture interrupted while waiting for the session to open.");
Thread.currentThread().interrupt();
return;
}
}
if (currentSession != null) {
Logging.d(TAG, "Stop capture: Nulling session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
capturerObserver.onCapturerStopped();
} else {
Logging.d(TAG, "Stop capture: No session open");
}
}
Logging.d(TAG, "Stop capture done");
}
@Override
public void changeCaptureFormat(int width, int height, int framerate) {
Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
synchronized (stateLock) {
stopCapture();
startCapture(width, height, framerate);
}
}
@Override
public void dispose() {
Logging.d(TAG, "dispose");
stopCapture();
}
@Override
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
if (deviceNames.size() < 2) {
reportCameraSwitchError("No camera to switch to.", switchEventsHandler);
return;
}
int cameraNameIndex = deviceNames.indexOf(cameraName);
String cameraName = deviceNames.get((cameraNameIndex + 1) % deviceNames.size());
switchCameraInternal(switchEventsHandler, cameraName);
}
});
}
@Override
public void switchCamera(final CameraSwitchHandler switchEventsHandler, final String cameraName) {
Logging.d(TAG, "switchCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
switchCameraInternal(switchEventsHandler, cameraName);
}
});
}
@Override
public boolean isScreencast() {
return false;
}
public void printStackTrace() {
Thread cameraThread = null;
if (cameraThreadHandler != null) {
cameraThread = cameraThreadHandler.getLooper().getThread();
}
if (cameraThread != null) {
StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
if (cameraStackTrace.length > 0) {
Logging.d(TAG, "CameraCapturer stack trace:");
for (StackTraceElement traceElem : cameraStackTrace) {
Logging.d(TAG, traceElem.toString());
}
}
}
}
private void reportCameraSwitchError(
String error, @Nullable CameraSwitchHandler switchEventsHandler) {
Logging.e(TAG, error);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
}
}
private void switchCameraInternal(
@Nullable final CameraSwitchHandler switchEventsHandler, final String selectedCameraName) {
Logging.d(TAG, "switchCamera internal");
List<String> deviceNames = Arrays.asList(cameraEnumerator.getDeviceNames());
if (!deviceNames.contains(selectedCameraName)) {
reportCameraSwitchError("Attempted to switch to unknown camera device " + selectedCameraName,
switchEventsHandler);
return;
}
synchronized (stateLock) {
if (switchState != SwitchState.IDLE) {
reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
return;
}
if (!sessionOpening && currentSession == null) {
reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
return;
}
this.switchEventsHandler = switchEventsHandler;
if (sessionOpening) {
switchState = SwitchState.PENDING;
pendingCameraName = selectedCameraName;
return;
} else {
switchState = SwitchState.IN_PROGRESS;
}
Logging.d(TAG, "switchCamera: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
cameraName = selectedCameraName;
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0);
}
Logging.d(TAG, "switchCamera done");
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
Logging.e(TAG, "Check is on camera thread failed.");
throw new RuntimeException("Not on camera thread.");
}
}
protected String getCameraName() {
synchronized (stateLock) {
return cameraName;
}
}
abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
int width, int height, int framerate);
}

View File

@ -0,0 +1,206 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static java.lang.Math.abs;
import android.graphics.ImageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
@SuppressWarnings("deprecation")
public class CameraEnumerationAndroid {
private final static String TAG = "CameraEnumerationAndroid";
static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
// 0, Unknown resolution
new Size(160, 120), // 1, QQVGA
new Size(240, 160), // 2, HQVGA
new Size(320, 240), // 3, QVGA
new Size(400, 240), // 4, WQVGA
new Size(480, 320), // 5, HVGA
new Size(640, 360), // 6, nHD
new Size(640, 480), // 7, VGA
new Size(768, 480), // 8, WVGA
new Size(854, 480), // 9, FWVGA
new Size(800, 600), // 10, SVGA
new Size(960, 540), // 11, qHD
new Size(960, 640), // 12, DVGA
new Size(1024, 576), // 13, WSVGA
new Size(1024, 600), // 14, WVSGA
new Size(1280, 720), // 15, HD
new Size(1280, 1024), // 16, SXGA
new Size(1920, 1080), // 17, Full HD
new Size(1920, 1440), // 18, Full HD 4:3
new Size(2560, 1440), // 19, QHD
new Size(3840, 2160) // 20, UHD
));
public static class CaptureFormat {
// Class to represent a framerate range. The framerate varies because of lightning conditions.
// The values are multiplied by 1000, so 1000 represents one frame per second.
public static class FramerateRange {
public int min;
public int max;
public FramerateRange(int min, int max) {
this.min = min;
this.max = max;
}
@Override
public String toString() {
return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FramerateRange)) {
return false;
}
final FramerateRange otherFramerate = (FramerateRange) other;
return min == otherFramerate.min && max == otherFramerate.max;
}
@Override
public int hashCode() {
// Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
return 1 + 65537 * min + max;
}
}
public final int width;
public final int height;
public final FramerateRange framerate;
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
// all imageFormats.
public final int imageFormat = ImageFormat.NV21;
public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
this.width = width;
this.height = height;
this.framerate = new FramerateRange(minFramerate, maxFramerate);
}
public CaptureFormat(int width, int height, FramerateRange framerate) {
this.width = width;
this.height = height;
this.framerate = framerate;
}
// Calculates the frame size of this capture format.
public int frameSize() {
return frameSize(width, height, imageFormat);
}
// Calculates the frame size of the specified image format. Currently only
// supporting ImageFormat.NV21.
// The size is width * height * number of bytes per pixel.
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
public static int frameSize(int width, int height, int imageFormat) {
if (imageFormat != ImageFormat.NV21) {
throw new UnsupportedOperationException("Don't know how to calculate "
+ "the frame size of non-NV21 image formats.");
}
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
@Override
public String toString() {
return width + "x" + height + "@" + framerate;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof CaptureFormat)) {
return false;
}
final CaptureFormat otherFormat = (CaptureFormat) other;
return width == otherFormat.width && height == otherFormat.height
&& framerate.equals(otherFormat.framerate);
}
@Override
public int hashCode() {
return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
}
}
// Helper class for finding the closest supported format for the two functions below. It creates a
// comparator based on the difference to some requested parameters, where the element with the
// minimum difference is the element that is closest to the requested parameters.
private static abstract class ClosestComparator<T> implements Comparator<T> {
// Difference between supported and requested parameter.
abstract int diff(T supportedParameter);
@Override
public int compare(T t1, T t2) {
return diff(t1) - diff(t2);
}
}
// Prefer a fps range with an upper bound close to `framerate`. Also prefer a fps range with a low
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
return Collections.min(
supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
// Progressive penalty if the upper bound is further away than `MAX_FPS_DIFF_THRESHOLD`
// from requested.
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
// Progressive penalty if the lower bound is bigger than `MIN_FPS_THRESHOLD`.
private static final int MIN_FPS_THRESHOLD = 8000;
private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
// Use one weight for small `value` less than `threshold`, and another weight above.
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
return (value < threshold) ? value * lowWeight
: threshold * lowWeight + (value - threshold) * highWeight;
}
@Override
int diff(CaptureFormat.FramerateRange range) {
final int minFpsError = progressivePenalty(
range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
return minFpsError + maxFpsError;
}
});
}
public static Size getClosestSupportedSize(
List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
return Collections.min(supportedSizes, new ClosestComparator<Size>() {
@Override
int diff(Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
}
});
}
// Helper method for camera classes.
static void reportCameraResolution(Histogram histogram, Size resolution) {
int index = COMMON_RESOLUTIONS.indexOf(resolution);
// 0 is reserved for unknown resolution, so add 1.
// indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
histogram.addSample(index + 1);
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.util.List;
public interface CameraEnumerator {
public String[] getDeviceNames();
public boolean isFrontFacing(String deviceName);
public boolean isBackFacing(String deviceName);
public List<CaptureFormat> getSupportedFormats(String deviceName);
public CameraVideoCapturer createCapturer(
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
}

View File

@ -0,0 +1,72 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.graphics.Matrix;
import android.view.WindowManager;
import android.view.Surface;
interface CameraSession {
enum FailureType { ERROR, DISCONNECTED }
// Callbacks are fired on the camera thread.
interface CreateSessionCallback {
void onDone(CameraSession session);
void onFailure(FailureType failureType, String error);
}
// Events are fired on the camera thread.
interface Events {
void onCameraOpening();
void onCameraError(CameraSession session, String error);
void onCameraDisconnected(CameraSession session);
void onCameraClosed(CameraSession session);
void onFrameCaptured(CameraSession session, VideoFrame frame);
}
/**
* Stops the capture. Waits until no more calls to capture observer will be made.
* If waitCameraStop is true, also waits for the camera to stop.
*/
void stop();
static int getDeviceOrientation(Context context) {
final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
return 90;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_270:
return 270;
case Surface.ROTATION_0:
default:
return 0;
}
}
static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
TextureBufferImpl buffer, boolean mirror, int rotation) {
final Matrix transformMatrix = new Matrix();
// Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
if (mirror) {
transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
}
transformMatrix.preRotate(rotation);
transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);
// The width and height are not affected by rotation since Camera2Session has set them to the
// value they should be after undoing the rotation.
return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
}
}

View File

@ -0,0 +1,172 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaRecorder;
/**
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
* class for detecting camera freezes.
*/
public interface CameraVideoCapturer extends VideoCapturer {
/**
* Camera events handler - can be used to be notifed about camera events. The callbacks are
* executed from an arbitrary thread.
*/
public interface CameraEventsHandler {
// Camera error handler - invoked when camera can not be opened
// or any camera exception happens on camera thread.
void onCameraError(String errorDescription);
// Called when camera is disconnected.
void onCameraDisconnected();
// Invoked when camera stops receiving frames.
void onCameraFreezed(String errorDescription);
// Callback invoked when camera is opening.
void onCameraOpening(String cameraName);
// Callback invoked when first camera frame is available after camera is started.
void onFirstFrameAvailable();
// Callback invoked when camera is closed.
void onCameraClosed();
}
/**
* Camera switch handler - one of these functions are invoked with the result of switchCamera().
* The callback may be called on an arbitrary thread.
*/
public interface CameraSwitchHandler {
// Invoked on success. `isFrontCamera` is true if the new camera is front facing.
void onCameraSwitchDone(boolean isFrontCamera);
// Invoked on failure, e.g. camera is stopped or only one camera available.
void onCameraSwitchError(String errorDescription);
}
/**
* Switch camera to the next valid camera id. This can only be called while the camera is running.
* This function can be called from any thread.
*/
void switchCamera(CameraSwitchHandler switchEventsHandler);
/**
* Switch camera to the specified camera id. This can only be called while the camera is running.
* This function can be called from any thread.
*/
void switchCamera(CameraSwitchHandler switchEventsHandler, String cameraName);
/**
* MediaRecorder add/remove handler - one of these functions are invoked with the result of
* addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
* The callback may be called on an arbitrary thread.
*/
@Deprecated
public interface MediaRecorderHandler {
// Invoked on success.
void onMediaRecorderSuccess();
// Invoked on failure, e.g. camera is stopped or any exception happens.
void onMediaRecorderError(String errorDescription);
}
/**
* Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
* Once MediaRecorder is added to camera pipeline camera switch is not allowed.
* This function can be called from any thread.
*/
@Deprecated
default void addMediaRecorderToCamera(
MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
throw new UnsupportedOperationException("Deprecated and not implemented.");
}
/**
* Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
* This function can be called from any thread.
*/
@Deprecated
default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
throw new UnsupportedOperationException("Deprecated and not implemented.");
}
/**
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
* thread.
*/
public static class CameraStatistics {
private final static String TAG = "CameraStatistics";
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
private final SurfaceTextureHelper surfaceTextureHelper;
private final CameraEventsHandler eventsHandler;
private int frameCount;
private int freezePeriodCount;
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
private final Runnable cameraObserver = new Runnable() {
@Override
public void run() {
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
Logging.d(TAG, "Camera fps: " + cameraFps + ".");
if (frameCount == 0) {
++freezePeriodCount;
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
&& eventsHandler != null) {
Logging.e(TAG, "Camera freezed.");
if (surfaceTextureHelper.isTextureInUse()) {
// This can only happen if we are capturing to textures.
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
} else {
eventsHandler.onCameraFreezed("Camera failure.");
}
return;
}
} else {
freezePeriodCount = 0;
}
frameCount = 0;
surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
}
};
public CameraStatistics(
SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
if (surfaceTextureHelper == null) {
throw new IllegalArgumentException("SurfaceTextureHelper is null");
}
this.surfaceTextureHelper = surfaceTextureHelper;
this.eventsHandler = eventsHandler;
this.frameCount = 0;
this.freezePeriodCount = 0;
surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
}
private void checkThread() {
if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
public void addFrame() {
checkThread();
++frameCount;
}
public void release() {
surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
}
}
}

View File

@ -0,0 +1,39 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Representation of a change in selected ICE candidate pair.
* {@code CandidatePairChangeEvent} in the C++ API.
*/
public final class CandidatePairChangeEvent {
public final IceCandidate local;
public final IceCandidate remote;
public final int lastDataReceivedMs;
public final String reason;
/**
* An estimate from the ICE stack on how long it was disconnected before
* changing to the new candidate pair in this event.
* The first time an candidate pair is signaled the value will be 0.
*/
public final int estimatedDisconnectedTimeMs;
@CalledByNative
CandidatePairChangeEvent(IceCandidate local, IceCandidate remote, int lastDataReceivedMs,
String reason, int estimatedDisconnectedTimeMs) {
this.local = local;
this.remote = remote;
this.lastDataReceivedMs = lastDataReceivedMs;
this.reason = reason;
this.estimatedDisconnectedTimeMs = estimatedDisconnectedTimeMs;
}
}

View File

@ -0,0 +1,27 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by
* {@link VideoSource#getCapturerObserver}.
*
* All callbacks must be executed on a single thread.
*/
public interface CapturerObserver {
/** Notify if the capturer have been started successfully or not. */
void onCapturerStarted(boolean success);
/** Notify that the capturer has been stopped. */
void onCapturerStopped();
/** Delivers a captured frame. */
void onFrameCaptured(VideoFrame frame);
}

View File

@ -0,0 +1,45 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
/**
* Class for storing the application context and retrieving it in a static context. Similar to
* org.chromium.base.ContextUtils.
*/
public class ContextUtils {
private static final String TAG = "ContextUtils";
private static Context applicationContext;
/**
* Stores the application context that will be returned by getApplicationContext. This is called
* by PeerConnectionFactory.initialize. The application context must be set before creating
* a PeerConnectionFactory and must not be modified while it is alive.
*/
public static void initialize(Context applicationContext) {
if (applicationContext == null) {
throw new IllegalArgumentException(
"Application context cannot be null for ContextUtils.initialize.");
}
ContextUtils.applicationContext = applicationContext;
}
/**
* Returns the stored application context.
*
* @deprecated crbug.com/webrtc/8937
*/
@Deprecated
public static Context getApplicationContext() {
return applicationContext;
}
}

View File

@ -0,0 +1,145 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* CryptoOptions defines advanced cryptographic settings for native WebRTC.
* These settings must be passed into RTCConfiguration. WebRTC is secur by
* default and you should not need to set any of these options unless you are
* specifically looking for an additional crypto feature such as AES_GCM
* support. This class is the Java binding of native api/crypto/cryptooptions.h
*/
public final class CryptoOptions {
/**
* SRTP Related Peer Connection Options.
*/
public final class Srtp {
/**
* Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
* if both sides enable it
*/
private final boolean enableGcmCryptoSuites;
/**
* If set to true, the (potentially insecure) crypto cipher
* kSrtpAes128CmSha1_32 will be included in the list of supported ciphers
* during negotiation. It will only be used if both peers support it and no
* other ciphers get preferred.
*/
private final boolean enableAes128Sha1_32CryptoCipher;
/**
* If set to true, encrypted RTP header extensions as defined in RFC 6904
* will be negotiated. They will only be used if both peers support them.
*/
private final boolean enableEncryptedRtpHeaderExtensions;
private Srtp(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
boolean enableEncryptedRtpHeaderExtensions) {
this.enableGcmCryptoSuites = enableGcmCryptoSuites;
this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
}
@CalledByNative("Srtp")
public boolean getEnableGcmCryptoSuites() {
return enableGcmCryptoSuites;
}
@CalledByNative("Srtp")
public boolean getEnableAes128Sha1_32CryptoCipher() {
return enableAes128Sha1_32CryptoCipher;
}
@CalledByNative("Srtp")
public boolean getEnableEncryptedRtpHeaderExtensions() {
return enableEncryptedRtpHeaderExtensions;
}
}
/**
* Options to be used when the FrameEncryptor / FrameDecryptor APIs are used.
*/
public final class SFrame {
/**
* If set all RtpSenders must have an FrameEncryptor attached to them before
* they are allowed to send packets. All RtpReceivers must have a
* FrameDecryptor attached to them before they are able to receive packets.
*/
private final boolean requireFrameEncryption;
private SFrame(boolean requireFrameEncryption) {
this.requireFrameEncryption = requireFrameEncryption;
}
@CalledByNative("SFrame")
public boolean getRequireFrameEncryption() {
return requireFrameEncryption;
}
}
private final Srtp srtp;
private final SFrame sframe;
private CryptoOptions(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
boolean enableEncryptedRtpHeaderExtensions, boolean requireFrameEncryption) {
this.srtp = new Srtp(
enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, enableEncryptedRtpHeaderExtensions);
this.sframe = new SFrame(requireFrameEncryption);
}
public static Builder builder() {
return new Builder();
}
@CalledByNative
public Srtp getSrtp() {
return srtp;
}
@CalledByNative
public SFrame getSFrame() {
return sframe;
}
public static class Builder {
private boolean enableGcmCryptoSuites;
private boolean enableAes128Sha1_32CryptoCipher;
private boolean enableEncryptedRtpHeaderExtensions;
private boolean requireFrameEncryption;
private Builder() {}
public Builder setEnableGcmCryptoSuites(boolean enableGcmCryptoSuites) {
this.enableGcmCryptoSuites = enableGcmCryptoSuites;
return this;
}
public Builder setEnableAes128Sha1_32CryptoCipher(boolean enableAes128Sha1_32CryptoCipher) {
this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
return this;
}
public Builder setEnableEncryptedRtpHeaderExtensions(
boolean enableEncryptedRtpHeaderExtensions) {
this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
return this;
}
public Builder setRequireFrameEncryption(boolean requireFrameEncryption) {
this.requireFrameEncryption = requireFrameEncryption;
return this;
}
public CryptoOptions createCryptoOptions() {
return new CryptoOptions(enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher,
enableEncryptedRtpHeaderExtensions, requireFrameEncryption);
}
}
}

View File

@ -0,0 +1,196 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/** Java wrapper for a C++ DataChannelInterface. */
public class DataChannel {
/** Java wrapper for WebIDL RTCDataChannel. */
public static class Init {
public boolean ordered = true;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int maxRetransmitTimeMs = -1;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int maxRetransmits = -1;
public String protocol = "";
public boolean negotiated;
// Optional unsigned short in WebIDL, -1 means unspecified.
public int id = -1;
@CalledByNative("Init")
boolean getOrdered() {
return ordered;
}
@CalledByNative("Init")
int getMaxRetransmitTimeMs() {
return maxRetransmitTimeMs;
}
@CalledByNative("Init")
int getMaxRetransmits() {
return maxRetransmits;
}
@CalledByNative("Init")
String getProtocol() {
return protocol;
}
@CalledByNative("Init")
boolean getNegotiated() {
return negotiated;
}
@CalledByNative("Init")
int getId() {
return id;
}
}
/** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
public static class Buffer {
/** The underlying data. */
public final ByteBuffer data;
/**
* Indicates whether `data` contains UTF-8 text or "binary data"
* (i.e. anything else).
*/
public final boolean binary;
@CalledByNative("Buffer")
public Buffer(ByteBuffer data, boolean binary) {
this.data = data;
this.binary = binary;
}
}
/** Java version of C++ DataChannelObserver. */
public interface Observer {
/** The data channel's bufferedAmount has changed. */
@CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount);
/** The data channel state has changed. */
@CalledByNative("Observer") public void onStateChange();
/**
* A data buffer was successfully received. NOTE: `buffer.data` will be
* freed once this function returns so callers who want to use the data
* asynchronously must make sure to copy it first.
*/
@CalledByNative("Observer") public void onMessage(Buffer buffer);
}
/** Keep in sync with DataChannelInterface::DataState. */
public enum State {
CONNECTING,
OPEN,
CLOSING,
CLOSED;
@CalledByNative("State")
static State fromNativeIndex(int nativeIndex) {
return values()[nativeIndex];
}
}
private long nativeDataChannel;
private long nativeObserver;
@CalledByNative
public DataChannel(long nativeDataChannel) {
this.nativeDataChannel = nativeDataChannel;
}
/** Register `observer`, replacing any previously-registered observer. */
public void registerObserver(Observer observer) {
checkDataChannelExists();
if (nativeObserver != 0) {
nativeUnregisterObserver(nativeObserver);
}
nativeObserver = nativeRegisterObserver(observer);
}
/** Unregister the (only) observer. */
public void unregisterObserver() {
checkDataChannelExists();
nativeUnregisterObserver(nativeObserver);
nativeObserver = 0;
}
public String label() {
checkDataChannelExists();
return nativeLabel();
}
public int id() {
checkDataChannelExists();
return nativeId();
}
public State state() {
checkDataChannelExists();
return nativeState();
}
/**
* Return the number of bytes of application data (UTF-8 text and binary data)
* that have been queued using SendBuffer but have not yet been transmitted
* to the network.
*/
public long bufferedAmount() {
checkDataChannelExists();
return nativeBufferedAmount();
}
/** Close the channel. */
public void close() {
checkDataChannelExists();
nativeClose();
}
/** Send `data` to the remote peer; return success. */
public boolean send(Buffer buffer) {
checkDataChannelExists();
// TODO(fischman): this could be cleverer about avoiding copies if the
// ByteBuffer is direct and/or is backed by an array.
byte[] data = new byte[buffer.data.remaining()];
buffer.data.get(data);
return nativeSend(data, buffer.binary);
}
/** Dispose of native resources attached to this channel. */
public void dispose() {
checkDataChannelExists();
JniCommon.nativeReleaseRef(nativeDataChannel);
nativeDataChannel = 0;
}
@CalledByNative
long getNativeDataChannel() {
return nativeDataChannel;
}
private void checkDataChannelExists() {
if (nativeDataChannel == 0) {
throw new IllegalStateException("DataChannel has been disposed.");
}
}
private native long nativeRegisterObserver(Observer observer);
private native void nativeUnregisterObserver(long observer);
private native String nativeLabel();
private native int nativeId();
private native State nativeState();
private native long nativeBufferedAmount();
private native void nativeClose();
private native boolean nativeSend(byte[] data, boolean binary);
};

View File

@ -0,0 +1,20 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class Dav1dDecoder extends WrappedNativeVideoDecoder {
@Override
public long createNativeVideoDecoder() {
return nativeCreateDecoder();
}
static native long nativeCreateDecoder();
}

View File

@ -0,0 +1,69 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedHashSet;
/**
* Helper class that combines HW and SW decoders.
*/
public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
private final VideoDecoderFactory hardwareVideoDecoderFactory;
private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
private final @Nullable VideoDecoderFactory platformSoftwareVideoDecoderFactory;
/**
* Create decoder factory using default hardware decoder factory.
*/
public DefaultVideoDecoderFactory(@Nullable EglBase.Context eglContext) {
this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext);
this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext);
}
/**
* Create decoder factory using explicit hardware decoder factory.
*/
DefaultVideoDecoderFactory(VideoDecoderFactory hardwareVideoDecoderFactory) {
this.hardwareVideoDecoderFactory = hardwareVideoDecoderFactory;
this.platformSoftwareVideoDecoderFactory = null;
}
@Override
public @Nullable VideoDecoder createDecoder(VideoCodecInfo codecType) {
VideoDecoder softwareDecoder = softwareVideoDecoderFactory.createDecoder(codecType);
final VideoDecoder hardwareDecoder = hardwareVideoDecoderFactory.createDecoder(codecType);
if (softwareDecoder == null && platformSoftwareVideoDecoderFactory != null) {
softwareDecoder = platformSoftwareVideoDecoderFactory.createDecoder(codecType);
}
if (hardwareDecoder != null && softwareDecoder != null) {
// Both hardware and software supported, wrap it in a software fallback
return new VideoDecoderFallback(
/* fallback= */ softwareDecoder, /* primary= */ hardwareDecoder);
}
return hardwareDecoder != null ? hardwareDecoder : softwareDecoder;
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<VideoCodecInfo>();
supportedCodecInfos.addAll(Arrays.asList(softwareVideoDecoderFactory.getSupportedCodecs()));
supportedCodecInfos.addAll(Arrays.asList(hardwareVideoDecoderFactory.getSupportedCodecs()));
if (platformSoftwareVideoDecoderFactory != null) {
supportedCodecInfos.addAll(
Arrays.asList(platformSoftwareVideoDecoderFactory.getSupportedCodecs()));
}
return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
}
}

View File

@ -0,0 +1,56 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedHashSet;
/** Helper class that combines HW and SW encoders. */
public class DefaultVideoEncoderFactory implements VideoEncoderFactory {
private final VideoEncoderFactory hardwareVideoEncoderFactory;
private final VideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
/** Create encoder factory using default hardware encoder factory. */
public DefaultVideoEncoderFactory(
EglBase.Context eglContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
this.hardwareVideoEncoderFactory =
new HardwareVideoEncoderFactory(eglContext, enableIntelVp8Encoder, enableH264HighProfile);
}
/** Create encoder factory using explicit hardware encoder factory. */
DefaultVideoEncoderFactory(VideoEncoderFactory hardwareVideoEncoderFactory) {
this.hardwareVideoEncoderFactory = hardwareVideoEncoderFactory;
}
@Nullable
@Override
public VideoEncoder createEncoder(VideoCodecInfo info) {
final VideoEncoder softwareEncoder = softwareVideoEncoderFactory.createEncoder(info);
final VideoEncoder hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info);
if (hardwareEncoder != null && softwareEncoder != null) {
// Both hardware and software supported, wrap it in a software fallback
return new VideoEncoderFallback(
/* fallback= */ softwareEncoder, /* primary= */ hardwareEncoder);
}
return hardwareEncoder != null ? hardwareEncoder : softwareEncoder;
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<VideoCodecInfo>();
supportedCodecInfos.addAll(Arrays.asList(softwareVideoEncoderFactory.getSupportedCodecs()));
supportedCodecInfos.addAll(Arrays.asList(hardwareVideoEncoderFactory.getSupportedCodecs()));
return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
}
}

View File

@ -0,0 +1,96 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ DtmfSenderInterface. */
public class DtmfSender {
private long nativeDtmfSender;
public DtmfSender(long nativeDtmfSender) {
this.nativeDtmfSender = nativeDtmfSender;
}
/**
* @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
*/
public boolean canInsertDtmf() {
checkDtmfSenderExists();
return nativeCanInsertDtmf(nativeDtmfSender);
}
/**
* Queues a task that sends the provided DTMF tones.
* <p>
* If insertDtmf is called on the same object while an existing task for this
* object to generate DTMF is still running, the previous task is canceled.
*
* @param tones This parameter is treated as a series of characters. The characters 0
* through 9, A through D, #, and * generate the associated DTMF tones. The
* characters a to d are equivalent to A to D. The character ',' indicates a
* delay of 2 seconds before processing the next character in the tones
* parameter. Unrecognized characters are ignored.
* @param duration Indicates the duration in ms to use for each character passed in the tones
* parameter. The duration cannot be more than 6000 or less than 70.
* @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
* as short as possible.
* @return true on success and false on failure.
*/
public boolean insertDtmf(String tones, int duration, int interToneGap) {
checkDtmfSenderExists();
return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
}
/**
* @return The tones remaining to be played out
*/
public String tones() {
checkDtmfSenderExists();
return nativeTones(nativeDtmfSender);
}
/**
* @return The current tone duration value in ms. This value will be the value last set via the
* insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
*/
public int duration() {
checkDtmfSenderExists();
return nativeDuration(nativeDtmfSender);
}
/**
* @return The current value of the between-tone gap in ms. This value will be the value last set
* via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
* called.
*/
public int interToneGap() {
checkDtmfSenderExists();
return nativeInterToneGap(nativeDtmfSender);
}
public void dispose() {
checkDtmfSenderExists();
JniCommon.nativeReleaseRef(nativeDtmfSender);
nativeDtmfSender = 0;
}
private void checkDtmfSenderExists() {
if (nativeDtmfSender == 0) {
throw new IllegalStateException("DtmfSender has been disposed.");
}
}
private static native boolean nativeCanInsertDtmf(long dtmfSender);
private static native boolean nativeInsertDtmf(
long dtmfSender, String tones, int duration, int interToneGap);
private static native String nativeTones(long dtmfSender);
private static native int nativeDuration(long dtmfSender);
private static native int nativeInterToneGap(long dtmfSender);
};

View File

@ -0,0 +1,98 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
* bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
* target bitrate by unacceptable margins.
*/
class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
// Change the bitrate at most once every three seconds.
private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
// Maximum bitrate adjustment scale - no more than 4 times.
private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
// Amount of adjustment steps to reach maximum scale.
private static final int BITRATE_ADJUSTMENT_STEPS = 20;
private static final double BITS_PER_BYTE = 8.0;
// How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
private double deviationBytes;
private double timeSinceLastAdjustmentMs;
private int bitrateAdjustmentScaleExp;
@Override
public void setTargets(int targetBitrateBps, double targetFramerateFps) {
if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
// Rescale the accumulator level if the accumulator max decreases
deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
}
super.setTargets(targetBitrateBps, targetFramerateFps);
}
@Override
public void reportEncodedFrame(int size) {
if (targetFramerateFps == 0) {
return;
}
// Accumulate the difference between actual and expected frame sizes.
double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps;
deviationBytes += (size - expectedBytesPerFrame);
timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps;
// Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
// shortfall of the target.
double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
// Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
// bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
deviationBytes = Math.min(deviationBytes, deviationCap);
deviationBytes = Math.max(deviationBytes, -deviationCap);
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
// from the target value.
if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
return;
}
if (deviationBytes > deviationThresholdBytes) {
// Encoder generates too high bitrate - need to reduce the scale.
int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
// Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
// This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
deviationBytes = deviationThresholdBytes;
} else if (deviationBytes < -deviationThresholdBytes) {
// Encoder generates too low bitrate - need to increase the scale.
int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
// Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
// This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
deviationBytes = -deviationThresholdBytes;
}
timeSinceLastAdjustmentMs = 0;
}
private double getBitrateAdjustmentScale() {
return Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
(double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS);
}
@Override
public int getAdjustedBitrateBps() {
return (int) (targetBitrateBps * getBitrateAdjustmentScale());
}
}

View File

@ -0,0 +1,305 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import javax.microedition.khronos.egl.EGL10;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
public interface EglBase {
// EGL wrapper for an actual EGLContext.
public interface Context {
public final static long NO_CONTEXT = 0;
/**
* Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is
* unsupported.
*
* @note This is currently only supported for EGL 1.4 and not for EGL 1.0.
*/
long getNativeEglContext();
}
/**
* Wraps the objects needed to interact with EGL that are independent of a particular EGLSurface.
* In practice this means EGLContext, EGLDisplay and EGLConfig objects. Separating them out in a
* standalone object allows for multiple EglBase instances to use the same underlying EGLContext,
* while still operating on their own EGLSurface.
*/
public interface EglConnection extends RefCounted {
/** Analogous to corresponding EglBase#create below. */
public static EglConnection create(@Nullable Context sharedContext, int[] configAttributes) {
if (sharedContext == null) {
return EglConnection.createEgl14(configAttributes);
} else if (sharedContext instanceof EglBase14.Context) {
return new EglBase14Impl.EglConnection(
((EglBase14.Context) sharedContext).getRawContext(), configAttributes);
} else if (sharedContext instanceof EglBase10.Context) {
return new EglBase10Impl.EglConnection(
((EglBase10.Context) sharedContext).getRawContext(), configAttributes);
}
throw new IllegalArgumentException("Unrecognized Context");
}
/** Analogous to corresponding EglBase#createEgl10 below. */
public static EglConnection createEgl10(int[] configAttributes) {
return new EglBase10Impl.EglConnection(/* sharedContext= */ null, configAttributes);
}
/** Analogous to corresponding EglBase#createEgl14 below. */
public static EglConnection createEgl14(int[] configAttributes) {
return new EglBase14Impl.EglConnection(/* sharedContext= */ null, configAttributes);
}
}
// According to the documentation, EGL can be used from multiple threads at the same time if each
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
// Therefore, synchronize on this global lock before calling dangerous EGL functions that might
// deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
public static final Object lock = new Object();
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
// This is similar to how GlSurfaceView does:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
public static final int EGL_OPENGL_ES2_BIT = 4;
public static final int EGL_OPENGL_ES3_BIT = 0x40;
// Android-specific extension.
public static final int EGL_RECORDABLE_ANDROID = 0x3142;
public static ConfigBuilder configBuilder() {
return new ConfigBuilder();
}
public static class ConfigBuilder {
private int openGlesVersion = 2;
private boolean hasAlphaChannel;
private boolean supportsPixelBuffer;
private boolean isRecordable;
public ConfigBuilder setOpenGlesVersion(int version) {
if (version < 1 || version > 3) {
throw new IllegalArgumentException("OpenGL ES version " + version + " not supported");
}
this.openGlesVersion = version;
return this;
}
public ConfigBuilder setHasAlphaChannel(boolean hasAlphaChannel) {
this.hasAlphaChannel = hasAlphaChannel;
return this;
}
public ConfigBuilder setSupportsPixelBuffer(boolean supportsPixelBuffer) {
this.supportsPixelBuffer = supportsPixelBuffer;
return this;
}
public ConfigBuilder setIsRecordable(boolean isRecordable) {
this.isRecordable = isRecordable;
return this;
}
public int[] createConfigAttributes() {
ArrayList<Integer> list = new ArrayList<>();
list.add(EGL10.EGL_RED_SIZE);
list.add(8);
list.add(EGL10.EGL_GREEN_SIZE);
list.add(8);
list.add(EGL10.EGL_BLUE_SIZE);
list.add(8);
if (hasAlphaChannel) {
list.add(EGL10.EGL_ALPHA_SIZE);
list.add(8);
}
if (openGlesVersion == 2 || openGlesVersion == 3) {
list.add(EGL10.EGL_RENDERABLE_TYPE);
list.add(openGlesVersion == 3 ? EGL_OPENGL_ES3_BIT : EGL_OPENGL_ES2_BIT);
}
if (supportsPixelBuffer) {
list.add(EGL10.EGL_SURFACE_TYPE);
list.add(EGL10.EGL_PBUFFER_BIT);
}
if (isRecordable) {
list.add(EGL_RECORDABLE_ANDROID);
list.add(1);
}
list.add(EGL10.EGL_NONE);
final int[] res = new int[list.size()];
for (int i = 0; i < list.size(); ++i) {
res[i] = list.get(i);
}
return res;
}
}
public static final int[] CONFIG_PLAIN = configBuilder().createConfigAttributes();
public static final int[] CONFIG_RGBA =
configBuilder().setHasAlphaChannel(true).createConfigAttributes();
public static final int[] CONFIG_PIXEL_BUFFER =
configBuilder().setSupportsPixelBuffer(true).createConfigAttributes();
public static final int[] CONFIG_PIXEL_RGBA_BUFFER = configBuilder()
.setHasAlphaChannel(true)
.setSupportsPixelBuffer(true)
.createConfigAttributes();
public static final int[] CONFIG_RECORDABLE =
configBuilder().setIsRecordable(true).createConfigAttributes();
static int getOpenGlesVersionFromConfig(int[] configAttributes) {
for (int i = 0; i < configAttributes.length - 1; ++i) {
if (configAttributes[i] == EGL10.EGL_RENDERABLE_TYPE) {
switch (configAttributes[i + 1]) {
case EGL_OPENGL_ES2_BIT:
return 2;
case EGL_OPENGL_ES3_BIT:
return 3;
default:
return 1;
}
}
}
// Default to V1 if no renderable type is specified.
return 1;
}
/**
* Creates a new EglBase with a shared EglConnection. EglBase instances sharing the same
* EglConnection should be used on the same thread to avoid the underlying EGLContext being made
* current on multiple threads. It is up to the client of EglBase to ensure that instances with a
* shared EglConnection are current on that thread before each use since other EglBase instances
* may have used the same EGLContext since the last interaction.
*/
public static EglBase create(EglConnection eglConnection) {
if (eglConnection == null) {
return create();
} else if (eglConnection instanceof EglBase14Impl.EglConnection) {
return new EglBase14Impl((EglBase14Impl.EglConnection) eglConnection);
} else if (eglConnection instanceof EglBase10Impl.EglConnection) {
return new EglBase10Impl((EglBase10Impl.EglConnection) eglConnection);
}
throw new IllegalArgumentException("Unrecognized EglConnection");
}
/**
* Create a new context with the specified config attributes, sharing data with `sharedContext`.
* If `sharedContext` is null, a root EGL 1.4 context is created.
*/
public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
if (sharedContext == null) {
return createEgl14(configAttributes);
} else if (sharedContext instanceof EglBase14.Context) {
return createEgl14((EglBase14.Context) sharedContext, configAttributes);
} else if (sharedContext instanceof EglBase10.Context) {
return createEgl10((EglBase10.Context) sharedContext, configAttributes);
}
throw new IllegalArgumentException("Unrecognized Context");
}
/**
* Helper function for creating a plain root context. This function will try to create an EGL 1.4
* context if possible, and an EGL 1.0 context otherwise.
*/
public static EglBase create() {
return create(null /* shaderContext */, CONFIG_PLAIN);
}
/**
* Helper function for creating a plain context, sharing data with `sharedContext`. This function
* will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
*/
public static EglBase create(Context sharedContext) {
return create(sharedContext, CONFIG_PLAIN);
}
/** Explicitly create a root EGl 1.0 context with the specified config attributes. */
public static EglBase10 createEgl10(int[] configAttributes) {
return new EglBase10Impl(/* sharedContext= */ null, configAttributes);
}
/**
* Explicitly create a root EGl 1.0 context with the specified config attributes and shared
* context.
*/
public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) {
return new EglBase10Impl(
sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
}
/**
* Explicitly create a root EGl 1.0 context with the specified config attributes
* and shared context.
*/
public static EglBase10 createEgl10(
javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
return new EglBase10Impl(sharedContext, configAttributes);
}
/** Explicitly create a root EGl 1.4 context with the specified config attributes. */
public static EglBase14 createEgl14(int[] configAttributes) {
return new EglBase14Impl(/* sharedContext= */ null, configAttributes);
}
/**
* Explicitly create a root EGl 1.4 context with the specified config attributes and shared
* context.
*/
public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) {
return new EglBase14Impl(
sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
}
/**
* Explicitly create a root EGl 1.4 context with the specified config attributes
* and shared context.
*/
public static EglBase14 createEgl14(
android.opengl.EGLContext sharedContext, int[] configAttributes) {
return new EglBase14Impl(sharedContext, configAttributes);
}
void createSurface(Surface surface);
// Create EGLSurface from the Android SurfaceTexture.
void createSurface(SurfaceTexture surfaceTexture);
// Create dummy 1x1 pixel buffer surface so the context can be made current.
void createDummyPbufferSurface();
void createPbufferSurface(int width, int height);
Context getEglBaseContext();
boolean hasSurface();
int surfaceWidth();
int surfaceHeight();
void releaseSurface();
void release();
void makeCurrent();
// Detach the current EGL context, so that it can be made current on another thread.
void detachCurrent();
void swapBuffers();
void swapBuffers(long presentationTimeStampNs);
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
/** EGL 1.0 implementation of EglBase. */
public interface EglBase10 extends EglBase {
interface Context extends EglBase.Context {
EGLContext getRawContext();
}
interface EglConnection extends EglBase.EglConnection {
EGL10 getEgl();
EGLContext getContext();
EGLDisplay getDisplay();
EGLConfig getConfig();
}
}

View File

@ -0,0 +1,448 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.opengl.GLException;
import android.view.Surface;
import android.view.SurfaceHolder;
import androidx.annotation.Nullable;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
class EglBase10Impl implements EglBase10 {
private static final String TAG = "EglBase10Impl";
// This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final EglConnection EGL_NO_CONNECTION = new EglConnection();
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
private EglConnection eglConnection;
// EGL wrapper for an actual EGLContext.
private static class Context implements EglBase10.Context {
private final EGL10 egl;
private final EGLContext eglContext;
private final EGLConfig eglContextConfig;
@Override
public EGLContext getRawContext() {
return eglContext;
}
@Override
public long getNativeEglContext() {
EGLContext previousContext = egl.eglGetCurrentContext();
EGLDisplay currentDisplay = egl.eglGetCurrentDisplay();
EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW);
EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ);
EGLSurface tempEglSurface = null;
if (currentDisplay == EGL10.EGL_NO_DISPLAY) {
currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
}
try {
if (previousContext != eglContext) {
int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE};
tempEglSurface =
egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs);
if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) {
throw new GLException(egl.eglGetError(),
"Failed to make temporary EGL surface active: " + egl.eglGetError());
}
}
return nativeGetCurrentNativeEGLContext();
} finally {
if (tempEglSurface != null) {
egl.eglMakeCurrent(
currentDisplay, previousDrawSurface, previousReadSurface, previousContext);
egl.eglDestroySurface(currentDisplay, tempEglSurface);
}
}
}
public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) {
this.egl = egl;
this.eglContext = eglContext;
this.eglContextConfig = eglContextConfig;
}
}
public static class EglConnection implements EglBase10.EglConnection {
private final EGL10 egl;
private final EGLContext eglContext;
private final EGLDisplay eglDisplay;
private final EGLConfig eglConfig;
private final RefCountDelegate refCountDelegate;
private EGLSurface currentSurface = EGL10.EGL_NO_SURFACE;
public EglConnection(EGLContext sharedContext, int[] configAttributes) {
egl = (EGL10) EGLContext.getEGL();
eglDisplay = getEglDisplay(egl);
eglConfig = getEglConfig(egl, eglDisplay, configAttributes);
final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
eglContext = createEglContext(egl, sharedContext, eglDisplay, eglConfig, openGlesVersion);
// Ref count delegate with release callback.
refCountDelegate = new RefCountDelegate(() -> {
synchronized (EglBase.lock) {
egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
}
egl.eglDestroyContext(eglDisplay, eglContext);
egl.eglTerminate(eglDisplay);
currentSurface = EGL10.EGL_NO_SURFACE;
});
}
// Returns a "null" EglConnection. Useful to represent a released instance with default values.
private EglConnection() {
egl = (EGL10) EGLContext.getEGL();
eglContext = EGL10.EGL_NO_CONTEXT;
eglDisplay = EGL10.EGL_NO_DISPLAY;
eglConfig = null;
refCountDelegate = new RefCountDelegate(() -> {});
}
@Override
public void retain() {
refCountDelegate.retain();
}
@Override
public void release() {
refCountDelegate.release();
}
@Override
public EGL10 getEgl() {
return egl;
}
@Override
public EGLContext getContext() {
return eglContext;
}
@Override
public EGLDisplay getDisplay() {
return eglDisplay;
}
@Override
public EGLConfig getConfig() {
return eglConfig;
}
public void makeCurrent(EGLSurface eglSurface) {
if (egl.eglGetCurrentContext() == eglContext && currentSurface == eglSurface) {
return;
}
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new GLException(egl.eglGetError(),
"eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
currentSurface = eglSurface;
}
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
throw new GLException(egl.eglGetError(),
"eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
currentSurface = EGL10.EGL_NO_SURFACE;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) {
this.eglConnection = new EglConnection(sharedContext, configAttributes);
}
public EglBase10Impl(EglConnection eglConnection) {
this.eglConnection = eglConnection;
this.eglConnection.retain();
}
@Override
public void createSurface(Surface surface) {
/**
* We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
* couldn't actually take a Surface object until API 17. Older versions fortunately just call
* SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
*/
class FakeSurfaceHolder implements SurfaceHolder {
private final Surface surface;
FakeSurfaceHolder(Surface surface) {
this.surface = surface;
}
@Override
public void addCallback(Callback callback) {}
@Override
public void removeCallback(Callback callback) {}
@Override
public boolean isCreating() {
return false;
}
@Deprecated
@Override
public void setType(int i) {}
@Override
public void setFixedSize(int i, int i2) {}
@Override
public void setSizeFromLayout() {}
@Override
public void setFormat(int i) {}
@Override
public void setKeepScreenOn(boolean b) {}
@Nullable
@Override
public Canvas lockCanvas() {
return null;
}
@Nullable
@Override
public Canvas lockCanvas(Rect rect) {
return null;
}
@Override
public void unlockCanvasAndPost(Canvas canvas) {}
@Nullable
@Override
public Rect getSurfaceFrame() {
return null;
}
@Override
public Surface getSurface() {
return surface;
}
}
createSurfaceInternal(new FakeSurfaceHolder(surface));
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
private void createSurfaceInternal(Object nativeWindow) {
if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
EGL10 egl = eglConnection.getEgl();
int[] surfaceAttribs = {EGL10.EGL_NONE};
eglSurface = egl.eglCreateWindowSurface(
eglConnection.getDisplay(), eglConnection.getConfig(), nativeWindow, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new GLException(egl.eglGetError(),
"Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
// Create dummy 1x1 pixel buffer surface so the context can be made current.
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
EGL10 egl = eglConnection.getEgl();
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
eglSurface = egl.eglCreatePbufferSurface(
eglConnection.getDisplay(), eglConnection.getConfig(), surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new GLException(egl.eglGetError(),
"Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ Integer.toHexString(egl.eglGetError()));
}
}
@Override
public org.webrtc.EglBase.Context getEglBaseContext() {
return new Context(
eglConnection.getEgl(), eglConnection.getContext(), eglConnection.getConfig());
}
@Override
public boolean hasSurface() {
return eglSurface != EGL10.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int widthArray[] = new int[1];
eglConnection.getEgl().eglQuerySurface(
eglConnection.getDisplay(), eglSurface, EGL10.EGL_WIDTH, widthArray);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int heightArray[] = new int[1];
eglConnection.getEgl().eglQuerySurface(
eglConnection.getDisplay(), eglSurface, EGL10.EGL_HEIGHT, heightArray);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL10.EGL_NO_SURFACE) {
eglConnection.getEgl().eglDestroySurface(eglConnection.getDisplay(), eglSurface);
eglSurface = EGL10.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglConnection == EGL_NO_CONNECTION) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
eglConnection.release();
eglConnection = EGL_NO_CONNECTION;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
eglConnection.makeCurrent(eglSurface);
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
eglConnection.detachCurrent();
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
eglConnection.getEgl().eglSwapBuffers(eglConnection.getDisplay(), eglSurface);
}
}
@Override
public void swapBuffers(long timeStampNs) {
// Setting presentation time is not supported for EGL 1.0.
swapBuffers();
}
// Return an EGLDisplay, or die trying.
private static EGLDisplay getEglDisplay(EGL10 egl) {
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new GLException(egl.eglGetError(),
"Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
}
int[] version = new int[2];
if (!egl.eglInitialize(eglDisplay, version)) {
throw new GLException(egl.eglGetError(),
"Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
throw new GLException(
egl.eglGetError(), "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private static EGLContext createEglContext(EGL10 egl, @Nullable EGLContext sharedContext,
EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE};
EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
}
if (eglContext == EGL10.EGL_NO_CONTEXT) {
throw new GLException(egl.eglGetError(),
"Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
}
return eglContext;
}
private static native long nativeGetCurrentNativeEGLContext();
}

View File

@ -0,0 +1,30 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
/** EGL 1.4 implementation of EglBase. */
public interface EglBase14 extends EglBase {
interface Context extends EglBase.Context {
EGLContext getRawContext();
}
interface EglConnection extends EglBase.EglConnection {
EGLContext getContext();
EGLDisplay getDisplay();
EGLConfig getConfig();
}
}

View File

@ -0,0 +1,340 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLException;
import android.view.Surface;
import androidx.annotation.Nullable;
/**
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
class EglBase14Impl implements EglBase14 {
private static final String TAG = "EglBase14Impl";
private static final EglConnection EGL_NO_CONNECTION = new EglConnection();
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
private EglConnection eglConnection;
public static class Context implements EglBase14.Context {
private final EGLContext egl14Context;
@Override
public EGLContext getRawContext() {
return egl14Context;
}
@Override
public long getNativeEglContext() {
return egl14Context.getNativeHandle();
}
public Context(android.opengl.EGLContext eglContext) {
this.egl14Context = eglContext;
}
}
public static class EglConnection implements EglBase14.EglConnection {
private final EGLContext eglContext;
private final EGLDisplay eglDisplay;
private final EGLConfig eglConfig;
private final RefCountDelegate refCountDelegate;
private EGLSurface currentSurface = EGL14.EGL_NO_SURFACE;
public EglConnection(EGLContext sharedContext, int[] configAttributes) {
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configAttributes);
final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
// Ref count delegate with release callback.
refCountDelegate = new RefCountDelegate(() -> {
synchronized (EglBase.lock) {
EGL14.eglMakeCurrent(
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(eglDisplay, eglContext);
}
EGL14.eglReleaseThread();
EGL14.eglTerminate(eglDisplay);
currentSurface = EGL14.EGL_NO_SURFACE;
});
}
// Returns a "null" EglConnection. Useful to represent a released instance with default values.
private EglConnection() {
eglContext = EGL14.EGL_NO_CONTEXT;
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglConfig = null;
refCountDelegate = new RefCountDelegate(() -> {});
}
@Override
public void retain() {
refCountDelegate.retain();
}
@Override
public void release() {
refCountDelegate.release();
}
@Override
public EGLContext getContext() {
return eglContext;
}
@Override
public EGLDisplay getDisplay() {
return eglDisplay;
}
@Override
public EGLConfig getConfig() {
return eglConfig;
}
public void makeCurrent(EGLSurface eglSurface) {
if (EGL14.eglGetCurrentContext() == eglContext && currentSurface == eglSurface) {
return;
}
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new GLException(EGL14.eglGetError(),
"eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
currentSurface = eglSurface;
}
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
throw new GLException(EGL14.eglGetError(),
"eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
currentSurface = EGL14.EGL_NO_SURFACE;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
// `sharedContext` may be null.
public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) {
this.eglConnection = new EglConnection(sharedContext, configAttributes);
}
// Create a new EglBase using an existing, possibly externally managed, EglConnection.
public EglBase14Impl(EglConnection eglConnection) {
this.eglConnection = eglConnection;
this.eglConnection.retain();
}
// Create EGLSurface from the Android Surface.
@Override
public void createSurface(Surface surface) {
createSurfaceInternal(surface);
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either Surface or SurfaceTexture.
private void createSurfaceInternal(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_NONE};
eglSurface = EGL14.eglCreateWindowSurface(
eglConnection.getDisplay(), eglConnection.getConfig(), surface, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new GLException(EGL14.eglGetError(),
"Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
eglSurface = EGL14.eglCreatePbufferSurface(
eglConnection.getDisplay(), eglConnection.getConfig(), surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new GLException(EGL14.eglGetError(),
"Failed to create pixel buffer surface with size " + width + "x" + height + ": 0x"
+ Integer.toHexString(EGL14.eglGetError()));
}
}
@Override
public Context getEglBaseContext() {
return new Context(eglConnection.getContext());
}
@Override
public boolean hasSurface() {
return eglSurface != EGL14.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int[] widthArray = new int[1];
EGL14.eglQuerySurface(eglConnection.getDisplay(), eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int[] heightArray = new int[1];
EGL14.eglQuerySurface(eglConnection.getDisplay(), eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(eglConnection.getDisplay(), eglSurface);
eglSurface = EGL14.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglConnection == EGL_NO_CONNECTION) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
eglConnection.release();
eglConnection = EGL_NO_CONNECTION;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
eglConnection.makeCurrent(eglSurface);
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
eglConnection.detachCurrent();
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
EGL14.eglSwapBuffers(eglConnection.getDisplay(), eglSurface);
}
}
@Override
public void swapBuffers(long timeStampNs) {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
// See
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
EGLExt.eglPresentationTimeANDROID(eglConnection.getDisplay(), eglSurface, timeStampNs);
EGL14.eglSwapBuffers(eglConnection.getDisplay(), eglSurface);
}
}
// Return an EGLDisplay, or die trying.
private static EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new GLException(EGL14.eglGetError(),
"Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new GLException(EGL14.eglGetError(),
"Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
throw new GLException(EGL14.eglGetError(),
"eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private static EGLContext createEglContext(@Nullable EGLContext sharedContext,
EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE};
EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
}
if (eglContext == EGL14.EGL_NO_CONTEXT) {
throw new GLException(EGL14.eglGetError(),
"Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
return eglContext;
}
}

View File

@ -0,0 +1,776 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.view.Surface;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
* be used as a helper class for rendering on SurfaceViews and TextureViews.
*/
public class EglRenderer implements VideoSink {
private static final String TAG = "EglRenderer";
private static final long LOG_INTERVAL_SEC = 4;
public interface FrameListener { void onFrame(Bitmap frame); }
/** Callback for clients to be notified about errors encountered during rendering. */
public static interface ErrorCallback {
/** Called if GLES20.GL_OUT_OF_MEMORY is encountered during rendering. */
void onGlOutOfMemory();
}
private static class FrameListenerAndParams {
public final FrameListener listener;
public final float scale;
public final RendererCommon.GlDrawer drawer;
public final boolean applyFpsReduction;
public FrameListenerAndParams(FrameListener listener, float scale,
RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
this.listener = listener;
this.scale = scale;
this.drawer = drawer;
this.applyFpsReduction = applyFpsReduction;
}
}
private class EglSurfaceCreation implements Runnable {
private Object surface;
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void setSurface(Object surface) {
this.surface = surface;
}
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void run() {
if (surface != null && eglBase != null && !eglBase.hasSurface()) {
if (surface instanceof Surface) {
eglBase.createSurface((Surface) surface);
} else if (surface instanceof SurfaceTexture) {
eglBase.createSurface((SurfaceTexture) surface);
} else {
throw new IllegalStateException("Invalid surface: " + surface);
}
eglBase.makeCurrent();
// Necessary for YUV frames with odd width.
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
}
}
}
protected final String name;
// `eglThread` is used for rendering, and is synchronized on `threadLock`.
private final Object threadLock = new Object();
@GuardedBy("threadLock") @Nullable private EglThread eglThread;
private final Runnable eglExceptionCallback = new Runnable() {
@Override
public void run() {
synchronized (threadLock) {
eglThread = null;
}
}
};
private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
private volatile ErrorCallback errorCallback;
// Variables for fps reduction.
private final Object fpsReductionLock = new Object();
// Time for when next frame should be rendered.
private long nextFrameTimeNs;
// Minimum duration between frames when fps reduction is active, or -1 if video is completely
// paused.
private long minRenderPeriodNs;
// EGL and GL resources for drawing YUV/OES textures. After initialization, these are only
// accessed from the render thread.
@Nullable private EglBase eglBase;
private final VideoFrameDrawer frameDrawer;
@Nullable private RendererCommon.GlDrawer drawer;
private boolean usePresentationTimeStamp;
private final Matrix drawMatrix = new Matrix();
// Pending frame to render. Serves as a queue with size 1. Synchronized on `frameLock`.
private final Object frameLock = new Object();
@Nullable private VideoFrame pendingFrame;
// These variables are synchronized on `layoutLock`.
private final Object layoutLock = new Object();
private float layoutAspectRatio;
// If true, mirrors the video stream horizontally.
private boolean mirrorHorizontally;
// If true, mirrors the video stream vertically.
private boolean mirrorVertically;
// These variables are synchronized on `statisticsLock`.
private final Object statisticsLock = new Object();
// Total number of video frames received in renderFrame() call.
private int framesReceived;
// Number of video frames dropped by renderFrame() because previous frame has not been rendered
// yet.
private int framesDropped;
// Number of rendered video frames.
private int framesRendered;
// Start time for counting these statistics, or 0 if we haven't started measuring yet.
private long statisticsStartTimeNs;
// Time in ns spent in renderFrameOnRenderThread() function.
private long renderTimeNs;
// Time in ns spent by the render thread in the swapBuffers() function.
private long renderSwapBufferTimeNs;
// Used for bitmap capturing.
private final GlTextureFrameBuffer bitmapTextureFramebuffer =
new GlTextureFrameBuffer(GLES20.GL_RGBA);
private final Runnable logStatisticsRunnable = new Runnable() {
@Override
public void run() {
logStatistics();
synchronized (threadLock) {
if (eglThread != null) {
eglThread.getHandler().removeCallbacks(logStatisticsRunnable);
eglThread.getHandler().postDelayed(
logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
}
}
}
};
private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
/**
* Standard constructor. The name will be included when logging. In order to render something,
* you must first call init() and createEglSurface.
*/
public EglRenderer(String name) {
this(name, new VideoFrameDrawer());
}
public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
this.name = name;
this.frameDrawer = videoFrameDrawer;
}
public void init(
EglThread eglThread, RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
synchronized (threadLock) {
if (this.eglThread != null) {
throw new IllegalStateException(name + "Already initialized");
}
logD("Initializing EglRenderer");
this.eglThread = eglThread;
this.drawer = drawer;
this.usePresentationTimeStamp = usePresentationTimeStamp;
eglThread.addExceptionCallback(eglExceptionCallback);
eglBase = eglThread.createEglBaseWithSharedConnection();
eglThread.getHandler().post(eglSurfaceCreationRunnable);
final long currentTimeNs = System.nanoTime();
resetStatistics(currentTimeNs);
eglThread.getHandler().postDelayed(
logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
}
}
/**
* Initialize this class, sharing resources with `sharedContext`. The custom `drawer` will be used
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
* `drawer`. It is allowed to call init() to reinitialize the renderer after a previous
* init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
* set with the frame timestamps, which specifies desired presentation time and might be useful
* for e.g. syncing audio and video.
*/
public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
EglThread thread =
EglThread.create(/* releaseMonitor= */ null, sharedContext, configAttributes);
init(thread, drawer, usePresentationTimeStamp);
}
/**
* Same as above with usePresentationTimeStamp set to false.
*
* @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean)
*/
public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
RendererCommon.GlDrawer drawer) {
init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false);
}
public void createEglSurface(Surface surface) {
createEglSurfaceInternal(surface);
}
public void createEglSurface(SurfaceTexture surfaceTexture) {
createEglSurfaceInternal(surfaceTexture);
}
private void createEglSurfaceInternal(Object surface) {
eglSurfaceCreationRunnable.setSurface(surface);
postToRenderThread(eglSurfaceCreationRunnable);
}
/**
* Block until any pending frame is returned and all GL resources released, even if an interrupt
* occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
* should be called before the Activity is destroyed and the EGLContext is still valid. If you
* don't call this function, the GL resources might leak.
*/
public void release() {
logD("Releasing.");
final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
synchronized (threadLock) {
if (eglThread == null) {
logD("Already released");
return;
}
eglThread.getHandler().removeCallbacks(logStatisticsRunnable);
eglThread.removeExceptionCallback(eglExceptionCallback);
// Release EGL and GL resources on render thread.
eglThread.getHandler().postAtFrontOfQueue(() -> {
// Detach current shader program.
synchronized (EglBase.lock) {
GLES20.glUseProgram(/* program= */ 0);
}
if (drawer != null) {
drawer.release();
drawer = null;
}
frameDrawer.release();
bitmapTextureFramebuffer.release();
if (eglBase != null) {
logD("eglBase detach and release.");
eglBase.detachCurrent();
eglBase.release();
eglBase = null;
}
frameListeners.clear();
eglCleanupBarrier.countDown();
});
// Don't accept any more frames or messages to the render thread.
eglThread.release();
eglThread = null;
}
// Make sure the EGL/GL cleanup posted above is executed.
ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
synchronized (frameLock) {
if (pendingFrame != null) {
pendingFrame.release();
pendingFrame = null;
}
}
logD("Releasing done.");
}
/**
* Reset the statistics logged in logStatistics().
*/
private void resetStatistics(long currentTimeNs) {
synchronized (statisticsLock) {
statisticsStartTimeNs = currentTimeNs;
framesReceived = 0;
framesDropped = 0;
framesRendered = 0;
renderTimeNs = 0;
renderSwapBufferTimeNs = 0;
}
}
public void printStackTrace() {
synchronized (threadLock) {
final Thread renderThread =
(eglThread == null) ? null : eglThread.getHandler().getLooper().getThread();
if (renderThread != null) {
final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
if (renderStackTrace.length > 0) {
logW("EglRenderer stack trace:");
for (StackTraceElement traceElem : renderStackTrace) {
logW(traceElem.toString());
}
}
}
}
}
/**
* Set if the video stream should be mirrored horizontally or not.
*/
public void setMirror(final boolean mirror) {
synchronized (layoutLock) {
this.mirrorHorizontally = mirror;
}
}
/**
* Set if the video stream should be mirrored vertically or not.
*/
public void setMirrorVertically(final boolean mirrorVertically) {
synchronized (layoutLock) {
this.mirrorVertically = mirrorVertically;
}
}
/**
* Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
* Set this to 0 to disable cropping.
*/
public void setLayoutAspectRatio(float layoutAspectRatio) {
synchronized (layoutLock) {
this.layoutAspectRatio = layoutAspectRatio;
}
}
/**
* Limit render framerate.
*
* @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
* reduction.
*/
public void setFpsReduction(float fps) {
synchronized (fpsReductionLock) {
final long previousRenderPeriodNs = minRenderPeriodNs;
if (fps <= 0) {
minRenderPeriodNs = Long.MAX_VALUE;
} else {
minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
}
if (minRenderPeriodNs != previousRenderPeriodNs) {
// Fps reduction changed - reset frame time.
nextFrameTimeNs = System.nanoTime();
}
}
}
public void disableFpsReduction() {
setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
}
public void pauseVideo() {
setFpsReduction(0 /* fps */);
}
/**
* Register a callback to be invoked when a new video frame has been received. This version uses
* the drawer of the EglRenderer that was passed in init.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
*/
public void addFrameListener(final FrameListener listener, final float scale) {
addFrameListener(listener, scale, null, false /* applyFpsReduction */);
}
/**
* Register a callback to be invoked when a new video frame has been received.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
* @param drawer Custom drawer to use for this frame listener or null to use the default one.
*/
public void addFrameListener(
final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
}
/**
* Register a callback to be invoked when a new video frame has been received.
*
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
* It should be lightweight and must not call removeFrameListener.
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
* required.
* @param drawer Custom drawer to use for this frame listener or null to use the default one.
* @param applyFpsReduction This callback will not be called for frames that have been dropped by
* FPS reduction.
*/
public void addFrameListener(final FrameListener listener, final float scale,
@Nullable final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
postToRenderThread(() -> {
final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
frameListeners.add(
new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
});
}
/**
* Remove any pending callback that was added with addFrameListener. If the callback is not in
* the queue, nothing happens. It is ensured that callback won't be called after this method
* returns.
*
* @param runnable The callback to remove.
*/
public void removeFrameListener(final FrameListener listener) {
final CountDownLatch latch = new CountDownLatch(1);
synchronized (threadLock) {
if (eglThread == null) {
return;
}
if (Thread.currentThread() == eglThread.getHandler().getLooper().getThread()) {
throw new RuntimeException("removeFrameListener must not be called on the render thread.");
}
postToRenderThread(() -> {
latch.countDown();
final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
while (iter.hasNext()) {
if (iter.next().listener == listener) {
iter.remove();
}
}
});
}
ThreadUtils.awaitUninterruptibly(latch);
}
/** Can be set in order to be notified about errors encountered during rendering. */
public void setErrorCallback(ErrorCallback errorCallback) {
this.errorCallback = errorCallback;
}
// VideoSink interface.
@Override
public void onFrame(VideoFrame frame) {
synchronized (statisticsLock) {
++framesReceived;
}
final boolean dropOldFrame;
synchronized (threadLock) {
if (eglThread == null) {
logD("Dropping frame - Not initialized or already released.");
return;
}
synchronized (frameLock) {
dropOldFrame = (pendingFrame != null);
if (dropOldFrame) {
pendingFrame.release();
}
pendingFrame = frame;
pendingFrame.retain();
eglThread.getHandler().post(this::renderFrameOnRenderThread);
}
}
if (dropOldFrame) {
synchronized (statisticsLock) {
++framesDropped;
}
}
}
/**
* Release EGL surface. This function will block until the EGL surface is released.
*/
public void releaseEglSurface(final Runnable completionCallback) {
// Ensure that the render thread is no longer touching the Surface before returning from this
// function.
eglSurfaceCreationRunnable.setSurface(null /* surface */);
synchronized (threadLock) {
if (eglThread != null) {
eglThread.getHandler().removeCallbacks(eglSurfaceCreationRunnable);
eglThread.getHandler().postAtFrontOfQueue(() -> {
if (eglBase != null) {
eglBase.detachCurrent();
eglBase.releaseSurface();
}
completionCallback.run();
});
return;
}
}
completionCallback.run();
}
/**
* Private helper function to post tasks safely.
*/
private void postToRenderThread(Runnable runnable) {
synchronized (threadLock) {
if (eglThread != null) {
eglThread.getHandler().post(runnable);
}
}
}
private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
if (eglBase != null && eglBase.hasSurface()) {
logD("clearSurface");
eglBase.makeCurrent();
GLES20.glClearColor(r, g, b, a);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
}
}
/**
* Post a task to clear the surface to a transparent uniform color.
*/
public void clearImage() {
clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
}
/**
* Post a task to clear the surface to a specific color.
*/
public void clearImage(final float r, final float g, final float b, final float a) {
synchronized (threadLock) {
if (eglThread == null) {
return;
}
eglThread.getHandler().postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
}
}
private void swapBuffersOnRenderThread(final VideoFrame frame, long swapBuffersStartTimeNs) {
synchronized (threadLock) {
if (eglThread != null) {
eglThread.scheduleRenderUpdate(
runsInline -> {
if (!runsInline) {
if (eglBase == null || !eglBase.hasSurface()) {
return;
}
eglBase.makeCurrent();
}
if (usePresentationTimeStamp) {
eglBase.swapBuffers(frame.getTimestampNs());
} else {
eglBase.swapBuffers();
}
synchronized (statisticsLock) {
renderSwapBufferTimeNs += (System.nanoTime() - swapBuffersStartTimeNs);
}
});
}
}
}
/**
* Renders and releases `pendingFrame`.
*/
private void renderFrameOnRenderThread() {
// Fetch and render `pendingFrame`.
final VideoFrame frame;
synchronized (frameLock) {
if (pendingFrame == null) {
return;
}
frame = pendingFrame;
pendingFrame = null;
}
if (eglBase == null || !eglBase.hasSurface()) {
logD("Dropping frame - No surface");
frame.release();
return;
}
eglBase.makeCurrent();
// Check if fps reduction is active.
final boolean shouldRenderFrame;
synchronized (fpsReductionLock) {
if (minRenderPeriodNs == Long.MAX_VALUE) {
// Rendering is paused.
shouldRenderFrame = false;
} else if (minRenderPeriodNs <= 0) {
// FPS reduction is disabled.
shouldRenderFrame = true;
} else {
final long currentTimeNs = System.nanoTime();
if (currentTimeNs < nextFrameTimeNs) {
logD("Skipping frame rendering - fps reduction is active.");
shouldRenderFrame = false;
} else {
nextFrameTimeNs += minRenderPeriodNs;
// The time for the next frame should always be in the future.
nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
shouldRenderFrame = true;
}
}
}
final long startTimeNs = System.nanoTime();
final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
final float drawnAspectRatio;
synchronized (layoutLock) {
drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
}
final float scaleX;
final float scaleY;
if (frameAspectRatio > drawnAspectRatio) {
scaleX = drawnAspectRatio / frameAspectRatio;
scaleY = 1f;
} else {
scaleX = 1f;
scaleY = frameAspectRatio / drawnAspectRatio;
}
drawMatrix.reset();
drawMatrix.preTranslate(0.5f, 0.5f);
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
drawMatrix.preScale(scaleX, scaleY);
drawMatrix.preTranslate(-0.5f, -0.5f);
try {
if (shouldRenderFrame) {
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
eglBase.surfaceWidth(), eglBase.surfaceHeight());
final long swapBuffersStartTimeNs = System.nanoTime();
swapBuffersOnRenderThread(frame, swapBuffersStartTimeNs);
synchronized (statisticsLock) {
++framesRendered;
renderTimeNs += (swapBuffersStartTimeNs - startTimeNs);
}
}
notifyCallbacks(frame, shouldRenderFrame);
} catch (GlUtil.GlOutOfMemoryException e) {
logE("Error while drawing frame", e);
final ErrorCallback errorCallback = this.errorCallback;
if (errorCallback != null) {
errorCallback.onGlOutOfMemory();
}
// Attempt to free up some resources.
drawer.release();
frameDrawer.release();
bitmapTextureFramebuffer.release();
// Continue here on purpose and retry again for next frame. In worst case, this is a
// continuous problem and no more frames will be drawn.
} finally {
frame.release();
}
}
private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
if (frameListeners.isEmpty())
return;
drawMatrix.reset();
drawMatrix.preTranslate(0.5f, 0.5f);
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
drawMatrix.preTranslate(-0.5f, -0.5f);
Iterator<FrameListenerAndParams> it = frameListeners.iterator();
while (it.hasNext()) {
FrameListenerAndParams listenerAndParams = it.next();
if (!wasRendered && listenerAndParams.applyFpsReduction) {
continue;
}
it.remove();
final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
if (scaledWidth == 0 || scaledHeight == 0) {
listenerAndParams.listener.onFrame(null);
continue;
}
bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
0 /* viewportY */, scaledWidth, scaledHeight);
final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
GLES20.glReadPixels(
0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(bitmapBuffer);
listenerAndParams.listener.onFrame(bitmap);
}
}
private String averageTimeAsString(long sumTimeNs, int count) {
return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us";
}
private void logStatistics() {
final DecimalFormat fpsFormat = new DecimalFormat("#.0");
final long currentTimeNs = System.nanoTime();
synchronized (statisticsLock) {
final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
if (elapsedTimeNs <= 0 || (minRenderPeriodNs == Long.MAX_VALUE && framesReceived == 0)) {
return;
}
final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
+ " Frames received: " + framesReceived + "."
+ " Dropped: " + framesDropped + "."
+ " Rendered: " + framesRendered + "."
+ " Render fps: " + fpsFormat.format(renderFps) + "."
+ " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
+ " Average swapBuffer time: "
+ averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
resetStatistics(currentTimeNs);
}
}
private void logE(String string, Throwable e) {
Logging.e(TAG, name + string, e);
}
private void logD(String string) {
Logging.d(TAG, name + string);
}
private void logW(String string) {
Logging.w(TAG, name + string);
}
}

View File

@ -0,0 +1,216 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import org.webrtc.EglBase.EglConnection;
/** EGL graphics thread that allows multiple clients to share the same underlying EGLContext. */
public class EglThread implements RenderSynchronizer.Listener {
/** Callback for externally managed reference count. */
public interface ReleaseMonitor {
/**
* Called by EglThread when a client releases its reference. Returns true when there are no more
* references and resources should be released.
*/
boolean onRelease(EglThread eglThread);
}
/** Interface for clients to schedule rendering updates that will run synchronized. */
public interface RenderUpdate {
/**
* Called by EglThread when the rendering window is open. `runsInline` is true when the update
* is executed directly while the client schedules the update.
*/
void update(boolean runsInline);
}
public static EglThread create(
@Nullable ReleaseMonitor releaseMonitor,
@Nullable final EglBase.Context sharedContext,
final int[] configAttributes,
@Nullable RenderSynchronizer renderSynchronizer) {
final HandlerThread renderThread = new HandlerThread("EglThread");
renderThread.start();
HandlerWithExceptionCallbacks handler =
new HandlerWithExceptionCallbacks(renderThread.getLooper());
// Not creating the EGLContext on the thread it will be used on seems to cause issues with
// creating window surfaces on certain devices. So keep the same legacy behavior as EglRenderer
// and create the context on the render thread.
EglConnection eglConnection = ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
// If sharedContext is null, then texture frames are disabled. This is typically for old
// devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
// caused trouble on some weird devices.
if (sharedContext == null) {
return EglConnection.createEgl10(configAttributes);
} else {
return EglConnection.create(sharedContext, configAttributes);
}
});
return new EglThread(
releaseMonitor != null ? releaseMonitor : eglThread -> true,
handler,
eglConnection,
renderSynchronizer);
}
public static EglThread create(
@Nullable ReleaseMonitor releaseMonitor,
@Nullable final EglBase.Context sharedContext,
final int[] configAttributes) {
return create(releaseMonitor, sharedContext, configAttributes, /* renderSynchronizer= */ null);
}
/**
* Handler that triggers callbacks when an uncaught exception happens when handling a message.
*/
private static class HandlerWithExceptionCallbacks extends Handler {
private final Object callbackLock = new Object();
@GuardedBy("callbackLock") private final List<Runnable> exceptionCallbacks = new ArrayList<>();
public HandlerWithExceptionCallbacks(Looper looper) {
super(looper);
}
@Override
public void dispatchMessage(Message msg) {
try {
super.dispatchMessage(msg);
} catch (Exception e) {
Logging.e("EglThread", "Exception on EglThread", e);
synchronized (callbackLock) {
for (Runnable callback : exceptionCallbacks) {
callback.run();
}
}
throw e;
}
}
public void addExceptionCallback(Runnable callback) {
synchronized (callbackLock) {
exceptionCallbacks.add(callback);
}
}
public void removeExceptionCallback(Runnable callback) {
synchronized (callbackLock) {
exceptionCallbacks.remove(callback);
}
}
}
private final ReleaseMonitor releaseMonitor;
private final HandlerWithExceptionCallbacks handler;
private final EglConnection eglConnection;
private final RenderSynchronizer renderSynchronizer;
private final List<RenderUpdate> pendingRenderUpdates = new ArrayList<>();
private boolean renderWindowOpen = true;
private EglThread(
ReleaseMonitor releaseMonitor,
HandlerWithExceptionCallbacks handler,
EglConnection eglConnection,
RenderSynchronizer renderSynchronizer) {
this.releaseMonitor = releaseMonitor;
this.handler = handler;
this.eglConnection = eglConnection;
this.renderSynchronizer = renderSynchronizer;
if (renderSynchronizer != null) {
renderSynchronizer.registerListener(this);
}
}
public void release() {
if (!releaseMonitor.onRelease(this)) {
// Thread is still in use, do not release yet.
return;
}
if (renderSynchronizer != null) {
renderSynchronizer.removeListener(this);
}
handler.post(eglConnection::release);
handler.getLooper().quitSafely();
}
/**
* Creates an EglBase instance with the EglThread's EglConnection. This method can be called on
* any thread, but the returned EglBase instance should only be used on this EglThread's Handler.
*/
public EglBase createEglBaseWithSharedConnection() {
return EglBase.create(eglConnection);
}
/**
* Returns the Handler to interact with Gl/EGL on. Callers need to make sure that their own
* EglBase is current on the handler before running any graphics operations since the EglThread
* can be shared by multiple clients.
*/
public Handler getHandler() {
return handler;
}
/**
* Adds a callback that will be called on the EGL thread if there is an exception on the thread.
*/
public void addExceptionCallback(Runnable callback) {
handler.addExceptionCallback(callback);
}
/**
* Removes a previously added exception callback.
*/
public void removeExceptionCallback(Runnable callback) {
handler.removeExceptionCallback(callback);
}
/**
* Schedules a render update (like swapBuffers) to be run in sync with other updates on the next
* open render window. If the render window is currently open the update will run immediately.
* This method must be called on the EglThread during a render pass.
*/
public void scheduleRenderUpdate(RenderUpdate update) {
if (renderWindowOpen) {
update.update(/* runsInline = */true);
} else {
pendingRenderUpdates.add(update);
}
}
@Override
public void onRenderWindowOpen() {
handler.post(
() -> {
renderWindowOpen = true;
for (RenderUpdate update : pendingRenderUpdates) {
update.update(/* runsInline = */false);
}
pendingRenderUpdates.clear();
});
}
@Override
public void onRenderWindowClose() {
handler.post(() -> renderWindowOpen = false);
}
}

View File

@ -0,0 +1,17 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Empty class for use in libjingle_peerconnection_java because all targets require at least one
* Java file.
*/
class Empty {}

View File

@ -0,0 +1,183 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
/**
* An encoded frame from a video stream. Used as an input for decoders and as an output for
* encoders.
*/
public class EncodedImage implements RefCounted {
// Must be kept in sync with common_types.h FrameType.
public enum FrameType {
EmptyFrame(0),
VideoFrameKey(3),
VideoFrameDelta(4);
private final int nativeIndex;
private FrameType(int nativeIndex) {
this.nativeIndex = nativeIndex;
}
public int getNative() {
return nativeIndex;
}
@CalledByNative("FrameType")
static FrameType fromNativeIndex(int nativeIndex) {
for (FrameType type : FrameType.values()) {
if (type.getNative() == nativeIndex) {
return type;
}
}
throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
}
}
private final RefCountDelegate refCountDelegate;
public final ByteBuffer buffer;
public final int encodedWidth;
public final int encodedHeight;
public final long captureTimeMs; // Deprecated
public final long captureTimeNs;
public final FrameType frameType;
public final int rotation;
public final @Nullable Integer qp;
// TODO(bugs.webrtc.org/9378): Use retain and release from jni code.
@Override
public void retain() {
refCountDelegate.retain();
}
@Override
public void release() {
refCountDelegate.release();
}
@CalledByNative
private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth,
int encodedHeight, long captureTimeNs, FrameType frameType, int rotation,
@Nullable Integer qp) {
this.buffer = buffer;
this.encodedWidth = encodedWidth;
this.encodedHeight = encodedHeight;
this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
this.captureTimeNs = captureTimeNs;
this.frameType = frameType;
this.rotation = rotation;
this.qp = qp;
this.refCountDelegate = new RefCountDelegate(releaseCallback);
}
@CalledByNative
private ByteBuffer getBuffer() {
return buffer;
}
@CalledByNative
private int getEncodedWidth() {
return encodedWidth;
}
@CalledByNative
private int getEncodedHeight() {
return encodedHeight;
}
@CalledByNative
private long getCaptureTimeNs() {
return captureTimeNs;
}
@CalledByNative
private int getFrameType() {
return frameType.getNative();
}
@CalledByNative
private int getRotation() {
return rotation;
}
@CalledByNative
private @Nullable Integer getQp() {
return qp;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private ByteBuffer buffer;
private @Nullable Runnable releaseCallback;
private int encodedWidth;
private int encodedHeight;
private long captureTimeNs;
private EncodedImage.FrameType frameType;
private int rotation;
private @Nullable Integer qp;
private Builder() {}
public Builder setBuffer(ByteBuffer buffer, @Nullable Runnable releaseCallback) {
this.buffer = buffer;
this.releaseCallback = releaseCallback;
return this;
}
public Builder setEncodedWidth(int encodedWidth) {
this.encodedWidth = encodedWidth;
return this;
}
public Builder setEncodedHeight(int encodedHeight) {
this.encodedHeight = encodedHeight;
return this;
}
@Deprecated
public Builder setCaptureTimeMs(long captureTimeMs) {
this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
return this;
}
public Builder setCaptureTimeNs(long captureTimeNs) {
this.captureTimeNs = captureTimeNs;
return this;
}
public Builder setFrameType(EncodedImage.FrameType frameType) {
this.frameType = frameType;
return this;
}
public Builder setRotation(int rotation) {
this.rotation = rotation;
return this;
}
public Builder setQp(@Nullable Integer qp) {
this.qp = qp;
return this;
}
public EncodedImage createEncodedImage() {
return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs,
frameType, rotation, qp);
}
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Factory for creating webrtc::FecControllerFactory instances.
*/
public interface FecControllerFactoryFactoryInterface {
/**
* Dynamically allocates a webrtc::FecControllerFactory instance and returns a pointer to it.
* The caller takes ownership of the object.
*/
public long createNative();
}

View File

@ -0,0 +1,201 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.os.SystemClock;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
public class FileVideoCapturer implements VideoCapturer {
private interface VideoReader {
VideoFrame getNextFrame();
void close();
}
/**
* Read video data from file for the .y4m container.
*/
@SuppressWarnings("StringSplitter")
private static class VideoReaderY4M implements VideoReader {
private static final String TAG = "VideoReaderY4M";
private static final String Y4M_FRAME_DELIMETER = "FRAME";
private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1;
private final int frameWidth;
private final int frameHeight;
// First char after header
private final long videoStart;
private final RandomAccessFile mediaFile;
private final FileChannel mediaFileChannel;
public VideoReaderY4M(String file) throws IOException {
mediaFile = new RandomAccessFile(file, "r");
mediaFileChannel = mediaFile.getChannel();
StringBuilder builder = new StringBuilder();
for (;;) {
int c = mediaFile.read();
if (c == -1) {
// End of file reached.
throw new RuntimeException("Found end of file before end of header for file: " + file);
}
if (c == '\n') {
// End of header found.
break;
}
builder.append((char) c);
}
videoStart = mediaFileChannel.position();
String header = builder.toString();
String[] headerTokens = header.split("[ ]");
int w = 0;
int h = 0;
String colorSpace = "420";
for (String tok : headerTokens) {
char c = tok.charAt(0);
switch (c) {
case 'W':
w = Integer.parseInt(tok.substring(1));
break;
case 'H':
h = Integer.parseInt(tok.substring(1));
break;
case 'C':
colorSpace = tok.substring(1);
break;
}
}
Logging.d(TAG, "Color space: " + colorSpace);
if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
throw new IllegalArgumentException(
"Does not support any other color space than I420 or I420mpeg2");
}
if ((w % 2) == 1 || (h % 2) == 1) {
throw new IllegalArgumentException("Does not support odd width or height");
}
frameWidth = w;
frameHeight = h;
Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
}
@Override
public VideoFrame getNextFrame() {
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
final ByteBuffer dataY = buffer.getDataY();
final ByteBuffer dataU = buffer.getDataU();
final ByteBuffer dataV = buffer.getDataV();
final int chromaHeight = (frameHeight + 1) / 2;
final int sizeY = frameHeight * buffer.getStrideY();
final int sizeU = chromaHeight * buffer.getStrideU();
final int sizeV = chromaHeight * buffer.getStrideV();
try {
ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH);
if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
// We reach end of file, loop
mediaFileChannel.position(videoStart);
if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
throw new RuntimeException("Error looping video");
}
}
String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII"));
if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
throw new RuntimeException(
"Frames should be delimited by FRAME plus newline, found delimter was: '"
+ frameDelimStr + "'");
}
mediaFileChannel.read(dataY);
mediaFileChannel.read(dataU);
mediaFileChannel.read(dataV);
} catch (IOException e) {
throw new RuntimeException(e);
}
return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
}
@Override
public void close() {
try {
// Closing a file also closes the channel.
mediaFile.close();
} catch (IOException e) {
Logging.e(TAG, "Problem closing file", e);
}
}
}
private final static String TAG = "FileVideoCapturer";
private final VideoReader videoReader;
private CapturerObserver capturerObserver;
private final Timer timer = new Timer();
private final TimerTask tickTask = new TimerTask() {
@Override
public void run() {
tick();
}
};
public FileVideoCapturer(String inputFile) throws IOException {
try {
videoReader = new VideoReaderY4M(inputFile);
} catch (IOException e) {
Logging.d(TAG, "Could not open video file: " + inputFile);
throw e;
}
}
public void tick() {
VideoFrame videoFrame = videoReader.getNextFrame();
capturerObserver.onFrameCaptured(videoFrame);
videoFrame.release();
}
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver) {
this.capturerObserver = capturerObserver;
}
@Override
public void startCapture(int width, int height, int framerate) {
timer.schedule(tickTask, 0, 1000 / framerate);
}
@Override
public void stopCapture() throws InterruptedException {
timer.cancel();
}
@Override
public void changeCaptureFormat(int width, int height, int framerate) {
// Empty on purpose
}
@Override
public void dispose() {
videoReader.close();
}
@Override
public boolean isScreencast() {
return false;
}
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* The FrameDecryptor interface allows Java API users to provide a
* pointer to their native implementation of the FrameDecryptorInterface.
* FrameDecryptors are extremely performance sensitive as they must process all
* incoming video and audio frames. Due to this reason they should always be
* backed by a native implementation
* @note Not ready for production use.
*/
public interface FrameDecryptor {
/**
* @return A FrameDecryptorInterface pointer.
*/
long getNativeFrameDecryptor();
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* The FrameEncryptor interface allows Java API users to provide a pointer to
* their native implementation of the FrameEncryptorInterface.
* FrameEncyptors are extremely performance sensitive as they must process all
* outgoing video and audio frames. Due to this reason they should always be
* backed by a native implementation.
* @note Not ready for production use.
*/
public interface FrameEncryptor {
/**
* @return A FrameEncryptorInterface pointer.
*/
long getNativeFrameEncryptor();
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
* hardware codecs that assume the framerate never changes.
*/
class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
private static final int DEFAULT_FRAMERATE_FPS = 30;
@Override
public void setTargets(int targetBitrateBps, double targetFramerateFps) {
// Keep frame rate unchanged and adjust bit rate.
this.targetFramerateFps = DEFAULT_FRAMERATE_FPS;
this.targetBitrateBps = (int) (targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps);
}
}

View File

@ -0,0 +1,284 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import androidx.annotation.Nullable;
import java.nio.FloatBuffer;
import org.webrtc.GlShader;
import org.webrtc.GlUtil;
import org.webrtc.RendererCommon;
/**
* Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input
* sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader
* should sample pixel values from the function "sample" that will be provided by this class and
* provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate
* variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The
* simplest possible generic shader that just draws pixel from the frame unmodified looks like:
* void main() {
* gl_FragColor = sample(tc);
* }
* This class covers the cases for most simple shaders and generates the necessary boiler plate.
* Advanced shaders can always implement RendererCommon.GlDrawer directly.
*/
class GlGenericDrawer implements RendererCommon.GlDrawer {
/**
* The different shader types representing different input sources. YUV here represents three
* separate Y, U, V textures.
*/
public static enum ShaderType { OES, RGB, YUV }
/**
* The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set
* uniform variables in the shader before a frame is drawn.
*/
public static interface ShaderCallbacks {
/**
* This callback is called when a new shader has been compiled and created. It will be called
* for the first frame as well as when the shader type is changed. This callback can be used to
* do custom initialization of the shader that only needs to happen once.
*/
void onNewShader(GlShader shader);
/**
* This callback is called before rendering a frame. It can be used to do custom preparation of
* the shader that needs to happen every frame.
*/
void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
int viewportWidth, int viewportHeight);
}
private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos";
private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc";
private static final String TEXTURE_MATRIX_NAME = "tex_mat";
private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "uniform mat4 tex_mat;\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " tc = (tex_mat * in_tc).xy;\n"
+ "}\n";
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1)
// is top-right.
private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER =
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) {
final StringBuilder stringBuilder = new StringBuilder();
if (shaderType == ShaderType.OES) {
stringBuilder.append("#extension GL_OES_EGL_image_external : require\n");
}
stringBuilder.append("precision mediump float;\n");
stringBuilder.append("varying vec2 tc;\n");
if (shaderType == ShaderType.YUV) {
stringBuilder.append("uniform sampler2D y_tex;\n");
stringBuilder.append("uniform sampler2D u_tex;\n");
stringBuilder.append("uniform sampler2D v_tex;\n");
// Add separate function for sampling texture.
// yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter.
stringBuilder.append("vec4 sample(vec2 p) {\n");
stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n");
stringBuilder.append(" float u = texture2D(u_tex, p).r;\n");
stringBuilder.append(" float v = texture2D(v_tex, p).r;\n");
stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n");
stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n");
stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n");
stringBuilder.append("}\n");
stringBuilder.append(genericFragmentSource);
} else {
final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D";
stringBuilder.append("uniform ").append(samplerName).append(" tex;\n");
// Update the sampling function in-place.
stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, "));
}
return stringBuilder.toString();
}
private final String genericFragmentSource;
private final String vertexShader;
private final ShaderCallbacks shaderCallbacks;
@Nullable private ShaderType currentShaderType;
@Nullable private GlShader currentShader;
private int inPosLocation;
private int inTcLocation;
private int texMatrixLocation;
public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks);
}
public GlGenericDrawer(
String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
this.vertexShader = vertexShader;
this.genericFragmentSource = genericFragmentSource;
this.shaderCallbacks = shaderCallbacks;
}
// Visible for testing.
GlShader createShader(ShaderType shaderType) {
return new GlShader(
vertexShader, createFragmentShaderString(genericFragmentSource, shaderType));
}
/**
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
@Override
public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(
ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
// Bind the texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
// Draw the texture.
GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Unbind the texture as a precaution.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
/**
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
* are allocated at the first call to this function.
*/
@Override
public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(
ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
// Bind the texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
// Draw the texture.
GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Unbind the texture as a precaution.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
/**
* Draw a YUV frame with specified texture transformation matrix. Required resources are allocated
* at the first call to this function.
*/
@Override
public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
prepareShader(
ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
// Bind the textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
}
// Draw the textures.
GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Unbind the textures as a precaution.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
}
private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth,
int frameHeight, int viewportWidth, int viewportHeight) {
final GlShader shader;
if (shaderType.equals(currentShaderType)) {
// Same shader type as before, reuse exising shader.
shader = currentShader;
} else {
// Allocate new shader.
currentShaderType = null;
if (currentShader != null) {
currentShader.release();
currentShader = null;
}
shader = createShader(shaderType);
currentShaderType = shaderType;
currentShader = shader;
shader.useProgram();
// Set input texture units.
if (shaderType == ShaderType.YUV) {
GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
} else {
GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
}
GlUtil.checkNoGLES2Error("Create shader");
shaderCallbacks.onNewShader(shader);
texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME);
inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME);
inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME);
}
shader.useProgram();
// Upload the vertex coordinates.
GLES20.glEnableVertexAttribArray(inPosLocation);
GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2,
/* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
FULL_RECTANGLE_BUFFER);
// Upload the texture coordinates.
GLES20.glEnableVertexAttribArray(inTcLocation);
GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2,
/* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
FULL_RECTANGLE_TEXTURE_BUFFER);
// Upload the texture transformation matrix.
GLES20.glUniformMatrix4fv(
texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */);
// Do custom per-frame shader preparation.
shaderCallbacks.onPrepareShader(
shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
GlUtil.checkNoGLES2Error("Prepare shader");
}
/**
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
*/
@Override
public void release() {
if (currentShader != null) {
currentShader.release();
currentShader = null;
currentShaderType = null;
}
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Simplest possible GL shader that just draws frames as opaque quads. */
public class GlRectDrawer extends GlGenericDrawer {
private static final String FRAGMENT_SHADER = "void main() {\n"
+ " gl_FragColor = sample(tc);\n"
+ "}\n";
private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
@Override
public void onNewShader(GlShader shader) {}
@Override
public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
int viewportWidth, int viewportHeight) {}
}
public GlRectDrawer() {
super(FRAGMENT_SHADER, new ShaderCallbacks());
}
}

View File

@ -0,0 +1,131 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
// Helper class for handling OpenGL shaders and shader programs.
public class GlShader {
private static final String TAG = "GlShader";
private static int compileShader(int shaderType, String source) {
final int shader = GLES20.glCreateShader(shaderType);
if (shader == 0) {
throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
}
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compileStatus = new int[] {GLES20.GL_FALSE};
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) {
Logging.e(
TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source);
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
}
GlUtil.checkNoGLES2Error("compileShader");
return shader;
}
private int program;
public GlShader(String vertexSource, String fragmentSource) {
final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
program = GLES20.glCreateProgram();
if (program == 0) {
throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[] {GLES20.GL_FALSE};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
}
// According to the documentation of glLinkProgram():
// "After the link operation, applications are free to modify attached shader objects, compile
// attached shader objects, detach shader objects, delete shader objects, and attach additional
// shader objects. None of these operations affects the information log or the program that is
// part of the program object."
// But in practice, detaching shaders from the program seems to break some devices. Deleting the
// shaders are fine however - it will delete them when they are no longer attached to a program.
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GlUtil.checkNoGLES2Error("Creating GlShader");
}
public int getAttribLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetAttribLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate '" + label + "' in program");
}
return location;
}
/**
* Enable and upload a vertex array for attribute `label`. The vertex data is specified in
* `buffer` with `dimension` number of components per vertex.
*/
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
}
/**
* Enable and upload a vertex array for attribute `label`. The vertex data is specified in
* `buffer` with `dimension` number of components per vertex and specified `stride`.
*/
public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = getAttribLocation(label);
GLES20.glEnableVertexAttribArray(location);
GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
GlUtil.checkNoGLES2Error("setVertexAttribArray");
}
public int getUniformLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = GLES20.glGetUniformLocation(program, label);
if (location < 0) {
throw new RuntimeException("Could not locate uniform '" + label + "' in program");
}
return location;
}
public void useProgram() {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
synchronized (EglBase.lock) {
GLES20.glUseProgram(program);
}
GlUtil.checkNoGLES2Error("glUseProgram");
}
public void release() {
Logging.d(TAG, "Deleting shader.");
// Delete program, automatically detaching any shaders from it.
if (program != -1) {
GLES20.glDeleteProgram(program);
program = -1;
}
}
}

View File

@ -0,0 +1,122 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
/**
* Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
* buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
* conversion. This class is not thread safe and must be used by a thread with an active GL context.
*/
// TODO(magjed): Add unittests for this class.
public class GlTextureFrameBuffer {
private final int pixelFormat;
private int frameBufferId;
private int textureId;
private int width;
private int height;
/**
* Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
* when calling this function. The framebuffer is not complete until setSize() is called.
*/
public GlTextureFrameBuffer(int pixelFormat) {
switch (pixelFormat) {
case GLES20.GL_LUMINANCE:
case GLES20.GL_RGB:
case GLES20.GL_RGBA:
this.pixelFormat = pixelFormat;
break;
default:
throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
}
this.width = 0;
this.height = 0;
}
/**
* (Re)allocate texture. Will do nothing if the requested size equals the current size. An
* EGLContext must be bound on the current thread when calling this function. Must be called at
* least once before using the framebuffer. May be called multiple times to change size.
*/
public void setSize(int width, int height) {
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
}
if (width == this.width && height == this.height) {
return;
}
this.width = width;
this.height = height;
// Lazy allocation the first time setSize() is called.
if (textureId == 0) {
textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
if (frameBufferId == 0) {
final int frameBuffers[] = new int[1];
GLES20.glGenFramebuffers(1, frameBuffers, 0);
frameBufferId = frameBuffers[0];
}
// Allocate texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
// Attach the texture to the framebuffer as color attachment.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
// Check that the framebuffer is in a good state.
final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
throw new IllegalStateException("Framebuffer not complete, status: " + status);
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
/** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */
public int getFrameBufferId() {
return frameBufferId;
}
/** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */
public int getTextureId() {
return textureId;
}
/**
* Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
* this function. This object should not be used after this call.
*/
public void release() {
GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
textureId = 0;
GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
frameBufferId = 0;
width = 0;
height = 0;
}
}

View File

@ -0,0 +1,66 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES20;
import android.opengl.GLException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Some OpenGL static utility functions.
*/
public class GlUtil {
private GlUtil() {}
public static class GlOutOfMemoryException extends GLException {
public GlOutOfMemoryException(int error, String msg) {
super(error, msg);
}
}
// Assert that no OpenGL ES 2.0 error has been raised.
public static void checkNoGLES2Error(String msg) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
throw error == GLES20.GL_OUT_OF_MEMORY
? new GlOutOfMemoryException(error, msg)
: new GLException(error, msg + ": GLES20 error: " + error);
}
}
public static FloatBuffer createFloatBuffer(float[] coords) {
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(coords);
fb.position(0);
return fb;
}
/**
* Generate texture with standard parameters.
*/
public static int generateTexture(int target) {
final int textureArray[] = new int[1];
GLES20.glGenTextures(1, textureArray, 0);
final int textureId = textureArray[0];
GLES20.glBindTexture(target, textureId);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkNoGLES2Error("generateTexture");
return textureId;
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.Map;
import java.util.HashMap;
/** Container for static helper functions related to dealing with H264 codecs. */
class H264Utils {
public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
public static final String H264_PROFILE_CONSTRAINED_BASELINE = "42e0";
public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
public static final String H264_CONSTRAINED_HIGH_3_1 =
H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
public static final String H264_CONSTRAINED_BASELINE_3_1 =
H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
public static Map<String, String> getDefaultH264Params(boolean isHighProfile) {
final Map<String, String> params = new HashMap<>();
params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
: VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
return params;
}
public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC =
new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false));
public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC =
new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true));
public static boolean isSameH264Profile(
Map<String, String> params1, Map<String, String> params2) {
return nativeIsSameH264Profile(params1, params2);
}
private static native boolean nativeIsSameH264Profile(
Map<String, String> params1, Map<String, String> params2);
}

View File

@ -0,0 +1,57 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaCodecInfo;
import androidx.annotation.Nullable;
import java.util.Arrays;
/** Factory for Android hardware VideoDecoders. */
public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
private final static Predicate<MediaCodecInfo> defaultAllowedPredicate =
new Predicate<MediaCodecInfo>() {
@Override
public boolean test(MediaCodecInfo arg) {
return MediaCodecUtils.isHardwareAccelerated(arg);
}
};
/** Creates a HardwareVideoDecoderFactory that does not use surface textures. */
@Deprecated // Not removed yet to avoid breaking callers.
public HardwareVideoDecoderFactory() {
this(null);
}
/**
* Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
*
* @param sharedContext The textures generated will be accessible from this context. May be null,
* this disables texture support.
*/
public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext) {
this(sharedContext, /* codecAllowedPredicate= */ null);
}
/**
* Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
*
* @param sharedContext The textures generated will be accessible from this context. May be null,
* this disables texture support.
* @param codecAllowedPredicate predicate to filter codecs. It is combined with the default
* predicate that only allows hardware codecs.
*/
public HardwareVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
@Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
super(sharedContext,
(codecAllowedPredicate == null ? defaultAllowedPredicate
: codecAllowedPredicate.and(defaultAllowedPredicate)));
}
}

View File

@ -0,0 +1,810 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel3;
import static android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileHigh;
import static android.media.MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.view.Surface;
import androidx.annotation.Nullable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/**
* Android hardware video encoder.
*/
class HardwareVideoEncoder implements VideoEncoder {
private static final String TAG = "HardwareVideoEncoder";
private static final int MAX_VIDEO_FRAMERATE = 30;
// See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc.
private static final int MAX_ENCODER_Q_SIZE = 2;
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
// Size of the input frames should be multiple of 16 for the H/W encoder.
private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16;
/**
* Keeps track of the number of output buffers that have been passed down the pipeline and not yet
* released. We need to wait for this to go down to zero before operations invalidating the output
* buffers, i.e., stop() and getOutputBuffer().
*/
private static class BusyCount {
private final Object countLock = new Object();
private int count;
public void increment() {
synchronized (countLock) {
count++;
}
}
// This method may be called on an arbitrary thread.
public void decrement() {
synchronized (countLock) {
count--;
if (count == 0) {
countLock.notifyAll();
}
}
}
// The increment and waitForZero methods are called on the same thread (deliverEncodedImage,
// running on the output thread). Hence, after waitForZero returns, the count will stay zero
// until the same thread calls increment.
public void waitForZero() {
boolean wasInterrupted = false;
synchronized (countLock) {
while (count > 0) {
try {
countLock.wait();
} catch (InterruptedException e) {
Logging.e(TAG, "Interrupted while waiting on busy count", e);
wasInterrupted = true;
}
}
}
if (wasInterrupted) {
Thread.currentThread().interrupt();
}
}
}
// --- Initialized on construction.
private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
private final String codecName;
private final VideoCodecMimeType codecType;
private final Integer surfaceColorFormat;
private final Integer yuvColorFormat;
private final Map<String, String> params;
private final int keyFrameIntervalSec; // Base interval for generating key frames.
// Interval at which to force a key frame. Used to reduce color distortions caused by some
// Qualcomm video encoders.
private final long forcedKeyFrameNs;
private final BitrateAdjuster bitrateAdjuster;
// EGL context shared with the application. Used to access texture inputs.
private final EglBase14.Context sharedContext;
// Drawer used to draw input textures onto the codec's input surface.
private final GlRectDrawer textureDrawer = new GlRectDrawer();
private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>();
private final ThreadChecker encodeThreadChecker = new ThreadChecker();
private final ThreadChecker outputThreadChecker = new ThreadChecker();
private final BusyCount outputBuffersBusyCount = new BusyCount();
// --- Set on initialize and immutable until release.
private Callback callback;
private boolean automaticResizeOn;
// --- Valid and immutable while an encoding session is running.
@Nullable
private MediaCodecWrapper codec;
// Thread that delivers encoded frames to the user callback.
@Nullable
private Thread outputThread;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
@Nullable
private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
@Nullable
private Surface textureInputSurface;
private int width;
private int height;
// Y-plane strides in the encoder's input
private int stride;
// Y-plane slice-height in the encoder's input
private int sliceHeight;
// True if encoder input color format is semi-planar (NV12).
private boolean isSemiPlanar;
// Size of frame for current color format and stride, in bytes.
private int frameSizeBytes;
private boolean useSurfaceMode;
// --- Only accessed from the encoding thread.
// Presentation timestamp of next frame to encode.
private long nextPresentationTimestampUs;
// Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameNs;
// --- Only accessed on the output thread.
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
@Nullable
private ByteBuffer configBuffer;
private int adjustedBitrate;
// Whether the encoder is running. Volatile so that the output thread can watch this value and
// exit when the encoder stops.
private volatile boolean running;
// Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
// value to send exceptions thrown during release back to the encoder thread.
@Nullable
private volatile Exception shutdownException;
// True if collection of encoding statistics is enabled.
private boolean isEncodingStatisticsEnabled;
/**
* Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
* intervals, and bitrateAdjuster.
*
* @param codecName the hardware codec implementation to use
* @param codecType the type of the given video codec (eg. VP8, VP9, H264, H265, AV1)
* @param surfaceColorFormat color format for surface mode or null if not available
* @param yuvColorFormat color format for bytebuffer mode
* @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
* @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
* used to reduce distortion caused by some codec implementations
* @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
* desired bitrates
* @throws IllegalArgumentException if colorFormat is unsupported
*/
public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
this.codecName = codecName;
this.codecType = codecType;
this.surfaceColorFormat = surfaceColorFormat;
this.yuvColorFormat = yuvColorFormat;
this.params = params;
this.keyFrameIntervalSec = keyFrameIntervalSec;
this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
this.bitrateAdjuster = bitrateAdjuster;
this.sharedContext = sharedContext;
// Allow construction on a different thread.
encodeThreadChecker.detachThread();
}
@Override
public VideoCodecStatus initEncode(Settings settings, Callback callback) {
encodeThreadChecker.checkIsOnValidThread();
this.callback = callback;
automaticResizeOn = settings.automaticResizeOn;
this.width = settings.width;
this.height = settings.height;
useSurfaceMode = canUseSurface();
if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
}
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
Logging.d(TAG,
"initEncode name: " + codecName + " type: " + codecType + " width: " + width
+ " height: " + height + " framerate_fps: " + settings.maxFramerate
+ " bitrate_kbps: " + settings.startBitrate + " surface mode: " + useSurfaceMode);
return initEncodeInternal();
}
private VideoCodecStatus initEncodeInternal() {
encodeThreadChecker.checkIsOnValidThread();
nextPresentationTimestampUs = 0;
lastKeyFrameNs = -1;
isEncodingStatisticsEnabled = false;
try {
codec = mediaCodecWrapperFactory.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "Cannot create media encoder " + codecName);
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
format.setInteger(MediaFormat.KEY_BITRATE_MODE, BITRATE_MODE_CBR);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
format.setFloat(
MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps());
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
if (codecType == VideoCodecMimeType.H264) {
String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
if (profileLevelId == null) {
profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
}
switch (profileLevelId) {
case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
format.setInteger("profile", AVCProfileHigh);
format.setInteger("level", AVCLevel3);
break;
case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
break;
default:
Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
}
}
if (codecName.equals("c2.google.av1.encoder")) {
// Enable RTC mode in AV1 HW encoder.
format.setInteger("vendor.google-av1enc.encoding-preset.int32.value", 1);
}
if (isEncodingStatisticsSupported()) {
format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_1);
isEncodingStatisticsEnabled = true;
}
Logging.d(TAG, "Format: " + format);
codec.configure(
format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurfaceMode) {
textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE);
textureInputSurface = codec.createInputSurface();
textureEglBase.createSurface(textureInputSurface);
textureEglBase.makeCurrent();
}
updateInputFormat(codec.getInputFormat());
codec.start();
} catch (IllegalArgumentException | IllegalStateException e) {
Logging.e(TAG, "initEncodeInternal failed", e);
release();
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
running = true;
outputThreadChecker.detachThread();
outputThread = createOutputThread();
outputThread.start();
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus release() {
encodeThreadChecker.checkIsOnValidThread();
final VideoCodecStatus returnValue;
if (outputThread == null) {
returnValue = VideoCodecStatus.OK;
} else {
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout");
returnValue = VideoCodecStatus.TIMEOUT;
} else if (shutdownException != null) {
// Log the exception and turn it into an error.
Logging.e(TAG, "Media encoder release exception", shutdownException);
returnValue = VideoCodecStatus.ERROR;
} else {
returnValue = VideoCodecStatus.OK;
}
}
textureDrawer.release();
videoFrameDrawer.release();
if (textureEglBase != null) {
textureEglBase.release();
textureEglBase = null;
}
if (textureInputSurface != null) {
textureInputSurface.release();
textureInputSurface = null;
}
outputBuilders.clear();
codec = null;
outputThread = null;
// Allow changing thread after release.
encodeThreadChecker.detachThread();
return returnValue;
}
@Override
public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
encodeThreadChecker.checkIsOnValidThread();
if (codec == null) {
return VideoCodecStatus.UNINITIALIZED;
}
final boolean isTextureBuffer = videoFrame.getBuffer() instanceof VideoFrame.TextureBuffer;
// If input resolution changed, restart the codec with the new resolution.
final int frameWidth = videoFrame.getBuffer().getWidth();
final int frameHeight = videoFrame.getBuffer().getHeight();
final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
if (status != VideoCodecStatus.OK) {
return status;
}
}
if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
// Too many frames in the encoder. Drop this frame.
Logging.e(TAG, "Dropped frame, encoder queue full");
//https://github.com/open-webrtc-toolkit/owt-deps-webrtc/issues/117
VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
if (status != VideoCodecStatus.OK) {
return status;
}
return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
}
boolean requestedKeyFrame = false;
for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
if (frameType == EncodedImage.FrameType.VideoFrameKey) {
requestedKeyFrame = true;
}
}
if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
requestKeyFrame(videoFrame.getTimestampNs());
}
EncodedImage.Builder builder = EncodedImage.builder()
.setCaptureTimeNs(videoFrame.getTimestampNs())
.setEncodedWidth(videoFrame.getBuffer().getWidth())
.setEncodedHeight(videoFrame.getBuffer().getHeight())
.setRotation(videoFrame.getRotation());
outputBuilders.offer(builder);
long presentationTimestampUs = nextPresentationTimestampUs;
// Round frame duration down to avoid bitrate overshoot.
long frameDurationUs =
(long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps());
nextPresentationTimestampUs += frameDurationUs;
final VideoCodecStatus returnValue;
if (useSurfaceMode) {
returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs);
} else {
returnValue = encodeByteBuffer(videoFrame, presentationTimestampUs);
}
// Check if the queue was successful.
if (returnValue != VideoCodecStatus.OK) {
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
}
return returnValue;
}
private VideoCodecStatus encodeTextureBuffer(
VideoFrame videoFrame, long presentationTimestampUs) {
encodeThreadChecker.checkIsOnValidThread();
try {
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// It is not necessary to release this frame because it doesn't own the buffer.
VideoFrame derotatedFrame =
new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
}
private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs) {
encodeThreadChecker.checkIsOnValidThread();
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
int index;
try {
index = codec.dequeueInputBuffer(0 /* timeout */);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.ERROR;
}
if (index == -1) {
// Encoder is falling behind. No input buffers available. Drop the frame.
Logging.d(TAG, "Dropped frame, no input buffers available");
return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
}
ByteBuffer buffer;
try {
buffer = codec.getInputBuffer(index);
} catch (IllegalStateException e) {
Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e);
return VideoCodecStatus.ERROR;
}
if (buffer.capacity() < frameSizeBytes) {
Logging.e(TAG,
"Input buffer size: " + buffer.capacity()
+ " is smaller than frame size: " + frameSizeBytes);
return VideoCodecStatus.ERROR;
}
fillInputBuffer(buffer, videoFrame.getBuffer());
try {
codec.queueInputBuffer(
index, 0 /* offset */, frameSizeBytes, presentationTimestampUs, 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
// IllegalStateException thrown when the codec is in the wrong state.
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
encodeThreadChecker.checkIsOnValidThread();
if (framerate > MAX_VIDEO_FRAMERATE) {
framerate = MAX_VIDEO_FRAMERATE;
}
bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setRates(RateControlParameters rcParameters) {
encodeThreadChecker.checkIsOnValidThread();
bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps);
return VideoCodecStatus.OK;
}
@Override
public ScalingSettings getScalingSettings() {
if (automaticResizeOn) {
if (codecType == VideoCodecMimeType.VP8) {
final int kLowVp8QpThreshold = 29;
final int kHighVp8QpThreshold = 95;
return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold);
} else if (codecType == VideoCodecMimeType.H264) {
final int kLowH264QpThreshold = 24;
final int kHighH264QpThreshold = 37;
return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
}
}
return ScalingSettings.OFF;
}
@Override
public String getImplementationName() {
return codecName;
}
@Override
public EncoderInfo getEncoderInfo() {
// Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment
// value to be 16. Additionally, this encoder produces a single stream. So it should not require
// alignment for all layers.
return new EncoderInfo(
/* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT,
/* applyAlignmentToAllSimulcastLayers= */ false);
}
private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
encodeThreadChecker.checkIsOnValidThread();
VideoCodecStatus status = release();
if (status != VideoCodecStatus.OK) {
return status;
}
width = newWidth;
height = newHeight;
useSurfaceMode = newUseSurfaceMode;
return initEncodeInternal();
}
private boolean shouldForceKeyFrame(long presentationTimestampNs) {
encodeThreadChecker.checkIsOnValidThread();
return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
}
private void requestKeyFrame(long presentationTimestampNs) {
encodeThreadChecker.checkIsOnValidThread();
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead,
// we request a key frame "soon".
try {
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
codec.setParameters(b);
} catch (IllegalStateException e) {
Logging.e(TAG, "requestKeyFrame failed", e);
return;
}
lastKeyFrameNs = presentationTimestampNs;
}
private Thread createOutputThread() {
return new Thread() {
@Override
public void run() {
while (running) {
deliverEncodedImage();
}
releaseCodecOnOutputThread();
}
};
}
// Visible for testing.
protected void deliverEncodedImage() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (index < 0) {
if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffersBusyCount.waitForZero();
}
return;
}
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
outputBuffer.position(info.offset);
outputBuffer.limit(info.offset + info.size);
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
if (info.size > 0
&& (codecType == VideoCodecMimeType.H264 || codecType == VideoCodecMimeType.H265)) {
// In case of H264 and H265 config buffer contains SPS and PPS headers. Presence of these
// headers makes IDR frame a truly keyframe. Some encoders issue IDR frames without SPS
// and PPS. We save config buffer here to prepend it to all IDR frames encoder delivers.
configBuffer = ByteBuffer.allocateDirect(info.size);
configBuffer.put(outputBuffer);
}
return;
}
bitrateAdjuster.reportEncodedFrame(info.size);
if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
updateBitrate();
}
final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
// Extract QP before releasing output buffer.
Integer qp = null;
if (isEncodingStatisticsEnabled) {
MediaFormat format = codec.getOutputFormat(index);
if (format != null && format.containsKey(MediaFormat.KEY_VIDEO_QP_AVERAGE)) {
qp = format.getInteger(MediaFormat.KEY_VIDEO_QP_AVERAGE);
}
}
final ByteBuffer frameBuffer;
final Runnable releaseCallback;
if (isKeyFrame && configBuffer != null) {
Logging.d(TAG,
"Prepending config buffer of size " + configBuffer.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
configBuffer.rewind();
frameBuffer.put(configBuffer);
frameBuffer.put(outputBuffer);
frameBuffer.rewind();
codec.releaseOutputBuffer(index, /* render= */ false);
releaseCallback = null;
} else {
frameBuffer = outputBuffer.slice();
outputBuffersBusyCount.increment();
releaseCallback = () -> {
// This callback should not throw any exceptions since
// it may be called on an arbitrary thread.
// Check bug webrtc:11230 for more details.
try {
codec.releaseOutputBuffer(index, /* render= */ false);
} catch (Exception e) {
Logging.e(TAG, "releaseOutputBuffer failed", e);
}
outputBuffersBusyCount.decrement();
};
}
final EncodedImage.FrameType frameType = isKeyFrame ? EncodedImage.FrameType.VideoFrameKey
: EncodedImage.FrameType.VideoFrameDelta;
EncodedImage.Builder builder = outputBuilders.poll();
builder.setBuffer(frameBuffer, releaseCallback);
builder.setFrameType(frameType);
builder.setQp(qp);
EncodedImage encodedImage = builder.createEncodedImage();
// TODO(mellem): Set codec-specific info.
callback.onEncodedFrame(encodedImage, new CodecSpecificInfo());
// Note that the callback may have retained the image.
encodedImage.release();
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverOutput failed", e);
}
}
private void releaseCodecOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Releasing MediaCodec on output thread");
outputBuffersBusyCount.waitForZero();
try {
codec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media encoder stop failed", e);
}
try {
codec.release();
} catch (Exception e) {
Logging.e(TAG, "Media encoder release failed", e);
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
configBuffer = null;
Logging.d(TAG, "Release on output thread done");
}
private VideoCodecStatus updateBitrate() {
outputThreadChecker.checkIsOnValidThread();
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
try {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
codec.setParameters(params);
return VideoCodecStatus.OK;
} catch (IllegalStateException e) {
Logging.e(TAG, "updateBitrate failed", e);
return VideoCodecStatus.ERROR;
}
}
private boolean canUseSurface() {
return sharedContext != null && surfaceColorFormat != null;
}
/**
* Fetches stride and slice height from input media format
*/
private void updateInputFormat(MediaFormat format) {
stride = width;
sliceHeight = height;
if (format != null) {
if (format.containsKey(MediaFormat.KEY_STRIDE)) {
stride = format.getInteger(MediaFormat.KEY_STRIDE);
stride = Math.max(stride, width);
}
if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
sliceHeight = Math.max(sliceHeight, height);
}
}
isSemiPlanar = isSemiPlanar(yuvColorFormat);
if (isSemiPlanar) {
int chromaHeight = (height + 1) / 2;
frameSizeBytes = sliceHeight * stride + chromaHeight * stride;
} else {
int chromaStride = (stride + 1) / 2;
int chromaSliceHeight = (sliceHeight + 1) / 2;
frameSizeBytes = sliceHeight * stride + chromaSliceHeight * chromaStride * 2;
}
Logging.d(TAG,
"updateInputFormat format: " + format + " stride: " + stride
+ " sliceHeight: " + sliceHeight + " isSemiPlanar: " + isSemiPlanar
+ " frameSizeBytes: " + frameSizeBytes);
}
protected boolean isEncodingStatisticsSupported() {
// WebRTC quality scaler, which adjusts resolution and/or frame rate based on encoded QP,
// expects QP to be in native bitstream range for given codec. Native QP range for VP8 is
// [0, 127] and for VP9 is [0, 255]. MediaCodec VP8 and VP9 encoders (perhaps not all)
// return QP in range [0, 64], which is libvpx API specific range. Due to this mismatch we
// can't use QP feedback from these codecs.
if (codecType == VideoCodecMimeType.VP8 || codecType == VideoCodecMimeType.VP9) {
return false;
}
MediaCodecInfo codecInfo = codec.getCodecInfo();
if (codecInfo == null) {
return false;
}
CodecCapabilities codecCaps = codecInfo.getCapabilitiesForType(codecType.mimeType());
if (codecCaps == null) {
return false;
}
return codecCaps.isFeatureSupported(CodecCapabilities.FEATURE_EncodingStatistics);
}
// Visible for testing.
protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer frame) {
VideoFrame.I420Buffer i420 = frame.toI420();
if (isSemiPlanar) {
YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride,
sliceHeight);
} else {
YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride,
sliceHeight);
}
i420.release();
}
protected boolean isSemiPlanar(int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
return false;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
return true;
default:
throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
}
}
}

View File

@ -0,0 +1,280 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
import static org.webrtc.MediaCodecUtils.HISI_PREFIX;
import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
import static org.webrtc.MediaCodecUtils.MTK_PREFIX;
import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/** Factory for android hardware video encoders. */
@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
private static final String TAG = "HardwareVideoEncoderFactory";
// We don't need periodic keyframes. But some HW encoders, Exynos in particular, fails to
// initialize with value -1 which should disable periodic keyframes according to the spec. Set it
// to 1 hour.
private static final int PERIODIC_KEY_FRAME_INTERVAL_S = 3600;
// Forced key frame interval - used to reduce color distortions on Qualcomm platforms.
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
// List of devices with poor H.264 encoder quality.
// HW H.264 encoder on below devices has poor bitrate control - actual
// bitrates deviates a lot from the target value.
private static final List<String> H264_HW_EXCEPTION_MODELS =
Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4");
@Nullable private final EglBase14.Context sharedContext;
private final boolean enableIntelVp8Encoder;
private final boolean enableH264HighProfile;
@Nullable private final Predicate<MediaCodecInfo> codecAllowedPredicate;
/**
* Creates a HardwareVideoEncoderFactory that supports surface texture encoding.
*
* @param sharedContext The textures generated will be accessible from this context. May be null,
* this disables texture support.
* @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled.
* @param enableH264HighProfile true if H264 High Profile enabled.
*/
public HardwareVideoEncoderFactory(
EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
this(sharedContext, enableIntelVp8Encoder, enableH264HighProfile,
/* codecAllowedPredicate= */ null);
}
/**
* Creates a HardwareVideoEncoderFactory that supports surface texture encoding.
*
* @param sharedContext The textures generated will be accessible from this context. May be null,
* this disables texture support.
* @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled.
* @param enableH264HighProfile true if H264 High Profile enabled.
* @param codecAllowedPredicate optional predicate to filter codecs. All codecs are allowed
* when predicate is not provided.
*/
public HardwareVideoEncoderFactory(EglBase.Context sharedContext, boolean enableIntelVp8Encoder,
boolean enableH264HighProfile, @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
// Texture mode requires EglBase14.
if (sharedContext instanceof EglBase14.Context) {
this.sharedContext = (EglBase14.Context) sharedContext;
} else {
Logging.w(TAG, "No shared EglBase.Context. Encoders will not use texture mode.");
this.sharedContext = null;
}
this.enableIntelVp8Encoder = enableIntelVp8Encoder;
this.enableH264HighProfile = enableH264HighProfile;
this.codecAllowedPredicate = codecAllowedPredicate;
}
@Deprecated
public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
this(null, enableIntelVp8Encoder, enableH264HighProfile);
}
@Nullable
@Override
public VideoEncoder createEncoder(VideoCodecInfo input) {
VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.getName());
MediaCodecInfo info = findCodecForType(type);
if (info == null) {
return null;
}
String codecName = info.getName();
String mime = type.mimeType();
Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat(
MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime));
Integer yuvColorFormat = MediaCodecUtils.selectColorFormat(
MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime));
if (type == VideoCodecMimeType.H264) {
boolean isHighProfile = H264Utils.isSameH264Profile(
input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true));
boolean isBaselineProfile = H264Utils.isSameH264Profile(
input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false));
if (!isHighProfile && !isBaselineProfile) {
return null;
}
if (isHighProfile && !isH264HighProfileSupported(info)) {
return null;
}
}
return new HardwareVideoEncoder(new MediaCodecWrapperFactoryImpl(), codecName, type,
surfaceColorFormat, yuvColorFormat, input.params, PERIODIC_KEY_FRAME_INTERVAL_S,
getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName),
sharedContext);
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
// Generate a list of supported codecs in order of preference:
// VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265.
for (VideoCodecMimeType type :
new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9,
VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) {
MediaCodecInfo codec = findCodecForType(type);
if (codec != null) {
String name = type.name();
// TODO(sakal): Always add H264 HP once WebRTC correctly removes codecs that are not
// supported by the decoder.
if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
supportedCodecInfos.add(new VideoCodecInfo(
name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
}
supportedCodecInfos.add(new VideoCodecInfo(
name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
}
}
return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
}
private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
}
if (info == null || !info.isEncoder()) {
continue;
}
if (isSupportedCodec(info, type)) {
return info;
}
}
return null; // No support for this type.
}
// Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) {
if (!MediaCodecUtils.codecSupportsType(info, type)) {
return false;
}
// Check for a supported color format.
if (MediaCodecUtils.selectColorFormat(
MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
== null) {
return false;
}
return isHardwareSupportedInCurrentSdk(info, type) && isMediaCodecAllowed(info);
}
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
// current SDK.
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
return info.isHardwareAccelerated();
}
switch (type) {
case VP8:
return isHardwareSupportedInCurrentSdkVp8(info);
case VP9:
return isHardwareSupportedInCurrentSdkVp9(info);
case H264:
return isHardwareSupportedInCurrentSdkH264(info);
case H265:
case AV1:
return false;
}
return false;
}
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
String name = info.getName();
// QCOM Vp8 encoder is always supported.
return name.startsWith(QCOM_PREFIX)
// Exynos VP8 encoder is supported in M or later.
|| (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
// Intel Vp8 encoder is always supported, with the intel encoder enabled.
|| (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder);
}
private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
String name = info.getName();
return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX))
// Both QCOM and Exynos VP9 encoders are supported in N or later.
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
}
private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
// First, H264 hardware might perform poorly on this model.
if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
return false;
}
String name = info.getName();
// QCOM and Exynos H264 encoders are always supported.
return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX)|| name.startsWith(MTK_PREFIX);
}
private boolean isMediaCodecAllowed(MediaCodecInfo info) {
if (codecAllowedPredicate == null) {
return true;
}
return codecAllowedPredicate.test(info);
}
private int getForcedKeyFrameIntervalMs(VideoCodecMimeType type, String codecName) {
if (type == VideoCodecMimeType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
}
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
}
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
}
// Other codecs don't need key frame forcing.
return 0;
}
private BitrateAdjuster createBitrateAdjuster(VideoCodecMimeType type, String codecName) {
if (codecName.startsWith(EXYNOS_PREFIX)) {
if (type == VideoCodecMimeType.VP8) {
// Exynos VP8 encoders need dynamic bitrate adjustment.
return new DynamicBitrateAdjuster();
} else {
// Exynos VP9 and H264 encoders need framerate-based bitrate adjustment.
return new FramerateBitrateAdjuster();
}
}
// Other codecs don't need bitrate adjustment.
return new BaseBitrateAdjuster();
}
private boolean isH264HighProfileSupported(MediaCodecInfo info) {
return enableH264HighProfile && Build.VERSION.SDK_INT > Build.VERSION_CODES.M
&& info.getName().startsWith(EXYNOS_PREFIX);
}
}

View File

@ -0,0 +1,44 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Class for holding the native pointer of a histogram. Since there is no way to destroy a
* histogram, please don't create unnecessary instances of this object. This class is thread safe.
*
* Usage example:
* private static final Histogram someMetricHistogram =
* Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
* someMetricHistogram.addSample(someVariable);
*/
class Histogram {
private final long handle;
private Histogram(long handle) {
this.handle = handle;
}
static public Histogram createCounts(String name, int min, int max, int bucketCount) {
return new Histogram(nativeCreateCounts(name, min, max, bucketCount));
}
static public Histogram createEnumeration(String name, int max) {
return new Histogram(nativeCreateEnumeration(name, max));
}
public void addSample(int sample) {
nativeAddSample(handle, sample);
}
private static native long nativeCreateCounts(String name, int min, int max, int bucketCount);
private static native long nativeCreateEnumeration(String name, int max);
private static native void nativeAddSample(long handle, int sample);
}

View File

@ -0,0 +1,86 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.Arrays;
import org.webrtc.PeerConnection;
/**
* Representation of a single ICE Candidate, mirroring
* {@code IceCandidateInterface} in the C++ API.
*/
public class IceCandidate {
public final String sdpMid;
public final int sdpMLineIndex;
public final String sdp;
public final String serverUrl;
public final PeerConnection.AdapterType adapterType;
public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
this.sdpMid = sdpMid;
this.sdpMLineIndex = sdpMLineIndex;
this.sdp = sdp;
this.serverUrl = "";
this.adapterType = PeerConnection.AdapterType.UNKNOWN;
}
@CalledByNative
IceCandidate(String sdpMid, int sdpMLineIndex, String sdp, String serverUrl,
PeerConnection.AdapterType adapterType) {
this.sdpMid = sdpMid;
this.sdpMLineIndex = sdpMLineIndex;
this.sdp = sdp;
this.serverUrl = serverUrl;
this.adapterType = adapterType;
}
@Override
public String toString() {
return sdpMid + ":" + sdpMLineIndex + ":" + sdp + ":" + serverUrl + ":"
+ adapterType.toString();
}
@CalledByNative
String getSdpMid() {
return sdpMid;
}
@CalledByNative
String getSdp() {
return sdp;
}
/** equals() checks sdpMid, sdpMLineIndex, and sdp for equality. */
@Override
public boolean equals(@Nullable Object object) {
if (!(object instanceof IceCandidate)) {
return false;
}
IceCandidate that = (IceCandidate) object;
return objectEquals(this.sdpMid, that.sdpMid) && this.sdpMLineIndex == that.sdpMLineIndex
&& objectEquals(this.sdp, that.sdp);
}
@Override
public int hashCode() {
Object[] values = {sdpMid, sdpMLineIndex, sdp};
return Arrays.hashCode(values);
}
private static boolean objectEquals(Object o1, Object o2) {
if (o1 == null) {
return o2 == null;
}
return o1.equals(o2);
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public final class IceCandidateErrorEvent {
/** The local IP address used to communicate with the STUN or TURN server. */
public final String address;
/** The port used to communicate with the STUN or TURN server. */
public final int port;
/**
* The STUN or TURN URL that identifies the STUN or TURN server for which the failure occurred.
*/
public final String url;
/**
* The numeric STUN error code returned by the STUN or TURN server. If no host candidate can reach
* the server, errorCode will be set to the value 701 which is outside the STUN error code range.
* This error is only fired once per server URL while in the RTCIceGatheringState of "gathering".
*/
public final int errorCode;
/**
* The STUN reason text returned by the STUN or TURN server. If the server could not be reached,
* errorText will be set to an implementation-specific value providing details about the error.
*/
public final String errorText;
@CalledByNative
public IceCandidateErrorEvent(
String address, int port, String url, int errorCode, String errorText) {
this.address = address;
this.port = port;
this.url = url;
this.errorCode = errorCode;
this.errorText = errorText;
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CalledByNative;
import org.webrtc.Loggable;
import org.webrtc.Logging.Severity;
class JNILogging {
private final Loggable loggable;
public JNILogging(Loggable loggable) {
this.loggable = loggable;
}
@CalledByNative
public void logToInjectable(String message, Integer severity, String tag) {
loggable.onLogMessage(message, Severity.values()[severity], tag);
}
}

View File

@ -0,0 +1,200 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;
/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
public class JavaI420Buffer implements VideoFrame.I420Buffer {
private final int width;
private final int height;
private final ByteBuffer dataY;
private final ByteBuffer dataU;
private final ByteBuffer dataV;
private final int strideY;
private final int strideU;
private final int strideV;
private final RefCountDelegate refCountDelegate;
private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
int strideU, ByteBuffer dataV, int strideV, @Nullable Runnable releaseCallback) {
this.width = width;
this.height = height;
this.dataY = dataY;
this.dataU = dataU;
this.dataV = dataV;
this.strideY = strideY;
this.strideU = strideU;
this.strideV = strideV;
this.refCountDelegate = new RefCountDelegate(releaseCallback);
}
private static void checkCapacity(ByteBuffer data, int width, int height, int stride) {
// The last row does not necessarily need padding.
final int minCapacity = stride * (height - 1) + width;
if (data.capacity() < minCapacity) {
throw new IllegalArgumentException(
"Buffer must be at least " + minCapacity + " bytes, but was " + data.capacity());
}
}
/** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */
public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY,
ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV,
@Nullable Runnable releaseCallback) {
if (dataY == null || dataU == null || dataV == null) {
throw new IllegalArgumentException("Data buffers cannot be null.");
}
if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) {
throw new IllegalArgumentException("Data buffers must be direct byte buffers.");
}
// Slice the buffers to prevent external modifications to the position / limit of the buffer.
// Note that this doesn't protect the contents of the buffers from modifications.
dataY = dataY.slice();
dataU = dataU.slice();
dataV = dataV.slice();
final int chromaWidth = (width + 1) / 2;
final int chromaHeight = (height + 1) / 2;
checkCapacity(dataY, width, height, strideY);
checkCapacity(dataU, chromaWidth, chromaHeight, strideU);
checkCapacity(dataV, chromaWidth, chromaHeight, strideV);
return new JavaI420Buffer(
width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback);
}
/** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
public static JavaI420Buffer allocate(int width, int height) {
int chromaHeight = (height + 1) / 2;
int strideUV = (width + 1) / 2;
int yPos = 0;
int uPos = yPos + width * height;
int vPos = uPos + strideUV * chromaHeight;
ByteBuffer buffer =
JniCommon.nativeAllocateByteBuffer(width * height + 2 * strideUV * chromaHeight);
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(vPos);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + strideUV * chromaHeight);
ByteBuffer dataV = buffer.slice();
return new JavaI420Buffer(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
() -> { JniCommon.nativeFreeByteBuffer(buffer); });
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public ByteBuffer getDataY() {
// Return a slice to prevent relative reads from changing the position.
return dataY.slice();
}
@Override
public ByteBuffer getDataU() {
// Return a slice to prevent relative reads from changing the position.
return dataU.slice();
}
@Override
public ByteBuffer getDataV() {
// Return a slice to prevent relative reads from changing the position.
return dataV.slice();
}
@Override
public int getStrideY() {
return strideY;
}
@Override
public int getStrideU() {
return strideU;
}
@Override
public int getStrideV() {
return strideV;
}
@Override
public I420Buffer toI420() {
retain();
return this;
}
@Override
public void retain() {
refCountDelegate.retain();
}
@Override
public void release() {
refCountDelegate.release();
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return cropAndScaleI420(this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
// No scaling.
ByteBuffer dataY = buffer.getDataY();
ByteBuffer dataU = buffer.getDataU();
ByteBuffer dataV = buffer.getDataV();
dataY.position(cropX + cropY * buffer.getStrideY());
dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
buffer.retain();
return JavaI420Buffer.wrap(scaleWidth, scaleHeight, dataY.slice(), buffer.getStrideY(),
dataU.slice(), buffer.getStrideU(), dataV.slice(), buffer.getStrideV(), buffer::release);
}
JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
scaleHeight);
return newBuffer;
}
private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/** Class with static JNI helper functions that are used in many places. */
public class JniCommon {
/** Functions to increment/decrement an rtc::RefCountInterface pointer. */
public static native void nativeAddRef(long refCountedPointer);
public static native void nativeReleaseRef(long refCountedPointer);
public static native ByteBuffer nativeAllocateByteBuffer(int size);
public static native void nativeFreeByteBuffer(ByteBuffer buffer);
}

View File

@ -0,0 +1,48 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.io.UnsupportedEncodingException;
import java.util.Map;
/**
* This class is only used from jni_helper.cc to give some Java functionality that were not possible
* to generate in other ways due to bugs.webrtc.org/8606 and bugs.webrtc.org/8632.
*/
class JniHelper {
// TODO(bugs.webrtc.org/8632): Remove.
@CalledByNative
static byte[] getStringBytes(String s) {
try {
return s.getBytes("ISO-8859-1");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("ISO-8859-1 is unsupported");
}
}
// TODO(bugs.webrtc.org/8632): Remove.
@CalledByNative
static Object getStringClass() {
return String.class;
}
// TODO(bugs.webrtc.org/8606): Remove.
@CalledByNative
static Object getKey(Map.Entry entry) {
return entry.getKey();
}
// TODO(bugs.webrtc.org/8606): Remove.
@CalledByNative
static Object getValue(Map.Entry entry) {
return entry.getValue();
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class LibaomAv1Encoder extends WrappedNativeVideoEncoder {
@Override
public long createNativeVideoEncoder() {
return nativeCreateEncoder();
}
static native long nativeCreateEncoder();
@Override
public boolean isHardwareEncoder() {
return false;
}
}

View File

@ -0,0 +1,20 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class LibvpxVp8Decoder extends WrappedNativeVideoDecoder {
@Override
public long createNativeVideoDecoder() {
return nativeCreateDecoder();
}
static native long nativeCreateDecoder();
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class LibvpxVp8Encoder extends WrappedNativeVideoEncoder {
@Override
public long createNativeVideoEncoder() {
return nativeCreateEncoder();
}
static native long nativeCreateEncoder();
@Override
public boolean isHardwareEncoder() {
return false;
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class LibvpxVp9Decoder extends WrappedNativeVideoDecoder {
@Override
public long createNativeVideoDecoder() {
return nativeCreateDecoder();
}
static native long nativeCreateDecoder();
static native boolean nativeIsSupported();
}

View File

@ -0,0 +1,27 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder {
@Override
public long createNativeVideoEncoder() {
return nativeCreateEncoder();
}
static native long nativeCreateEncoder();
@Override
public boolean isHardwareEncoder() {
return false;
}
static native boolean nativeIsSupported();
}

View File

@ -0,0 +1,22 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.Logging.Severity;
/**
* Java interface for WebRTC logging. The default implementation uses webrtc.Logging.
*
* When injected, the Loggable will receive logging from both Java and native.
*/
public interface Loggable {
public void onLogMessage(String message, Severity severity, String tag);
}

View File

@ -0,0 +1,201 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.EnumSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.webrtc.Loggable;
/**
* Java wrapper for WebRTC logging. Logging defaults to java.util.logging.Logger, but a custom
* logger implementing the Loggable interface can be injected along with a Severity. All subsequent
* log messages will then be redirected to the injected Loggable, except those with a severity lower
* than the specified severity, which will be discarded.
*
* It is also possible to switch to native logging (rtc::LogMessage) if one of the following static
* functions are called from the app:
* - Logging.enableLogThreads
* - Logging.enableLogTimeStamps
* - Logging.enableLogToDebugOutput
*
* The priority goes:
* 1. Injected loggable
* 2. Native logging
* 3. Fallback logging.
* Only one method will be used at a time.
*
* Injecting a Loggable or using any of the enable... methods requires that the native library is
* loaded, using PeerConnectionFactory.initialize.
*/
public class Logging {
private static final Logger fallbackLogger = createFallbackLogger();
private static volatile boolean loggingEnabled;
@Nullable private static Loggable loggable;
private static Severity loggableSeverity;
private static Logger createFallbackLogger() {
final Logger fallbackLogger = Logger.getLogger("org.webrtc.Logging");
fallbackLogger.setLevel(Level.ALL);
return fallbackLogger;
}
static void injectLoggable(Loggable injectedLoggable, Severity severity) {
if (injectedLoggable != null) {
loggable = injectedLoggable;
loggableSeverity = severity;
}
}
static void deleteInjectedLoggable() {
loggable = null;
}
// TODO(solenberg): Remove once dependent projects updated.
@Deprecated
public enum TraceLevel {
TRACE_NONE(0x0000),
TRACE_STATEINFO(0x0001),
TRACE_WARNING(0x0002),
TRACE_ERROR(0x0004),
TRACE_CRITICAL(0x0008),
TRACE_APICALL(0x0010),
TRACE_DEFAULT(0x00ff),
TRACE_MODULECALL(0x0020),
TRACE_MEMORY(0x0100),
TRACE_TIMER(0x0200),
TRACE_STREAM(0x0400),
TRACE_DEBUG(0x0800),
TRACE_INFO(0x1000),
TRACE_TERSEINFO(0x2000),
TRACE_ALL(0xffff);
public final int level;
TraceLevel(int level) {
this.level = level;
}
}
// Keep in sync with webrtc/rtc_base/logging.h:LoggingSeverity.
public enum Severity { LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE }
public static void enableLogThreads() {
nativeEnableLogThreads();
}
public static void enableLogTimeStamps() {
nativeEnableLogTimeStamps();
}
// TODO(solenberg): Remove once dependent projects updated.
@Deprecated
public static void enableTracing(String path, EnumSet<TraceLevel> levels) {}
// Enable diagnostic logging for messages of `severity` to the platform debug
// output. On Android, the output will be directed to Logcat.
// Note: this function starts collecting the output of the RTC_LOG() macros.
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void enableLogToDebugOutput(Severity severity) {
if (loggable != null) {
throw new IllegalStateException(
"Logging to native debug output not supported while Loggable is injected. "
+ "Delete the Loggable before calling this method.");
}
nativeEnableLogToDebugOutput(severity.ordinal());
loggingEnabled = true;
}
public static void log(Severity severity, String tag, String message) {
if (tag == null || message == null) {
throw new IllegalArgumentException("Logging tag or message may not be null.");
}
if (loggable != null) {
// Filter log messages below loggableSeverity.
if (severity.ordinal() < loggableSeverity.ordinal()) {
return;
}
loggable.onLogMessage(message, severity, tag);
return;
}
// Try native logging if no loggable is injected.
if (loggingEnabled) {
nativeLog(severity.ordinal(), tag, message);
return;
}
// Fallback to system log.
Level level;
switch (severity) {
case LS_ERROR:
level = Level.SEVERE;
break;
case LS_WARNING:
level = Level.WARNING;
break;
case LS_INFO:
level = Level.INFO;
break;
default:
level = Level.FINE;
break;
}
fallbackLogger.log(level, tag + ": " + message);
}
public static void d(String tag, String message) {
log(Severity.LS_INFO, tag, message);
}
public static void e(String tag, String message) {
log(Severity.LS_ERROR, tag, message);
}
public static void w(String tag, String message) {
log(Severity.LS_WARNING, tag, message);
}
public static void e(String tag, String message, Throwable e) {
log(Severity.LS_ERROR, tag, message);
log(Severity.LS_ERROR, tag, e.toString());
log(Severity.LS_ERROR, tag, getStackTraceString(e));
}
public static void w(String tag, String message, Throwable e) {
log(Severity.LS_WARNING, tag, message);
log(Severity.LS_WARNING, tag, e.toString());
log(Severity.LS_WARNING, tag, getStackTraceString(e));
}
public static void v(String tag, String message) {
log(Severity.LS_VERBOSE, tag, message);
}
private static String getStackTraceString(Throwable e) {
if (e == null) {
return "";
}
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
return sw.toString();
}
private static native void nativeEnableLogToDebugOutput(int nativeSeverity);
private static native void nativeEnableLogThreads();
private static native void nativeEnableLogTimeStamps();
private static native void nativeLog(int severity, String tag, String message);
}

View File

@ -0,0 +1,135 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.HashMap;
import java.util.Map;
/** Container class for static constants and helpers used with MediaCodec. */
// We are forced to use the old API because we want to support API level < 21.
@SuppressWarnings("deprecation")
class MediaCodecUtils {
private static final String TAG = "MediaCodecUtils";
// Prefixes for supported hardware encoder/decoder component names.
static final String EXYNOS_PREFIX = "OMX.Exynos.";
static final String INTEL_PREFIX = "OMX.Intel.";
static final String NVIDIA_PREFIX = "OMX.Nvidia.";
static final String QCOM_PREFIX = "OMX.qcom.";
static final String HISI_PREFIX = "OMX.hisi.";
static final String MTK_PREFIX = "OMX.mtk.";
static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {
"OMX.google.", "OMX.SEC.", "c2.android"};
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Color formats supported by hardware decoder - in order of preference.
static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by hardware encoder - in order of preference.
static final int[] ENCODER_COLOR_FORMATS = {
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by texture mode encoding - in order of preference.
static final int[] TEXTURE_COLOR_FORMATS =
new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
static @Nullable Integer selectColorFormat(
int[] supportedColorFormats, CodecCapabilities capabilities) {
for (int supportedColorFormat : supportedColorFormats) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
return codecColorFormat;
}
}
}
return null;
}
static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) {
for (String mimeType : info.getSupportedTypes()) {
if (type.mimeType().equals(mimeType)) {
return true;
}
}
return false;
}
static Map<String, String> getCodecProperties(VideoCodecMimeType type, boolean highProfile) {
switch (type) {
case VP8:
case VP9:
case AV1:
case H265:
return new HashMap<String, String>();
case H264:
return H264Utils.getDefaultH264Params(highProfile);
default:
throw new IllegalArgumentException("Unsupported codec: " + type);
}
}
static boolean isHardwareAccelerated(MediaCodecInfo info) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
return isHardwareAcceleratedQOrHigher(info);
}
return !isSoftwareOnly(info);
}
@TargetApi(29)
private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) {
return codecInfo.isHardwareAccelerated();
}
static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
return isSoftwareOnlyQOrHigher(codecInfo);
}
String name = codecInfo.getName();
for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) {
if (name.startsWith(prefix)) {
return true;
}
}
return false;
}
@TargetApi(29)
private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) {
return codecInfo.isSoftwareOnly();
}
private MediaCodecUtils() {
// This class should not be instantiated.
}
}

View File

@ -0,0 +1,141 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
import static org.webrtc.MediaCodecUtils.HISI_PREFIX;
import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.os.Build;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
/** Factory for decoders backed by Android MediaCodec API. */
@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
private static final String TAG = "MediaCodecVideoDecoderFactory";
private final @Nullable EglBase.Context sharedContext;
private final @Nullable Predicate<MediaCodecInfo> codecAllowedPredicate;
/**
* MediaCodecVideoDecoderFactory with support of codecs filtering.
*
* @param sharedContext The textures generated will be accessible from this context. May be null,
* this disables texture support.
* @param codecAllowedPredicate optional predicate to test if codec allowed. All codecs are
* allowed when predicate is not provided.
*/
public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext,
@Nullable Predicate<MediaCodecInfo> codecAllowedPredicate) {
this.sharedContext = sharedContext;
this.codecAllowedPredicate = codecAllowedPredicate;
}
@Nullable
@Override
public VideoDecoder createDecoder(VideoCodecInfo codecType) {
VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName());
MediaCodecInfo info = findCodecForType(type);
if (info == null) {
return null;
}
CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type,
MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
sharedContext);
}
@Override
public VideoCodecInfo[] getSupportedCodecs() {
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
// Generate a list of supported codecs in order of preference:
// VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265.
for (VideoCodecMimeType type :
new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9,
VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) {
MediaCodecInfo codec = findCodecForType(type);
if (codec != null) {
String name = type.name();
if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
supportedCodecInfos.add(new VideoCodecInfo(
name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
}
supportedCodecInfos.add(new VideoCodecInfo(
name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
}
}
return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
}
private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
info = MediaCodecList.getCodecInfoAt(i);
} catch (IllegalArgumentException e) {
Logging.e(TAG, "Cannot retrieve decoder codec info", e);
}
if (info == null || info.isEncoder()) {
continue;
}
if (isSupportedCodec(info, type)) {
return info;
}
}
return null; // No support for this type.
}
// Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) {
if (!MediaCodecUtils.codecSupportsType(info, type)) {
return false;
}
// Check for a supported color format.
if (MediaCodecUtils.selectColorFormat(
MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
== null) {
return false;
}
return isCodecAllowed(info);
}
private boolean isCodecAllowed(MediaCodecInfo info) {
if (codecAllowedPredicate == null) {
return true;
}
return codecAllowedPredicate.test(info);
}
private boolean isH264HighProfileSupported(MediaCodecInfo info) {
String name = info.getName();
// Support H.264 HP decoding on QCOM chips.
if (name.startsWith(QCOM_PREFIX)) {
return true;
}
// Support H.264 HP decoding on Exynos chips for Android M and above.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && name.startsWith(EXYNOS_PREFIX)) {
return true;
}
return false;
}
}

View File

@ -0,0 +1,60 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Bundle;
import android.view.Surface;
import java.nio.ByteBuffer;
/**
* Subset of methods defined in {@link android.media.MediaCodec} needed by
* {@link HardwareVideoEncoder} and {@link AndroidVideoDecoder}. This interface
* exists to allow mocking and using a fake implementation in tests.
*/
interface MediaCodecWrapper {
void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags);
void start();
void flush();
void stop();
void release();
int dequeueInputBuffer(long timeoutUs);
void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags);
int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs);
void releaseOutputBuffer(int index, boolean render);
MediaFormat getInputFormat();
MediaFormat getOutputFormat();
MediaFormat getOutputFormat(int index);
ByteBuffer getInputBuffer(int index);
ByteBuffer getOutputBuffer(int index);
Surface createInputSurface();
void setParameters(Bundle params);
MediaCodecInfo getCodecInfo();
}

View File

@ -0,0 +1,22 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.io.IOException;
interface MediaCodecWrapperFactory {
/**
* Creates a new {@link MediaCodecWrapper} by codec name.
*
* <p>For additional information see {@link android.media.MediaCodec#createByCodecName}.
*/
MediaCodecWrapper createByCodecName(String name) throws IOException;
}

View File

@ -0,0 +1,126 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Bundle;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping
* {@link android.media.MediaCodec} objects.
*/
class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
private static class MediaCodecWrapperImpl implements MediaCodecWrapper {
private final MediaCodec mediaCodec;
public MediaCodecWrapperImpl(MediaCodec mediaCodec) {
this.mediaCodec = mediaCodec;
}
@Override
public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
mediaCodec.configure(format, surface, crypto, flags);
}
@Override
public void start() {
mediaCodec.start();
}
@Override
public void flush() {
mediaCodec.flush();
}
@Override
public void stop() {
mediaCodec.stop();
}
@Override
public void release() {
mediaCodec.release();
}
@Override
public int dequeueInputBuffer(long timeoutUs) {
return mediaCodec.dequeueInputBuffer(timeoutUs);
}
@Override
public void queueInputBuffer(
int index, int offset, int size, long presentationTimeUs, int flags) {
mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
}
@Override
public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
}
@Override
public void releaseOutputBuffer(int index, boolean render) {
mediaCodec.releaseOutputBuffer(index, render);
}
@Override
public MediaFormat getInputFormat() {
return mediaCodec.getInputFormat();
}
@Override
public MediaFormat getOutputFormat() {
return mediaCodec.getOutputFormat();
}
@Override
public MediaFormat getOutputFormat(int index) {
return mediaCodec.getOutputFormat(index);
}
@Override
public ByteBuffer getInputBuffer(int index) {
return mediaCodec.getInputBuffer(index);
}
@Override
public ByteBuffer getOutputBuffer(int index) {
return mediaCodec.getOutputBuffer(index);
}
@Override
public Surface createInputSurface() {
return mediaCodec.createInputSurface();
}
@Override
public void setParameters(Bundle params) {
mediaCodec.setParameters(params);
}
@Override
public MediaCodecInfo getCodecInfo() {
return mediaCodec.getCodecInfo();
}
}
@Override
public MediaCodecWrapper createByCodecName(String name) throws IOException {
return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name));
}
}

View File

@ -0,0 +1,99 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
/**
* Description of media constraints for {@code MediaStream} and
* {@code PeerConnection}.
*/
public class MediaConstraints {
/** Simple String key/value pair. */
public static class KeyValuePair {
private final String key;
private final String value;
public KeyValuePair(String key, String value) {
this.key = key;
this.value = value;
}
@CalledByNative("KeyValuePair")
public String getKey() {
return key;
}
@CalledByNative("KeyValuePair")
public String getValue() {
return value;
}
@Override
public String toString() {
return key + ": " + value;
}
@Override
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
KeyValuePair that = (KeyValuePair) other;
return key.equals(that.key) && value.equals(that.value);
}
@Override
public int hashCode() {
return key.hashCode() + value.hashCode();
}
}
public final List<KeyValuePair> mandatory;
public final List<KeyValuePair> optional;
public MediaConstraints() {
mandatory = new ArrayList<KeyValuePair>();
optional = new ArrayList<KeyValuePair>();
}
private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
StringBuilder builder = new StringBuilder("[");
for (KeyValuePair pair : list) {
if (builder.length() > 1) {
builder.append(", ");
}
builder.append(pair.toString());
}
return builder.append("]").toString();
}
@Override
public String toString() {
return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+ stringifyKeyValuePairList(optional);
}
@CalledByNative
List<KeyValuePair> getMandatory() {
return mandatory;
}
@CalledByNative
List<KeyValuePair> getOptional() {
return optional;
}
}

View File

@ -0,0 +1,74 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Java wrapper for a C++ MediaSourceInterface. */
public class MediaSource {
/** Tracks MediaSourceInterface.SourceState */
public enum State {
INITIALIZING,
LIVE,
ENDED,
MUTED;
@CalledByNative("State")
static State fromNativeIndex(int nativeIndex) {
return values()[nativeIndex];
}
}
private final RefCountDelegate refCountDelegate;
private long nativeSource;
public MediaSource(long nativeSource) {
refCountDelegate = new RefCountDelegate(() -> JniCommon.nativeReleaseRef(nativeSource));
this.nativeSource = nativeSource;
}
public State state() {
checkMediaSourceExists();
return nativeGetState(nativeSource);
}
public void dispose() {
checkMediaSourceExists();
refCountDelegate.release();
nativeSource = 0;
}
/** Returns a pointer to webrtc::MediaSourceInterface. */
protected long getNativeMediaSource() {
checkMediaSourceExists();
return nativeSource;
}
/**
* Runs code in {@code runnable} holding a reference to the media source. If the object has
* already been released, does nothing.
*/
void runWithReference(Runnable runnable) {
if (refCountDelegate.safeRetain()) {
try {
runnable.run();
} finally {
refCountDelegate.release();
}
}
}
private void checkMediaSourceExists() {
if (nativeSource == 0) {
throw new IllegalStateException("MediaSource has been disposed.");
}
}
private static native State nativeGetState(long pointer);
}

Some files were not shown because too many files have changed in this diff Show More