Merge branch 'ZLMediaKit:master' into master

This commit is contained in:
gongluck 2022-09-29 21:17:57 -05:00 committed by GitHub
commit 63745acef0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
62 changed files with 1917 additions and 124 deletions

View File

@ -90,8 +90,12 @@ if(ENABLE_RTPPROXY OR ENABLE_HLS)
aux_source_directory(${MediaServer_MPEG_ROOT}/source MPEG_SRC_LIST)
add_library(mpeg STATIC ${MPEG_SRC_LIST})
add_library(MediaServer::mpeg ALIAS mpeg)
# media-server
# MPEG_H26X_VERIFY -
# MPEG_ZERO_PAYLOAD_LENGTH - hik
# MPEG_DAHUA_AAC_FROM_G711 - dahua
target_compile_options(mpeg
PRIVATE ${COMPILE_OPTIONS_DEFAULT})
PRIVATE ${COMPILE_OPTIONS_DEFAULT} -DMPEG_H26X_VERIFY -DMPEG_ZERO_PAYLOAD_LENGTH -DMPEG_DAHUA_AAC_FROM_G711)
target_include_directories(mpeg
PRIVATE
"$<BUILD_INTERFACE:${MediaServer_MPEG_ROOT}/include>"

View File

@ -299,7 +299,7 @@ if(ENABLE_FFMPEG)
endif()
if(ENABLE_MEM_DEBUG)
list(APPEND COMPILE_OPTIONS_DEFAULT
update_cached_list(MK_LINK_LIBRARIES
"-Wl,-wrap,free;-Wl,-wrap,malloc;-Wl,-wrap,realloc;-Wl,-wrap,calloc")
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_MEM_DEBUG)
message(STATUS "已启用内存调试功能")
@ -308,6 +308,10 @@ endif()
if(ENABLE_ASAN)
list(APPEND COMPILE_OPTIONS_DEFAULT
"-fsanitize=address;-fno-omit-frame-pointer")
# https://github.com/google/sanitizers/wiki/AddressSanitizer#using-addresssanitizer
# > In order to use AddressSanitizer you will need to
# > compile and link your program using clang with the -fsanitize=address switch.
update_cached_list(MK_LINK_LIBRARIES "-fsanitize=address")
message(STATUS "已启用 Address Sanitize")
endif()
@ -413,6 +417,9 @@ endif()
# for version.h
include_directories(${CMAKE_CURRENT_BINARY_DIR})
# for assert.h
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/3rdpart)
add_subdirectory(3rdpart)
add_subdirectory(src)

View File

@ -92,10 +92,11 @@
- 支持http文件访问鉴权
- GB28181与RTP推流
- 支持UDP/TCP国标RTP(PS或TS)推流服务器可以转换成RTSP/RTMP/HLS等协议
- 支持RTSP/RTMP/HLS转国标推流客户端支持TCP/UDP模式提供相应restful api
- 支持UDP/TCP RTP(PS/TS/ES)推流服务器可以转换成RTSP/RTMP/HLS等协议
- 支持RTSP/RTMP/HLS等协议转rtp推流客户端支持TCP/UDP模式提供相应restful api,支持主动被动方式。
- 支持H264/H265/AAC/G711/OPUS编码
- 支持海康ehome推流
- 支持GB28181主动拉流模式
- MP4点播与录制
- 支持录制为FLV/HLS/MP4

View File

@ -162,11 +162,18 @@ API_EXPORT uint16_t API_CALL mk_rtp_server_start(uint16_t port);
/**
* rtc服务器
* @param port rtp监听端口
* @param port rtc监听端口
* @return 0:,0:
*/
API_EXPORT uint16_t API_CALL mk_rtc_server_start(uint16_t port);
/**
* srt服务器
* @param port srt监听端口
* @return 0:,0:
*/
API_EXPORT uint16_t API_CALL mk_srt_server_start(uint16_t port);
/**
* shell服务器

View File

@ -40,6 +40,11 @@ static std::shared_ptr<RtpServer> rtpServer;
static std::shared_ptr<UdpServer> rtcServer;
#endif
#if defined(ENABLE_SRT)
#include "../srt/SrtSession.hpp"
static std::shared_ptr<UdpServer> srtServer;
#endif
//////////////////////////environment init///////////////////////////
API_EXPORT void API_CALL mk_env_init(const mk_config *cfg) {
@ -62,8 +67,15 @@ API_EXPORT void API_CALL mk_stop_all_server(){
CLEAR_ARR(rtsp_server);
CLEAR_ARR(rtmp_server);
CLEAR_ARR(http_server);
shell_server = nullptr;
#ifdef ENABLE_RTPPROXY
rtpServer = nullptr;
#endif
#ifdef ENABLE_WEBRTC
rtcServer = nullptr;
#endif
#ifdef ENABLE_SRT
srtServer = nullptr;
#endif
stopAllTcpServer();
}
@ -254,6 +266,36 @@ API_EXPORT uint16_t API_CALL mk_rtc_server_start(uint16_t port) {
#endif
}
API_EXPORT uint16_t API_CALL mk_srt_server_start(uint16_t port) {
#ifdef ENABLE_SRT
try {
srtServer = std::make_shared<UdpServer>();
srtServer->setOnCreateSocket([](const EventPoller::Ptr &poller, const Buffer::Ptr &buf, struct sockaddr *, int) {
if (!buf) {
return Socket::createSocket(poller, false);
}
auto new_poller = SRT::SrtSession::queryPoller(buf);
if (!new_poller) {
//握手第一阶段
return Socket::createSocket(poller, false);
}
return Socket::createSocket(new_poller, false);
});
srtServer->start<SRT::SrtSession>(port);
return srtServer->getPort();
} catch (std::exception &ex) {
srtServer.reset();
WarnL << ex.what();
return 0;
}
#else
WarnL << "未启用该功能!";
return 0;
#endif
}
API_EXPORT uint16_t API_CALL mk_shell_server_start(uint16_t port){
try {
shell_server = std::make_shared<TcpServer>();

View File

@ -416,6 +416,7 @@ int main(int argc, char *argv[]) {
mk_shell_server_start(9000);
mk_rtp_server_start(10000);
mk_rtc_server_start(8000);
mk_srt_server_start(9000);
mk_events events = {
.on_mk_media_changed = on_mk_media_changed,

View File

@ -336,12 +336,9 @@ int start_main(int argc,char *argv[]) {
if (rtcPort) { rtcSrv->start<WebRtcSession>(rtcPort); }
#endif//defined(ENABLE_WEBRTC)
#if defined(ENABLE_SRT)
// srt udp服务器
if(srtPort){
srtSrv->start<SRT::SrtSession>(srtPort);
}
if(srtPort) { srtSrv->start<SRT::SrtSession>(srtPort); }
#endif//defined(ENABLE_SRT)
} catch (std::exception &ex) {

View File

@ -312,10 +312,6 @@ EventPoller::Ptr MultiMediaSourceMuxer::getOwnerPoller(MediaSource &sender) {
}
bool MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) {
if (CodecL16 == track->getCodecId()) {
WarnL << "L16音频格式目前只支持RTSP协议推流拉流!!!";
return false;
}
bool ret = false;
if (_rtmp) {

View File

@ -80,8 +80,6 @@ public:
GET_OPT_VALUE(modify_stamp);
}
ProtocolOption(const ProtocolOption &) = default;
private:
template <typename MAP, typename KEY, typename TYPE>
static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {

View File

@ -41,6 +41,8 @@ void RtpSender::startSend(const MediaSourceEvent::SendRtpArgs &args, const funct
if (args.passive) {
// tcp被动发流模式
_args.is_udp = false;
// 默认等待链接
bool is_wait = true;
try {
auto tcp_listener = Socket::createSocket(_poller, false);
if (args.src_port) {
@ -49,18 +51,22 @@ void RtpSender::startSend(const MediaSourceEvent::SendRtpArgs &args, const funct
throw std::invalid_argument(StrPrinter << "open tcp passive server failed on port:" << args.src_port
<< ", err:" << get_uv_errmsg(true));
}
is_wait = true;
} else {
auto pr = std::make_pair(tcp_listener, Socket::createSocket(_poller, false));
//从端口池获取随机端口
makeSockPair(pr, "::", false, false);
// 随机端口不等待,保证调用者可以知道端口
is_wait = false;
}
// tcp服务器默认开启5秒
auto delay_task = _poller->doDelayTask(_args.tcp_passive_close_delay_ms, [tcp_listener, cb]() mutable {
cb(0, SockException(Err_timeout, "wait tcp connection timeout"));
auto delay_task = _poller->doDelayTask(_args.tcp_passive_close_delay_ms, [tcp_listener, cb,is_wait]() mutable {
if(is_wait)
cb(0, SockException(Err_timeout, "wait tcp connection timeout"));
tcp_listener = nullptr;
return 0;
});
tcp_listener->setOnAccept([weak_self, cb, delay_task](Socket::Ptr &sock, std::shared_ptr<void> &complete) {
tcp_listener->setOnAccept([weak_self, cb, delay_task,is_wait](Socket::Ptr &sock, std::shared_ptr<void> &complete) {
auto strong_self = weak_self.lock();
if (!strong_self) {
return;
@ -69,10 +75,15 @@ void RtpSender::startSend(const MediaSourceEvent::SendRtpArgs &args, const funct
delay_task->cancel();
strong_self->_socket_rtp = sock;
strong_self->onConnect();
cb(sock->get_local_port(), SockException());
if(is_wait)
cb(sock->get_local_port(), SockException());
InfoL << "accept connection from:" << sock->get_peer_ip() << ":" << sock->get_peer_port();
});
InfoL << "start tcp passive server on:" << tcp_listener->get_local_port();
if(!is_wait){
// 随机端口马上返回端口,保证调用者知道端口
cb(tcp_listener->get_local_port(), SockException());
}
} catch (std::exception &ex) {
cb(0, SockException(Err_other, ex.what()));
return;

View File

@ -60,6 +60,10 @@ public:
* @param packet
*/
void sortPacket(SEQ seq, T packet) {
if(!_is_inited && _next_seq_out == 0){
_next_seq_out = seq;
_is_inited = true;
}
if (seq < _next_seq_out) {
if (_next_seq_out < seq + kMax) {
//过滤seq回退包(回环包除外)
@ -147,6 +151,9 @@ private:
}
private:
//第一个包是已经进入
bool _is_inited = false;
//下次应该输出的SEQ
SEQ _next_seq_out = 0;
//seq回环次数计数

View File

@ -59,10 +59,10 @@ bool ACKPacket::storeToData() {
storeUint32(ptr, rtt_variance);
ptr += 4;
storeUint32(ptr, pkt_recv_rate);
storeUint32(ptr, available_buf_size);
ptr += 4;
storeUint32(ptr, available_buf_size);
storeUint32(ptr, pkt_recv_rate);
ptr += 4;
storeUint32(ptr, estimated_link_capacity);

View File

@ -13,8 +13,9 @@
#endif // defined(_WIN32)
#include <chrono>
#define MAX_SEQ 0x7fffffff
#define MAX_TS 0xffffffff
#define MAX_SEQ 0x7fffffff
#define SEQ_NONE 0xffffffff
#define MAX_TS 0xffffffff
namespace SRT {
using SteadyClock = std::chrono::steady_clock;
@ -35,6 +36,25 @@ static inline uint16_t loadUint16(uint8_t *ptr) {
return ptr[0] << 8 | ptr[1];
}
inline static int64_t seqCmp(uint32_t seq1, uint32_t seq2) {
if(seq1 > seq2){
if((seq1 - seq2) >(MAX_SEQ>>1)){
return (int64_t)seq1 - (int64_t)(seq2+MAX_SEQ);
}else{
return (int64_t)seq1 - (int64_t)seq2;
}
}else{
if((seq2-seq1) >(MAX_SEQ>>1)){
return (int64_t)(seq1+MAX_SEQ) - (int64_t)seq2;
}else{
return (int64_t)seq1 - (int64_t)seq2;
}
}
}
inline static uint32_t incSeq(int32_t seq) {
return (seq == MAX_SEQ) ? 0 : seq + 1;
}
static inline void storeUint32(uint8_t *buf, uint32_t val) {
buf[0] = val >> 24;
buf[1] = (val >> 16) & 0xff;

View File

@ -14,6 +14,20 @@
namespace SRT {
using namespace toolkit;
static const size_t HDR_SIZE = 16; // packet header size = SRT_PH_E_SIZE * sizeof(uint32_t)
// Can also be calculated as: sizeof(struct ether_header) + sizeof(struct ip) + sizeof(struct udphdr).
static const size_t UDP_HDR_SIZE = 28; // 20 bytes IPv4 + 8 bytes of UDP { u16 sport, dport, len, csum }.
static const size_t SRT_DATA_HDR_SIZE = UDP_HDR_SIZE + HDR_SIZE;
// Maximum transmission unit size. 1500 in case of Ethernet II (RFC 1191).
static const size_t ETH_MAX_MTU_SIZE = 1500;
// Maximum payload size of an SRT packet.
static const size_t SRT_MAX_PAYLOAD_SIZE = ETH_MAX_MTU_SIZE - SRT_DATA_HDR_SIZE;
/*
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1

View File

@ -128,9 +128,9 @@ void SrtSession::onError(const SockException &err) {
// 防止互相引用导致不释放
auto transport = std::move(_transport);
getPoller()->async(
[transport, err] {
[transport] {
//延时减引用防止使用transport对象时销毁对象
transport->onShutdown(err);
//transport->onShutdown(err);
},
false);
}

View File

@ -1,4 +1,6 @@
#include "Util/onceToken.h"
#include "Util/mini.h"
#include <iterator>
#include <stdlib.h>
@ -15,6 +17,13 @@ const std::string kPort = SRT_FIELD "port";
const std::string kLatencyMul = SRT_FIELD "latencyMul";
const std::string kPktBufSize = SRT_FIELD "pktBufSize";
static onceToken token([]() {
mINI::Instance()[kTimeOutSec] = 5;
mINI::Instance()[kPort] = 9000;
mINI::Instance()[kLatencyMul] = 4;
mINI::Instance()[kPktBufSize] = 8192;
});
static std::atomic<uint32_t> s_srt_socket_id_generate { 125 };
//////////// SrtTransport //////////////////////////
SrtTransport::SrtTransport(const EventPoller::Ptr &poller)
@ -22,7 +31,7 @@ SrtTransport::SrtTransport(const EventPoller::Ptr &poller)
_start_timestamp = SteadyClock::now();
_socket_id = s_srt_socket_id_generate.fetch_add(1);
_pkt_recv_rate_context = std::make_shared<PacketRecvRateContext>(_start_timestamp);
_recv_rate_context = std::make_shared<RecvRateContext>(_start_timestamp);
//_recv_rate_context = std::make_shared<RecvRateContext>(_start_timestamp);
_estimated_link_capacity_context = std::make_shared<EstimatedLinkCapacityContext>(_start_timestamp);
}
@ -60,7 +69,29 @@ void SrtTransport::switchToOtherTransport(uint8_t *buf, int len, uint32_t socket
}
}
void SrtTransport::createTimerForCheckAlive(){
std::weak_ptr<SrtTransport> weak_self = std::static_pointer_cast<SrtTransport>(shared_from_this());
auto timeoutSec = getTimeOutSec();
_timer = std::make_shared<Timer>(
timeoutSec/ 2,
[weak_self,timeoutSec]() {
auto strong_self = weak_self.lock();
if (!strong_self) {
return false;
}
if (strong_self->_alive_ticker.elapsedTime() > timeoutSec * 1000) {
strong_self->onShutdown(SockException(Err_timeout, "接收srt数据超时"));
}
return true;
},
getPoller());
}
void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage *addr) {
_alive_ticker.resetTime();
if(!_timer){
createTimerForCheckAlive();
}
using srt_control_handler = void (SrtTransport::*)(uint8_t * buf, int len, struct sockaddr_storage *addr);
static std::unordered_map<uint16_t, srt_control_handler> s_control_functions;
static onceToken token([]() {
@ -83,11 +114,11 @@ void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage
if(_handleshake_timer){
_handleshake_timer.reset();
}
_pkt_recv_rate_context->inputPacket(_now);
_estimated_link_capacity_context->inputPacket(_now);
_recv_rate_context->inputPacket(_now, len);
_pkt_recv_rate_context->inputPacket(_now,len+UDP_HDR_SIZE);
//_recv_rate_context->inputPacket(_now, len);
handleDataPacket(buf, len, addr);
checkAndSendAckNak();
} else {
WarnL<<"DataPacket switch to other transport: "<<socketId;
switchToOtherTransport(buf, len, socketId, addr);
@ -102,9 +133,10 @@ void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage
switchToOtherTransport(buf, len, socketId, addr);
return;
}
_pkt_recv_rate_context->inputPacket(_now);
_estimated_link_capacity_context->inputPacket(_now);
_recv_rate_context->inputPacket(_now, len);
//_pkt_recv_rate_context->inputPacket(_now,len);
//_estimated_link_capacity_context->inputPacket(_now);
//_recv_rate_context->inputPacket(_now, len);
auto it = s_control_functions.find(type);
if (it == s_control_functions.end()) {
@ -113,6 +145,9 @@ void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage
} else {
(this->*(it->second))(buf, len, addr);
}
if(_is_handleshake_finished && isPusher()){
checkAndSendAckNak();
}
} else {
// not reach
WarnL << "not reach this";
@ -152,6 +187,7 @@ void SrtTransport::handleHandshakeInduction(HandshakePacket &pkt, struct sockadd
_mtu = pkt.mtu;
_last_pkt_seq = _init_seq_number - 1;
_estimated_link_capacity_context->setLastSeq(_last_pkt_seq);
_peer_socket_id = pkt.srt_socket_id;
HandshakePacket::Ptr res = std::make_shared<HandshakePacket>();
@ -173,8 +209,7 @@ void SrtTransport::handleHandshakeInduction(HandshakePacket &pkt, struct sockadd
registerSelfHandshake();
sendControlPacket(res, true);
_handleshake_timer = std::make_shared<Timer>(0.02,[this]()->bool{
_handleshake_timer = std::make_shared<Timer>(0.2,[this]()->bool{
sendControlPacket(_handleshake_res, true);
return true;
},getPoller());
@ -266,7 +301,7 @@ void SrtTransport::handleHandshakeConclusion(HandshakePacket &pkt, struct sockad
return;
}
_last_ack_pkt_seq_num = _init_seq_number;
_last_ack_pkt_seq = _init_seq_number;
}
void SrtTransport::handleHandshake(uint8_t *buf, int len, struct sockaddr_storage *addr) {
@ -368,6 +403,7 @@ void SrtTransport::handleDropReq(uint8_t *buf, int len, struct sockaddr_storage
std::list<DataPacket::Ptr> list;
// TraceL<<"drop "<<pkt.first_pkt_seq_num<<" last "<<pkt.last_pkt_seq_num;
_recv_buf->drop(pkt.first_pkt_seq_num, pkt.last_pkt_seq_num, list);
//checkAndSendAckNak();
if (list.empty()) {
return;
}
@ -392,7 +428,8 @@ void SrtTransport::handleDropReq(uint8_t *buf, int len, struct sockaddr_storage
// TraceL << "check lost send nack";
}
*/
}
void SrtTransport::checkAndSendAckNak(){
auto nak_interval = (_rtt + _rtt_variance * 4) / 2;
if (nak_interval <= 20 * 1000) {
nak_interval = 20 * 1000;
@ -409,7 +446,13 @@ void SrtTransport::handleDropReq(uint8_t *buf, int len, struct sockaddr_storage
_light_ack_pkt_count = 0;
_ack_ticker.resetTime(_now);
// send a ack per 10 ms for receiver
sendACKPacket();
if(_last_ack_pkt_seq != _recv_buf->getExpectedSeq()){
//TraceL<<"send a ack packet";
sendACKPacket();
}else{
//TraceL<<" ignore repeate ack packet";
}
} else {
if (_light_ack_pkt_count >= 64) {
// for high bitrate stream send light ack
@ -421,7 +464,6 @@ void SrtTransport::handleDropReq(uint8_t *buf, int len, struct sockaddr_storage
}
_light_ack_pkt_count++;
}
void SrtTransport::handleUserDefinedType(uint8_t *buf, int len, struct sockaddr_storage *addr) {
TraceL;
}
@ -431,12 +473,34 @@ void SrtTransport::handleACKACK(uint8_t *buf, int len, struct sockaddr_storage *
ACKACKPacket::Ptr pkt = std::make_shared<ACKACKPacket>();
pkt->loadFromData(buf, len);
uint32_t rtt = DurationCountMicroseconds(_now - _ack_send_timestamp[pkt->ack_number]);
_rtt_variance = (3 * _rtt_variance + abs((long)_rtt - (long)rtt)) / 4;
_rtt = (7 * rtt + _rtt) / 8;
if(_ack_send_timestamp.find(pkt->ack_number)!=_ack_send_timestamp.end()){
uint32_t rtt = DurationCountMicroseconds(_now - _ack_send_timestamp[pkt->ack_number]);
_rtt_variance = (3 * _rtt_variance + abs((long)_rtt - (long)rtt)) / 4;
_rtt = (7 * rtt + _rtt) / 8;
// TraceL<<" rtt:"<<_rtt<<" rtt variance:"<<_rtt_variance;
_ack_send_timestamp.erase(pkt->ack_number);
// TraceL<<" rtt:"<<_rtt<<" rtt variance:"<<_rtt_variance;
_ack_send_timestamp.erase(pkt->ack_number);
if(_last_recv_ackack_seq_num < pkt->ack_number){
_last_recv_ackack_seq_num = pkt->ack_number;
}else{
if((_last_recv_ackack_seq_num-pkt->ack_number)>(MAX_TS>>1)){
_last_recv_ackack_seq_num = pkt->ack_number;
}
}
if(_ack_send_timestamp.size()>1000){
// clear data
for(auto it = _ack_send_timestamp.begin(); it != _ack_send_timestamp.end();){
if(DurationCountMicroseconds(_now-it->second)>5e6){
// 超过五秒没有ackack 丢弃
it = _ack_send_timestamp.erase(it);
}else{
it++;
}
}
}
}
}
void SrtTransport::handlePeerError(uint8_t *buf, int len, struct sockaddr_storage *addr) {
@ -444,6 +508,8 @@ void SrtTransport::handlePeerError(uint8_t *buf, int len, struct sockaddr_storag
}
void SrtTransport::sendACKPacket() {
uint32_t recv_rate = 0;
ACKPacket::Ptr pkt = std::make_shared<ACKPacket>();
pkt->dst_socket_id = _peer_socket_id;
pkt->timestamp = DurationCountMicroseconds(_now - _start_timestamp);
@ -452,12 +518,23 @@ void SrtTransport::sendACKPacket() {
pkt->rtt = _rtt;
pkt->rtt_variance = _rtt_variance;
pkt->available_buf_size = _recv_buf->getAvailableBufferSize();
pkt->pkt_recv_rate = _pkt_recv_rate_context->getPacketRecvRate();
pkt->pkt_recv_rate = _pkt_recv_rate_context->getPacketRecvRate(recv_rate);
pkt->estimated_link_capacity = _estimated_link_capacity_context->getEstimatedLinkCapacity();
pkt->recv_rate = _recv_rate_context->getRecvRate();
pkt->recv_rate = recv_rate;
if(0){
TraceL<<pkt->pkt_recv_rate<<" pkt/s "<<recv_rate<<" byte/s "<<pkt->estimated_link_capacity<<" pkt/s (cap) "<<pkt->available_buf_size<<" available buf";
//TraceL<<_pkt_recv_rate_context->dump();
//TraceL<<"recv estimated:";
//TraceL<< _pkt_recv_rate_context->dump();
//TraceL<<"recv queue:";
//TraceL<<_recv_buf->dump();
}
if(pkt->available_buf_size<2){
pkt->available_buf_size = 2;
}
pkt->storeToData();
_ack_send_timestamp[pkt->ack_number] = _now;
_last_ack_pkt_seq_num = pkt->last_ack_pkt_seq_number;
_last_ack_pkt_seq = pkt->last_ack_pkt_seq_number;
sendControlPacket(pkt, true);
// TraceL<<"send ack "<<pkt->dump();
// TraceL<<_recv_buf->dump();
@ -477,7 +554,7 @@ void SrtTransport::sendLightACKPacket() {
pkt->estimated_link_capacity = 0;
pkt->recv_rate = 0;
pkt->storeToData();
_last_ack_pkt_seq_num = pkt->last_ack_pkt_seq_number;
_last_ack_pkt_seq = pkt->last_ack_pkt_seq_number;
sendControlPacket(pkt, true);
TraceL << "send ack " << pkt->dump();
}
@ -534,6 +611,8 @@ void SrtTransport::handleDataPacket(uint8_t *buf, int len, struct sockaddr_stora
DataPacket::Ptr pkt = std::make_shared<DataPacket>();
pkt->loadFromData(buf, len);
_estimated_link_capacity_context->inputPacket(_now,pkt);
std::list<DataPacket::Ptr> list;
//TraceL<<" seq="<< pkt->packet_seq_number<<" ts="<<pkt->timestamp<<" size="<<pkt->payloadSize()<<\
//" PP="<<(int)pkt->PP<<" O="<<(int)pkt->O<<" kK="<<(int)pkt->KK<<" R="<<(int)pkt->R;
@ -563,7 +642,7 @@ void SrtTransport::handleDataPacket(uint8_t *buf, int len, struct sockaddr_stora
sendNAKPacket(lost);
}
*/
/*
auto nak_interval = (_rtt + _rtt_variance * 4) / 2;
if (nak_interval <= 20 * 1000) {
nak_interval = 20 * 1000;
@ -596,6 +675,7 @@ void SrtTransport::handleDataPacket(uint8_t *buf, int len, struct sockaddr_stora
_light_ack_pkt_count = 0;
}
_light_ack_pkt_count++;
*/
// bufCheckInterval();
}

View File

@ -53,10 +53,13 @@ protected:
virtual bool isPusher() { return true; };
virtual void onSRTData(DataPacket::Ptr pkt) {};
virtual void onShutdown(const SockException &ex);
virtual void onHandShakeFinished(std::string &streamid, struct sockaddr_storage *addr) {};
virtual void onHandShakeFinished(std::string &streamid, struct sockaddr_storage *addr) {
_is_handleshake_finished = true;
};
virtual void sendPacket(Buffer::Ptr pkt, bool flush = true);
virtual int getLatencyMul() { return 4; };
virtual int getPktBufSize() { return 8192; };
virtual float getTimeOutSec(){return 5.0;};
private:
void registerSelf();
@ -88,6 +91,10 @@ private:
size_t getPayloadSize();
void createTimerForCheckAlive();
void checkAndSendAckNak();
protected:
void sendDataPacket(DataPacket::Ptr pkt, char *buf, int len, bool flush = false);
void sendControlPacket(ControlPacket::Ptr pkt, bool flush = true);
@ -126,7 +133,8 @@ private:
uint32_t _rtt_variance = 50 * 1000;
uint32_t _light_ack_pkt_count = 0;
uint32_t _ack_number_count = 0;
uint32_t _last_ack_pkt_seq_num = 0;
uint32_t _last_ack_pkt_seq = 0;
uint32_t _last_recv_ackack_seq_num = 0;
uint32_t _last_pkt_seq = 0;
UTicker _ack_ticker;
@ -134,7 +142,7 @@ private:
std::shared_ptr<PacketRecvRateContext> _pkt_recv_rate_context;
std::shared_ptr<EstimatedLinkCapacityContext> _estimated_link_capacity_context;
std::shared_ptr<RecvRateContext> _recv_rate_context;
//std::shared_ptr<RecvRateContext> _recv_rate_context;
UTicker _nak_ticker;
@ -144,6 +152,13 @@ private:
Timer::Ptr _handleshake_timer;
ResourcePool<BufferRaw> _packet_pool;
//检测超时的定时器
Timer::Ptr _timer;
//刷新计时器
Ticker _alive_ticker;
bool _is_handleshake_finished = false;
};
class SrtTransportManager {

View File

@ -22,6 +22,7 @@ SrtTransportImp::~SrtTransportImp() {
}
void SrtTransportImp::onHandShakeFinished(std::string &streamid, struct sockaddr_storage *addr) {
SrtTransport::onHandShakeFinished(streamid,addr);
// TODO parse stream id like this zlmediakit.com/live/test?token=1213444&type=push
if (!_addr) {
_addr.reset(new sockaddr_storage(*((sockaddr_storage *)addr)));
@ -100,6 +101,7 @@ void SrtTransportImp::onSRTData(DataPacket::Ptr pkt) {
}
if (_decoder) {
_decoder->input(reinterpret_cast<const uint8_t *>(pkt->payloadData()), pkt->payloadSize());
//TraceL<<" size "<<pkt->payloadSize();
} else {
WarnP(this) << " not reach this";
}
@ -340,6 +342,15 @@ int SrtTransportImp::getLatencyMul() {
return latencyMul;
}
float SrtTransportImp::getTimeOutSec() {
GET_CONFIG(float, timeOutSec, kTimeOutSec);
if (timeOutSec <= 0) {
WarnL << "config srt " << kTimeOutSec << " not vaild";
return 5.0;
}
return timeOutSec;
}
int SrtTransportImp::getPktBufSize() {
// kPktBufSize
GET_CONFIG(int, pktBufSize, kPktBufSize);

View File

@ -37,6 +37,7 @@ protected:
///////SrtTransport override///////
int getLatencyMul() override;
int getPktBufSize() override;
float getTimeOutSec() override;
void onSRTData(DataPacket::Ptr pkt) override;
void onShutdown(const SockException &ex) override;
void onHandShakeFinished(std::string &streamid, struct sockaddr_storage *addr) override;

View File

@ -1,81 +1,211 @@
#include <algorithm>
#include <math.h>
#include "Statistic.hpp"
namespace SRT {
void PacketRecvRateContext::inputPacket(TimePoint &ts) {
if (_pkt_map.size() > 100) {
_pkt_map.erase(_pkt_map.begin());
PacketRecvRateContext::PacketRecvRateContext(TimePoint start)
: _last_arrive_time(start) {
for (size_t i = 0; i < SIZE; i++) {
_ts_arr[i] = 1000000;
_size_arr[i] = SRT_MAX_PAYLOAD_SIZE;
}
auto tmp = DurationCountMicroseconds(ts - _start);
_pkt_map.emplace(tmp, tmp);
_cur_idx = 0;
};
void PacketRecvRateContext::inputPacket(TimePoint &ts,size_t len) {
auto tmp = DurationCountMicroseconds(ts - _last_arrive_time);
_ts_arr[_cur_idx] = tmp;
_size_arr[_cur_idx] = len;
_cur_idx = (1+_cur_idx)%SIZE;
_last_arrive_time = ts;
}
uint32_t PacketRecvRateContext::getPacketRecvRate() {
if (_pkt_map.size() < 2) {
return 50000;
}
int64_t dur = 1000;
for (auto it = _pkt_map.begin(); it != _pkt_map.end(); ++it) {
auto next = it;
++next;
if (next == _pkt_map.end()) {
break;
}
uint32_t PacketRecvRateContext::getPacketRecvRate(uint32_t &bytesps) {
int64_t tmp_arry[SIZE];
std::copy(_ts_arr, _ts_arr + SIZE, tmp_arry);
std::nth_element(tmp_arry, tmp_arry + (SIZE / 2), tmp_arry + SIZE);
int64_t median = tmp_arry[SIZE / 2];
if ((next->first - it->first) < dur) {
dur = next->first - it->first;
unsigned count = 0;
int sum = 0;
int64_t upper = median << 3;
int64_t lower = median >> 3;
int64_t min = median;
int64_t min_size = 0;
bytesps = 0;
size_t bytes = 0;
const size_t *bp = _size_arr;
// median filtering
const int64_t *p = _ts_arr;
for (int i = 0, n = SIZE; i < n; ++i) {
if ((*p < upper) && (*p > lower)) {
++count; // packet counter
sum += *p; // usec counter
bytes += *bp; // byte counter
}
if(*p < min){
min = *p;
min_size = *bp;
}
++p; // advance packet pointer
++bp; // advance bytes pointer
}
double rate = 1e6 / (double)dur;
if (rate <= 1000) {
return 50000;
uint32_t max_ret = (uint32_t)ceil(1e6/min);
uint32_t max_byteps = (uint32_t)ceil(1e6*min_size/min);
if(count>(SIZE>>1)){
bytesps = (uint32_t)ceil(1000000.0 / (double(sum) / double(bytes)));
auto ret = (uint32_t)ceil(1000000.0 / (double(sum) / double(count)));
//bytesps = max_byteps;
return max_ret;
}else{
//TraceL<<max_ret<<" pkt/s "<<max_byteps<<" byte/s";
bytesps = 0;
return 0;
}
return rate;
bytesps = 0;
return 0;
// claculate speed, or return 0 if not enough valid value
}
void EstimatedLinkCapacityContext::inputPacket(TimePoint &ts) {
if (_pkt_map.size() > 16) {
_pkt_map.erase(_pkt_map.begin());
std::string PacketRecvRateContext::dump(){
_StrPrinter printer;
printer <<"dur array : ";
for (size_t i = 0; i < SIZE; i++)
{
printer<<_ts_arr[i]<<" ";
}
auto tmp = DurationCountMicroseconds(ts - _start);
_pkt_map.emplace(tmp, tmp);
printer <<"\r\n";
printer <<"size array : ";
for (size_t i = 0; i < SIZE; i++)
{
printer<<_size_arr[i]<<" ";
}
printer <<"\r\n";
return std::move(printer);
}
EstimatedLinkCapacityContext::EstimatedLinkCapacityContext(TimePoint start) : _start(start) {
for (size_t i = 0; i < SIZE; i++) {
_dur_probe_arr[i] = 1000;
}
_cur_idx = 0;
};
void EstimatedLinkCapacityContext::inputPacket(TimePoint &ts,DataPacket::Ptr& pkt) {
uint32_t seq = pkt->packet_seq_number;
auto diff = seqCmp(seq,_last_seq);
const bool retransmitted = pkt->R == 1;
const bool unordered = diff<=0;
uint32_t one = seq&0xf;
if(one == 0){
probe1Arrival(ts,pkt,unordered || retransmitted);
}
if(diff>0){
_last_seq = seq;
}
if(unordered || retransmitted){
return;
}
if(one == 1){
probe2Arrival(ts,pkt);
}
}
/// Record the arrival time of the first probing packet.
void EstimatedLinkCapacityContext::probe1Arrival(TimePoint &ts, const DataPacket::Ptr &pkt, bool unordered) {
if (unordered && pkt->packet_seq_number == _probe1_seq) {
// Reset the starting probe into "undefined", when
// a packet has come as retransmitted before the
// measurement at arrival of 17th could be taken.
_probe1_seq = SEQ_NONE;
return;
}
_ts_probe_time = ts;
_probe1_seq = pkt->packet_seq_number; // Record the sequence where 16th packet probe was taken
}
/// Record the arrival time of the second probing packet and the interval between packet pairs.
void EstimatedLinkCapacityContext::probe2Arrival(TimePoint &ts, const DataPacket::Ptr &pkt) {
// Reject probes that don't refer to the very next packet
// towards the one that was lately notified by probe1Arrival.
// Otherwise the result can be stupid.
// Simply, in case when this wasn't called exactly for the
// expected packet pair, behave as if the 17th packet was lost.
// no start point yet (or was reset) OR not very next packet
if (_probe1_seq == SEQ_NONE || incSeq(_probe1_seq) != pkt->packet_seq_number)
return;
// Reset the starting probe to prevent checking if the
// measurement was already taken.
_probe1_seq = SEQ_NONE;
// record the probing packets interval
// Adjust the time for what a complete packet would have take
const int64_t timediff = DurationCountMicroseconds(ts - _ts_probe_time);
const int64_t timediff_times_pl_size = timediff * SRT_MAX_PAYLOAD_SIZE;
// Let's take it simpler than it is coded here:
// (stating that a packet has never zero size)
//
// probe_case = (now - previous_packet_time) * SRT_MAX_PAYLOAD_SIZE / pktsz;
//
// Meaning: if the packet is fully packed, probe_case = timediff.
// Otherwise the timediff will be "converted" to a time that a fully packed packet "would take",
// provided the arrival time is proportional to the payload size and skipping
// the ETH+IP+UDP+SRT header part elliminates the constant packet delivery time influence.
//
const size_t pktsz = pkt->payloadSize();
_dur_probe_arr[_cur_idx] = pktsz ? int64_t(timediff_times_pl_size / pktsz) : int64_t(timediff);
// the window is logically circular
_cur_idx = (_cur_idx + 1) % SIZE;
}
uint32_t EstimatedLinkCapacityContext::getEstimatedLinkCapacity() {
decltype(_pkt_map.begin()) next;
std::vector<int64_t> tmp;
int64_t tmp[SIZE];
std::copy(_dur_probe_arr, _dur_probe_arr + SIZE , tmp);
std::nth_element(tmp, tmp + (SIZE / 2), tmp + SIZE);
int64_t median = tmp[SIZE / 2];
for (auto it = _pkt_map.begin(); it != _pkt_map.end(); ++it) {
next = it;
++next;
if (next != _pkt_map.end()) {
tmp.push_back(next->first - it->first);
} else {
break;
}
}
std::sort(tmp.begin(), tmp.end());
if (tmp.empty()) {
return 1000;
}
int64_t count = 1;
int64_t sum = median;
int64_t upper = median << 3; // median*8
int64_t lower = median >> 3; // median/8
if (tmp.size() < 16) {
return 1000;
}
// median filtering
const int64_t* p = _dur_probe_arr;
for (int i = 0, n = SIZE; i < n; ++ i)
{
if ((*p < upper) && (*p > lower))
{
++ count;
sum += *p;
}
++ p;
}
double dur = tmp[0] / 1e6;
return (uint32_t)(1.0 / dur);
return (uint32_t)ceil(1000000.0 / (double(sum) / double(count)));
}
/*
void RecvRateContext::inputPacket(TimePoint &ts, size_t size) {
if (_pkt_map.size() > 100) {
_pkt_map.erase(_pkt_map.begin());
}
auto tmp = DurationCountMicroseconds(ts - _start);
_pkt_map.emplace(tmp, tmp);
_pkt_map.emplace(tmp, size);
}
uint32_t RecvRateContext::getRecvRate() {
@ -94,5 +224,5 @@ uint32_t RecvRateContext::getRecvRate() {
double rate = (double)bytes / dur;
return (uint32_t)rate;
}
*/
} // namespace SRT

View File

@ -6,32 +6,46 @@
#include "Packet.hpp"
namespace SRT {
class PacketRecvRateContext {
public:
PacketRecvRateContext(TimePoint start)
: _start(start) {};
PacketRecvRateContext(TimePoint start);
~PacketRecvRateContext() = default;
void inputPacket(TimePoint &ts);
uint32_t getPacketRecvRate();
void inputPacket(TimePoint &ts,size_t len = 0);
uint32_t getPacketRecvRate(uint32_t& bytesps);
std::string dump();
static const int SIZE = 16;
private:
TimePoint _start;
std::map<int64_t, int64_t> _pkt_map;
TimePoint _last_arrive_time;
int64_t _ts_arr[SIZE];
size_t _size_arr[SIZE];
size_t _cur_idx;
//std::map<int64_t, int64_t> _pkt_map;
};
class EstimatedLinkCapacityContext {
public:
EstimatedLinkCapacityContext(TimePoint start) : _start(start) {};
EstimatedLinkCapacityContext(TimePoint start);
~EstimatedLinkCapacityContext() = default;
void inputPacket(TimePoint &ts);
void setLastSeq(uint32_t seq){
_last_seq = seq;
}
void inputPacket(TimePoint &ts,DataPacket::Ptr& pkt);
uint32_t getEstimatedLinkCapacity();
static const int SIZE = 64;
private:
void probe1Arrival(TimePoint &ts,const DataPacket::Ptr& pkt, bool unordered);
void probe2Arrival(TimePoint &ts,const DataPacket::Ptr& pkt);
private:
TimePoint _start;
std::map<int64_t, int64_t> _pkt_map;
TimePoint _ts_probe_time;
int64_t _dur_probe_arr[SIZE];
size_t _cur_idx;
uint32_t _last_seq = 0;
uint32_t _probe1_seq = SEQ_NONE;
//std::map<int64_t, int64_t> _pkt_map;
};
/*
class RecvRateContext {
public:
RecvRateContext(TimePoint start)
@ -44,6 +58,6 @@ private:
TimePoint _start;
std::map<int64_t, size_t> _pkt_map;
};
*/
} // namespace SRT
#endif // ZLMEDIAKIT_SRT_STATISTIC_H

16
webrtc_player/android/.gitignore vendored Normal file
View File

@ -0,0 +1,16 @@
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties
/.idea/

Binary file not shown.

2
webrtc_player/android/app/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/build
.cxx

View File

@ -0,0 +1,54 @@
plugins {
id 'com.android.application'
id 'org.jetbrains.kotlin.android'
id 'kotlin-android-extensions'
id 'kotlin-kapt'
}
apply plugin: 'kotlin-android'
android {
compileSdk 32
defaultConfig {
applicationId "com.zlmediakit.webrtc"
minSdk 21
targetSdk 32
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
}
dependencies {
implementation 'androidx.core:core-ktx:1.7.0'
implementation 'androidx.appcompat:appcompat:1.5.1'
implementation 'com.google.android.material:material:1.6.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
implementation 'com.google.code.gson:gson:2.8.9'
implementation("com.squareup.okhttp3:okhttp:4.10.0")
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'org.webrtc:google-webrtc:1.0.32006'
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,24 @@
package com.zlmediakit.webrtc
import androidx.test.platform.app.InstrumentationRegistry
import androidx.test.ext.junit.runners.AndroidJUnit4
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.Assert.*
/**
* Instrumented test, which will execute on an Android device.
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
@RunWith(AndroidJUnit4::class)
class ExampleInstrumentedTest {
@Test
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("com.zlmediakit.webrtc", appContext.packageName)
}
}

View File

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.zlmediakit.webrtc">
<uses-feature android:name="android.hardware.camera"/>
<uses-feature android:name="android.hardware.camera.autofocus"/>
<uses-feature
android:glEsVersion="0x00020000"
android:required="true"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.CAPTURE_VIDEO_OUTPUT"/>
<uses-permission android:name="android.permission.READ_PHONE_STATE"/>
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AndroidWebRTC"
android:usesCleartextTraffic="true"
tools:targetApi="31">
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,79 @@
package com.zlmediakit.webrtc
import android.annotation.SuppressLint
import android.graphics.drawable.BitmapDrawable
import android.graphics.drawable.Drawable
import android.os.Bundle
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import kotlinx.android.synthetic.main.activity_main.*
import kotlinx.android.synthetic.main.activity_main.view.*
class MainActivity : AppCompatActivity() {
private var isSpeaker = true
@SuppressLint("SetTextI18n")
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
lifecycle.addObserver(web_rtc_sv)
//http://124.223.98.45/index/api/webrtc?app=live&stream=test&type=play
url.setText("http://124.223.98.45/index/api/webrtc?app=live&stream=test&type=play")
//http://192.168.1.17/index/api/webrtc?app=live&stream=test&type=play
btn_play.setOnClickListener {
web_rtc_sv?.setVideoPath(url.text.toString())
web_rtc_sv.start()
}
web_rtc_sv.setOnErrorListener { errorCode, errorMsg ->
runOnUiThread {
Toast.makeText(this, "errorCode:$errorCode,errorMsg:$errorMsg", Toast.LENGTH_SHORT)
.show()
}
}
btn_pause.setOnClickListener {
web_rtc_sv?.pause()
}
btn_resume.setOnClickListener {
web_rtc_sv?.resume()
}
btn_screenshot.setOnClickListener {
web_rtc_sv?.screenshot {
runOnUiThread {
iv_screen.setImageDrawable(BitmapDrawable(it))
}
}
}
btn_mute.setOnClickListener {
web_rtc_sv.mute(true)
}
selectAudio()
btn_speaker.setOnClickListener {
selectAudio()
}
}
fun selectAudio(){
if (isSpeaker){
btn_speaker.setText("扬声器")
web_rtc_sv.setSpeakerphoneOn(isSpeaker)
}else{
btn_speaker.setText("话筒")
web_rtc_sv.setSpeakerphoneOn(isSpeaker)
}
isSpeaker=!isSpeaker
}
}

View File

@ -0,0 +1,439 @@
package com.zlmediakit.webrtc
import android.content.Context
import android.graphics.Bitmap
import android.media.AudioManager
import android.util.AttributeSet
import android.util.Log
import android.view.LayoutInflater
import android.widget.RelativeLayout
import androidx.lifecycle.DefaultLifecycleObserver
import androidx.lifecycle.LifecycleOwner
import com.google.gson.Gson
import okhttp3.*
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.MediaType.Companion.toMediaTypeOrNull
import org.webrtc.*
import org.webrtc.RendererCommon.ScalingType
import org.webrtc.audio.AudioDeviceModule
import org.webrtc.audio.JavaAudioDeviceModule
import java.io.IOException
import java.util.*
public class WebRTCSurfaceView(context: Context, attrs: AttributeSet?) :
RelativeLayout(context, attrs), DefaultLifecycleObserver, RendererCommon.RendererEvents {
private data class sdp(var sdp: String, var username: String, var password: String)
private data class SdpResponse(var code: Int, var id: String, var sdp: String, var type: String)
private enum class ErrorCode(val errorCode: Int) {
SUCCESS(0x00),
GET_REMOTE_SDP_ERROR(0x01);
}
companion object {
private val TAG = "WebRTCSurfaceView"
}
private var mContext: Context = context
private val eglBase: EglBase = EglBase.create()
private var mEGLBaseContext: EglBase.Context = eglBase.eglBaseContext
private lateinit var videoUrl: String;
private var mPeerConnectionFactory: PeerConnectionFactory? = null
private var mLocalMediaStream: MediaStream? = null
private var mLocalAudioTrack: AudioTrack? = null
private var mAudioSource: AudioSource? = null
private var mLocalSessionDescription: SessionDescription? = null
private var mRemoteSessionDescription: SessionDescription? = null
private var mLocalPeer: Peer? = null
private var mSurfaceViewRenderer: SurfaceViewRenderer
private lateinit var OnErrorListener: (errorCode: Int, errorMsg: String) -> Unit?
fun setOnErrorListener(listener: (errorCode: Int, errorMsg: String) -> Unit) {
this.OnErrorListener = listener
}
private lateinit var OnPreparedListener: () -> Unit?
fun setOnPreparedListener(listener: () -> Unit) {
this.OnPreparedListener = listener
}
private val audioManager: AudioManager
init {
val view = LayoutInflater.from(mContext).inflate(R.layout.layout_videoview, this)
mPeerConnectionFactory = createConnectionFactory()
mSurfaceViewRenderer = view.findViewById(R.id.surface_view_renderer)
mSurfaceViewRenderer.init(mEGLBaseContext, this)
mSurfaceViewRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL)
mSurfaceViewRenderer.setEnableHardwareScaler(true)
//创建媒体流
mLocalMediaStream = mPeerConnectionFactory?.createLocalMediaStream("ARDAMS")
//采集音频
mAudioSource = mPeerConnectionFactory?.createAudioSource(createAudioConstraints())
mLocalAudioTrack = mPeerConnectionFactory?.createAudioTrack("ARDAMSa0", mAudioSource)
//添加Tracks
mLocalMediaStream?.addTrack(mLocalAudioTrack)
audioManager = context.getSystemService(Context.AUDIO_SERVICE) as AudioManager
audioManager.isSpeakerphoneOn = false
}
private fun set(width: Int, height: Int) {
layoutParams.width = width
layoutParams.height = height
}
private fun createConnectionFactory(): PeerConnectionFactory? {
val options = PeerConnectionFactory.InitializationOptions.builder(mContext)
.setEnableInternalTracer(false)
.createInitializationOptions()
PeerConnectionFactory.initialize(options)
val videoEncoderFactory = DefaultVideoEncoderFactory(
mEGLBaseContext,
true,
true
)
val videoDecoderFactory = DefaultVideoDecoderFactory(mEGLBaseContext)
val audioDevice = createJavaAudioDevice()
val peerConnectionFactory = PeerConnectionFactory.builder()
.setAudioDeviceModule(audioDevice)
.setVideoEncoderFactory(videoEncoderFactory)
.setVideoDecoderFactory(videoDecoderFactory)
.createPeerConnectionFactory()
audioDevice.release()
return peerConnectionFactory
}
private fun createAudioConstraints(): MediaConstraints {
val audioConstraints = MediaConstraints()
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googEchoCancellation",
"true"
)
)
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googAutoGainControl",
"false"
)
)
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googHighpassFilter",
"true"
)
)
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googNoiseSuppression",
"true"
)
)
return audioConstraints
}
private fun offerOrAnswerConstraint(): MediaConstraints {
val mediaConstraints = MediaConstraints()
val keyValuePairs = java.util.ArrayList<MediaConstraints.KeyValuePair>()
keyValuePairs.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"))
keyValuePairs.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"))
mediaConstraints.mandatory.addAll(keyValuePairs)
return mediaConstraints
}
private fun createJavaAudioDevice(): AudioDeviceModule {
val audioTrackErrorCallback: JavaAudioDeviceModule.AudioTrackErrorCallback = object :
JavaAudioDeviceModule.AudioTrackErrorCallback {
override fun onWebRtcAudioTrackInitError(errorMessage: String) {
Log.i(TAG, "onWebRtcAudioTrackInitError ============> $errorMessage")
}
override fun onWebRtcAudioTrackStartError(
errorCode: JavaAudioDeviceModule.AudioTrackStartErrorCode, errorMessage: String
) {
Log.i(TAG, "onWebRtcAudioTrackStartError ============> $errorCode:$errorMessage")
}
override fun onWebRtcAudioTrackError(errorMessage: String) {
Log.i(TAG, "onWebRtcAudioTrackError ============> $errorMessage")
}
}
// Set audio track state callbacks.
val audioTrackStateCallback: JavaAudioDeviceModule.AudioTrackStateCallback = object :
JavaAudioDeviceModule.AudioTrackStateCallback {
override fun onWebRtcAudioTrackStart() {
Log.i(TAG, "onWebRtcAudioTrackStart ============>")
}
override fun onWebRtcAudioTrackStop() {
Log.i(TAG, "onWebRtcAudioTrackStop ============>")
}
}
return JavaAudioDeviceModule.builder(mContext)
.setUseHardwareAcousticEchoCanceler(true)
.setUseHardwareNoiseSuppressor(true)
.setAudioTrackErrorCallback(audioTrackErrorCallback)
.setAudioTrackStateCallback(audioTrackStateCallback)
.setUseStereoOutput(true) //立体声
.createAudioDeviceModule()
}
fun setVideoPath(url: String) {
videoUrl = url
}
fun start() {
mLocalPeer = Peer {
val okHttpClient = OkHttpClient.Builder().build()
val body = RequestBody.create("text/plain; charset=utf-8".toMediaType(), it!!)
val request: Request = Request.Builder()
.url(videoUrl)
.post(body)
.build()
val call: Call = okHttpClient.newCall(request)
call.enqueue(object : Callback {
override fun onFailure(call: Call, e: IOException) {
Log.i(TAG, "onFailure")
OnErrorListener?.invoke(
ErrorCode.GET_REMOTE_SDP_ERROR.errorCode,
e.message.toString()
)
}
override fun onResponse(call: Call, response: Response) {
val body = response.body?.string()
val sdpResponse = Gson().fromJson(body, SdpResponse::class.java)
try {
mRemoteSessionDescription = SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"),
sdpResponse.sdp
)
Log.i(
TAG,
"RemoteSdpObserver onCreateSuccess:[SessionDescription[type=${mRemoteSessionDescription?.type?.name},description=${mRemoteSessionDescription?.description}]]"
)
mLocalPeer?.setRemoteDescription(mRemoteSessionDescription!!)
} catch (e: Exception) {
Log.i(TAG, e.toString())
OnErrorListener.invoke(
ErrorCode.GET_REMOTE_SDP_ERROR.errorCode,
e.localizedMessage
)
}
}
})
}
}
fun pause() {
mSurfaceViewRenderer.pauseVideo()
//mSurfaceViewRenderer.disableFpsReduction()
}
fun resume() {
mSurfaceViewRenderer.setFpsReduction(15f)
}
fun screenshot(listener: (bitmap: Bitmap) -> Unit) {
mSurfaceViewRenderer.addFrameListener({
listener.invoke(it)
}, 1f)
}
fun setSpeakerphoneOn(on: Boolean) {
audioManager.isSpeakerphoneOn = on
}
fun mute(on:Boolean) {
audioManager.isMicrophoneMute=on
}
override fun onDestroy(owner: LifecycleOwner) {
super.onDestroy(owner)
mSurfaceViewRenderer.release()
mLocalPeer?.mPeerConnection?.dispose()
mAudioSource?.dispose()
mPeerConnectionFactory?.dispose()
}
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec)
}
inner class Peer(var sdp: (String?) -> Unit = {}) : PeerConnection.Observer, SdpObserver {
var mPeerConnection: PeerConnection? = null
init {
mPeerConnection = createPeerConnection()
mPeerConnection?.createOffer(this, offerOrAnswerConstraint())
}
//初始化 RTCPeerConnection 连接管道
private fun createPeerConnection(): PeerConnection? {
if (mPeerConnectionFactory == null) {
mPeerConnectionFactory = createConnectionFactory()
}
// 管道连接抽象类实现方法
val ICEServers = LinkedList<PeerConnection.IceServer>()
val rtcConfig = PeerConnection.RTCConfiguration(ICEServers)
//修改模式 PlanB无法使用仅接收音视频的配置
//rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.PLAN_B
return mPeerConnectionFactory?.createPeerConnection(rtcConfig, this)
}
fun setRemoteDescription(sdp: SessionDescription) {
mPeerConnection?.setRemoteDescription(this, sdp)
}
override fun onCreateSuccess(sessionDescription: SessionDescription?) {
mPeerConnection?.setLocalDescription(this, sessionDescription)
mPeerConnection?.addStream(mLocalMediaStream)
sdp.invoke(sessionDescription?.description)
}
override fun onSetSuccess() {
}
override fun onCreateFailure(p0: String?) {
}
override fun onSetFailure(p0: String?) {
}
override fun onSignalingChange(signalingState: PeerConnection.SignalingState?) {
Log.i(TAG, "onSignalingChange ============> " + signalingState.toString())
}
override fun onIceConnectionChange(iceConnectionState: PeerConnection.IceConnectionState?) {
Log.i(TAG, "onIceConnectionChange ============> " + iceConnectionState.toString())
}
override fun onIceConnectionReceivingChange(p0: Boolean) {
Log.i(TAG, "onIceConnectionReceivingChange ============> $p0")
}
override fun onIceGatheringChange(iceGatheringState: PeerConnection.IceGatheringState?) {
Log.i(TAG, "onIceGatheringChange ============> ${iceGatheringState.toString()}")
}
override fun onIceCandidate(iceCandidate: IceCandidate?) {
Log.i(TAG, "onIceCandidate ============> ${iceCandidate.toString()}")
}
override fun onIceCandidatesRemoved(p0: Array<out IceCandidate>?) {
Log.i(TAG, "onIceCandidatesRemoved ============> ${p0.toString()}")
}
override fun onAddStream(mediaStream: MediaStream?) {
Log.i(TAG, "onAddStream ============> ${mediaStream?.toString()}")
if (mediaStream?.videoTracks?.isEmpty() != true) {
val remoteVideoTrack = mediaStream?.videoTracks?.get(0)
remoteVideoTrack?.setEnabled(true)
remoteVideoTrack?.addSink(mSurfaceViewRenderer)
}
if (mediaStream?.audioTracks?.isEmpty() != true) {
val remoteAudioTrack = mediaStream?.audioTracks?.get(0)
remoteAudioTrack?.setEnabled(true)
remoteAudioTrack?.setVolume(1.0)
}
}
override fun onRemoveStream(mediaStream: MediaStream?) {
Log.i(TAG, "onRemoveStream ============> ${mediaStream.toString()}")
}
override fun onDataChannel(dataChannel: DataChannel?) {
Log.i(TAG, "onDataChannel ============> ${dataChannel.toString()}")
}
override fun onRenegotiationNeeded() {
Log.i(TAG, "onRenegotiationNeeded ============>")
}
override fun onAddTrack(rtpReceiver: RtpReceiver?, p1: Array<out MediaStream>?) {
Log.i(TAG, "onAddTrack ============>" + rtpReceiver?.track())
Log.i(TAG, "onAddTrack ============>" + p1?.size)
}
}
override fun onFirstFrameRendered() {
Log.i(TAG, "onFirstFrameRendered ============>")
}
override fun onFrameResolutionChanged(frameWidth: Int, frameHeight: Int, rotation: Int) {
Log.i(TAG, "onFrameResolutionChanged ============> $frameWidth:$frameHeight:$rotation")
//set(frameWidth,frameHeight)
}
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

View File

@ -0,0 +1,93 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<com.zlmediakit.webrtc.WebRTCSurfaceView
android:id="@+id/web_rtc_sv"
android:layout_width="match_parent"
android:layout_height="200dp"
app:layout_constraintTop_toTopOf="parent" />
<androidx.appcompat.widget.AppCompatEditText
android:id="@+id/url"
android:layout_width="match_parent"
android:layout_height="wrap_content"
app:layout_constraintTop_toBottomOf="@+id/web_rtc_sv"
android:text=""/>
<LinearLayout
android:id="@+id/ll"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginTop="30dp"
app:layout_constraintTop_toBottomOf="@+id/url">
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_play"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="播放" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_pause"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="暂停" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_resume"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="恢复" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_speaker"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="扬声器" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_mute"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="静音" />
</LinearLayout>
<LinearLayout
android:id="@+id/ll2"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginTop="10dp"
app:layout_constraintTop_toBottomOf="@+id/ll">
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_screenshot"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="截图" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_screen_record"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="录制" />
</LinearLayout>
<androidx.appcompat.widget.AppCompatImageView
android:id="@+id/iv_screen"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:layout_constraintBottom_toBottomOf="parent"
tools:ignore="MissingConstraints" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/surface_view_renderer"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
</RelativeLayout>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 982 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

View File

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.AndroidWebRTC" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_200</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/black</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_200</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor" tools:targetApi="l">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>

View File

@ -0,0 +1,3 @@
<resources>
<string name="app_name">AndroidWebRTC</string>
</resources>

View File

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.AndroidWebRTC" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_500</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/white</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_700</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor" tools:targetApi="l">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?><!--
Sample backup rules file; uncomment and customize as necessary.
See https://developer.android.com/guide/topics/data/autobackup
for details.
Note: This file is ignored for devices older that API 31
See https://developer.android.com/about/versions/12/backup-restore
-->
<full-backup-content>
<!--
<include domain="sharedpref" path="."/>
<exclude domain="sharedpref" path="device.xml"/>
-->
</full-backup-content>

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?><!--
Sample data extraction rules file; uncomment and customize as necessary.
See https://developer.android.com/about/versions/12/backup-restore#xml-changes
for details.
-->
<data-extraction-rules>
<cloud-backup>
<!-- TODO: Use <include> and <exclude> to control what is backed up.
<include .../>
<exclude .../>
-->
</cloud-backup>
<!--
<device-transfer>
<include .../>
<exclude .../>
</device-transfer>
-->
</data-extraction-rules>

View File

@ -0,0 +1,17 @@
package com.zlmediakit.webrtc
import org.junit.Test
import org.junit.Assert.*
/**
* Example local unit test, which will execute on the development machine (host).
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
class ExampleUnitTest {
@Test
fun addition_isCorrect() {
assertEquals(4, 2 + 2)
}
}

View File

@ -0,0 +1,19 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
ext.kotlin_version = '1.7.10'
repositories {
mavenCentral()
}
dependencies {
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
plugins {
id 'com.android.application' version '7.2.1' apply false
id 'com.android.library' version '7.2.1' apply false
id 'org.jetbrains.kotlin.android' version '1.7.10' apply false
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,23 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Mon Sep 19 22:08:36 CST 2022
distributionBase=GRADLE_USER_HOME
distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip
distributionPath=wrapper/dists
zipStorePath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME

185
webrtc_player/android/gradlew vendored Normal file
View File

@ -0,0 +1,185 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"

89
webrtc_player/android/gradlew.bat vendored Normal file
View File

@ -0,0 +1,89 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

Binary file not shown.

After

Width:  |  Height:  |  Size: 635 KiB

View File

@ -0,0 +1,24 @@
pluginManagement {
repositories {
gradlePluginPortal()
google()
mavenCentral()
jcenter() // Warning: this repository is going to shut down soon
maven { url 'https://jitpack.io' }
}
}
dependencyResolutionManagement {
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
google()
mavenCentral()
jcenter() // Warning: this repository is going to shut down soon
maven { url 'https://jitpack.io' }
}
}
rootProject.name = "android"
include ':app'

View File

@ -46,14 +46,14 @@
</p>
<p>
<label for="methond">methond(play or push or echo):</label>
<input type="radio" name="methond" value="echo" >echo
<input type="radio" name="methond" value="push" >push
<input type="radio" name="methond" value="play" checked = true>play
<label for="method">method(play or push or echo):</label>
<input type="radio" name="method" value="echo" >echo
<input type="radio" name="method" value="push" >push
<input type="radio" name="method" value="play" checked = true>play
</p>
<p>
<label for="resilution">resolution:</label>
<select id="resilution">
<label for="resolution">resolution:</label>
<select id="resolution">
</select>
</p>
<p>
@ -98,7 +98,7 @@
url = "http://127.0.0.1"+"/index/api/webrtc?app=live&stream=test&type=play"
}
document.getElementById('streamUrl').value = url
document.getElementsByName("methond").forEach((el,idx)=>{
document.getElementsByName("method").forEach((el,idx)=>{
el.onclick=function(e){
let url = new URL(document.getElementById('streamUrl').value);
url.searchParams.set("type",el.value)
@ -118,14 +118,14 @@
opt = document.createElement('option');
opt.text = r.label +"("+r.width+"x"+r.height+")";
opt.value = r;
document.getElementById("resilution").add(opt,null)
document.getElementById("resolution").add(opt,null)
//console.log(opt.text.match(/\d+/g))
})
function start_play(){
let elr = document.getElementById("resilution");
let elr = document.getElementById("resolution");
let res = elr.options[elr.selectedIndex].text.match(/\d+/g);
let h = parseInt(res.pop());
let w = parseInt(res.pop());
@ -204,7 +204,7 @@
function start()
{
stop();
let elr = document.getElementById("resilution");
let elr = document.getElementById("resolution");
let res = elr.options[elr.selectedIndex].text.match(/\d+/g);
let h = parseInt(res.pop());
let w = parseInt(res.pop());