Merge pull request #29 from ZLMediaKit/master

[pull] master from ZLMediaKit:master
This commit is contained in:
waken 2024-05-23 15:57:02 +08:00 committed by GitHub
commit 47a4543283
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
80 changed files with 2364 additions and 1068 deletions

@ -1 +1 @@
Subproject commit 04d1c47d2568f5ce1ff84260cefaf2754e514a5e
Subproject commit 26d54bbc7b1860a450434dce49bbc8fcbcbae88b

@ -1 +1 @@
Subproject commit 8bc32a516b279414f749d0dead8bdc2837d3c527
Subproject commit 527c0f5117b489fda78fcd123d446370ddd9ec9a

View File

@ -44,7 +44,6 @@ Xinghua Zhao <(holychaossword@hotmail.com>
[Dw9](https://github.com/Dw9)
明月惊鹊 <mingyuejingque@gmail.com>
cgm <2958580318@qq.com>
hejilin <1724010622@qq.com>
alexliyu7352 <liyu7352@gmail.com>
cgm <2958580318@qq.com>
[haorui wang](https://github.com/HaoruiWang)
@ -104,3 +103,8 @@ WuPeng <wp@zafu.edu.cn>
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)

View File

@ -57,7 +57,8 @@ option(ENABLE_SCTP "Enable SCTP" ON)
option(ENABLE_WEBRTC "Enable WebRTC" ON)
option(ENABLE_X264 "Enable x264" OFF)
option(ENABLE_WEPOLL "Enable wepoll" ON)
option(DISABLE_REPORT "Disable report to report.zlmediakit.com" off)
option(ENABLE_VIDEOSTACK "Enable video stack" OFF)
option(DISABLE_REPORT "Disable report to report.zlmediakit.com" OFF)
option(USE_SOLUTION_FOLDERS "Enable solution dir supported" ON)
##############################################################################
# socket256k.0socket,使用系统内核默认值(设置为0仅对linux有效)
@ -141,8 +142,8 @@ if(GIT_FOUND)
endif()
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/version.h.ini
${CMAKE_CURRENT_BINARY_DIR}/version.h
${CMAKE_CURRENT_SOURCE_DIR}/ZLMVersion.h.ini
${CMAKE_CURRENT_BINARY_DIR}/ZLMVersion.h
@ONLY)
message(STATUS "Git version is ${BRANCH_NAME} ${COMMIT_HASH}/${COMMIT_TIME} ${BUILD_TIME}")
@ -191,7 +192,7 @@ if(UNIX)
set(COMPILE_OPTIONS_DEFAULT
"-fPIC"
"-Wall;-Wextra"
"-Wno-unused-function;-Wno-unused-parameter;-Wno-unused-variable"
"-Wno-unused-function;-Wno-unused-parameter;-Wno-unused-variable;-Wno-deprecated-declarations"
"-Wno-error=extra;-Wno-error=missing-field-initializers;-Wno-error=type-limits")
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
@ -203,7 +204,7 @@ elseif(WIN32)
if (MSVC)
set(COMPILE_OPTIONS_DEFAULT
# TODO: /wd4819
"/wd4566;/wd4819"
"/wd4566;/wd4819;/utf-8"
# warning C4530: C++ exception handler used, but unwind semantics are not enabled.
"/EHsc")
# disable Windows logo
@ -402,6 +403,8 @@ if(OPENSSL_FOUND AND ENABLE_OPENSSL)
update_cached_list(MK_LINK_LIBRARIES ${OPENSSL_LIBRARIES})
if(CMAKE_SYSTEM_NAME MATCHES "Linux" AND OPENSSL_USE_STATIC_LIBS)
update_cached_list(MK_LINK_LIBRARIES ${CMAKE_DL_LIBS})
elseif(CMAKE_SYSTEM_NAME MATCHES "Windows" AND OPENSSL_USE_STATIC_LIBS)
update_cached_list(MK_LINK_LIBRARIES Crypt32)
endif()
else()
set(ENABLE_OPENSSL OFF)
@ -532,3 +535,9 @@ endif ()
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/www" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/config.ini" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/default.pem" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
# VideoStack
# Copy the default background image used by VideoStack when there is no video stream
if (ENABLE_VIDEOSTACK AND ENABLE_FFMPEG AND ENABLE_X264)
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/novideo.yuv" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
endif ()

View File

@ -222,7 +222,7 @@ bash build_docker_images.sh
- 请关注微信公众号获取最新消息推送:
<img src=https://user-images.githubusercontent.com/11495632/232451702-4c50bc72-84d8-4c94-af2b-57290088ba7a.png width=15% />
- 也可以自愿有偿加入知识星球咨询和获取资料
- 也可以自愿有偿加入知识星球咨询、获取资料以及加入微信技术群
<img src= https://user-images.githubusercontent.com/11495632/231946329-aa8517b0-3cf5-49cf-8c75-a93ed58cb9d2.png width=30% />
@ -358,6 +358,11 @@ bash build_docker_images.sh
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
同时感谢JetBrains对开源项目的支持本项目使用CLion开发与调试

View File

@ -516,6 +516,11 @@ Thanks to all those who have supported this project in various ways, including b
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
Also thank to JetBrains for their support for open source project, we developed and debugged zlmediakit with CLion:

View File

@ -24,6 +24,10 @@
# define API_CALL
#endif
#ifndef _WIN32
#define _strdup strdup
#endif
#if defined(_WIN32) && defined(_MSC_VER)
# if !defined(GENERATE_EXPORT)
# if defined(MediaKitApi_EXPORTS)

View File

@ -132,7 +132,12 @@ typedef struct {
/**
* mp4分片文件成功后广播
*/
void (API_CALL *on_mk_record_mp4)(const mk_mp4_info mp4);
void (API_CALL *on_mk_record_mp4)(const mk_record_info mp4);
/**
* ts分片文件成功后广播
*/
void (API_CALL *on_mk_record_ts)(const mk_record_info ts);
/**
* shell登录鉴权
@ -175,16 +180,16 @@ typedef struct {
* @param err
* @param msg
*/
void(API_CALL *on_mk_media_send_rtp_stop)(const char *vhost, const char *app, const char *stream, const char *ssrc, int err, const char *msg);
void (API_CALL *on_mk_media_send_rtp_stop)(const char *vhost, const char *app, const char *stream, const char *ssrc, int err, const char *msg);
/**
* rtc sctp连接中///
* @param rtc_transport
*/
void(API_CALL *on_mk_rtc_sctp_connecting)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_connected)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_failed)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_closed)(mk_rtc_transport rtc_transport);
void (API_CALL *on_mk_rtc_sctp_connecting)(mk_rtc_transport rtc_transport);
void (API_CALL *on_mk_rtc_sctp_connected)(mk_rtc_transport rtc_transport);
void (API_CALL *on_mk_rtc_sctp_failed)(mk_rtc_transport rtc_transport);
void (API_CALL *on_mk_rtc_sctp_closed)(mk_rtc_transport rtc_transport);
/**
* rtc数据通道发送数据回调
@ -192,7 +197,7 @@ typedef struct {
* @param msg
* @param len
*/
void(API_CALL *on_mk_rtc_sctp_send)(mk_rtc_transport rtc_transport, const uint8_t *msg, size_t len);
void (API_CALL *on_mk_rtc_sctp_send)(mk_rtc_transport rtc_transport, const uint8_t *msg, size_t len);
/**
* rtc数据通道接收数据回调
@ -202,7 +207,7 @@ typedef struct {
* @param msg
* @param len
*/
void(API_CALL *on_mk_rtc_sctp_received)(mk_rtc_transport rtc_transport, uint16_t streamId, uint32_t ppid, const uint8_t *msg, size_t len);
void (API_CALL *on_mk_rtc_sctp_received)(mk_rtc_transport rtc_transport, uint16_t streamId, uint32_t ppid, const uint8_t *msg, size_t len);
} mk_events;

View File

@ -18,29 +18,42 @@
extern "C" {
#endif
///////////////////////////////////////////MP4Info/////////////////////////////////////////////
//MP4Info对象的C映射
typedef struct mk_mp4_info_t *mk_mp4_info;
///////////////////////////////////////////RecordInfo/////////////////////////////////////////////
//RecordInfo对象的C映射
typedef struct mk_record_info_t *mk_record_info;
// GMT 标准时间,单位秒
API_EXPORT uint64_t API_CALL mk_mp4_info_get_start_time(const mk_mp4_info ctx);
API_EXPORT uint64_t API_CALL mk_record_info_get_start_time(const mk_record_info ctx);
// 录像长度,单位秒
API_EXPORT float API_CALL mk_mp4_info_get_time_len(const mk_mp4_info ctx);
API_EXPORT float API_CALL mk_record_info_get_time_len(const mk_record_info ctx);
// 文件大小,单位 BYTE
API_EXPORT size_t API_CALL mk_mp4_info_get_file_size(const mk_mp4_info ctx);
API_EXPORT size_t API_CALL mk_record_info_get_file_size(const mk_record_info ctx);
// 文件路径
API_EXPORT const char* API_CALL mk_mp4_info_get_file_path(const mk_mp4_info ctx);
API_EXPORT const char *API_CALL mk_record_info_get_file_path(const mk_record_info ctx);
// 文件名称
API_EXPORT const char* API_CALL mk_mp4_info_get_file_name(const mk_mp4_info ctx);
API_EXPORT const char *API_CALL mk_record_info_get_file_name(const mk_record_info ctx);
// 文件夹路径
API_EXPORT const char* API_CALL mk_mp4_info_get_folder(const mk_mp4_info ctx);
API_EXPORT const char *API_CALL mk_record_info_get_folder(const mk_record_info ctx);
// 播放路径
API_EXPORT const char* API_CALL mk_mp4_info_get_url(const mk_mp4_info ctx);
API_EXPORT const char *API_CALL mk_record_info_get_url(const mk_record_info ctx);
// 应用名称
API_EXPORT const char* API_CALL mk_mp4_info_get_vhost(const mk_mp4_info ctx);
API_EXPORT const char *API_CALL mk_record_info_get_vhost(const mk_record_info ctx);
// 流 ID
API_EXPORT const char* API_CALL mk_mp4_info_get_app(const mk_mp4_info ctx);
API_EXPORT const char *API_CALL mk_record_info_get_app(const mk_record_info ctx);
// 虚拟主机
API_EXPORT const char* API_CALL mk_mp4_info_get_stream(const mk_mp4_info ctx);
API_EXPORT const char *API_CALL mk_record_info_get_stream(const mk_record_info ctx);
//// 下面宏保障用户代码兼容性, 二进制abi不兼容用户需要重新编译链接 /////
#define mk_mp4_info mk_record_info
#define mk_mp4_info_get_start_time mk_record_info_get_start_time
#define mk_mp4_info_get_time_len mk_record_info_get_time_len
#define mk_mp4_info_get_file_size mk_record_info_get_file_size
#define mk_mp4_info_get_file_path mk_record_info_get_file_path
#define mk_mp4_info_get_file_name mk_record_info_get_file_name
#define mk_mp4_info_get_folder mk_record_info_get_folder
#define mk_mp4_info_get_url mk_record_info_get_url
#define mk_mp4_info_get_vhost mk_record_info_get_vhost
#define mk_mp4_info_get_app mk_record_info_get_app
#define mk_mp4_info_get_stream mk_record_info_get_stream
///////////////////////////////////////////Parser/////////////////////////////////////////////
//Parser对象的C映射
@ -103,6 +116,16 @@ API_EXPORT int API_CALL mk_media_source_get_track_count(const mk_media_source ct
API_EXPORT mk_track API_CALL mk_media_source_get_track(const mk_media_source ctx, int index);
// MediaSource::broadcastMessage
API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx, const char *msg, size_t len);
// MediaSource::getOriginUrl()
API_EXPORT const char* API_CALL mk_media_source_get_origin_url(const mk_media_source ctx);
// MediaSource::getOriginType()
API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source ctx);
// MediaSource::getCreateStamp()
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx);
// MediaSource::isRecording() 0:hls,1:MP4
API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx, int type);
/**
* ZLMediaKit中被称作为MediaSource
@ -142,11 +165,11 @@ API_EXPORT void API_CALL mk_media_source_find(const char *schema,
void *user_data,
on_mk_media_source_find_cb cb);
API_EXPORT const mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4);
API_EXPORT mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4);
//MediaSource::for_each_media()
API_EXPORT void API_CALL mk_media_source_for_each(void *user_data, on_mk_media_source_find_cb cb, const char *schema,
const char *vhost, const char *app, const char *stream);

View File

@ -128,6 +128,15 @@ API_EXPORT char *API_CALL mk_ini_dump_string(mk_ini ini);
* @param file
*/
API_EXPORT void API_CALL mk_ini_dump_file(mk_ini ini, const char *file);
///////////////////////////////////////////统计/////////////////////////////////////////////
typedef void(API_CALL *on_mk_get_statistic_cb)(void *user_data, mk_ini ini);
/**
*
* @param ini
*/
API_EXPORT void API_CALL mk_get_statistic(on_mk_get_statistic_cb cb, void *user_data, on_user_data_free free_cb);
///////////////////////////////////////////日志/////////////////////////////////////////////

View File

@ -304,10 +304,10 @@ API_EXPORT void API_CALL mk_webrtc_get_answer_sdp2(void *user_data, on_user_data
std::string offer_str = offer;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
auto args = std::make_shared<WebRtcArgsUrl>(url);
WebRtcPluginManager::Instance().getAnswerSdp(*session, type, *args,
[offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
WebRtcPluginManager::Instance().negotiateSdp(*session, type, *args, [offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
auto sdp_answer = exchangeSdp(exchanger, offer_str);
auto sdp_answer = handler.getAnswerSdp(offer_str);
cb(ptr.get(), sdp_answer.data(), nullptr);
} catch (std::exception &ex) {
cb(ptr.get(), nullptr, ex.what());

View File

@ -14,7 +14,10 @@
#include "Http/HttpSession.h"
#include "Rtsp/RtspSession.h"
#include "Record/MP4Recorder.h"
#ifdef ENABLE_WEBRTC
#include "webrtc/WebRtcTransport.h"
#endif
using namespace toolkit;
using namespace mediakit;
@ -39,7 +42,13 @@ API_EXPORT void API_CALL mk_events_listen(const mk_events *events){
NoticeCenter::Instance().addListener(&s_tag,Broadcast::kBroadcastRecordMP4,[](BroadcastRecordMP4Args){
if(s_events.on_mk_record_mp4){
s_events.on_mk_record_mp4((mk_mp4_info)&info);
s_events.on_mk_record_mp4((mk_record_info)&info);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRecordTs, [](BroadcastRecordTsArgs) {
if (s_events.on_mk_record_ts) {
s_events.on_mk_record_ts((mk_record_info)&info);
}
});
@ -168,7 +177,7 @@ API_EXPORT void API_CALL mk_events_listen(const mk_events *events){
sender.getMediaTuple().stream.c_str(), ssrc.c_str(), ex.getErrCode(), ex.what());
}
});
#ifdef ENABLE_WEBRTC
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpConnecting,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_connecting) {
s_events.on_mk_rtc_sctp_connecting((mk_rtc_transport)&sender);
@ -204,6 +213,7 @@ API_EXPORT void API_CALL mk_events_listen(const mk_events *events){
s_events.on_mk_rtc_sctp_received((mk_rtc_transport)&sender, streamId, ppid, msg, len);
}
});
#endif
});
}

View File

@ -17,67 +17,70 @@
#include "Http/HttpClient.h"
#include "Rtsp/RtspSession.h"
#ifdef ENABLE_WEBRTC
#include "webrtc/WebRtcTransport.h"
#endif
using namespace toolkit;
using namespace mediakit;
///////////////////////////////////////////RecordInfo/////////////////////////////////////////////
API_EXPORT uint64_t API_CALL mk_mp4_info_get_start_time(const mk_mp4_info ctx){
API_EXPORT uint64_t API_CALL mk_record_info_get_start_time(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->start_time;
}
API_EXPORT float API_CALL mk_mp4_info_get_time_len(const mk_mp4_info ctx){
API_EXPORT float API_CALL mk_record_info_get_time_len(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->time_len;
}
API_EXPORT size_t API_CALL mk_mp4_info_get_file_size(const mk_mp4_info ctx){
API_EXPORT size_t API_CALL mk_record_info_get_file_size(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->file_size;
}
API_EXPORT const char* API_CALL mk_mp4_info_get_file_path(const mk_mp4_info ctx){
API_EXPORT const char *API_CALL mk_record_info_get_file_path(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->file_path.c_str();
}
API_EXPORT const char* API_CALL mk_mp4_info_get_file_name(const mk_mp4_info ctx){
API_EXPORT const char *API_CALL mk_record_info_get_file_name(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->file_name.c_str();
}
API_EXPORT const char* API_CALL mk_mp4_info_get_folder(const mk_mp4_info ctx){
API_EXPORT const char *API_CALL mk_record_info_get_folder(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->folder.c_str();
}
API_EXPORT const char* API_CALL mk_mp4_info_get_url(const mk_mp4_info ctx){
API_EXPORT const char *API_CALL mk_record_info_get_url(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->url.c_str();
}
API_EXPORT const char* API_CALL mk_mp4_info_get_vhost(const mk_mp4_info ctx){
API_EXPORT const char *API_CALL mk_record_info_get_vhost(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->vhost.c_str();
}
API_EXPORT const char* API_CALL mk_mp4_info_get_app(const mk_mp4_info ctx){
API_EXPORT const char *API_CALL mk_record_info_get_app(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->app.c_str();
}
API_EXPORT const char* API_CALL mk_mp4_info_get_stream(const mk_mp4_info ctx){
API_EXPORT const char *API_CALL mk_record_info_get_stream(const mk_record_info ctx) {
assert(ctx);
RecordInfo *info = (RecordInfo *)ctx;
return info->stream.c_str();
@ -127,7 +130,7 @@ API_EXPORT const char* API_CALL mk_parser_get_content(const mk_parser ctx, size_
API_EXPORT const char* API_CALL mk_media_info_get_params(const mk_media_info ctx){
assert(ctx);
MediaInfo *info = (MediaInfo *)ctx;
return info->param_strs.c_str();
return info->params.c_str();
}
API_EXPORT const char* API_CALL mk_media_info_get_schema(const mk_media_info ctx){
@ -225,6 +228,30 @@ API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx,
return src->broadcastMessage(any);
}
API_EXPORT const char* API_CALL mk_media_source_get_origin_url(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return _strdup(src->getOriginUrl().c_str());
}
API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return static_cast<int>(src->getOriginType());
}
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return src->getCreateStamp();
}
API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx,int type) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return src->isRecording((Recorder::type)type);
}
API_EXPORT int API_CALL mk_media_source_close(const mk_media_source ctx,int force){
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
@ -275,11 +302,11 @@ API_EXPORT void API_CALL mk_media_source_find(const char *schema,
cb(user_data, (mk_media_source)src.get());
}
API_EXPORT const mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4) {
API_EXPORT mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4) {
assert(schema && vhost && app && stream);
auto src = MediaSource::find(schema, vhost, app, stream, from_mp4);
return (mk_media_source)src.get();
@ -501,7 +528,7 @@ API_EXPORT void API_CALL mk_auth_invoker_clone_release(const mk_auth_invoker ctx
}
///////////////////////////////////////////WebRtcTransport/////////////////////////////////////////////
API_EXPORT void API_CALL mk_rtc_sendDatachannel(const mk_rtc_transport ctx, uint16_t streamId, uint32_t ppid, const char *msg, size_t len) {
API_EXPORT void API_CALL mk_rtc_send_datachannel(const mk_rtc_transport ctx, uint16_t streamId, uint32_t ppid, const char *msg, size_t len) {
#ifdef ENABLE_WEBRTC
assert(ctx && msg);
WebRtcTransport *transport = (WebRtcTransport *)ctx;

View File

@ -27,8 +27,9 @@ protected:
private:
bool _h265 = false;
bool _have_decode_frame = false;
onH264 _cb;
size_t _search_pos = 0;
toolkit::BufferLikeString _buffer;
};
void H264Splitter::setOnSplitted(H264Splitter::onH264 cb) {
@ -42,11 +43,21 @@ H264Splitter::~H264Splitter() {
}
ssize_t H264Splitter::onRecvHeader(const char *data, size_t len) {
_cb(data, len);
auto frame = Factory::getFrameFromPtr(_h265 ? CodecH265 : CodecH264, (char *)data, len, 0, 0);
if (_have_decode_frame && (frame->decodeAble() || frame->configFrame())) {
// 缓存中存在可解码帧且下一帧是可解码帧或者配置帧那么flush缓存
_cb(_buffer.data(), _buffer.size());
_buffer.assign(data, len);
_have_decode_frame = frame->decodeAble();
} else {
// 还需要缓存
_buffer.append(data, len);
_have_decode_frame = _have_decode_frame || frame->decodeAble();
}
return 0;
}
static const char *onSearchPacketTail_l(const char *data, size_t len) {
const char *H264Splitter::onSearchPacketTail(const char *data, size_t len) {
for (size_t i = 2; len > 2 && i < len - 2; ++i) {
//判断0x00 00 01
if (data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1) {
@ -60,28 +71,6 @@ static const char *onSearchPacketTail_l(const char *data, size_t len) {
return nullptr;
}
const char *H264Splitter::onSearchPacketTail(const char *data, size_t len) {
auto last_frame = data + _search_pos;
auto next_frame = onSearchPacketTail_l(last_frame, len - _search_pos);
if (!next_frame) {
return nullptr;
}
auto last_frame_len = next_frame - last_frame;
Frame::Ptr frame;
if (_h265) {
frame = Factory::getFrameFromPtr(CodecH265, (char *)last_frame, last_frame_len, 0, 0);
} else {
frame = Factory::getFrameFromPtr(CodecH264, (char *)last_frame, last_frame_len, 0, 0);
}
if (frame->decodeAble()) {
_search_pos = 0;
return next_frame;
}
_search_pos += last_frame_len;
return nullptr;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////
API_EXPORT mk_h264_splitter API_CALL mk_h264_splitter_create(on_mk_h264_splitter_frame cb, void *user_data, int is_h265) {

View File

@ -15,16 +15,15 @@
#include "Util/util.h"
#include "Util/mini.h"
#include "Util/logger.h"
#include "Util/TimeTicker.h"
#include "Poller/EventPoller.h"
#include "Thread/WorkThreadPool.h"
#include "Common/config.h"
using namespace std;
using namespace toolkit;
using namespace mediakit;
#ifndef _WIN32
#define _strdup strdup
#endif
API_EXPORT void API_CALL mk_free(void *ptr) {
free(ptr);
}
@ -136,6 +135,129 @@ API_EXPORT void API_CALL mk_ini_dump_file(mk_ini ini, const char *file) {
ptr->dumpFile(file);
}
extern uint64_t getTotalMemUsage();
extern uint64_t getTotalMemBlock();
extern uint64_t getThisThreadMemUsage();
extern uint64_t getThisThreadMemBlock();
extern std::vector<size_t> getBlockTypeSize();
extern uint64_t getTotalMemBlockByType(int type);
extern uint64_t getThisThreadMemBlockByType(int type);
namespace mediakit {
class MediaSource;
class MultiMediaSourceMuxer;
class FrameImp;
class Frame;
class RtpPacket;
class RtmpPacket;
} // namespace mediakit
namespace toolkit {
class TcpServer;
class TcpSession;
class UdpServer;
class UdpSession;
class TcpClient;
class Socket;
class Buffer;
class BufferRaw;
class BufferLikeString;
class BufferList;
} // namespace toolkit
API_EXPORT void API_CALL mk_get_statistic(on_mk_get_statistic_cb func, void *user_data, on_user_data_free free_cb) {
assert(func);
std::shared_ptr<void> data(user_data, free_cb);
auto cb = [func, data](const toolkit::mINI &ini) { func(data.get(), (mk_ini)&ini); };
auto obj = std::make_shared<toolkit::mINI>();
auto &val = *obj;
val["object.MediaSource"] = ObjectStatistic<MediaSource>::count();
val["object.MultiMediaSourceMuxer"] = ObjectStatistic<MultiMediaSourceMuxer>::count();
val["object.TcpServer"] = ObjectStatistic<TcpServer>::count();
val["object.TcpSession"] = ObjectStatistic<TcpSession>::count();
val["object.UdpServer"] = ObjectStatistic<UdpServer>::count();
val["object.UdpSession"] = ObjectStatistic<UdpSession>::count();
val["object.TcpClient"] = ObjectStatistic<TcpClient>::count();
val["object.Socket"] = ObjectStatistic<Socket>::count();
val["object.FrameImp"] = ObjectStatistic<FrameImp>::count();
val["object.Frame"] = ObjectStatistic<Frame>::count();
val["object.Buffer"] = ObjectStatistic<Buffer>::count();
val["object.BufferRaw"] = ObjectStatistic<BufferRaw>::count();
val["object.BufferLikeString"] = ObjectStatistic<BufferLikeString>::count();
val["object.BufferList"] = ObjectStatistic<BufferList>::count();
val["object.RtpPacket"] = ObjectStatistic<RtpPacket>::count();
val["object.RtmpPacket"] = ObjectStatistic<RtmpPacket>::count();
#ifdef ENABLE_MEM_DEBUG
auto bytes = getTotalMemUsage();
val["memory.memUsage"] = bytes;
val["memory.memUsageMB"] = (int)(bytes / 1024 / 1024);
val["memory.memBlock"] = getTotalMemBlock();
static auto block_type_size = getBlockTypeSize();
{
int i = 0;
string str;
size_t last = 0;
for (auto sz : block_type_size) {
str.append(to_string(last) + "~" + to_string(sz) + ":" + to_string(getTotalMemBlockByType(i++)) + ";");
last = sz;
}
str.pop_back();
val["memory.memBlockTypeCount"] = str;
}
#endif
auto thread_size = EventPollerPool::Instance().getExecutorSize() + WorkThreadPool::Instance().getExecutorSize();
std::shared_ptr<vector<toolkit::mINI>> thread_mem_info = std::make_shared<vector<toolkit::mINI>>(thread_size);
shared_ptr<void> finished(nullptr, [thread_mem_info, cb, obj](void *) {
for (auto &val : *thread_mem_info) {
auto thread_name = val["name"];
replace(thread_name, "...", "~~~");
auto prefix = "thread-" + thread_name + ".";
for (auto &pr : val) {
(*obj).emplace(prefix + pr.first, std::move(pr.second));
}
}
// 触发回调
cb(*obj);
});
auto pos = 0;
auto lambda = [&](const TaskExecutor::Ptr &executor) {
auto &val = (*thread_mem_info)[pos++];
val["load"] = executor->load();
Ticker ticker;
executor->async([finished, &val, ticker]() {
val["name"] = getThreadName();
val["delay"] = ticker.elapsedTime();
#ifdef ENABLE_MEM_DEBUG
auto bytes = getThisThreadMemUsage();
val["memUsage"] = bytes;
val["memUsageMB"] = bytes / 1024 / 1024;
val["memBlock"] = getThisThreadMemBlock();
{
int i = 0;
string str;
size_t last = 0;
for (auto sz : block_type_size) {
str.append(to_string(last) + "~" + to_string(sz) + ":" + to_string(getThisThreadMemBlockByType(i++)) + ";");
last = sz;
}
str.pop_back();
val["memBlockTypeCount"] = str;
}
#endif
});
};
EventPollerPool::Instance().for_each(lambda);
WorkThreadPool::Instance().for_each(lambda);
}
API_EXPORT void API_CALL mk_log_printf(int level, const char *file, const char *function, int line, const char *fmt, ...) {
va_list ap;
va_start(ap, fmt);

152
api/tests/h264_pusher.c Normal file
View File

@ -0,0 +1,152 @@
/*
* Copyright (c) 2016-present The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/ZLMediaKit/ZLMediaKit).
*
* Use of this source code is governed by MIT-like license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include <signal.h>
#include <stdio.h>
#include <string.h>
#ifdef _WIN32
#include "windows.h"
#else
#include "unistd.h"
#endif
#include "mk_mediakit.h"
static int exit_flag = 0;
static void s_on_exit(int sig) {
exit_flag = 1;
}
static void on_h264_frame(void *user_data, mk_h264_splitter splitter, const char *data, int size) {
#ifdef _WIN32
Sleep(40);
#else
usleep(40 * 1000);
#endif
static int dts = 0;
mk_frame frame = mk_frame_create(MKCodecH264, dts, dts, data, size, NULL, NULL);
dts += 40;
mk_media_input_frame((mk_media)user_data, frame);
mk_frame_unref(frame);
}
typedef struct {
mk_pusher pusher;
char *url;
} Context;
void release_context(void *user_data) {
Context *ptr = (Context *)user_data;
if (ptr->pusher) {
mk_pusher_release(ptr->pusher);
}
free(ptr->url);
free(ptr);
log_info("停止推流");
}
void on_push_result(void *user_data, int err_code, const char *err_msg) {
Context *ptr = (Context *)user_data;
if (err_code == 0) {
log_info("推流成功: %s", ptr->url);
} else {
log_warn("推流%s失败: %d(%s)", ptr->url, err_code, err_msg);
}
}
void on_push_shutdown(void *user_data, int err_code, const char *err_msg) {
Context *ptr = (Context *)user_data;
log_warn("推流%s中断: %d(%s)", ptr->url, err_code, err_msg);
}
void API_CALL on_regist(void *user_data, mk_media_source sender, int regist) {
Context *ptr = (Context *)user_data;
const char *schema = mk_media_source_get_schema(sender);
if (strstr(ptr->url, schema) != ptr->url) {
// 协议匹配失败
return;
}
if (!regist) {
// 注销
if (ptr->pusher) {
mk_pusher_release(ptr->pusher);
ptr->pusher = NULL;
}
} else {
// 注册
if (!ptr->pusher) {
ptr->pusher = mk_pusher_create_src(sender);
mk_pusher_set_on_result2(ptr->pusher, on_push_result, ptr, NULL);
mk_pusher_set_on_shutdown2(ptr->pusher, on_push_shutdown, ptr, NULL);
// 开始推流
mk_pusher_publish(ptr->pusher, ptr->url);
}
}
}
int main(int argc, char *argv[]) {
if (argc < 3) {
log_error("Usage: /path/to/h264/file rtsp_or_rtmp_url");
return -1;
}
mk_config config = { .ini = NULL,
.ini_is_path = 1,
.log_level = 0,
.log_mask = LOG_CONSOLE,
.log_file_path = NULL,
.log_file_days = 0,
.ssl = NULL,
.ssl_is_path = 1,
.ssl_pwd = NULL,
.thread_num = 0 };
mk_env_init(&config);
FILE *fp = fopen(argv[1], "rb");
if (!fp) {
log_error("打开文件失败!");
return -1;
}
mk_media media = mk_media_create("__defaultVhost__", "live", "test", 0, 0, 0);
// h264的codec
codec_args v_args = { 0 };
mk_track v_track = mk_track_create(MKCodecH264, &v_args);
mk_media_init_track(media, v_track);
mk_media_init_complete(media);
mk_track_unref(v_track);
Context *ctx = (Context *)malloc(sizeof(Context));
memset(ctx, 0, sizeof(Context));
ctx->url = strdup(argv[2]);
mk_media_set_on_regist2(media, on_regist, ctx, release_context);
// 创建h264分帧器
mk_h264_splitter splitter = mk_h264_splitter_create(on_h264_frame, media, 0);
signal(SIGINT, s_on_exit); // 设置退出信号
signal(SIGTERM, s_on_exit); // 设置退出信号
char buf[1024];
while (!exit_flag) {
int size = fread(buf, 1, sizeof(buf) - 1, fp);
if (size > 0) {
mk_h264_splitter_input_data(splitter, buf, size);
} else {
// 文件读完了,重新开始
fseek(fp, 0, SEEK_SET);
}
}
log_info("文件读取完毕");
mk_h264_splitter_release(splitter);
mk_media_release(media);
fclose(fp);
return 0;
}

View File

@ -189,6 +189,14 @@ static void on_mk_webrtc_get_answer_sdp_func(void *user_data, const char *answer
free((void *)answer);
}
}
void API_CALL on_get_statistic_cb(void *user_data, mk_ini ini) {
const char *response_header[] = { NULL };
char *str = mk_ini_dump_string(ini);
mk_http_response_invoker_do_string(user_data, 200, response_header, str);
mk_free(str);
}
/**
* http api请求广播(GET/POST)
* @param parser http请求内容对象
@ -247,6 +255,9 @@ void API_CALL on_mk_http_request(const mk_parser parser,
mk_webrtc_get_answer_sdp(mk_http_response_invoker_clone(invoker), on_mk_webrtc_get_answer_sdp_func,
mk_parser_get_url_param(parser, "type"), mk_parser_get_content(parser, NULL), rtc_url);
} else if (strcmp(url, "/index/api/getStatistic") == 0) {
//拦截api: /index/api/webrtc
mk_get_statistic(on_get_statistic_cb, mk_http_response_invoker_clone(invoker), (on_user_data_free) mk_http_response_invoker_clone_release);
} else {
*consumed = 0;
return;
@ -387,7 +398,7 @@ void API_CALL on_mk_rtsp_auth(const mk_media_info url_info,
/**
* mp4分片文件成功后广播
*/
void API_CALL on_mk_record_mp4(const mk_mp4_info mp4) {
void API_CALL on_mk_record_mp4(const mk_record_info mp4) {
log_printf(LOG_LEV,
"\nstart_time: %d\n"
"time_len: %d\n"
@ -399,16 +410,16 @@ void API_CALL on_mk_record_mp4(const mk_mp4_info mp4) {
"vhost: %s\n"
"app: %s\n"
"stream: %s\n",
mk_mp4_info_get_start_time(mp4),
mk_mp4_info_get_time_len(mp4),
mk_mp4_info_get_file_size(mp4),
mk_mp4_info_get_file_path(mp4),
mk_mp4_info_get_file_name(mp4),
mk_mp4_info_get_folder(mp4),
mk_mp4_info_get_url(mp4),
mk_mp4_info_get_vhost(mp4),
mk_mp4_info_get_app(mp4),
mk_mp4_info_get_stream(mp4));
mk_record_info_get_start_time(mp4),
mk_record_info_get_time_len(mp4),
mk_record_info_get_file_size(mp4),
mk_record_info_get_file_path(mp4),
mk_record_info_get_file_name(mp4),
mk_record_info_get_folder(mp4),
mk_record_info_get_url(mp4),
mk_record_info_get_vhost(mp4),
mk_record_info_get_app(mp4),
mk_record_info_get_stream(mp4));
}
/**

View File

@ -126,6 +126,8 @@ wait_track_ready_ms=10000
wait_add_track_ms=3000
#如果track未就绪我们先缓存帧数据但是有最大个数限制防止内存溢出
unready_frame_cache=100
#是否启用观看人数变化事件广播置1则启用置0则关闭
broadcast_player_count_changed=0
[hls]
#hls写文件的buf大小调整参数可以提高文件io性能
@ -219,7 +221,7 @@ timeout_sec=15
retry_count=3
[http]
#http服务器字符编码windows上默认gb2312
#http服务器字符编码
charSet=utf-8
#http链接超时时间
keepAliveSecond=30
@ -357,7 +359,7 @@ tcpPort = 8000
rembBitRate=0
#rtc支持的音频codec类型,在前面的优先级更高
#以下范例为所有支持的音频codec
preferredCodecA=PCMU,PCMA,opus,mpeg4-generic
preferredCodecA=PCMA,PCMU,opus,mpeg4-generic
#rtc支持的视频codec类型,在前面的优先级更高
#以下范例为所有支持的视频codec
preferredCodecV=H264,H265,AV1,VP9,VP8
@ -367,6 +369,24 @@ start_bitrate=0
max_bitrate=0
min_bitrate=0
#nack接收端
#Nack缓存包最早时间间隔
maxNackMS=5000
#Nack包检查间隔(包数量)
rtpCacheCheckInterval=100
#nack发送端
#最大保留的rtp丢包状态个数
nackMaxSize=2048
#rtp丢包状态最长保留时间
nackMaxMS=3000
#nack最多请求重传次数
nackMaxCount=15
#nack重传频率rtt的倍数
nackIntervalRatio=1.0
#nack包中rtp个数减小此值可以让nack包响应更灵敏
nackRtpSize=8
[srt]
#srt播放推流、播放超时时间,单位秒
timeoutSec=5

1
conf/novideo.yuv Normal file

File diff suppressed because one or more lines are too long

View File

@ -17,7 +17,7 @@ using namespace toolkit;
namespace mediakit {
void AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
CHECK(pkt->size() > 2);
CHECK_RET(pkt->size() > 2);
if (pkt->isConfigFrame()) {
getTrack()->setExtraData((uint8_t *)pkt->data() + 2, pkt->size() - 2);
return;

View File

@ -46,7 +46,7 @@ bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
const size_t rtp_size = max_size;
n++;
stamp += _pkt_dur_ms;
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), true);
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), false);
ptr += rtp_size;
remain_size -= rtp_size;
}

View File

@ -14,14 +14,6 @@
using namespace std;
using namespace toolkit;
#define CHECK_RET(...) \
try { \
CHECK(__VA_ARGS__); \
} catch (AssertFailedException & ex) { \
WarnL << ex.what(); \
return; \
}
namespace mediakit {
void H264RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {

View File

@ -18,14 +18,6 @@
using namespace std;
using namespace toolkit;
#define CHECK_RET(...) \
try { \
CHECK(__VA_ARGS__); \
} catch (AssertFailedException & ex) { \
WarnL << ex.what(); \
return; \
}
namespace mediakit {
void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {

View File

@ -1,11 +1,10 @@
{
"info": {
"_postman_id": "08c77fc3-7670-428c-bde4-80c8cc9f389f",
"_postman_id": "8b3cdc62-3e18-4700-9ddd-dc9f58ebce83",
"name": "ZLMediaKit",
"description": "媒体服务器",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
"_exporter_id": "29185956",
"_collection_link": "https://lively-station-598157.postman.co/workspace/%E6%B5%81%E5%AA%92%E4%BD%93%E6%9C%8D%E5%8A%A1~1e119172-45b0-4ed6-b1fc-8a15d0e2d5f8/collection/29185956-08c77fc3-7670-428c-bde4-80c8cc9f389f?action=share&source=collection_link&creator=29185956"
"_exporter_id": "26338564"
},
"item": [
{
@ -34,6 +33,72 @@
},
"response": []
},
{
"name": "关闭多屏拼接(stack/stop)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/getApiList?secret={{ZLMediaKit_secret}}&id=stack_test",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"getApiList"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "id",
"value": "stack_test"
}
]
}
},
"response": []
},
{
"name": "添加多屏拼接(stack/start)",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"gapv\": 0.002,\r\n \"gaph\": 0.001,\r\n \"width\": 1920,\r\n \"url\": [\r\n [\r\n \"rtsp://kkem.me/live/test3\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy2\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy3\",\r\n \"rtsp://kkem.me/live/cy4\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy6\",\r\n \"rtsp://kkem.me/live/cy7\",\r\n \"rtsp://kkem.me/live/cy8\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy9\",\r\n \"rtsp://kkem.me/live/cy10\",\r\n \"rtsp://kkem.me/live/cy11\",\r\n \"rtsp://kkem.me/live/cy12\"\r\n ]\r\n ],\r\n \"id\": \"89\",\r\n \"row\": 4,\r\n \"col\": 4,\r\n \"height\": 1080,\r\n \"span\": [\r\n [\r\n [\r\n 0,\r\n 0\r\n ],\r\n [\r\n 1,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 3,\r\n 0\r\n ],\r\n [\r\n 3,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 2,\r\n 3\r\n ],\r\n [\r\n 3,\r\n 3\r\n ]\r\n ]\r\n ]\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/stack/start?secret={{ZLMediaKit_secret}}",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"stack",
"start"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
}
]
}
},
"response": []
},
{
"name": "获取网络线程负载(getThreadsLoad)",
"request": {
@ -1979,6 +2044,47 @@
},
"response": []
},
{
"name": "获取rtp发送列表(listRtpSender)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/listRtpSender?secret={{ZLMediaKit_secret}}&vhost={{defaultVhost}}&app=live&stream=test",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"listRtpSender"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "live",
"description": "应用名,例如 live"
},
{
"key": "stream",
"value": "test",
"description": "流id例如 obs"
}
]
}
},
"response": []
},
{
"name": "获取版本信息(version)",
"request": {

592
server/VideoStack.cpp Normal file
View File

@ -0,0 +1,592 @@
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
#include "VideoStack.h"
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "Util/logger.h"
#include "Util/util.h"
#include "json/value.h"
#include <Thread/WorkThreadPool.h>
#include <fstream>
#include <libavutil/pixfmt.h>
#include <memory>
#include <mutex>
// ITU-R BT.601
// #define RGB_TO_Y(R, G, B) ((( 66 * (R) + 129 * (G) + 25 * (B)+128) >> 8)+16)
// #define RGB_TO_U(R, G, B) (((-38 * (R) - 74 * (G) + 112 * (B)+128) >> 8)+128)
// #define RGB_TO_V(R, G, B) (((112 * (R) - 94 * (G) - 18 * (B)+128) >> 8)+128)
// ITU-R BT.709
#define RGB_TO_Y(R, G, B) (((47 * (R) + 157 * (G) + 16 * (B) + 128) >> 8) + 16)
#define RGB_TO_U(R, G, B) (((-26 * (R)-87 * (G) + 112 * (B) + 128) >> 8) + 128)
#define RGB_TO_V(R, G, B) (((112 * (R)-102 * (G)-10 * (B) + 128) >> 8) + 128)
INSTANCE_IMP(VideoStackManager)
Param::~Param()
{
VideoStackManager::Instance().unrefChannel(
id, width, height, pixfmt);
}
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
{
_tmp = std::make_shared<mediakit::FFmpegFrame>();
_tmp->get()->width = _width;
_tmp->get()->height = _height;
_tmp->get()->format = _pixfmt;
av_frame_get_buffer(_tmp->get(), 32);
memset(_tmp->get()->data[0], 0, _tmp->get()->linesize[0] * _height);
memset(_tmp->get()->data[1], 0, _tmp->get()->linesize[1] * _height / 2);
memset(_tmp->get()->data[2], 0, _tmp->get()->linesize[2] * _height / 2);
auto frame = VideoStackManager::Instance().getBgImg();
_sws = std::make_shared<mediakit::FFmpegSws>(_pixfmt, _width, _height);
_tmp = _sws->inputFrame(frame);
}
void Channel::addParam(const std::weak_ptr<Param>& p)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
_params.push_back(p);
}
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
std::weak_ptr<Channel> weakSelf = shared_from_this();
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
_poller->async([weakSelf, frame]() {
auto self = weakSelf.lock();
if (!self) {
return;
}
self->_tmp = self->_sws->inputFrame(frame);
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
});
}
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func)
{
for (auto& wp : _params) {
if (auto sp = wp.lock()) {
func(sp);
}
}
}
void Channel::fillBuffer(const Param::Ptr& p)
{
if (auto buf = p->weak_buf.lock()) {
copyData(buf, p);
}
}
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p)
{
switch (p->pixfmt) {
case AV_PIX_FMT_YUV420P: {
for (int i = 0; i < p->height; i++) {
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i,
_tmp->get()->width);
}
//确保height为奇数时也能正确的复制到最后一行uv数据
for (int i = 0; i < (p->height + 1) / 2; i++) {
// U平面
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i,
_tmp->get()->width / 2);
// V平面
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i,
_tmp->get()->width / 2);
}
break;
}
case AV_PIX_FMT_NV12: {
//TODO: 待实现
break;
}
default:
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
break;
}
}
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
_channels.push_back(chn);
}
void StackPlayer::play()
{
auto url = _url;
//创建拉流 解码对象
_player = std::make_shared<mediakit::MediaPlayer>();
std::weak_ptr<mediakit::MediaPlayer> weakPlayer = _player;
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
(*_player)[mediakit::Client::kWaitTrackReady] = false;
(*_player)[mediakit::Client::kRtpType] = mediakit::Rtsp::RTP_TCP;
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
auto self = weakSelf.lock();
if (!self) {
return;
}
if (!ex) {
// 取消定时器
self->_timer.reset();
self->_failedCount = 0;
} else {
self->onDisconnect();
self->rePlay(url);
}
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false));
//auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
if (videoTrack) {
//TODO:添加使用显卡还是cpu解码的判断逻辑
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"});
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
auto self = weakSelf.lock();
if (!self) {
return;
}
self->onFrame(frame);
});
videoTrack->addDelegate([decoder](const mediakit::Frame::Ptr& frame) {
return decoder->inputFrame(frame, false, true);
});
}
});
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
auto self = weakSelf.lock();
if (!self) {
return;
}
self->onDisconnect();
self->rePlay(url);
});
_player->play(url);
}
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
chn->onFrame(frame);
}
}
}
void StackPlayer::onDisconnect()
{
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
auto frame = VideoStackManager::Instance().getBgImg();
chn->onFrame(frame);
}
}
}
void StackPlayer::rePlay(const std::string& url)
{
_failedCount++;
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000)); //步进延迟 重试间隔
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
_timer = std::make_shared<toolkit::Timer>(
delay / 1000.0f, [weakSelf, url]() {
auto self = weakSelf.lock();
if (!self) {
}
WarnL << "replay [" << self->_failedCount << "]:" << url;
self->_player->play(url);
return false;
},
nullptr);
}
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
, _fps(fps)
, _bitRate(bitRate)
{
_buffer = std::make_shared<mediakit::FFmpegFrame>();
_buffer->get()->width = _width;
_buffer->get()->height = _height;
_buffer->get()->format = _pixfmt;
av_frame_get_buffer(_buffer->get(), 32);
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id });
mediakit::VideoInfo info;
info.codecId = mediakit::CodecH264;
info.iWidth = _width;
info.iHeight = _height;
info.iFrameRate = _fps;
info.iBitRate = _bitRate;
_dev->initVideo(info);
//dev->initAudio(); //TODO:音频
_dev->addTrackCompleted();
_isExit = false;
}
VideoStack::~VideoStack()
{
_isExit = true;
if (_thread.joinable()) {
_thread.join();
}
}
void VideoStack::setParam(const Params& params)
{
if (_params) {
for (auto& p : (*_params)) {
if (!p)
continue;
p->weak_buf.reset();
}
}
initBgColor();
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_buf = _buffer;
if (auto chn = p->weak_chn.lock()) {
chn->addParam(p);
chn->fillBuffer(p);
}
}
_params = params;
}
void VideoStack::start()
{
_thread = std::thread([&]() {
uint64_t pts = 0;
int frameInterval = 1000 / _fps;
auto lastEncTP = std::chrono::steady_clock::now();
while (!_isExit) {
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) {
lastEncTP = std::chrono::steady_clock::now();
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
pts += frameInterval;
}
}
});
}
void VideoStack::initBgColor()
{
//填充底色
auto R = 20;
auto G = 20;
auto B = 20;
double Y = RGB_TO_Y(R, G, B);
double U = RGB_TO_U(R, G, B);
double V = RGB_TO_V(R, G, B);
memset(_buffer->get()->data[0], Y, _buffer->get()->linesize[0] * _height);
memset(_buffer->get()->data[1], U, _buffer->get()->linesize[1] * _height / 2);
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
}
Channel::Ptr VideoStackManager::getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto it = _channelMap.find(key);
if (it != _channelMap.end()) {
return it->second->acquire();
}
return createChannel(id, width, height, pixfmt);
}
void VideoStackManager::unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto chn_it = _channelMap.find(key);
if (chn_it != _channelMap.end() && chn_it->second->dispose()) {
_channelMap.erase(chn_it);
auto player_it = _playerMap.find(id);
if (player_it != _playerMap.end() && player_it->second->dispose()) {
_playerMap.erase(player_it);
}
}
}
int VideoStackManager::startVideoStack(const Json::Value& json)
{
std::string id;
int width, height;
auto params = parseParams(json, id, width, height);
if (!params) {
ErrorL << "Videostack parse params failed!";
return -1;
}
auto stack = std::make_shared<VideoStack>(id, width, height);
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
stack->setParam(params);
stack->start();
std::lock_guard<std::recursive_mutex> lock(_mx);
_stackMap[id] = stack;
return 0;
}
int VideoStackManager::resetVideoStack(const Json::Value& json)
{
std::string id;
int width, height;
auto params = parseParams(json, id, width, height);
if (!params) {
return -1;
}
VideoStack::Ptr stack;
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it == _stackMap.end()) {
return -2;
}
stack = it->second;
}
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
stack->setParam(params);
return 0;
}
int VideoStackManager::stopVideoStack(const std::string& id)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it != _stackMap.end()) {
_stackMap.erase(it);
InfoL << "VideoStack stop: " << id;
return 0;
}
return -1;
}
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg()
{
return _bgImg;
}
Params VideoStackManager::parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height)
{
try {
id = json["id"].asString();
width = json["width"].asInt();
height = json["height"].asInt();
int rows = json["row"].asInt(); //堆叠行数
int cols = json["col"].asInt(); //堆叠列数
float gapv = json["gapv"].asFloat(); //垂直间距
float gaph = json["gaph"].asFloat(); //水平间距
//单个间距
int gaphPix = static_cast<int>(round(width * gaph));
int gapvPix = static_cast<int>(round(height * gapv));
// 根据间距计算格子宽高
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
std::string url = json["url"][row][col].asString();
auto param = std::make_shared<Param>();
param->posX = gridWidth * col + col * gaphPix;
param->posY = gridHeight * row + row * gapvPix;
param->width = gridWidth;
param->height = gridHeight;
param->id = url;
(*params)[row * cols + col] = param;
}
}
//判断是否需要合并格子 (焦点屏)
if (!json["span"].empty() && json.isMember("span")) {
for (const auto& subArray : json["span"]) {
if (!subArray.isArray() || subArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
}
std::array<int, 4> mergePos;
int index = 0;
for (const auto& innerArray : subArray) {
if (!innerArray.isArray() || innerArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
}
for (const auto& number : innerArray) {
if (index < mergePos.size()) {
mergePos[index++] = number.asInt();
}
}
}
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
if (i == mergePos[0] && j == mergePos[1]) {
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix;
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix;
} else {
(*params)[i * cols + j] = nullptr;
}
}
}
}
}
return params;
} catch (const std::exception& e) {
ErrorL << "Videostack parse params failed! " << e.what();
return nullptr;
}
}
bool VideoStackManager::loadBgImg(const std::string& path)
{
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
_bgImg->get()->width = 1280;
_bgImg->get()->height = 720;
_bgImg->get()->format = AV_PIX_FMT_YUV420P;
av_frame_get_buffer(_bgImg->get(), 32);
std::ifstream file(path, std::ios::binary);
if (!file.is_open()) {
return false;
}
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V
return true;
}
Channel::Ptr VideoStackManager::createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
StackPlayer::Ptr player;
auto it = _playerMap.find(id);
if (it != _playerMap.end()) {
player = it->second->acquire();
} else {
player = createPlayer(id);
}
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt));
auto chn = refChn->acquire();
player->addChannel(chn);
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn;
return chn;
}
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
_playerMap[id] = refPlayer;
auto player = refPlayer->acquire();
if (!id.empty()) {
player->play();
}
return player;
}
#endif

208
server/VideoStack.h Normal file
View File

@ -0,0 +1,208 @@
#pragma once
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "Player/MediaPlayer.h"
#include "json/json.h"
#include <mutex>
template <typename T>
class RefWrapper {
public:
using Ptr = std::shared_ptr<RefWrapper<T>>;
template <typename... Args>
explicit RefWrapper(Args&&... args)
: _rc(0)
, _entity(std::forward<Args>(args)...)
{
}
T acquire()
{
++_rc;
return _entity;
}
bool dispose() { return --_rc <= 0; }
private:
T _entity;
std::atomic<int> _rc;
};
class Channel;
struct Param {
using Ptr = std::shared_ptr<Param>;
int posX = 0;
int posY = 0;
int width = 0;
int height = 0;
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P;
std::string id {};
// runtime
std::weak_ptr<Channel> weak_chn;
std::weak_ptr<mediakit::FFmpegFrame> weak_buf;
~Param();
};
using Params = std::shared_ptr<std::vector<Param::Ptr>>;
class Channel : public std::enable_shared_from_this<Channel> {
public:
using Ptr = std::shared_ptr<Channel>;
Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
void addParam(const std::weak_ptr<Param>& p);
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
void fillBuffer(const Param::Ptr& p);
protected:
void forEachParam(const std::function<void(const Param::Ptr&)>& func);
void copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p);
private:
std::string _id;
int _width;
int _height;
AVPixelFormat _pixfmt;
mediakit::FFmpegFrame::Ptr _tmp;
std::recursive_mutex _mx;
std::vector<std::weak_ptr<Param>> _params;
mediakit::FFmpegSws::Ptr _sws;
toolkit::EventPoller::Ptr _poller;
};
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
public:
using Ptr = std::shared_ptr<StackPlayer>;
StackPlayer(const std::string& url)
: _url(url)
{
}
void addChannel(const std::weak_ptr<Channel>& chn);
void play();
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
void onDisconnect();
protected:
void rePlay(const std::string& url);
private:
std::string _url;
mediakit::MediaPlayer::Ptr _player;
//用于断线重连
toolkit::Timer::Ptr _timer;
int _failedCount = 0;
std::recursive_mutex _mx;
std::vector<std::weak_ptr<Channel>> _channels;
};
class VideoStack {
public:
using Ptr = std::shared_ptr<VideoStack>;
VideoStack(const std::string& url,
int width = 1920,
int height = 1080,
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
float fps = 25.0,
int bitRate = 2 * 1024 * 1024);
~VideoStack();
void setParam(const Params& params);
void start();
protected:
void initBgColor();
public:
Params _params;
mediakit::FFmpegFrame::Ptr _buffer;
private:
std::string _id;
int _width;
int _height;
AVPixelFormat _pixfmt;
float _fps;
int _bitRate;
mediakit::DevChannel::Ptr _dev;
bool _isExit;
std::thread _thread;
};
class VideoStackManager {
public:
static VideoStackManager& Instance();
Channel::Ptr getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
void unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
int startVideoStack(const Json::Value& json);
int resetVideoStack(const Json::Value& json);
int stopVideoStack(const std::string& id);
bool loadBgImg(const std::string& path);
mediakit::FFmpegFrame::Ptr getBgImg();
protected:
Params parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height);
protected:
Channel::Ptr createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
StackPlayer::Ptr createPlayer(const std::string& id);
private:
mediakit::FFmpegFrame::Ptr _bgImg;
private:
std::recursive_mutex _mx;
std::unordered_map<std::string, VideoStack::Ptr> _stackMap;
std::unordered_map<std::string, RefWrapper<Channel::Ptr>::Ptr> _channelMap;
std::unordered_map<std::string, RefWrapper<StackPlayer::Ptr>::Ptr> _playerMap;
};
#endif

View File

@ -59,7 +59,11 @@
#endif
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined (ENABLE_FFMPEG)
#include "VideoStack.h"
#endif
using namespace std;
@ -115,7 +119,7 @@ static HttpApi toApi(const function<void(API_ARGS_MAP_ASYNC)> &cb) {
//参数解析成map
auto args = getAllArgs(parser);
cb(sender, headerOut, HttpAllArgs<decltype(args)>(parser, args), val, invoker);
cb(sender, headerOut, ArgsMap(parser, args), val, invoker);
};
}
@ -143,7 +147,7 @@ static HttpApi toApi(const function<void(API_ARGS_JSON_ASYNC)> &cb) {
Json::Reader reader;
reader.parse(parser.content(), args);
cb(sender, headerOut, HttpAllArgs<decltype(args)>(parser, args), val, invoker);
cb(sender, headerOut, ArgsJson(parser, args), val, invoker);
};
}
@ -163,7 +167,7 @@ static HttpApi toApi(const function<void(API_ARGS_STRING_ASYNC)> &cb) {
Json::Value val;
val["code"] = API::Success;
cb(sender, headerOut, HttpAllArgs<string>(parser, (string &)parser.content()), val, invoker);
cb(sender, headerOut, ArgsString(parser, (string &)parser.content()), val, invoker);
};
}
@ -204,7 +208,7 @@ static ApiArgsType getAllArgs(const Parser &parser) {
if (parser["Content-Type"].find("application/x-www-form-urlencoded") == 0) {
auto contentArgs = parser.parseArgs(parser.content());
for (auto &pr : contentArgs) {
allArgs[pr.first] = HttpSession::urlDecodeComponent(pr.second);
allArgs[pr.first] = strCoding::UrlDecodeComponent(pr.second);
}
} else if (parser["Content-Type"].find("application/json") == 0) {
try {
@ -285,6 +289,8 @@ static inline void addHttpListener(){
it->second(parser, invoker, sender);
} catch (ApiRetException &ex) {
responseApi(ex.code(), ex.what(), invoker);
auto helper = static_cast<SocketHelper &>(sender).shared_from_this();
helper->getPoller()->async([helper, ex]() { helper->shutdown(SockException(Err_shutdown, ex.what())); }, false);
}
#ifdef ENABLE_MYSQL
catch(SqlException &ex){
@ -385,6 +391,7 @@ void dumpMediaTuple(const MediaTuple &tuple, Json::Value& item) {
item[VHOST_KEY] = tuple.vhost;
item["app"] = tuple.app;
item["stream"] = tuple.stream;
item["params"] = tuple.params;
}
Value makeMediaSourceJson(MediaSource &media){
@ -580,8 +587,10 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
//添加拉流代理
auto player = s_player_proxy.make(key, vhost, app, stream, option, retry_count);
// 先透传参数
player->mINI::operator=(args);
// 先透传拷贝参数
for (auto &pr : args) {
(*player)[pr.first] = pr.second;
}
//指定RTP over TCP(播放rtsp时有效)
(*player)[Client::kRtpType] = rtp_type;
@ -656,13 +665,6 @@ void addStreamPusherProxy(const string &schema,
pusher->publish(url);
}
template <typename Type>
static void getArgsValue(const HttpAllArgs<ApiArgsType> &allArgs, const string &key, Type &value) {
auto val = allArgs[key];
if (!val.empty()) {
value = (Type)val;
}
}
/**
* api接口
@ -729,7 +731,7 @@ void installWebApi() {
CHECK_SECRET();
auto &ini = mINI::Instance();
int changed = API::Success;
for (auto &pr : allArgs.getArgs()) {
for (auto &pr : allArgs.args) {
if (ini.find(pr.first) == ini.end()) {
#if 1
//没有这个key
@ -1087,7 +1089,7 @@ void installWebApi() {
CHECK_ARGS("vhost","app","stream","url");
mINI args;
for (auto &pr : allArgs.getArgs()) {
for (auto &pr : allArgs.args) {
args.emplace(pr.first, pr.second);
}
@ -1184,7 +1186,7 @@ void installWebApi() {
//测试url http://127.0.0.1/index/api/downloadBin
api_regist("/index/api/downloadBin",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
invoker.responseFile(allArgs.getParser().getHeader(),StrCaseMap(),exePath());
invoker.responseFile(allArgs.parser.getHeader(), StrCaseMap(), exePath());
});
#if defined(ENABLE_RTPPROXY)
@ -1315,7 +1317,7 @@ void installWebApi() {
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto type = allArgs["type"].as<int>();
auto type = allArgs["type"].empty() ? (int)MediaSourceEvent::SendRtpArgs::kRtpPS : allArgs["type"].as<int>();
if (!allArgs["use_ps"].empty()) {
// 兼容之前的use_ps参数
type = allArgs["use_ps"].as<int>();
@ -1347,6 +1349,26 @@ void installWebApi() {
});
});
api_regist("/index/api/listRtpSender",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream");
auto src = MediaSource::find(allArgs["vhost"], allArgs["app"], allArgs["stream"]);
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto muxer = src->getMuxer();
CHECK(muxer, "get muxer from media source failed");
src->getOwnerPoller()->async([=]() mutable {
muxer->forEachRtpSender([&](const std::string &ssrc) mutable {
val["data"].append(ssrc);
});
invoker(200, headerOut, val.toStyledString());
});
});
api_regist("/index/api/startSendRtpPassive",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream", "ssrc");
@ -1355,7 +1377,7 @@ void installWebApi() {
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto type = allArgs["type"].as<int>();
auto type = allArgs["type"].empty() ? (int)MediaSourceEvent::SendRtpArgs::kRtpPS : allArgs["type"].as<int>();
if (!allArgs["use_ps"].empty()) {
// 兼容之前的use_ps参数
type = allArgs["use_ps"].as<int>();
@ -1564,16 +1586,18 @@ void installWebApi() {
api_regist("/index/api/deleteRecordDirectory", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream", "period");
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"]};
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"], ""};
auto record_path = Recorder::getRecordPath(Recorder::type_mp4, tuple, allArgs["customized_path"]);
auto period = allArgs["period"];
record_path = record_path + period + "/";
bool recording = false;
auto name = allArgs["name"];
if (!name.empty()) {
// 删除指定文件
record_path += name;
}
bool recording = false;
{
} else {
// 删除文件夹,先判断该流是否正在录制中
auto src = MediaSource::find(allArgs["vhost"], allArgs["app"], allArgs["stream"]);
if (src && src->isRecording(Recorder::type_mp4)) {
recording = true;
@ -1603,7 +1627,7 @@ void installWebApi() {
api_regist("/index/api/getMP4RecordFile", [](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream");
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"]};
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"], ""};
auto record_path = Recorder::getRecordPath(Recorder::type_mp4, tuple, allArgs["customized_path"]);
auto period = allArgs["period"];
@ -1691,7 +1715,7 @@ void installWebApi() {
//截图存在,且未过期,那么返回之
res_old_snap = true;
responseSnap(path, allArgs.getParser().getHeader(), invoker);
responseSnap(path, allArgs.parser.getHeader(), invoker);
//中断遍历
return false;
});
@ -1722,7 +1746,7 @@ void installWebApi() {
File::delete_file(new_snap);
rename(new_snap_tmp.data(), new_snap.data());
}
responseSnap(new_snap, allArgs.getParser().getHeader(), invoker, err_msg);
responseSnap(new_snap, allArgs.parser.getHeader(), invoker, err_msg);
});
});
@ -1737,7 +1761,7 @@ void installWebApi() {
#ifdef ENABLE_WEBRTC
class WebRtcArgsImp : public WebRtcArgs {
public:
WebRtcArgsImp(const HttpAllArgs<string> &args, std::string session_id)
WebRtcArgsImp(const ArgsString &args, std::string session_id)
: _args(args)
, _session_id(std::move(session_id)) {}
~WebRtcArgsImp() override = default;
@ -1755,40 +1779,26 @@ void installWebApi() {
CHECK_ARGS("app", "stream");
return StrPrinter << "rtc://" << _args["Host"] << "/" << _args["app"] << "/"
<< _args["stream"] << "?" << _args.getParser().params() + "&session=" + _session_id;
<< _args["stream"] << "?" << _args.parser.params() + "&session=" + _session_id;
}
private:
HttpAllArgs<string> _args;
ArgsString _args;
std::string _session_id;
};
api_regist("/index/api/webrtc",[](API_ARGS_STRING_ASYNC){
CHECK_ARGS("type");
auto type = allArgs["type"];
auto offer = allArgs.getArgs();
auto offer = allArgs.args;
CHECK(!offer.empty(), "http body(webrtc offer sdp) is empty");
std::string host = allArgs.getParser()["Host"];
std::string localIp = host.substr(0, host.find(':'));
auto isVaildIP = [](std::string ip)-> bool {
int a,b,c,d;
return sscanf(ip.c_str(),"%d.%d.%d.%d", &a, &b, &c, &d) == 4;
};
if (!isVaildIP(localIp) || localIp=="127.0.0.1") {
localIp = "";
}
auto &session = static_cast<Session&>(sender);
auto args = std::make_shared<WebRtcArgsImp>(allArgs, sender.getIdentifier());
WebRtcPluginManager::Instance().getAnswerSdp(static_cast<Session&>(sender), type, *args, [invoker, val, offer, headerOut, localIp](const WebRtcInterface &exchanger) mutable {
//设置返回类型
headerOut["Content-Type"] = HttpFileManager::getContentType(".json");
//设置跨域
headerOut["Access-Control-Allow-Origin"] = "*";
WebRtcPluginManager::Instance().negotiateSdp(session, type, *args, [invoker, val, offer, headerOut](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
setLocalIp(exchanger,localIp);
val["sdp"] = exchangeSdp(exchanger, offer);
val["sdp"] = handler.getAnswerSdp(offer);
val["id"] = exchanger.getIdentifier();
val["type"] = "answer";
invoker(200, headerOut, val.toStyledString());
@ -1802,26 +1812,24 @@ void installWebApi() {
static constexpr char delete_webrtc_url [] = "/index/api/delete_webrtc";
static auto whip_whep_func = [](const char *type, API_ARGS_STRING_ASYNC) {
auto offer = allArgs.getArgs();
auto offer = allArgs.args;
CHECK(!offer.empty(), "http body(webrtc offer sdp) is empty");
auto &session = static_cast<Session&>(sender);
auto location = std::string("http") + (session.overSsl() ? "s" : "") + "://" + allArgs["host"] + delete_webrtc_url;
auto location = std::string(session.overSsl() ? "https://" : "http://") + allArgs["host"] + delete_webrtc_url;
auto args = std::make_shared<WebRtcArgsImp>(allArgs, sender.getIdentifier());
WebRtcPluginManager::Instance().getAnswerSdp(session, type, *args,
[invoker, offer, headerOut, location](const WebRtcInterface &exchanger) mutable {
// 设置跨域
headerOut["Access-Control-Allow-Origin"] = "*";
try {
// 设置返回类型
headerOut["Content-Type"] = "application/sdp";
headerOut["Location"] = location + "?id=" + exchanger.getIdentifier() + "&token=" + exchanger.deleteRandStr();
invoker(201, headerOut, exchangeSdp(exchanger, offer));
} catch (std::exception &ex) {
headerOut["Content-Type"] = "text/plain";
invoker(406, headerOut, ex.what());
}
});
WebRtcPluginManager::Instance().negotiateSdp(session, type, *args, [invoker, offer, headerOut, location](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
// 设置返回类型
headerOut["Content-Type"] = "application/sdp";
headerOut["Location"] = location + "?id=" + exchanger.getIdentifier() + "&token=" + exchanger.deleteRandStr();
invoker(201, headerOut, handler.getAnswerSdp(offer));
} catch (std::exception &ex) {
headerOut["Content-Type"] = "text/plain";
invoker(406, headerOut, ex.what());
}
});
};
api_regist("/index/api/whip", [](API_ARGS_STRING_ASYNC) { whip_whep_func("push", API_ARGS_VALUE, invoker); });
@ -1829,7 +1837,7 @@ void installWebApi() {
api_regist(delete_webrtc_url, [](API_ARGS_MAP_ASYNC) {
CHECK_ARGS("id", "token");
CHECK(allArgs.getParser().method() == "DELETE", "http method is not DELETE: " + allArgs.getParser().method());
CHECK(allArgs.parser.method() == "DELETE", "http method is not DELETE: " + allArgs.parser.method());
auto obj = WebRtcTransportManager::Instance().getItem(allArgs["id"]);
if (!obj) {
invoker(404, headerOut, "id not found");
@ -1879,7 +1887,7 @@ void installWebApi() {
std::set<std::string> ret;
auto vec = toolkit::split(str, ";");
for (auto &item : vec) {
auto root = File::absolutePath(item, "", true);
auto root = File::absolutePath("", item, true);
ret.emplace(std::move(root));
}
return ret;
@ -1915,16 +1923,41 @@ void installWebApi() {
if (!save_name.empty()) {
res_header.emplace("Content-Disposition", "attachment;filename=\"" + save_name + "\"");
}
invoker.responseFile(allArgs.getParser().getHeader(), res_header, allArgs["file_path"]);
invoker.responseFile(allArgs.parser.getHeader(), res_header, allArgs["file_path"]);
}
};
bool flag = NOTICE_EMIT(BroadcastHttpAccessArgs, Broadcast::kBroadcastHttpAccess, allArgs.getParser(), file_path, false, file_invoker, sender);
bool flag = NOTICE_EMIT(BroadcastHttpAccessArgs, Broadcast::kBroadcastHttpAccess, allArgs.parser, file_path, false, file_invoker, sender);
if (!flag) {
// 文件下载鉴权事件无人监听,不允许下载
invoker(401, StrCaseMap {}, "None http access event listener");
}
});
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
VideoStackManager::Instance().loadBgImg("novideo.yuv");
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastStreamNoneReader, [](BroadcastStreamNoneReaderArgs) {
auto id = sender.getMediaTuple().stream;
VideoStackManager::Instance().stopVideoStack(id);
});
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
CHECK_SECRET();
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
invoker(200, headerOut, val.toStyledString());
});
api_regist("/index/api/stack/stop", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("id");
auto ret = VideoStackManager::Instance().stopVideoStack(allArgs["id"]);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
invoker(200, headerOut, val.toStyledString());
});
#endif
}
void unInstallWebApi(){

View File

@ -115,72 +115,41 @@ std::string getValue(const mediakit::Parser &parser, Args &args, const First &fi
template<typename Args>
class HttpAllArgs {
mediakit::Parser* _parser = nullptr;
Args* _args = nullptr;
public:
HttpAllArgs(const mediakit::Parser &parser, Args &args) {
_get_args = [&args]() {
return (void *) &args;
};
_get_parser = [&parser]() -> const mediakit::Parser & {
return parser;
};
_get_value = [](HttpAllArgs &that, const std::string &key) {
return getValue(that.getParser(), that.getArgs(), key);
};
_clone = [&](HttpAllArgs &that) {
that._get_args = [args]() {
return (void *) &args;
};
that._get_parser = [parser]() -> const mediakit::Parser & {
return parser;
};
that._get_value = [](HttpAllArgs &that, const std::string &key) {
return getValue(that.getParser(), that.getArgs(), key);
};
that._cache_able = true;
};
}
const mediakit::Parser& parser;
Args& args;
HttpAllArgs(const HttpAllArgs &that) {
if (that._cache_able) {
_get_args = that._get_args;
_get_parser = that._get_parser;
_get_value = that._get_value;
_cache_able = true;
} else {
that._clone(*this);
HttpAllArgs(const mediakit::Parser &p, Args &a): parser(p), args(a) {}
HttpAllArgs(const HttpAllArgs &that): _parser(new mediakit::Parser(that.parser)),
_args(new Args(that.args)),
parser(*_parser), args(*_args) {}
~HttpAllArgs() {
if (_parser) {
delete _parser;
}
if (_args) {
delete _args;
}
}
template<typename Key>
toolkit::variant operator[](const Key &key) const {
return (toolkit::variant)_get_value(*(HttpAllArgs*)this, key);
return (toolkit::variant)getValue(parser, args, key);
}
const mediakit::Parser &getParser() const {
return _get_parser();
}
Args &getArgs() {
return *((Args *) _get_args());
}
const Args &getArgs() const {
return *((Args *) _get_args());
}
private:
bool _cache_able = false;
std::function<void *() > _get_args;
std::function<const mediakit::Parser &() > _get_parser;
std::function<std::string(HttpAllArgs &that, const std::string &key)> _get_value;
std::function<void(HttpAllArgs &that) > _clone;
};
#define API_ARGS_MAP toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<ApiArgsType> &allArgs, Json::Value &val
using ArgsMap = HttpAllArgs<ApiArgsType>;
using ArgsJson = HttpAllArgs<Json::Value>;
using ArgsString = HttpAllArgs<std::string>;
#define API_ARGS_MAP toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsMap &allArgs, Json::Value &val
#define API_ARGS_MAP_ASYNC API_ARGS_MAP, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_JSON toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<Json::Value> &allArgs, Json::Value &val
#define API_ARGS_JSON toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsJson &allArgs, Json::Value &val
#define API_ARGS_JSON_ASYNC API_ARGS_JSON, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_STRING toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<std::string> &allArgs, Json::Value &val
#define API_ARGS_STRING toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsString &allArgs, Json::Value &val
#define API_ARGS_STRING_ASYNC API_ARGS_STRING, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_VALUE sender, headerOut, allArgs, val
@ -234,8 +203,6 @@ void unInstallWebApi();
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
void connectRtpServer(const std::string &stream_id, const std::string &dst_url, uint16_t dst_port, const std::function<void(const toolkit::SockException &ex)> &cb);
bool closeRtpServer(const std::string &stream_id);
#endif
Json::Value makeMediaSourceJson(mediakit::MediaSource &media);

View File

@ -225,7 +225,7 @@ static ArgsType make_json(const MediaInfo &args) {
ArgsType body;
body["schema"] = args.schema;
dumpMediaTuple(args, body);
body["params"] = args.param_strs;
body["params"] = args.params;
return body;
}
@ -286,7 +286,7 @@ static string getPullUrl(const string &origin_fmt, const MediaInfo &info) {
return "";
}
// 告知源站这是来自边沿站的拉流请求,如果未找到流请立即返回拉流失败
return string(url) + '?' + kEdgeServerParam + '&' + VHOST_KEY + '=' + info.vhost + '&' + info.param_strs;
return string(url) + '?' + kEdgeServerParam + '&' + VHOST_KEY + '=' + info.vhost + '&' + info.params;
}
static void pullStreamFromOrigin(const vector<string> &urls, size_t index, size_t failed_cnt, const MediaInfo &args, const function<void()> &closePlayer) {
@ -498,7 +498,7 @@ void installWebHook() {
return;
}
if (start_with(args.param_strs, kEdgeServerParam)) {
if (start_with(args.params, kEdgeServerParam)) {
// 源站收到来自边沿站的溯源请求,流不存在时立即返回拉流失败
closePlayer();
return;

View File

@ -38,7 +38,7 @@
#endif
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
#if !defined(_WIN32)
@ -258,6 +258,15 @@ int start_main(int argc,char *argv[]) {
//加载配置文件,如果配置文件不存在就创建一个
loadIniConfig(g_ini_file.data());
auto &secret = mINI::Instance()[API::kSecret];
if (secret == "035c73f7-bb6b-4889-a715-d9eb2d1925cc" || secret.empty()) {
// 使用默认secret被禁止启动
secret = makeRandStr(32, true);
mINI::Instance().dumpFile(g_ini_file);
WarnL << "The " << API::kSecret << " is invalid, modified it to: " << secret
<< ", saved config file: " << g_ini_file;
}
if (!File::is_dir(ssl_file)) {
// 不是文件夹,加载证书,证书包含公钥和私钥
SSL_Initor::Instance().loadCertificate(ssl_file.data());
@ -352,14 +361,6 @@ int start_main(int argc,char *argv[]) {
InfoL << "已启动http hook 接口";
try {
auto &secret = mINI::Instance()[API::kSecret];
if (secret == "035c73f7-bb6b-4889-a715-d9eb2d1925cc" || secret.empty()) {
// 使用默认secret被禁止启动
secret = makeRandStr(32, true);
mINI::Instance().dumpFile(g_ini_file);
WarnL << "The " << API::kSecret << " is invalid, modified it to: " << secret
<< ", saved config file: " << g_ini_file;
}
//rtsp服务器端口默认554
if (rtspPort) { rtspSrv->start<RtspSession>(rtspPort); }
//rtsps服务器端口默认322

View File

@ -187,6 +187,8 @@ void MediaSink::emitAllTrackReady() {
pr.second.for_each([&](const Frame::Ptr &frame) { MediaSink::inputFrame(frame); });
}
_frame_unread.clear();
} else {
throw toolkit::SockException(toolkit::Err_shutdown, "no vaild track data");
}
}

View File

@ -113,7 +113,7 @@ ProtocolOption::ProtocolOption() {
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
struct MediaSourceNull : public MediaSource {
MediaSourceNull() : MediaSource("schema", MediaTuple{"vhost", "app", "stream"}) {};
MediaSourceNull() : MediaSource("schema", MediaTuple{"vhost", "app", "stream", ""}) {};
int readerCount() override { return 0; }
};
@ -583,7 +583,7 @@ void MediaInfo::parse(const std::string &url_in){
auto url = url_in;
auto pos = url.find("?");
if (pos != string::npos) {
param_strs = url.substr(pos + 1);
params = url.substr(pos + 1);
url.erase(pos);
}
@ -616,9 +616,10 @@ void MediaInfo::parse(const std::string &url_in){
stream = stream_id;
}
auto params = Parser::parseArgs(param_strs);
if (params.find(VHOST_KEY) != params.end()) {
vhost = params[VHOST_KEY];
auto kv = Parser::parseArgs(params);
auto it = kv.find(VHOST_KEY);
if (it != kv.end()) {
vhost = it->second;
}
GET_CONFIG(bool, enableVhost, General::kEnableVhost);
@ -651,6 +652,10 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string &
/////////////////////////////////////MediaSourceEvent//////////////////////////////////////
void MediaSourceEvent::onReaderChanged(MediaSource &sender, int size){
GET_CONFIG(bool, enable, General::kBroadcastPlayerCountChanged);
if (enable) {
NOTICE_EMIT(BroadcastPlayerCountChangedArgs, Broadcast::kBroadcastPlayerCountChanged, sender.getMediaTuple(), sender.totalReaderCount());
}
if (size || sender.totalReaderCount()) {
//还有人观看该视频,不触发关闭事件
_async_close_timer = nullptr;

View File

@ -136,6 +136,15 @@ private:
toolkit::Timer::Ptr _async_close_timer;
};
template <typename MAP, typename KEY, typename TYPE>
static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
auto val = ((MAP &)allArgs)[key];
if (!val.empty()) {
value = (TYPE)val;
}
}
class ProtocolOption {
public:
ProtocolOption();
@ -243,15 +252,6 @@ public:
GET_OPT_VALUE(stream_replace);
GET_OPT_VALUE(max_track);
}
private:
template <typename MAP, typename KEY, typename TYPE>
static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
auto val = ((MAP &)allArgs)[key];
if (!val.empty()) {
value = (TYPE)val;
}
}
};
//该对象用于拦截感兴趣的MediaSourceEvent事件
@ -299,7 +299,6 @@ public:
std::string full_url;
std::string schema;
std::string host;
std::string param_strs;
};
bool equalMediaTuple(const MediaTuple& a, const MediaTuple& b);

View File

@ -44,6 +44,7 @@ public:
}
void resetTimer(const EventPoller::Ptr &poller) {
std::lock_guard<std::recursive_mutex> lck(_mtx);
std::weak_ptr<FramePacedSender> weak_self = shared_from_this();
_timer = std::make_shared<Timer>(_paced_sender_ms / 1000.0f, [weak_self]() {
if (auto strong_self = weak_self.lock()) {
@ -55,6 +56,7 @@ public:
}
bool inputFrame(const Frame::Ptr &frame) override {
std::lock_guard<std::recursive_mutex> lck(_mtx);
if (!_timer) {
setCurrentStamp(frame->dts());
resetTimer(EventPoller::getCurrentPoller());
@ -66,6 +68,7 @@ public:
private:
void onTick() {
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto dst = _cache.empty() ? 0 : _cache.back().first;
while (!_cache.empty()) {
auto &front = _cache.front();
@ -110,6 +113,7 @@ private:
OnFrame _cb;
Ticker _ticker;
Timer::Ptr _timer;
std::recursive_mutex _mtx;
std::list<std::pair<uint64_t, Frame::Ptr>> _cache;
};
@ -169,6 +173,12 @@ std::string MultiMediaSourceMuxer::shortUrl() const {
return _tuple.shortUrl();
}
void MultiMediaSourceMuxer::forEachRtpSender(const std::function<void(const std::string &ssrc)> &cb) const {
for (auto &pr : _rtp_sender) {
cb(pr.first);
}
}
MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_sec, const ProtocolOption &option): _tuple(tuple) {
if (!option.stream_replace.empty()) {
// 支持在on_publish hook中替换stream_id
@ -593,15 +603,17 @@ void MultiMediaSourceMuxer::resetTracks() {
}
}
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame) {
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
auto frame = frame_in;
if (_option.modify_stamp != ProtocolOption::kModifyStampOff) {
// 时间戳不采用原始的绝对时间戳
const_cast<Frame::Ptr&>(frame) = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
frame = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
}
return _paced_sender ? _paced_sender->inputFrame(frame) : onTrackFrame_l(frame);
}
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame_in) {
auto frame = frame_in;
bool ret = false;
if (_rtmp) {
ret = _rtmp->inputFrame(frame) ? true : ret;
@ -629,7 +641,7 @@ bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
}
if (_ring) {
// 此场景由于直接转发可能存在切换线程引起的数据被缓存在管道所以需要CacheAbleFrame
const_cast<Frame::Ptr &>(frame) = Frame::getCacheAbleFrame(frame);
frame = Frame::getCacheAbleFrame(frame);
if (frame->getTrackType() == TrackVideo) {
// 视频时遇到第一帧配置帧或关键帧则标记为gop开始处
auto video_key_pos = frame->keyFrame() || frame->configFrame();

View File

@ -133,6 +133,8 @@ public:
const MediaTuple &getMediaTuple() const;
std::string shortUrl() const;
void forEachRtpSender(const std::function<void(const std::string &ssrc)> &cb) const;
protected:
/////////////////////////////////MediaSink override/////////////////////////////////

View File

@ -294,8 +294,8 @@ void RtspUrl::setup(bool is_ssl, const string &url, const string &user, const st
splitUrl(ip, ip, port);
_url = std::move(url);
_user = strCoding::UrlDecodeComponent(std::move(user));
_passwd = strCoding::UrlDecodeComponent(std::move(passwd));
_user = strCoding::UrlDecodeUserOrPass(user);
_passwd = strCoding::UrlDecodeUserOrPass(passwd);
_host = std::move(ip);
_port = port;
_is_ssl = is_ssl;

View File

@ -30,7 +30,7 @@ struct StrCaseCompare {
class StrCaseMap : public std::multimap<std::string, std::string, StrCaseCompare> {
public:
using Super = multimap<std::string, std::string, StrCaseCompare>;
using Super = std::multimap<std::string, std::string, StrCaseCompare>;
std::string &operator[](const std::string &k) {
auto it = find(k);

View File

@ -64,6 +64,7 @@ const string kBroadcastRtcSctpFailed = "kBroadcastRtcSctpFailed";
const string kBroadcastRtcSctpClosed = "kBroadcastRtcSctpClosed";
const string kBroadcastRtcSctpSend = "kBroadcastRtcSctpSend";
const string kBroadcastRtcSctpReceived = "kBroadcastRtcSctpReceived";
const string kBroadcastPlayerCountChanged = "kBroadcastPlayerCountChanged";
} // namespace Broadcast
@ -82,6 +83,7 @@ const string kEnableFFmpegLog = GENERAL_FIELD "enable_ffmpeg_log";
const string kWaitTrackReadyMS = GENERAL_FIELD "wait_track_ready_ms";
const string kWaitAddTrackMS = GENERAL_FIELD "wait_add_track_ms";
const string kUnreadyFrameCache = GENERAL_FIELD "unready_frame_cache";
const string kBroadcastPlayerCountChanged = GENERAL_FIELD "broadcast_player_count_changed";
static onceToken token([]() {
mINI::Instance()[kFlowThreshold] = 1024;
@ -96,6 +98,7 @@ static onceToken token([]() {
mINI::Instance()[kWaitTrackReadyMS] = 10000;
mINI::Instance()[kWaitAddTrackMS] = 3000;
mINI::Instance()[kUnreadyFrameCache] = 100;
mINI::Instance()[kBroadcastPlayerCountChanged] = 0;
});
} // namespace General
@ -181,12 +184,7 @@ static onceToken token([]() {
mINI::Instance()[kKeepAliveSecond] = 15;
mINI::Instance()[kDirMenu] = true;
mINI::Instance()[kVirtualPath] = "";
#if defined(_WIN32)
mINI::Instance()[kCharSet] = "gb2312";
#else
mINI::Instance()[kCharSet] = "utf-8";
#endif
mINI::Instance()[kRootPath] = "./www";
mINI::Instance()[kNotFound] = StrPrinter << "<html>"
@ -366,6 +364,7 @@ static onceToken token([]() {
namespace Client {
const string kNetAdapter = "net_adapter";
const string kRtpType = "rtp_type";
const string kRtspBeatType = "rtsp_beat_type";
const string kRtspUser = "rtsp_user";
const string kRtspPwd = "rtsp_pwd";
const string kRtspPwdIsMD5 = "rtsp_pwd_md5";

View File

@ -124,6 +124,10 @@ extern const std::string kBroadcastRtcSctpSend;
extern const std::string kBroadcastRtcSctpReceived;
#define BroadcastRtcSctpReceivedArgs WebRtcTransport& sender, uint16_t &streamId, uint32_t &ppid, const uint8_t *&msg, size_t &len
// 观看人数变化广播
extern const std::string kBroadcastPlayerCountChanged;
#define BroadcastPlayerCountChangedArgs const MediaTuple& args, const int& count
#define ReloadConfigTag ((void *)(0xFF))
#define RELOAD_KEY(arg, key) \
do { \
@ -196,6 +200,8 @@ extern const std::string kWaitTrackReadyMS;
extern const std::string kWaitAddTrackMS;
// 如果track未就绪我们先缓存帧数据但是有最大个数限制(100帧时大约4秒),防止内存溢出
extern const std::string kUnreadyFrameCache;
// 是否启用观看人数变化事件广播置1则启用置0则关闭
extern const std::string kBroadcastPlayerCountChanged;
} // namespace General
namespace Protocol {
@ -417,6 +423,9 @@ extern const std::string kNetAdapter;
// 设置rtp传输类型可选项有0(tcp默认)、1(udp)、2(组播)
// 设置方法:player[PlayerBase::kRtpType] = 0/1/2;
extern const std::string kRtpType;
// rtsp播放器发送信令心跳还是rtcp心跳可选项有0(同时发)、1(rtcp心跳)、2(信令心跳)
// 设置方法:player[PlayerBase::kRtspBeatType] = 0/1/2;
extern const std::string kRtspBeatType;
// rtsp认证用户名
extern const std::string kRtspUser;
// rtsp认证用用户密码可以是明文也可以是md5,md5密码生成方式 md5(username:realm:password)

View File

@ -14,7 +14,7 @@
using namespace toolkit;
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
extern "C" {
@ -44,4 +44,4 @@ const char kServerName[] = "ZLMediaKit-8.0(build in " __DATE__ " " __TIME__ ")"
const char kServerName[] = "ZLMediaKit(git hash:" COMMIT_HASH "/" COMMIT_TIME ",branch:" BRANCH_NAME ",build time:" BUILD_TIME ")";
#endif
}//namespace mediakit
}//namespace mediakit

View File

@ -36,6 +36,16 @@
#define CHECK(exp, ...) ::mediakit::Assert_ThrowCpp(!(exp), #exp, __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__)
#endif // CHECK
#ifndef CHECK_RET
#define CHECK_RET(...) \
try { \
CHECK(__VA_ARGS__); \
} catch (AssertFailedException & ex) { \
WarnL << ex.what(); \
return; \
}
#endif
#ifndef MAX
#define MAX(a, b) ((a) > (b) ? (a) : (b))
#endif // MAX

View File

@ -52,25 +52,7 @@ char HexStrToBin(const char *str) {
}
return (high << 4) | low;
}
string strCoding::UrlEncode(const string &str) {
string out;
size_t len = str.size();
for (size_t i = 0; i < len; ++i) {
char ch = str[i];
if (isalnum((uint8_t) ch)) {
out.push_back(ch);
} else {
char buf[4];
sprintf(buf, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
out.append(buf);
}
}
return out;
}
string strCoding::UrlEncodePath(const string &str) {
const char *dont_escape = "!#&'*+:=?@/._-$,;~()";
static string UrlEncodeCommon(const string &str,const char* dont_escape){
string out;
size_t len = str.size();
for (size_t i = 0; i < len; ++i) {
@ -85,52 +67,7 @@ string strCoding::UrlEncodePath(const string &str) {
}
return out;
}
string strCoding::UrlEncodeComponent(const string &str) {
const char *dont_escape = "!'()*-._~";
string out;
size_t len = str.size();
for (size_t i = 0; i < len; ++i) {
char ch = str[i];
if (isalnum((uint8_t) ch) || strchr(dont_escape, (uint8_t) ch) != NULL) {
out.push_back(ch);
} else {
char buf[4];
snprintf(buf, 4, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
out.append(buf);
}
}
return out;
}
string strCoding::UrlDecode(const string &str) {
string output;
size_t i = 0, len = str.length();
while (i < len) {
if (str[i] == '%') {
if (i + 3 > len) {
// %后面必须还有两个字节才会反转义
output.append(str, i, len - i);
break;
}
char ch = HexStrToBin(&(str[i + 1]));
if (ch == -1) {
// %后面两个字节不是16进制字符串转义失败直接拼接3个原始字符
output.append(str, i, 3);
} else {
output += ch;
}
i += 3;
} else {
output += str[i];
++i;
}
}
return output;
}
string strCoding::UrlDecodePath(const string &str) {
const char *dont_unescape = "#$&+,/:;=?@";
static string UrlDecodeCommon(const string &str,const char* dont_unescape){
string output;
size_t i = 0, len = str.length();
while (i < len) {
@ -156,6 +93,36 @@ string strCoding::UrlDecodePath(const string &str) {
return output;
}
string strCoding::UrlEncodePath(const string &str) {
const char *dont_escape = "!#&'*+:=?@/._-$,;~()";
return UrlEncodeCommon(str,dont_escape);
}
string strCoding::UrlEncodeComponent(const string &str) {
const char *dont_escape = "!'()*-._~";
return UrlEncodeCommon(str,dont_escape);
}
std::string strCoding::UrlEncodeUserOrPass(const std::string &str) {
// from rfc https://datatracker.ietf.org/doc/html/rfc3986
// §2.3 Unreserved characters (mark)
//'-', '_', '.', '~'
// §2.2 Reserved characters (reserved)
// '$', '&', '+', ',', '/', ':', ';', '=', '?', '@',
// §3.2.1
// The RFC allows ';', ':', '&', '=', '+', '$', and ',' in
// userinfo, so we must escape only '@', '/', and '?'.
// The parsing of userinfo treats ':' as special so we must escape
// that too.
const char *dont_escape = "$&+,;=-._~";
return UrlEncodeCommon(str,dont_escape);
}
string strCoding::UrlDecodePath(const string &str) {
const char *dont_unescape = "#$&+,/:;=?@";
return UrlDecodeCommon(str,dont_unescape);
}
std::string strCoding::UrlDecodeComponent(const std::string &str) {
string output;
size_t i = 0, len = str.length();
@ -185,27 +152,11 @@ std::string strCoding::UrlDecodeComponent(const std::string &str) {
return output;
}
#if 0
#include "Util/onceToken.h"
static toolkit::onceToken token([]() {
auto str0 = strCoding::UrlDecode(
"rtsp%3A%2F%2Fadmin%3AJm13317934%25jm%40111.47.84.69%3A554%2FStreaming%2FChannels%2F101%3Ftransportmode%3Dunicast%26amp%3Bprofile%3DProfile_1");
auto str1 = strCoding::UrlDecode("%j1"); // 测试%后面两个字节不是16进制字符串
auto str2 = strCoding::UrlDecode("%a"); // 测试%后面字节数不够
auto str3 = strCoding::UrlDecode("%"); // 测试只有%
auto str4 = strCoding::UrlDecode("%%%"); // 测试多个%
auto str5 = strCoding::UrlDecode("%%%%40"); // 测试多个非法%后恢复正常解析
auto str6 = strCoding::UrlDecode("Jm13317934%jm"); // 测试多个非法%后恢复正常解析
cout << str0 << endl;
cout << str1 << endl;
cout << str2 << endl;
cout << str3 << endl;
cout << str4 << endl;
cout << str5 << endl;
cout << str6 << endl;
});
#endif
std::string strCoding::UrlDecodeUserOrPass(const std::string &str) {
const char *dont_unescape = "";
return UrlDecodeCommon(str,dont_unescape);
}
///////////////////////////////windows专用///////////////////////////////////
#if defined(_WIN32)
void UnicodeToGB2312(char* pOut, wchar_t uData)

View File

@ -18,12 +18,12 @@ namespace mediakit {
class strCoding {
public:
[[deprecated]] static std::string UrlEncode(const std::string &str); //url utf8编码, deprecated
static std::string UrlEncodePath(const std::string &str); //url路径 utf8编码
static std::string UrlEncodeComponent(const std::string &str); // url参数 utf8编码
[[deprecated]] static std::string UrlDecode(const std::string &str); //url utf8解码, deprecated
static std::string UrlDecodePath(const std::string &str); //url路径 utf8解码
static std::string UrlDecodeComponent(const std::string &str); // url参数 utf8解码
static std::string UrlEncodeUserOrPass(const std::string &str); // url中用户名与密码编码
static std::string UrlDecodeUserOrPass(const std::string &str); // url中用户名与密码解码
#if defined(_WIN32)
static std::string UTF8ToGB2312(const std::string &str);//utf_8转为gb2312
static std::string GB2312ToUTF8(const std::string &str); //gb2312 转utf_8

View File

@ -66,7 +66,8 @@ void HttpClient::sendRequest(const string &url) {
_http_persistent = true;
if (_body && _body->remainSize()) {
_header.emplace("Content-Length", to_string(_body->remainSize()));
_header.emplace("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8");
GET_CONFIG(string, charSet, Http::kCharSet);
_header.emplace("Content-Type", "application/x-www-form-urlencoded; charset=" + charSet);
}
bool host_changed = (_last_host != host + ":" + to_string(port)) || (_is_https != is_https);

View File

@ -65,18 +65,18 @@ void HttpRequestSplitter::input(const char *data,size_t len) {
_content_len = onRecvHeader(header_ptr, header_size);
}
if(_remain_data_size <= 0){
//没有剩余数据,清空缓存
_remain_data.clear();
return;
}
/*
*
* HttpRequestSplitter::reset()
*/
tail_ref = tail_tmp;
if(_remain_data_size <= 0){
//没有剩余数据,清空缓存
_remain_data.clear();
return;
}
if(_content_len == 0){
//尚未找到http头缓存定位到剩余数据部分
_remain_data.assign(ptr,_remain_data_size);

View File

@ -683,46 +683,10 @@ void HttpSession::sendResponse(int code,
AsyncSender::onSocketFlushed(data);
}
string HttpSession::urlDecode(const string &str) {
auto ret = strCoding::UrlDecode(str);
#ifdef _WIN32
GET_CONFIG(string, charSet, Http::kCharSet);
bool isGb2312 = !strcasecmp(charSet.data(), "gb2312");
if (isGb2312) {
ret = strCoding::UTF8ToGB2312(ret);
}
#endif // _WIN32
return ret;
}
string HttpSession::urlDecodePath(const string &str) {
auto ret = strCoding::UrlDecodePath(str);
#ifdef _WIN32
GET_CONFIG(string, charSet, Http::kCharSet);
bool isGb2312 = !strcasecmp(charSet.data(), "gb2312");
if (isGb2312) {
ret = strCoding::UTF8ToGB2312(ret);
}
#endif // _WIN32
return ret;
}
string HttpSession::urlDecodeComponent(const string &str) {
auto ret = strCoding::UrlDecodeComponent(str);
#ifdef _WIN32
GET_CONFIG(string, charSet, Http::kCharSet);
bool isGb2312 = !strcasecmp(charSet.data(), "gb2312");
if (isGb2312) {
ret = strCoding::UTF8ToGB2312(ret);
}
#endif // _WIN32
return ret;
}
void HttpSession::urlDecode(Parser &parser) {
parser.setUrl(urlDecodePath(parser.url()));
parser.setUrl(strCoding::UrlDecodePath(parser.url()));
for (auto &pr : _parser.getUrlArgs()) {
const_cast<string &>(pr.second) = urlDecodeComponent(pr.second);
const_cast<string &>(pr.second) = strCoding::UrlDecodeComponent(pr.second);
}
}

View File

@ -44,9 +44,6 @@ public:
void onRecv(const toolkit::Buffer::Ptr &) override;
void onError(const toolkit::SockException &err) override;
void onManager() override;
[[deprecated]] static std::string urlDecode(const std::string &str);
static std::string urlDecodePath(const std::string &str);
static std::string urlDecodeComponent(const std::string &str);
void setTimeoutSec(size_t second);
void setMaxReqSize(size_t max_req_size);

View File

@ -28,7 +28,7 @@ void HlsMaker::makeIndexFile(bool include_delay, bool eof) {
GET_CONFIG(uint32_t, segDelay, Hls::kSegmentDelay);
GET_CONFIG(uint32_t, segRetain, Hls::kSegmentRetain);
std::deque<std::tuple<int, std::string>> temp(_seg_dur_list);
if (!include_delay) {
if (!include_delay && _seg_number) {
while (temp.size() > _seg_number) {
temp.pop_front();
}

View File

@ -195,10 +195,8 @@ std::shared_ptr<FILE> HlsMakerImp::makeFile(const string &file, bool setbuf) {
return ret;
}
void HlsMakerImp::setMediaSource(const string &vhost, const string &app, const string &stream_id) {
_info.app = app;
_info.stream = stream_id;
_info.vhost = vhost;
void HlsMakerImp::setMediaSource(const MediaTuple& tuple) {
static_cast<MediaTuple &>(_info) = tuple;
_media_src = std::make_shared<HlsMediaSource>(isFmp4() ? HLS_FMP4_SCHEMA : HLS_SCHEMA, _info);
}

View File

@ -27,11 +27,8 @@ public:
/**
*
* @param vhost
* @param app
* @param stream_id id
*/
void setMediaSource(const std::string &vhost, const std::string &app, const std::string &stream_id);
void setMediaSource(const MediaTuple& tuple);
/**
* MediaSource

View File

@ -34,7 +34,7 @@ public:
}
void setMediaSource(const MediaTuple& tuple) {
_hls->setMediaSource(tuple.vhost, tuple.app, tuple.stream);
_hls->setMediaSource(tuple);
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener) {

View File

@ -38,7 +38,7 @@ MP4Reader::MP4Reader(const std::string &vhost, const std::string &app, const std
void MP4Reader::setup(const std::string &vhost, const std::string &app, const std::string &stream_id, const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller) {
//读写文件建议放在后台线程
auto tuple = MediaTuple{vhost, app, stream_id};
auto tuple = MediaTuple{vhost, app, stream_id, ""};
_poller = poller ? std::move(poller) : WorkThreadPool::Instance().getPoller();
_file_path = file_path;
if (_file_path.empty()) {

View File

@ -22,12 +22,10 @@ using namespace toolkit;
namespace mediakit {
MP4Recorder::MP4Recorder(const string &path, const string &vhost, const string &app, const string &stream_id, size_t max_second) {
MP4Recorder::MP4Recorder(const MediaTuple &tuple, const string &path, size_t max_second) {
_folder_path = path;
/////record 业务逻辑//////
_info.app = app;
_info.stream = stream_id;
_info.vhost = vhost;
static_cast<MediaTuple &>(_info) = tuple;
_info.folder = path;
GET_CONFIG(uint32_t, s_max_second, Protocol::kMP4MaxSecond);
_max_second = max_second ? max_second : s_max_second;
@ -117,11 +115,13 @@ bool MP4Recorder::inputFrame(const Frame::Ptr &frame) {
if (!(_have_video && frame->getTrackType() == TrackAudio)) {
//如果有视频且输入的是音频,那么应该忽略切片逻辑
if (_last_dts == 0 || _last_dts > frame->dts()) {
//极少情况下dts时间戳可能回退
_last_dts = frame->dts();
//b帧情况下dts时间戳可能回退
_last_dts = MAX(frame->dts(), _last_dts);
}
auto duration = 5u; // 默认至少一帧5ms
if (frame->dts() > 0 && frame->dts() > _last_dts) {
duration = MAX(duration, frame->dts() - _last_dts);
}
auto duration = frame->dts() - _last_dts;
if (!_muxer || ((duration > _max_second * 1000) && (!_have_video || (_have_video && frame->keyFrame())))) {
//成立条件
// 1、_muxer为空

View File

@ -26,7 +26,7 @@ class MP4Recorder final : public MediaSinkInterface {
public:
using Ptr = std::shared_ptr<MP4Recorder>;
MP4Recorder(const std::string &path, const std::string &vhost, const std::string &app, const std::string &stream_id, size_t max_second);
MP4Recorder(const MediaTuple &tuple, const std::string &path, size_t max_second);
~MP4Recorder() override;
/**

View File

@ -68,8 +68,7 @@ string Recorder::getRecordPath(Recorder::type type, const MediaTuple& tuple, con
}
return File::absolutePath(m3u8FilePath, hlsPath);
}
default:
return "";
default: return "";
}
}
@ -85,13 +84,12 @@ std::shared_ptr<MediaSinkInterface> Recorder::createRecorder(type type, const Me
#else
throw std::invalid_argument("hls相关功能未打开请开启ENABLE_HLS宏后编译再测试");
#endif
}
case Recorder::type_mp4: {
#if defined(ENABLE_MP4)
auto path = Recorder::getRecordPath(type, tuple, option.mp4_save_path);
return std::make_shared<MP4Recorder>(path, tuple.vhost, tuple.app, tuple.stream, option.mp4_max_second);
return std::make_shared<MP4Recorder>(tuple, path, option.mp4_max_second);
#else
throw std::invalid_argument("mp4相关功能未打开请开启ENABLE_MP4宏后编译再测试");
#endif

View File

@ -22,6 +22,7 @@ struct MediaTuple {
std::string vhost;
std::string app;
std::string stream;
std::string params;
std::string shortUrl() const {
return vhost + '/' + app + '/' + stream;
}
@ -31,7 +32,7 @@ class RecordInfo: public MediaTuple {
public:
time_t start_time; // GMT 标准时间,单位秒
float time_len; // 录像长度,单位秒
off_t file_size; // 文件大小,单位 BYTE
uint64_t file_size; // 文件大小,单位 BYTE
std::string file_path; // 文件路径
std::string file_name; // 文件名称
std::string folder; // 文件夹路径

View File

@ -29,7 +29,13 @@ public:
getRtmpRing()->setDelegate(_media_src);
}
~RtmpMediaSourceMuxer() override { RtmpMuxer::flush(); }
~RtmpMediaSourceMuxer() override {
try {
RtmpMuxer::flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
setDelegate(listener);

View File

@ -352,12 +352,20 @@ public:
}
void makeSockPair(std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
auto &sock0 = pair.first;
auto &sock1 = pair.second;
auto sock_pair = getPortPair();
if (!sock_pair) {
throw runtime_error("none reserved port in pool");
}
makeSockPair_l(sock_pair, pair, local_ip, re_use_port, is_udp);
// 确保udp和tcp模式都能打开
auto new_pair = std::make_pair(Socket::createSocket(), Socket::createSocket());
makeSockPair_l(sock_pair, new_pair, local_ip, re_use_port, !is_udp);
}
void makeSockPair_l(const std::shared_ptr<uint16_t> &sock_pair, std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
auto &sock0 = pair.first;
auto &sock1 = pair.second;
if (is_udp) {
if (!sock0->bindUdpSock(2 * *sock_pair, local_ip.data(), re_use_port)) {
// 分配端口失败

View File

@ -28,6 +28,7 @@ using namespace std;
namespace mediakit {
enum PlayType { type_play = 0, type_pause, type_seek, type_speed };
enum class BeatType : uint32_t { both = 0, rtcp, cmd };
RtspPlayer::RtspPlayer(const EventPoller::Ptr &poller)
: TcpClient(poller) {}
@ -85,6 +86,8 @@ void RtspPlayer::play(const string &strUrl) {
_play_url = url._url;
_rtp_type = (Rtsp::eRtpType)(int)(*this)[Client::kRtpType];
_beat_type = (*this)[Client::kRtspBeatType].as<int>();
_beat_interval_ms = (*this)[Client::kBeatIntervalMS].as<int>();
DebugL << url._url << " " << (url._user.size() ? url._user : "null") << " " << (url._passwd.size() ? url._passwd : "null") << " " << _rtp_type;
weak_ptr<RtspPlayer> weakSelf = static_pointer_cast<RtspPlayer>(shared_from_this());
@ -210,7 +213,8 @@ void RtspPlayer::handleResDESCRIBE(const Parser &parser) {
if (play_track != TrackInvalid) {
auto track = sdpParser.getTrack(play_track);
_sdp_track.emplace_back(track);
sdp = track->toString();
auto title_track = sdpParser.getTrack(TrackTitle);
sdp = (title_track ? title_track->toString() : "") + track->toString();
} else {
_sdp_track = sdpParser.getAvailableTrack();
sdp = sdpParser.toString();
@ -641,23 +645,28 @@ void RtspPlayer::onBeforeRtpSorted(const RtpPacket::Ptr &rtp, int track_idx) {
rtcp_ctx->onRtp(rtp->getSeq(), rtp->getStamp(), rtp->ntp_stamp, rtp->sample_rate, rtp->size() - RtpPacket::kRtpTcpHeaderSize);
auto &ticker = _rtcp_send_ticker[track_idx];
if (ticker.elapsedTime() < 3 * 1000) {
// 时间未到
if (ticker.elapsedTime() < _beat_interval_ms) {
// 心跳时间未到
return;
}
auto &rtcp_flag = _send_rtcp[track_idx];
// 每3秒发送一次心跳rtcp与rtsp信令轮流心跳该特性用于兼容issue:642
// 有些rtsp服务器需要rtcp保活有些需要发送信令保活
// 有些rtsp服务器需要rtcp保活有些需要发送信令保活; rtcp与rtsp信令轮流心跳该特性用于兼容issue:#642
auto &rtcp_flag = _send_rtcp[track_idx];
ticker.resetTime();
switch ((BeatType)_beat_type) {
case BeatType::cmd: rtcp_flag = false; break;
case BeatType::rtcp: rtcp_flag = true; break;
case BeatType::both:
default: rtcp_flag = !rtcp_flag; break;
}
// 发送信令保活
if (!rtcp_flag) {
if (track_idx == 0) {
// 两个track无需同时触发发送信令保活
sendKeepAlive();
}
ticker.resetTime();
// 下次发送rtcp保活
rtcp_flag = true;
return;
}
@ -679,9 +688,6 @@ void RtspPlayer::onBeforeRtpSorted(const RtpPacket::Ptr &rtp, int track_idx) {
rtcp_sdes->chunks.ssrc = htonl(ssrc);
send_rtcp(this, track_idx, std::move(rtcp));
send_rtcp(this, track_idx, RtcpHeader::toBuffer(rtcp_sdes));
ticker.resetTime();
// 下次发送信令保活
rtcp_flag = false;
}
void RtspPlayer::onPlayResult_l(const SockException &ex, bool handshake_done) {

View File

@ -114,6 +114,11 @@ private:
//轮流发送rtcp与GET_PARAMETER保活
bool _send_rtcp[2] = {true, true};
// 心跳类型
uint32_t _beat_type = 0;
// 心跳保护间隔
uint32_t _beat_interval_ms = 0;
std::string _play_url;
std::vector<SdpTrack::Ptr> _sdp_track;
std::function<void(const Parser&)> _on_response;

View File

@ -10,8 +10,9 @@
#include <cstdlib>
#include "RtspSplitter.h"
#include "Util/logger.h"
#include "Util/util.h"
#include "Util/logger.h"
#include "Common/macros.h"
using namespace std;
using namespace toolkit;
@ -64,7 +65,18 @@ ssize_t RtspSplitter::onRecvHeader(const char *data, size_t len) {
if (len == 4 && !memcmp(data, "\r\n\r\n", 4)) {
return 0;
}
_parser.parse(data, len);
try {
_parser.parse(data, len);
} catch (mediakit::AssertFailedException &ex){
if (!_enableRecvRtp) {
// 还在握手中,直接中断握手
throw;
}
// 握手已经结束如果rtsp server存在发送缓存溢出的bug那么rtsp信令可能跟rtp混在一起
// 这种情况下rtsp信令解析异常不中断链接只丢弃这个包
WarnL << ex.what();
return 0;
}
auto ret = getContentLength(_parser);
if (ret == 0) {
onWholeRtspPacket(_parser);

View File

@ -63,8 +63,8 @@ void SrtTransportImp::onHandShakeFinished(std::string &streamid, struct sockaddr
return;
}
auto params = Parser::parseArgs(_media_info.param_strs);
if (params["m"] == "publish") {
auto kv = Parser::parseArgs(_media_info.params);
if (kv["m"] == "publish") {
_is_pusher = true;
_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ts, this);
emitOnPublish();
@ -98,10 +98,10 @@ bool SrtTransportImp::parseStreamid(std::string &streamid) {
app = tmps[0];
stream_name = tmps[1];
} else {
if (_media_info.param_strs.empty()) {
_media_info.param_strs = it.first + "=" + it.second;
if (_media_info.params.empty()) {
_media_info.params = it.first + "=" + it.second;
} else {
_media_info.param_strs += "&" + it.first + "=" + it.second;
_media_info.params += "&" + it.first + "=" + it.second;
}
}
}
@ -118,7 +118,7 @@ bool SrtTransportImp::parseStreamid(std::string &streamid) {
_media_info.app = app;
_media_info.stream = stream_name;
TraceL << " mediainfo=" << _media_info.shortUrl() << " params=" << _media_info.param_strs;
TraceL << " mediainfo=" << _media_info.shortUrl() << " params=" << _media_info.params;
return true;
}

View File

@ -27,7 +27,7 @@ using namespace mediakit;
class FlvSplitterImp : public FlvSplitter {
public:
FlvSplitterImp() {
_src = std::make_shared<RtmpMediaSourceImp>(MediaTuple{DEFAULT_VHOST, "live", "test"});
_src = std::make_shared<RtmpMediaSourceImp>(MediaTuple{DEFAULT_VHOST, "live", "test", ""});
}
~FlvSplitterImp() override = default;

View File

@ -90,7 +90,7 @@ void initEventListener() {
static onceToken s_token([]() {
//监听kBroadcastOnGetRtspRealm事件决定rtsp链接是否需要鉴权(传统的rtsp鉴权方案)才能访问
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastOnGetRtspRealm, [](BroadcastOnGetRtspRealmArgs) {
DebugL << "RTSP是否需要鉴权事件" << args.getUrl() << " " << args.param_strs;
DebugL << "RTSP是否需要鉴权事件" << args.getUrl() << " " << args.params;
if (string("1") == args.stream) {
// live/1需要认证
//该流需要认证并且设置realm
@ -104,7 +104,7 @@ void initEventListener() {
//监听kBroadcastOnRtspAuth事件返回正确的rtsp鉴权用户密码
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastOnRtspAuth, [](BroadcastOnRtspAuthArgs) {
DebugL << "RTSP播放鉴权:" << args.getUrl() << " " << args.param_strs;
DebugL << "RTSP播放鉴权:" << args.getUrl() << " " << args.params;
DebugL << "RTSP用户" << user_name << (must_no_encrypt ? " Base64" : " MD5") << " 方式登录";
string user = user_name;
//假设我们异步读取数据库
@ -134,14 +134,14 @@ void initEventListener() {
//监听rtsp/rtmp推流事件返回结果告知是否有推流权限
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastMediaPublish, [](BroadcastMediaPublishArgs) {
DebugL << "推流鉴权:" << args.getUrl() << " " << args.param_strs;
DebugL << "推流鉴权:" << args.getUrl() << " " << args.params;
invoker("", ProtocolOption());//鉴权成功
//invoker("this is auth failed message");//鉴权失败
});
//监听rtsp/rtsps/rtmp/http-flv播放事件返回结果告知是否有播放权限(rtsp通过kBroadcastOnRtspAuth或此事件都可以实现鉴权)
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastMediaPlayed, [](BroadcastMediaPlayedArgs) {
DebugL << "播放鉴权:" << args.getUrl() << " " << args.param_strs;
DebugL << "播放鉴权:" << args.getUrl() << " " << args.params;
invoker("");//鉴权成功
//invoker("this is auth failed message");//鉴权失败
});
@ -183,13 +183,13 @@ void initEventListener() {
*
* ZLMediaKit会把其立即转发给播放器(55)
*/
DebugL << "未找到流事件:" << args.getUrl() << " " << args.param_strs;
DebugL << "未找到流事件:" << args.getUrl() << " " << args.params;
});
//监听播放或推流结束时消耗流量事件
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastFlowReport, [](BroadcastFlowReportArgs) {
DebugL << "播放器(推流器)断开连接事件:" << args.getUrl() << " " << args.param_strs << "\r\n使用流量:" << totalBytes << " bytes,连接时长:" << totalDuration << "";
DebugL << "播放器(推流器)断开连接事件:" << args.getUrl() << " " << args.params << "\r\n使用流量:" << totalBytes << " bytes,连接时长:" << totalDuration << "";
});

View File

@ -741,8 +741,7 @@ namespace RTC
if (!IsRunning())
{
MS_ERROR("cannot process data while not running");
MS_WARN_TAG(nullptr,"cannot process data while not running");
return;
}

View File

@ -9,24 +9,56 @@
*/
#include "Nack.h"
#include "Common/config.h"
using namespace std;
using namespace toolkit;
namespace mediakit {
static constexpr uint32_t kMaxNackMS = 5 * 1000;
static constexpr uint32_t kRtpCacheCheckInterval = 100;
// RTC配置项目
namespace Rtc {
#define RTC_FIELD "rtc."
//~ nack接收端
// Nack缓存包最早时间间隔
const string kMaxNackMS = RTC_FIELD "maxNackMS";
// Nack包检查间隔(包数量)
const string kRtpCacheCheckInterval = RTC_FIELD "rtpCacheCheckInterval";
//~ nack发送端
//最大保留的rtp丢包状态个数
const string kNackMaxSize = RTC_FIELD "nackMaxSize";
// rtp丢包状态最长保留时间
const string kNackMaxMS = RTC_FIELD "nackMaxMS";
// nack最多请求重传次数
const string kNackMaxCount = RTC_FIELD "nackMaxCount";
// nack重传频率rtt的倍数
const string kNackIntervalRatio = RTC_FIELD "nackIntervalRatio";
// nack包中rtp个数减小此值可以让nack包响应更灵敏
const string kNackRtpSize = RTC_FIELD "nackRtpSize";
static onceToken token([]() {
mINI::Instance()[kMaxNackMS] = 5 * 1000;
mINI::Instance()[kRtpCacheCheckInterval] = 100;
mINI::Instance()[kNackMaxSize] = 2048;
mINI::Instance()[kNackMaxMS] = 3 * 1000;
mINI::Instance()[kNackMaxCount] = 15;
mINI::Instance()[kNackIntervalRatio] = 1.0f;
mINI::Instance()[kNackRtpSize] = 8;
});
} // namespace Rtc
void NackList::pushBack(RtpPacket::Ptr rtp) {
auto seq = rtp->getSeq();
_nack_cache_seq.emplace_back(seq);
_nack_cache_pkt.emplace(seq, std::move(rtp));
if (++_cache_ms_check < kRtpCacheCheckInterval) {
GET_CONFIG(uint32_t, rtpcache_checkinterval, Rtc::kRtpCacheCheckInterval);
if (++_cache_ms_check < rtpcache_checkinterval) {
return;
}
_cache_ms_check = 0;
while (getCacheMS() >= kMaxNackMS) {
GET_CONFIG(uint32_t, maxnackms, Rtc::kMaxNackMS);
while (getCacheMS() >= maxnackms) {
// 需要清除部分nack缓存
popFront();
}
@ -148,10 +180,13 @@ void NackContext::makeNack(uint16_t max_seq, bool flush) {
eraseFrontSeq();
// 最多生成5个nack包防止seq大幅跳跃导致一直循环
auto max_nack = 5u;
GET_CONFIG(uint32_t, nack_rtpsize, Rtc::kNackRtpSize);
// kNackRtpSize must between 0 and 16
nack_rtpsize = std::min<uint32_t>(nack_rtpsize, FCI_NACK::kBitSize);
while (_nack_seq != max_seq && max_nack--) {
// 一次不能发送超过16+1个rtp的状态
uint16_t nack_rtp_count = std::min<uint16_t>(FCI_NACK::kBitSize, max_seq - (uint16_t)(_nack_seq + 1));
if (!flush && nack_rtp_count < kNackRtpSize) {
if (!flush && nack_rtp_count < nack_rtpsize) {
// 非flush状态下seq个数不足以发送一次nack
break;
}
@ -206,7 +241,9 @@ void NackContext::clearNackStatus(uint16_t seq) {
_nack_send_status.erase(it);
// 限定rtt在合理有效范围内
_rtt = max<int>(10, min<int>(rtt, kNackMaxMS / kNackMaxCount));
GET_CONFIG(uint32_t, nack_maxms, Rtc::kNackMaxMS);
GET_CONFIG(uint32_t, nack_maxcount, Rtc::kNackMaxCount);
_rtt = max<int>(10, min<int>(rtt, nack_maxms / nack_maxcount));
}
void NackContext::recordNack(const FCI_NACK &nack) {
@ -222,7 +259,8 @@ void NackContext::recordNack(const FCI_NACK &nack) {
++i;
}
// 记录太多了,移除一部分早期的记录
while (_nack_send_status.size() > kNackMaxSize) {
GET_CONFIG(uint32_t, nack_maxsize, Rtc::kNackMaxSize);
while (_nack_send_status.size() > nack_maxsize) {
_nack_send_status.erase(_nack_send_status.begin());
}
}
@ -230,13 +268,16 @@ void NackContext::recordNack(const FCI_NACK &nack) {
uint64_t NackContext::reSendNack() {
set<uint16_t> nack_rtp;
auto now = getCurrentMillisecond();
GET_CONFIG(uint32_t, nack_maxms, Rtc::kNackMaxMS);
GET_CONFIG(uint32_t, nack_maxcount, Rtc::kNackMaxCount);
GET_CONFIG(float, nack_intervalratio, Rtc::kNackIntervalRatio);
for (auto it = _nack_send_status.begin(); it != _nack_send_status.end();) {
if (now - it->second.first_stamp > kNackMaxMS) {
if (now - it->second.first_stamp > nack_maxms) {
// 该rtp丢失太久了不再要求重传
it = _nack_send_status.erase(it);
continue;
}
if (now - it->second.update_stamp < kNackIntervalRatio * _rtt) {
if (now - it->second.update_stamp < nack_intervalratio * _rtt) {
// 距离上次nack不足2倍的rtt不用再发送nack
++it;
continue;
@ -245,7 +286,7 @@ uint64_t NackContext::reSendNack() {
nack_rtp.emplace(it->first);
// 更新nack发送时间戳
it->second.update_stamp = now;
if (++(it->second.nack_count) == kNackMaxCount) {
if (++(it->second.nack_count) == nack_maxcount) {
// nack次数太多移除之
it = _nack_send_status.erase(it);
continue;

View File

@ -41,18 +41,6 @@ class NackContext {
public:
using Ptr = std::shared_ptr<NackContext>;
using onNack = std::function<void(const FCI_NACK &nack)>;
//最大保留的rtp丢包状态个数
static constexpr auto kNackMaxSize = 2048;
// rtp丢包状态最长保留时间
static constexpr auto kNackMaxMS = 3 * 1000;
// nack最多请求重传10次
static constexpr auto kNackMaxCount = 15;
// nack重传频率rtt的倍数
static constexpr auto kNackIntervalRatio = 1.0f;
// nack包中rtp个数减小此值可以让nack包响应更灵敏
static constexpr auto kNackRtpSize = 8;
static_assert(kNackRtpSize >=0 && kNackRtpSize <= FCI_NACK::kBitSize, "NackContext::kNackRtpSize must between 0 and 16");
NackContext();

View File

@ -204,7 +204,8 @@ static unordered_map<string/*ext*/, RtpExtType/*id*/> s_url_to_type = {RTP_EXT_M
RtpExtType RtpExt::getExtType(const string &url) {
auto it = s_url_to_type.find(url);
if (it == s_url_to_type.end()) {
throw std::invalid_argument(string("未识别的rtp ext url类型:") + url);
WarnL << "unknown rtp ext url type: " << url;
return RtpExtType::padding;
}
return it->second;
}

File diff suppressed because it is too large Load Diff

View File

@ -22,97 +22,87 @@
namespace mediakit {
//https://datatracker.ietf.org/doc/rfc4566/?include_text=1
//https://blog.csdn.net/aggresss/article/details/109850434
//https://aggresss.blog.csdn.net/article/details/106436703
//Session description
// v= (protocol version)
// o= (originator and session identifier)
// s= (session name)
// i=* (session information)
// u=* (URI of description)
// e=* (email address)
// p=* (phone number)
// c=* (connection information -- not required if included in
// all media)
// b=* (zero or more bandwidth information lines)
// One or more time descriptions ("t=" and "r=" lines; see below)
// z=* (time zone adjustments)
// k=* (encryption key)
// a=* (zero or more session attribute lines)
// Zero or more media descriptions
// https://datatracker.ietf.org/doc/rfc4566/?include_text=1
// https://blog.csdn.net/aggresss/article/details/109850434
// https://aggresss.blog.csdn.net/article/details/106436703
// Session description
// v= (protocol version)
// o= (originator and session identifier)
// s= (session name)
// i=* (session information)
// u=* (URI of description)
// e=* (email address)
// p=* (phone number)
// c=* (connection information -- not required if included in
// all media)
// b=* (zero or more bandwidth information lines)
// One or more time descriptions ("t=" and "r=" lines; see below)
// z=* (time zone adjustments)
// k=* (encryption key)
// a=* (zero or more session attribute lines)
// Zero or more media descriptions
//
// Time description
// t= (time the session is active)
// r=* (zero or more repeat times)
// Time description
// t= (time the session is active)
// r=* (zero or more repeat times)
//
// Media description, if present
// m= (media name and transport address)
// i=* (media title)
// c=* (connection information -- optional if included at
// session level)
// b=* (zero or more bandwidth information lines)
// k=* (encryption key)
// a=* (zero or more media attribute lines)
// Media description, if present
// m= (media name and transport address)
// i=* (media title)
// c=* (connection information -- optional if included at
// session level)
// b=* (zero or more bandwidth information lines)
// k=* (encryption key)
// a=* (zero or more media attribute lines)
enum class RtpDirection {
invalid = -1,
//只发送
// 只发送
sendonly,
//只接收
// 只接收
recvonly,
//同时发送接收
// 同时发送接收
sendrecv,
//禁止发送数据
// 禁止发送数据
inactive
};
enum class DtlsRole {
invalid = -1,
//客户端
// 客户端
active,
//服务端
// 服务端
passive,
//既可作做客户端也可以做服务端
// 既可作做客户端也可以做服务端
actpass,
};
enum class SdpType {
invalid = -1,
offer,
answer
};
enum class SdpType { invalid = -1, offer, answer };
DtlsRole getDtlsRole(const std::string &str);
const char* getDtlsRoleString(DtlsRole role);
const char *getDtlsRoleString(DtlsRole role);
RtpDirection getRtpDirection(const std::string &str);
const char* getRtpDirectionString(RtpDirection val);
const char *getRtpDirectionString(RtpDirection val);
class SdpItem {
public:
using Ptr = std::shared_ptr<SdpItem>;
virtual ~SdpItem() = default;
virtual void parse(const std::string &str) {
value = str;
}
virtual std::string toString() const {
return value;
}
virtual const char* getKey() const = 0;
virtual void parse(const std::string &str) { value = str; }
virtual std::string toString() const { return value; }
virtual const char *getKey() const = 0;
void reset() {
value.clear();
}
void reset() { value.clear(); }
protected:
mutable std::string value;
};
template <char KEY>
class SdpString : public SdpItem{
class SdpString : public SdpItem {
public:
SdpString() = default;
SdpString(std::string val) {value = std::move(val);}
SdpString(std::string val) { value = std::move(val); }
// *=*
const char* getKey() const override { static std::string key(1, KEY); return key.data();}
};
@ -126,34 +116,34 @@ public:
this->value = std::move(val);
}
const char* getKey() const override { return key.data();}
const char *getKey() const override { return key.data(); }
};
class SdpTime : public SdpItem{
class SdpTime : public SdpItem {
public:
//5.9. Timing ("t=")
// t=<start-time> <stop-time>
uint64_t start {0};
uint64_t stop {0};
// 5.9. Timing ("t=")
// t=<start-time> <stop-time>
uint64_t start { 0 };
uint64_t stop { 0 };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "t";}
const char *getKey() const override { return "t"; }
};
class SdpOrigin : public SdpItem{
class SdpOrigin : public SdpItem {
public:
// 5.2. Origin ("o=")
// o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
// o=<username> <sess-id> <sess-version> <nettype> <addrtype> <unicast-address>
std::string username {"-"};
std::string username { "-" };
std::string session_id;
std::string session_version;
std::string nettype {"IN"};
std::string addrtype {"IP4"};
std::string address {"0.0.0.0"};
std::string nettype { "IN" };
std::string addrtype { "IP4" };
std::string address { "0.0.0.0" };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "o";}
const char *getKey() const override { return "o"; }
bool empty() const {
return username.empty() || session_id.empty() || session_version.empty()
|| nettype.empty() || addrtype.empty() || address.empty();
@ -165,28 +155,28 @@ public:
// 5.7. Connection Data ("c=")
// c=IN IP4 224.2.17.12/127
// c=<nettype> <addrtype> <connection-address>
std::string nettype {"IN"};
std::string addrtype {"IP4"};
std::string address {"0.0.0.0"};
std::string nettype { "IN" };
std::string addrtype { "IP4" };
std::string address { "0.0.0.0" };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "c";}
bool empty() const {return address.empty();}
const char *getKey() const override { return "c"; }
bool empty() const { return address.empty(); }
};
class SdpBandwidth : public SdpItem {
public:
//5.8. Bandwidth ("b=")
//b=<bwtype>:<bandwidth>
// 5.8. Bandwidth ("b=")
// b=<bwtype>:<bandwidth>
//AS、CT
std::string bwtype {"AS"};
uint32_t bandwidth {0};
// AS、CT
std::string bwtype { "AS" };
uint32_t bandwidth { 0 };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "b";}
bool empty() const {return bandwidth == 0;}
const char *getKey() const override { return "b"; }
bool empty() const { return bandwidth == 0; }
};
class SdpMedia : public SdpItem {
@ -195,287 +185,284 @@ public:
// m=<media> <port> <proto> <fmt> ...
TrackType type;
uint16_t port;
//RTP/AVP应用场景为视频/音频的 RTP 协议。参考 RFC 3551
//RTP/SAVP应用场景为视频/音频的 SRTP 协议。参考 RFC 3711
//RTP/AVPF: 应用场景为视频/音频的 RTP 协议,支持 RTCP-based Feedback。参考 RFC 4585
//RTP/SAVPF: 应用场景为视频/音频的 SRTP 协议,支持 RTCP-based Feedback。参考 RFC 5124
// RTP/AVP应用场景为视频/音频的 RTP 协议。参考 RFC 3551
// RTP/SAVP应用场景为视频/音频的 SRTP 协议。参考 RFC 3711
// RTP/AVPF: 应用场景为视频/音频的 RTP 协议,支持 RTCP-based Feedback。参考 RFC 4585
// RTP/SAVPF: 应用场景为视频/音频的 SRTP 协议,支持 RTCP-based Feedback。参考 RFC 5124
std::string proto;
std::vector<std::string> fmts;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "m";}
const char *getKey() const override { return "m"; }
};
class SdpAttr : public SdpItem{
class SdpAttr : public SdpItem {
public:
using Ptr = std::shared_ptr<SdpAttr>;
//5.13. Attributes ("a=")
//a=<attribute>
//a=<attribute>:<value>
// 5.13. Attributes ("a=")
// a=<attribute>
// a=<attribute>:<value>
SdpItem::Ptr detail;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "a";}
const char *getKey() const override { return "a"; }
};
class SdpAttrGroup : public SdpItem{
class SdpAttrGroup : public SdpItem {
public:
//a=group:BUNDLE line with all the 'mid' identifiers part of the
// BUNDLE group is included at the session-level.
//a=group:LS session level attribute MUST be included wth the 'mid'
// identifiers that are part of the same lip sync group.
std::string type {"BUNDLE"};
// a=group:BUNDLE line with all the 'mid' identifiers part of the
// BUNDLE group is included at the session-level.
// a=group:LS session level attribute MUST be included wth the 'mid'
// identifiers that are part of the same lip sync group.
std::string type { "BUNDLE" };
std::vector<std::string> mids;
void parse(const std::string &str) override ;
std::string toString() const override ;
const char* getKey() const override { return "group";}
void parse(const std::string &str) override;
std::string toString() const override;
const char *getKey() const override { return "group"; }
};
class SdpAttrMsidSemantic : public SdpItem {
public:
//https://tools.ietf.org/html/draft-alvestrand-rtcweb-msid-02#section-3
//3. The Msid-Semantic Attribute
// https://tools.ietf.org/html/draft-alvestrand-rtcweb-msid-02#section-3
// 3. The Msid-Semantic Attribute
//
// In order to fully reproduce the semantics of the SDP and SSRC
// grouping frameworks, a session-level attribute is defined for
// signalling the semantics associated with an msid grouping.
// In order to fully reproduce the semantics of the SDP and SSRC
// grouping frameworks, a session-level attribute is defined for
// signalling the semantics associated with an msid grouping.
//
// This OPTIONAL attribute gives the message ID and its group semantic.
// a=msid-semantic: examplefoo LS
// This OPTIONAL attribute gives the message ID and its group semantic.
// a=msid-semantic: examplefoo LS
//
//
// The ABNF of msid-semantic is:
// The ABNF of msid-semantic is:
//
// msid-semantic-attr = "msid-semantic:" " " msid token
// token = <as defined in RFC 4566>
// msid-semantic-attr = "msid-semantic:" " " msid token
// token = <as defined in RFC 4566>
//
// The semantic field may hold values from the IANA registries
// "Semantics for the "ssrc-group" SDP Attribute" and "Semantics for the
// "group" SDP Attribute".
//a=msid-semantic: WMS 616cfbb1-33a3-4d8c-8275-a199d6005549
std::string msid{"WMS"};
// The semantic field may hold values from the IANA registries
// "Semantics for the "ssrc-group" SDP Attribute" and "Semantics for the
// "group" SDP Attribute".
// a=msid-semantic: WMS 616cfbb1-33a3-4d8c-8275-a199d6005549
std::string msid { "WMS" };
std::string token;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "msid-semantic";}
bool empty() const {
return msid.empty();
}
const char *getKey() const override { return "msid-semantic"; }
bool empty() const { return msid.empty(); }
};
class SdpAttrRtcp : public SdpItem {
public:
// a=rtcp:9 IN IP4 0.0.0.0
uint16_t port{0};
std::string nettype {"IN"};
std::string addrtype {"IP4"};
std::string address {"0.0.0.0"};
void parse(const std::string &str) override;;
uint16_t port { 0 };
std::string nettype { "IN" };
std::string addrtype { "IP4" };
std::string address { "0.0.0.0" };
void parse(const std::string &str) override;
;
std::string toString() const override;
const char* getKey() const override { return "rtcp";}
bool empty() const {
return address.empty() || !port;
}
const char *getKey() const override { return "rtcp"; }
bool empty() const { return address.empty() || !port; }
};
class SdpAttrIceUfrag : public SdpItem {
public:
SdpAttrIceUfrag() = default;
SdpAttrIceUfrag(std::string str) {value = std::move(str);}
//a=ice-ufrag:sXJ3
const char* getKey() const override { return "ice-ufrag";}
SdpAttrIceUfrag(std::string str) { value = std::move(str); }
// a=ice-ufrag:sXJ3
const char *getKey() const override { return "ice-ufrag"; }
};
class SdpAttrIcePwd : public SdpItem {
public:
SdpAttrIcePwd() = default;
SdpAttrIcePwd(std::string str) {value = std::move(str);}
//a=ice-pwd:yEclOTrLg1gEubBFefOqtmyV
const char* getKey() const override { return "ice-pwd";}
SdpAttrIcePwd(std::string str) { value = std::move(str); }
// a=ice-pwd:yEclOTrLg1gEubBFefOqtmyV
const char *getKey() const override { return "ice-pwd"; }
};
class SdpAttrIceOption : public SdpItem {
public:
//a=ice-options:trickle
bool trickle{false};
bool renomination{false};
// a=ice-options:trickle
bool trickle { false };
bool renomination { false };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "ice-options";}
const char *getKey() const override { return "ice-options"; }
};
class SdpAttrFingerprint : public SdpItem {
public:
//a=fingerprint:sha-256 22:14:B5:AF:66:12:C7:C7:8D:EF:4B:DE:40:25:ED:5D:8F:17:54:DD:88:33:C0:13:2E:FD:1A:FA:7E:7A:1B:79
// a=fingerprint:sha-256 22:14:B5:AF:66:12:C7:C7:8D:EF:4B:DE:40:25:ED:5D:8F:17:54:DD:88:33:C0:13:2E:FD:1A:FA:7E:7A:1B:79
std::string algorithm;
std::string hash;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "fingerprint";}
const char *getKey() const override { return "fingerprint"; }
bool empty() const { return algorithm.empty() || hash.empty(); }
};
class SdpAttrSetup : public SdpItem {
public:
//a=setup:actpass
// a=setup:actpass
SdpAttrSetup() = default;
SdpAttrSetup(DtlsRole r) { role = r; }
DtlsRole role{DtlsRole::actpass};
DtlsRole role { DtlsRole::actpass };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "setup";}
const char *getKey() const override { return "setup"; }
};
class SdpAttrMid : public SdpItem {
public:
SdpAttrMid() = default;
SdpAttrMid(std::string val) { value = std::move(val); }
//a=mid:audio
const char* getKey() const override { return "mid";}
// a=mid:audio
const char *getKey() const override { return "mid"; }
};
class SdpAttrExtmap : public SdpItem {
public:
//https://aggresss.blog.csdn.net/article/details/106436703
//a=extmap:1[/sendonly] urn:ietf:params:rtp-hdrext:ssrc-audio-level
// https://aggresss.blog.csdn.net/article/details/106436703
// a=extmap:1[/sendonly] urn:ietf:params:rtp-hdrext:ssrc-audio-level
uint8_t id;
RtpDirection direction{RtpDirection::invalid};
RtpDirection direction { RtpDirection::invalid };
std::string ext;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "extmap";}
const char *getKey() const override { return "extmap"; }
};
class SdpAttrRtpMap : public SdpItem {
public:
//a=rtpmap:111 opus/48000/2
// a=rtpmap:111 opus/48000/2
uint8_t pt;
std::string codec;
uint32_t sample_rate;
uint32_t channel {0};
uint32_t channel { 0 };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "rtpmap";}
const char *getKey() const override { return "rtpmap"; }
};
class SdpAttrRtcpFb : public SdpItem {
public:
//a=rtcp-fb:98 nack pli
//a=rtcp-fb:120 nack 支持 nack 重传nack (Negative-Acknowledgment) 。
//a=rtcp-fb:120 nack pli 支持 nack 关键帧重传PLI (Picture Loss Indication) 。
//a=rtcp-fb:120 ccm fir 支持编码层关键帧请求CCM (Codec Control Message)FIR (Full Intra Request ),通常与 nack pli 有同样的效果,但是 nack pli 是用于重传时的关键帧请求。
//a=rtcp-fb:120 goog-remb 支持 REMB (Receiver Estimated Maximum Bitrate) 。
//a=rtcp-fb:120 transport-cc 支持 TCC (Transport Congest Control) 。
// a=rtcp-fb:98 nack pli
// a=rtcp-fb:120 nack 支持 nack 重传nack (Negative-Acknowledgment) 。
// a=rtcp-fb:120 nack pli 支持 nack 关键帧重传PLI (Picture Loss Indication) 。
// a=rtcp-fb:120 ccm fir 支持编码层关键帧请求CCM (Codec Control Message)FIR (Full Intra Request ),通常与 nack pli 有同样的效果,但是 nack pli
// 是用于重传时的关键帧请求。 a=rtcp-fb:120 goog-remb 支持 REMB (Receiver Estimated Maximum Bitrate) 。 a=rtcp-fb:120 transport-cc 支持 TCC (Transport
// Congest Control) 。
uint8_t pt;
std::string rtcp_type;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "rtcp-fb";}
const char *getKey() const override { return "rtcp-fb"; }
};
class SdpAttrFmtp : public SdpItem {
public:
//fmtp:96 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f
// fmtp:96 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f
uint8_t pt;
std::map<std::string/*key*/, std::string/*value*/, StrCaseCompare> fmtp;
std::map<std::string /*key*/, std::string /*value*/, StrCaseCompare> fmtp;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "fmtp";}
const char *getKey() const override { return "fmtp"; }
};
class SdpAttrSSRC : public SdpItem {
public:
//a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
//a=ssrc:3245185839 msid:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9 0cf7e597-36a2-4480-9796-69bf0955eef5
//a=ssrc:3245185839 mslabel:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9
//a=ssrc:3245185839 label:0cf7e597-36a2-4480-9796-69bf0955eef5
//a=ssrc:<ssrc-id> <attribute>
//a=ssrc:<ssrc-id> <attribute>:<value>
//cname 是必须的msid/mslabel/label 这三个属性都是 WebRTC 自创的,或者说 Google 自创的,可以参考 https://tools.ietf.org/html/draft-ietf-mmusic-msid-17
// 理解它们三者的关系需要先了解三个概念RTP stream / MediaStreamTrack / MediaStream
//一个 a=ssrc 代表一个 RTP stream
//一个 MediaStreamTrack 通常包含一个或多个 RTP stream例如一个视频 MediaStreamTrack 中通常包含两个 RTP stream一个用于常规传输一个用于 nack 重传;
//一个 MediaStream 通常包含一个或多个 MediaStreamTrack ,例如 simulcast 场景下,一个 MediaStream 通常会包含三个不同编码质量的 MediaStreamTrack
//这种标记方式并不被 Firefox 认可,在 Firefox 生成的 SDP 中一个 a=ssrc 通常只有一行,例如:
//a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
// a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
// a=ssrc:3245185839 msid:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9 0cf7e597-36a2-4480-9796-69bf0955eef5
// a=ssrc:3245185839 mslabel:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9
// a=ssrc:3245185839 label:0cf7e597-36a2-4480-9796-69bf0955eef5
// a=ssrc:<ssrc-id> <attribute>
// a=ssrc:<ssrc-id> <attribute>:<value>
// cname 是必须的msid/mslabel/label 这三个属性都是 WebRTC 自创的,或者说 Google 自创的,可以参考 https://tools.ietf.org/html/draft-ietf-mmusic-msid-17
// 理解它们三者的关系需要先了解三个概念RTP stream / MediaStreamTrack / MediaStream
// 一个 a=ssrc 代表一个 RTP stream
// 一个 MediaStreamTrack 通常包含一个或多个 RTP stream例如一个视频 MediaStreamTrack 中通常包含两个 RTP stream一个用于常规传输一个用于 nack 重传;
// 一个 MediaStream 通常包含一个或多个 MediaStreamTrack ,例如 simulcast 场景下,一个 MediaStream 通常会包含三个不同编码质量的 MediaStreamTrack
// 这种标记方式并不被 Firefox 认可,在 Firefox 生成的 SDP 中一个 a=ssrc 通常只有一行,例如:
// a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
uint32_t ssrc;
std::string attribute;
std::string attribute_value;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "ssrc";}
const char *getKey() const override { return "ssrc"; }
};
class SdpAttrSSRCGroup : public SdpItem {
public:
//a=ssrc-group 定义参考 RFC 5576(https://tools.ietf.org/html/rfc5576) ,用于描述多个 ssrc 之间的关联,常见的有两种:
//a=ssrc-group:FID 2430709021 3715850271
// FID (Flow Identification) 最初用在 FEC 的关联中WebRTC 中通常用于关联一组常规 RTP stream 和 重传 RTP stream 。
//a=ssrc-group:SIM 360918977 360918978 360918980
// 在 Chrome 独有的 SDP munging 风格的 simulcast 中使用,将三组编码质量由低到高的 MediaStreamTrack 关联在一起。
std::string type{"FID"};
// a=ssrc-group 定义参考 RFC 5576(https://tools.ietf.org/html/rfc5576) ,用于描述多个 ssrc 之间的关联,常见的有两种:
// a=ssrc-group:FID 2430709021 3715850271
// FID (Flow Identification) 最初用在 FEC 的关联中WebRTC 中通常用于关联一组常规 RTP stream 和 重传 RTP stream 。
// a=ssrc-group:SIM 360918977 360918978 360918980
// 在 Chrome 独有的 SDP munging 风格的 simulcast 中使用,将三组编码质量由低到高的 MediaStreamTrack 关联在一起。
std::string type { "FID" };
std::vector<uint32_t> ssrcs;
bool isFID() const { return type == "FID"; }
bool isSIM() const { return type == "SIM"; }
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "ssrc-group";}
const char *getKey() const override { return "ssrc-group"; }
};
class SdpAttrSctpMap : public SdpItem {
public:
//https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-05
//a=sctpmap:5000 webrtc-datachannel 1024
//a=sctpmap: sctpmap-number media-subtypes [streams]
// https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-05
// a=sctpmap:5000 webrtc-datachannel 1024
// a=sctpmap: sctpmap-number media-subtypes [streams]
uint16_t port = 0;
std::string subtypes;
uint32_t streams = 0;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "sctpmap";}
const char *getKey() const override { return "sctpmap"; }
bool empty() const { return port == 0 && subtypes.empty() && streams == 0; }
};
class SdpAttrCandidate : public SdpItem {
public:
using Ptr = std::shared_ptr<SdpAttrCandidate>;
//https://tools.ietf.org/html/rfc5245
//15.1. "candidate" Attribute
//a=candidate:4 1 udp 2 192.168.1.7 58107 typ host
//a=candidate:<foundation> <component-id> <transport> <priority> <address> <port> typ <cand-type>
// https://tools.ietf.org/html/rfc5245
// 15.1. "candidate" Attribute
// a=candidate:4 1 udp 2 192.168.1.7 58107 typ host
// a=candidate:<foundation> <component-id> <transport> <priority> <address> <port> typ <cand-type>
std::string foundation;
//传输媒体的类型,1代表RTP;2代表 RTCP。
// 传输媒体的类型,1代表RTP;2代表 RTCP。
uint32_t component;
std::string transport {"udp"};
std::string transport { "udp" };
uint32_t priority;
std::string address;
uint16_t port;
std::string type;
std::vector<std::pair<std::string, std::string> > arr;
std::vector<std::pair<std::string, std::string>> arr;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "candidate";}
const char *getKey() const override { return "candidate"; }
};
class SdpAttrMsid : public SdpItem{
class SdpAttrMsid : public SdpItem {
public:
const char* getKey() const override { return "msid";}
const char *getKey() const override { return "msid"; }
};
class SdpAttrExtmapAllowMixed : public SdpItem{
class SdpAttrExtmapAllowMixed : public SdpItem {
public:
const char* getKey() const override { return "extmap-allow-mixed";}
const char *getKey() const override { return "extmap-allow-mixed"; }
};
class SdpAttrSimulcast : public SdpItem{
class SdpAttrSimulcast : public SdpItem {
public:
//https://www.meetecho.com/blog/simulcast-janus-ssrc/
//https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-14
const char* getKey() const override { return "simulcast";}
// https://www.meetecho.com/blog/simulcast-janus-ssrc/
// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-14
const char *getKey() const override { return "simulcast"; }
void parse(const std::string &str) override;
std::string toString() const override;
bool empty() const { return rids.empty(); }
@ -483,11 +470,11 @@ public:
std::vector<std::string> rids;
};
class SdpAttrRid : public SdpItem{
class SdpAttrRid : public SdpItem {
public:
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "rid";}
const char *getKey() const override { return "rid"; }
std::string direction;
std::string rid;
};
@ -507,8 +494,8 @@ public:
RtpDirection getDirection() const;
template<typename cls>
cls getItemClass(char key, const char *attr_key = nullptr) const{
template <typename cls>
cls getItemClass(char key, const char *attr_key = nullptr) const {
auto item = std::dynamic_pointer_cast<cls>(getItem(key, attr_key));
if (!item) {
return cls();
@ -516,7 +503,7 @@ public:
return *item;
}
std::string getStringItem(char key, const char *attr_key = nullptr) const{
std::string getStringItem(char key, const char *attr_key = nullptr) const {
auto item = getItem(key, attr_key);
if (!item) {
return "";
@ -526,7 +513,7 @@ public:
SdpItem::Ptr getItem(char key, const char *attr_key = nullptr) const;
template<typename cls>
template <typename cls>
std::vector<cls> getAllItem(char key_c, const char *attr_key = nullptr) const {
std::vector<cls> ret;
std::string key(1, key_c);
@ -555,7 +542,7 @@ private:
std::vector<SdpItem::Ptr> items;
};
class RtcSessionSdp : public RtcSdpBase{
class RtcSessionSdp : public RtcSdpBase {
public:
using Ptr = std::shared_ptr<RtcSessionSdp>;
int getVersion() const;
@ -572,7 +559,7 @@ public:
std::string getTimeZone() const;
std::string getEncryptKey() const;
std::string getRepeatTimes() const;
std::vector<RtcSdpBase> medias;
void parse(const std::string &str);
std::string toString() const override;
@ -580,45 +567,45 @@ public:
//////////////////////////////////////////////////////////////////
//ssrc相关信息
class RtcSSRC{
// ssrc相关信息
class RtcSSRC {
public:
uint32_t ssrc {0};
uint32_t rtx_ssrc {0};
uint32_t ssrc { 0 };
uint32_t rtx_ssrc { 0 };
std::string cname;
std::string msid;
std::string mslabel;
std::string label;
bool empty() const {return ssrc == 0 && cname.empty();}
bool empty() const { return ssrc == 0 && cname.empty(); }
};
//rtc传输编码方案
class RtcCodecPlan{
// rtc传输编码方案
class RtcCodecPlan {
public:
using Ptr = std::shared_ptr<RtcCodecPlan>;
uint8_t pt;
std::string codec;
uint32_t sample_rate;
//音频时有效
// 音频时有效
uint32_t channel = 0;
//rtcp反馈
// rtcp反馈
std::set<std::string> rtcp_fb;
std::map<std::string/*key*/, std::string/*value*/, StrCaseCompare> fmtp;
std::map<std::string /*key*/, std::string /*value*/, StrCaseCompare> fmtp;
std::string getFmtp(const char *key) const;
};
//rtc 媒体描述
class RtcMedia{
// rtc 媒体描述
class RtcMedia {
public:
TrackType type{TrackType::TrackInvalid};
TrackType type { TrackType::TrackInvalid };
std::string mid;
uint16_t port{0};
uint16_t port { 0 };
SdpConnection addr;
SdpBandwidth bandwidth;
std::string proto;
RtpDirection direction{RtpDirection::invalid};
RtpDirection direction { RtpDirection::invalid };
std::vector<RtcCodecPlan> plan;
//////// rtp ////////
@ -629,20 +616,20 @@ public:
std::vector<std::string> rtp_rids;
//////// rtcp ////////
bool rtcp_mux{false};
bool rtcp_rsize{false};
bool rtcp_mux { false };
bool rtcp_rsize { false };
SdpAttrRtcp rtcp_addr;
//////// ice ////////
bool ice_trickle{false};
bool ice_lite{false};
bool ice_renomination{false};
bool ice_trickle { false };
bool ice_lite { false };
bool ice_renomination { false };
std::string ice_ufrag;
std::string ice_pwd;
std::vector<SdpAttrCandidate> candidate;
//////// dtls ////////
DtlsRole role{DtlsRole::invalid};
DtlsRole role { DtlsRole::invalid };
SdpAttrFingerprint fingerprint;
//////// extmap ////////
@ -650,7 +637,7 @@ public:
//////// sctp ////////////
SdpAttrSctpMap sctpmap;
uint32_t sctp_port{0};
uint32_t sctp_port { 0 };
void checkValid() const;
const RtcCodecPlan *getPlan(uint8_t pt) const;
@ -679,7 +666,7 @@ public:
void checkValid() const;
std::string toString() const;
std::string toRtspSdp() const;
const RtcMedia *getMedia(TrackType type) const;
const RtcMedia *getMedia(TrackType type) const;
bool supportRtcpFb(const std::string &name, TrackType type = TrackType::TrackVideo) const;
bool supportSimulcast() const;
bool isOnlyDatachannel() const;
@ -705,7 +692,7 @@ public:
std::string ice_ufrag;
std::string ice_pwd;
RtpDirection direction{RtpDirection::invalid};
RtpDirection direction { RtpDirection::invalid };
SdpAttrFingerprint fingerprint;
std::set<std::string> rtcp_fb;
@ -752,6 +739,6 @@ private:
~SdpConst() = delete;
};
}// namespace mediakit
} // namespace mediakit
#endif //ZLMEDIAKIT_SDP_H
#endif // ZLMEDIAKIT_SDP_H

View File

@ -45,7 +45,7 @@ void WebRtcEchoTest::onCheckSdp(SdpType type, RtcSession &sdp) {
for (auto &m : sdp.media) {
for (auto &ssrc : m.rtp_rtx_ssrc) {
if (!ssrc.msid.empty()) {
ssrc.msid = "zlmediakit msid";
ssrc.msid = "zlmediakit-mslabel zlmediakit-label-" + m.mid;
}
}
}

View File

@ -27,7 +27,6 @@ protected:
void onRtp(const char *buf, size_t len, uint64_t stamp_ms) override;
void onRtcp(const char *buf, size_t len) override;
void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) override {};
void onBeforeEncryptRtp(const char *buf, int &len, void *ctx) override {};
void onBeforeEncryptRtcp(const char *buf, int &len, void *ctx) override {};

View File

@ -17,9 +17,8 @@ namespace mediakit {
WebRtcPlayer::Ptr WebRtcPlayer::create(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const MediaInfo &info,
bool preferred_tcp) {
WebRtcPlayer::Ptr ret(new WebRtcPlayer(poller, src, info, preferred_tcp), [](WebRtcPlayer *ptr) {
const MediaInfo &info) {
WebRtcPlayer::Ptr ret(new WebRtcPlayer(poller, src, info), [](WebRtcPlayer *ptr) {
ptr->onDestory();
delete ptr;
});
@ -29,8 +28,7 @@ WebRtcPlayer::Ptr WebRtcPlayer::create(const EventPoller::Ptr &poller,
WebRtcPlayer::WebRtcPlayer(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const MediaInfo &info,
bool preferred_tcp) : WebRtcTransportImp(poller,preferred_tcp) {
const MediaInfo &info) : WebRtcTransportImp(poller) {
_media_info = info;
_play_src = src;
CHECK(src);

View File

@ -19,7 +19,7 @@ namespace mediakit {
class WebRtcPlayer : public WebRtcTransportImp {
public:
using Ptr = std::shared_ptr<WebRtcPlayer>;
static Ptr create(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info, bool preferred_tcp = false);
static Ptr create(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info);
MediaInfo getMediaInfo() { return _media_info; }
protected:
@ -27,10 +27,9 @@ protected:
void onStartWebRTC() override;
void onDestory() override;
void onRtcConfigure(RtcConfigure &configure) const override;
void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) override {};
private:
WebRtcPlayer(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info, bool preferred_tcp);
WebRtcPlayer(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info);
private:
//媒体相关元数据

View File

@ -20,9 +20,8 @@ WebRtcPusher::Ptr WebRtcPusher::create(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership,
const MediaInfo &info,
const ProtocolOption &option,
bool preferred_tcp) {
WebRtcPusher::Ptr ret(new WebRtcPusher(poller, src, ownership, info, option,preferred_tcp), [](WebRtcPusher *ptr) {
const ProtocolOption &option) {
WebRtcPusher::Ptr ret(new WebRtcPusher(poller, src, ownership, info, option), [](WebRtcPusher *ptr) {
ptr->onDestory();
delete ptr;
});
@ -34,8 +33,7 @@ WebRtcPusher::WebRtcPusher(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership,
const MediaInfo &info,
const ProtocolOption &option,
bool preferred_tcp) : WebRtcTransportImp(poller,preferred_tcp) {
const ProtocolOption &option) : WebRtcTransportImp(poller) {
_media_info = info;
_push_src = src;
_push_src_ownership = ownership;

View File

@ -20,8 +20,7 @@ class WebRtcPusher : public WebRtcTransportImp, public MediaSourceEvent {
public:
using Ptr = std::shared_ptr<WebRtcPusher>;
static Ptr create(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option, bool preferred_tcp = false);
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option);
protected:
///////WebRtcTransportImp override///////
@ -53,7 +52,7 @@ protected:
private:
WebRtcPusher(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option, bool preferred_tcp);
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option);
private:
bool _simulcast = false;

View File

@ -57,6 +57,9 @@ const string kMinBitrate = RTC_FIELD "min_bitrate";
// 数据通道设置
const string kDataChannelEcho = RTC_FIELD "datachannel_echo";
// rtp丢包状态最长保留时间
const string kNackMaxMS = RTC_FIELD "nackMaxMS";
static onceToken token([]() {
mINI::Instance()[kTimeOutSec] = 15;
mINI::Instance()[kExternIP] = "";
@ -69,6 +72,8 @@ static onceToken token([]() {
mINI::Instance()[kMinBitrate] = 0;
mINI::Instance()[kDataChannelEcho] = true;
mINI::Instance()[kNackMaxMS] = 3 * 1000;
});
} // namespace RTC
@ -378,6 +383,12 @@ void WebRtcTransport::setRemoteDtlsFingerprint(const RtcSession &remote) {
}
void WebRtcTransport::onRtcConfigure(RtcConfigure &configure) const {
SdpAttrFingerprint fingerprint;
fingerprint.algorithm = _offer_sdp->media[0].fingerprint.algorithm;
fingerprint.hash = getFingerprint(fingerprint.algorithm, _dtls_transport);
configure.setDefaultSetting(
_ice_server->GetUsernameFragment(), _ice_server->GetPassword(), RtpDirection::sendrecv, fingerprint);
// 开启remb后关闭twcc因为开启twcc后remb无效
GET_CONFIG(size_t, remb_bit_rate, Rtc::kRembBitRate);
configure.enableTWCC(!remb_bit_rate);
@ -407,12 +418,7 @@ std::string WebRtcTransport::getAnswerSdp(const string &offer) {
setRemoteDtlsFingerprint(*_offer_sdp);
//// sdp 配置 ////
SdpAttrFingerprint fingerprint;
fingerprint.algorithm = _offer_sdp->media[0].fingerprint.algorithm;
fingerprint.hash = getFingerprint(fingerprint.algorithm, _dtls_transport);
RtcConfigure configure;
configure.setDefaultSetting(
_ice_server->GetUsernameFragment(), _ice_server->GetPassword(), RtpDirection::sendrecv, fingerprint);
onRtcConfigure(configure);
//// 生成answer sdp ////
@ -431,10 +437,6 @@ static bool isDtls(char *buf) {
return ((*buf > 19) && (*buf < 64));
}
static string getPeerAddress(RTC::TransportTuple *tuple) {
return tuple->get_peer_ip();
}
void WebRtcTransport::inputSockData(char *buf, int len, RTC::TransportTuple *tuple) {
if (RTC::StunPacket::IsStun((const uint8_t *)buf, len)) {
std::unique_ptr<RTC::StunPacket> packet(RTC::StunPacket::Parse((const uint8_t *)buf, len));
@ -451,7 +453,7 @@ void WebRtcTransport::inputSockData(char *buf, int len, RTC::TransportTuple *tup
}
if (isRtp(buf, len)) {
if (!_srtp_session_recv) {
WarnL << "received rtp packet when dtls not completed from:" << getPeerAddress(tuple);
WarnL << "received rtp packet when dtls not completed from:" << tuple->get_peer_ip();
return;
}
if (_srtp_session_recv->DecryptSrtp((uint8_t *)buf, &len)) {
@ -461,7 +463,7 @@ void WebRtcTransport::inputSockData(char *buf, int len, RTC::TransportTuple *tup
}
if (isRtcp(buf, len)) {
if (!_srtp_session_recv) {
WarnL << "received rtcp packet when dtls not completed from:" << getPeerAddress(tuple);
WarnL << "received rtcp packet when dtls not completed from:" << tuple->get_peer_ip();
return;
}
if (_srtp_session_recv->DecryptSrtcp((uint8_t *)buf, &len)) {
@ -533,8 +535,7 @@ void WebRtcTransportImp::OnDtlsTransportApplicationDataReceived(const RTC::DtlsT
#endif
}
WebRtcTransportImp::WebRtcTransportImp(const EventPoller::Ptr &poller,bool preferred_tcp)
: WebRtcTransport(poller), _preferred_tcp(preferred_tcp) {
WebRtcTransportImp::WebRtcTransportImp(const EventPoller::Ptr &poller) : WebRtcTransport(poller) {
InfoL << getIdentifier();
}
@ -674,7 +675,7 @@ void WebRtcTransportImp::onCheckAnswer(RtcSession &sdp) {
});
for (auto &m : sdp.media) {
m.addr.reset();
m.addr.address = extern_ips.empty() ? _localIp.empty() ? SockUtil::get_local_ip() : _localIp : extern_ips[0];
m.addr.address = extern_ips.empty() ? _local_ip.empty() ? SockUtil::get_local_ip() : _local_ip : extern_ips[0];
m.rtcp_addr.reset();
m.rtcp_addr.address = m.addr.address;
@ -769,7 +770,7 @@ void WebRtcTransportImp::onRtcConfigure(RtcConfigure &configure) const {
return ret;
});
if (extern_ips.empty()) {
std::string local_ip = _localIp.empty() ? SockUtil::get_local_ip() : _localIp;
std::string local_ip = _local_ip.empty() ? SockUtil::get_local_ip() : _local_ip;
if (local_udp_port) { configure.addCandidate(*makeIceCandidate(local_ip, local_udp_port, 120, "udp")); }
if (local_tcp_port) { configure.addCandidate(*makeIceCandidate(local_ip, local_tcp_port, _preferred_tcp ? 125 : 115, "tcp")); }
} else {
@ -783,12 +784,16 @@ void WebRtcTransportImp::onRtcConfigure(RtcConfigure &configure) const {
}
}
void WebRtcTransportImp::setIceCandidate(vector<SdpAttrCandidate> cands) {
_cands = std::move(cands);
void WebRtcTransportImp::setPreferredTcp(bool flag) {
_preferred_tcp = flag;
}
void WebRtcTransportImp::setLocalIp(const std::string &localIp) {
_localIp = localIp;
void WebRtcTransportImp::setLocalIp(std::string local_ip) {
_local_ip = std::move(local_ip);
}
void WebRtcTransportImp::setIceCandidate(vector<SdpAttrCandidate> cands) {
_cands = std::move(cands);
}
///////////////////////////////////////////////////////////////////
@ -800,7 +805,8 @@ public:
_on_nack = std::move(on_nack);
setOnSorted(std::move(cb));
//设置jitter buffer参数
RtpTrackImp::setParams(1024, NackContext::kNackMaxMS, 512);
GET_CONFIG(uint32_t, nack_maxms, Rtc::kNackMaxMS);
RtpTrackImp::setParams(1024, nack_maxms, 512);
_nack_ctx.setOnNack([this](const FCI_NACK &nack) { onNack(nack); });
}
@ -1278,21 +1284,14 @@ void WebRtcPluginManager::registerPlugin(const string &type, Plugin cb) {
_map_creator[type] = std::move(cb);
}
std::string exchangeSdp(const WebRtcInterface &exchanger, const std::string& offer) {
return const_cast<WebRtcInterface &>(exchanger).getAnswerSdp(offer);
}
void setLocalIp(const WebRtcInterface& exchanger, const std::string& localIp) {
return const_cast<WebRtcInterface &>(exchanger).setLocalIp(localIp);
}
void WebRtcPluginManager::setListener(Listener cb) {
lock_guard<mutex> lck(_mtx_creator);
_listener = std::move(cb);
}
void WebRtcPluginManager::getAnswerSdp(Session &sender, const string &type, const WebRtcArgs &args, const onCreateRtc &cb_in) {
onCreateRtc cb;
void WebRtcPluginManager::negotiateSdp(Session &sender, const string &type, const WebRtcArgs &args, const onCreateWebRtc &cb_in) {
onCreateWebRtc cb;
lock_guard<mutex> lck(_mtx_creator);
if (_listener) {
auto listener = _listener;
@ -1308,21 +1307,19 @@ void WebRtcPluginManager::getAnswerSdp(Session &sender, const string &type, cons
auto it = _map_creator.find(type);
if (it == _map_creator.end()) {
cb(WebRtcException(SockException(Err_other, "the type can not supported")));
cb_in(WebRtcException(SockException(Err_other, "the type can not supported")));
return;
}
it->second(sender, args, cb);
}
void echo_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginManager::onCreateRtc &cb) {
void echo_plugin(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb) {
cb(*WebRtcEchoTest::create(EventPollerPool::Instance().getPoller()));
}
void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginManager::onCreateRtc &cb) {
void push_plugin(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb) {
MediaInfo info(args["url"]);
bool preferred_tcp = args["preferred_tcp"];
Broadcast::PublishAuthInvoker invoker = [cb, info, preferred_tcp](const string &err, const ProtocolOption &option) mutable {
Broadcast::PublishAuthInvoker invoker = [cb, info](const string &err, const ProtocolOption &option) mutable {
if (!err.empty()) {
cb(WebRtcException(SockException(Err_other, err)));
return;
@ -1361,7 +1358,7 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
push_src_ownership = push_src->getOwnership();
push_src->setProtocolOption(option);
}
auto rtc = WebRtcPusher::create(EventPollerPool::Instance().getPoller(), push_src, push_src_ownership, info, option, preferred_tcp);
auto rtc = WebRtcPusher::create(EventPollerPool::Instance().getPoller(), push_src, push_src_ownership, info, option);
push_src->setListener(rtc);
cb(*rtc);
};
@ -1374,12 +1371,10 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
}
}
void play_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginManager::onCreateRtc &cb) {
void play_plugin(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb) {
MediaInfo info(args["url"]);
bool preferred_tcp = args["preferred_tcp"];
auto session_ptr = static_pointer_cast<Session>(sender.shared_from_this());
Broadcast::AuthInvoker invoker = [cb, info, session_ptr, preferred_tcp](const string &err) mutable {
Broadcast::AuthInvoker invoker = [cb, info, session_ptr](const string &err) mutable {
if (!err.empty()) {
cb(WebRtcException(SockException(Err_other, err)));
return;
@ -1395,7 +1390,7 @@ void play_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
}
// 还原成rtc目的是为了hook时识别哪种播放协议
info.schema = "rtc";
auto rtc = WebRtcPlayer::create(EventPollerPool::Instance().getPoller(), src, info, preferred_tcp);
auto rtc = WebRtcPlayer::create(EventPollerPool::Instance().getPoller(), src, info);
cb(*rtc);
});
};
@ -1408,39 +1403,63 @@ void play_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
}
}
static void set_webrtc_cands(const WebRtcArgs &args, const WebRtcInterface &rtc) {
vector<SdpAttrCandidate> cands;
static void setWebRtcArgs(const WebRtcArgs &args, WebRtcInterface &rtc) {
{
auto cand_str = trim(args["cand_udp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
static auto is_vaild_ip = [](const std::string &ip) -> bool {
int a, b, c, d;
return sscanf(ip.c_str(), "%d.%d.%d.%d", &a, &b, &c, &d) == 4;
};
std::string host = args["Host"];
if (!host.empty()) {
auto local_ip = host.substr(0, host.find(':'));
if (!is_vaild_ip(local_ip) || local_ip == "127.0.0.1") {
local_ip = "";
}
rtc.setLocalIp(std::move(local_ip));
}
}
bool preferred_tcp = args["preferred_tcp"];
{
rtc.setPreferredTcp(preferred_tcp);
}
{
vector<SdpAttrCandidate> cands;
{
auto cand_str = trim(args["cand_udp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
// udp优先
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), preferred_tcp ? 100 : 120, "udp");
cands.emplace_back(std::move(*ice_cand));
}
}
{
auto cand_str = trim(args["cand_tcp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
// tcp模式
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), preferred_tcp ? 120 : 100, "tcp");
cands.emplace_back(std::move(*ice_cand));
}
}
if (!cands.empty()) {
// udp优先
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), 120, "udp");
cands.emplace_back(std::move(*ice_cand));
rtc.setIceCandidate(std::move(cands));
}
}
{
auto cand_str = trim(args["cand_tcp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
// tcp模式
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), 100, "tcp");
cands.emplace_back(std::move(*ice_cand));
}
}
if (!cands.empty()) {
// udp优先
const_cast<WebRtcInterface &>(rtc).setIceCandidate(std::move(cands));
}
}
static onceToken s_rtc_auto_register([]() {
#if !defined (NDEBUG)
// debug模式才开启echo插件
WebRtcPluginManager::Instance().registerPlugin("echo", echo_plugin);
#endif
WebRtcPluginManager::Instance().registerPlugin("push", push_plugin);
WebRtcPluginManager::Instance().registerPlugin("play", play_plugin);
WebRtcPluginManager::Instance().setListener([](Session &sender, const std::string &type, const WebRtcArgs &args, const WebRtcInterface &rtc) {
set_webrtc_cands(args, rtc);
setWebRtcArgs(args, const_cast<WebRtcInterface&>(rtc));
});
});

View File

@ -42,13 +42,10 @@ public:
virtual const std::string& getIdentifier() const = 0;
virtual const std::string& deleteRandStr() const { static std::string s_null; return s_null; }
virtual void setIceCandidate(std::vector<SdpAttrCandidate> cands) {}
virtual void setLocalIp(const std::string &localIp) {}
virtual void setLocalIp(std::string localIp) {}
virtual void setPreferredTcp(bool flag) {}
};
std::string exchangeSdp(const WebRtcInterface &exchanger, const std::string& offer);
void setLocalIp(const WebRtcInterface &exchanger, const std::string &localIp);
class WebRtcException : public WebRtcInterface {
public:
WebRtcException(const SockException &ex) : _ex(ex) {};
@ -88,7 +85,7 @@ public:
* @param offer offer sdp
* @return answer sdp
*/
std::string getAnswerSdp(const std::string &offer) override;
std::string getAnswerSdp(const std::string &offer) override final;
/**
* id
@ -252,14 +249,16 @@ public:
void onSendRtp(const RtpPacket::Ptr &rtp, bool flush, bool rtx = false);
void createRtpChannel(const std::string &rid, uint32_t ssrc, MediaTrack &track);
void setIceCandidate(std::vector<SdpAttrCandidate> cands) override;
void removeTuple(RTC::TransportTuple* tuple);
void safeShutdown(const SockException &ex);
void setLocalIp(const std::string &localIp) override;
void setPreferredTcp(bool flag) override;
void setLocalIp(std::string local_ip) override;
void setIceCandidate(std::vector<SdpAttrCandidate> cands) override;
protected:
void OnIceServerSelectedTuple(const RTC::IceServer *iceServer, RTC::TransportTuple *tuple) override;
WebRtcTransportImp(const EventPoller::Ptr &poller,bool preferred_tcp = false);
WebRtcTransportImp(const EventPoller::Ptr &poller);
void OnDtlsTransportApplicationDataReceived(const RTC::DtlsTransport *dtlsTransport, const uint8_t *data, size_t len) override;
void onStartWebRTC() override;
void onSendSockData(Buffer::Ptr buf, bool flush = true, RTC::TransportTuple *tuple = nullptr) override;
@ -273,7 +272,7 @@ protected:
void onCreate() override;
void onDestory() override;
void onShutdown(const SockException &ex) override;
virtual void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) = 0;
virtual void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) {}
void updateTicker();
float getLossRate(TrackType type);
void onRtcpBye() override;
@ -289,7 +288,7 @@ private:
void onCheckAnswer(RtcSession &sdp);
private:
bool _preferred_tcp;
bool _preferred_tcp = false;
uint16_t _rtx_seq[2] = {0, 0};
//用掉的总流量
uint64_t _bytes_usage = 0;
@ -310,8 +309,8 @@ private:
//根据接收rtp的pt获取相关信息
std::unordered_map<uint8_t/*pt*/, std::unique_ptr<WrappedMediaTrack>> _pt_to_track;
std::vector<SdpAttrCandidate> _cands;
//源访问的hostip
std::string _localIp;
//http访问时的host ip
std::string _local_ip;
};
class WebRtcTransportManager {
@ -333,21 +332,20 @@ private:
class WebRtcArgs : public std::enable_shared_from_this<WebRtcArgs> {
public:
virtual ~WebRtcArgs() = default;
virtual variant operator[](const std::string &key) const = 0;
};
using onCreateWebRtc = std::function<void(const WebRtcInterface &rtc)>;
class WebRtcPluginManager {
public:
using onCreateRtc = std::function<void(const WebRtcInterface &rtc)>;
using Plugin = std::function<void(Session &sender, const WebRtcArgs &args, const onCreateRtc &cb)>;
using Plugin = std::function<void(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb)>;
using Listener = std::function<void(Session &sender, const std::string &type, const WebRtcArgs &args, const WebRtcInterface &rtc)>;
static WebRtcPluginManager &Instance();
void registerPlugin(const std::string &type, Plugin cb);
void getAnswerSdp(Session &sender, const std::string &type, const WebRtcArgs &args, const onCreateRtc &cb);
void setListener(Listener cb);
void negotiateSdp(Session &sender, const std::string &type, const WebRtcArgs &args, const onCreateWebRtc &cb);
private:
WebRtcPluginManager() = default;

View File

@ -115,17 +115,10 @@
document.getElementsByName("method").forEach((el,idx) => {
el.checked = el.value === type;
el.onclick = function(e) {
let url = new URL(document.getElementById('streamUrl').value);
const url = new URL(document.getElementById('streamUrl').value);
url.searchParams.set("type",el.value);
document.getElementById('streamUrl').value = url.toString();
if(el.value == "play"){
recvOnly = true;
}else if(el.value == "echo"){
recvOnly = false;
}else{
recvOnly = false;
}
recvOnly = 'play' === el.value;
};
});
@ -145,6 +138,25 @@
let h = parseInt(res.pop());
let w = parseInt(res.pop());
const url = new URL(document.getElementById('streamUrl').value);
const newUrl = new URL(window.location.href);
let count = 0;
if (url.searchParams.has('app')) {
newUrl.searchParams.set('app', url.searchParams.get('app'));
count++;
}
if (url.searchParams.has('stream')) {
newUrl.searchParams.set('stream', url.searchParams.get('stream'));
count++;
}
if (url.searchParams.has('type')) {
newUrl.searchParams.set('type', url.searchParams.get('type'));
count++;
}
if (count > 0) {
window.history.pushState(null, null, newUrl);
}
player = new ZLMRTCClient.Endpoint(
{
element: document.getElementById('video'),// video 标签