Merge branch 'ZLMediaKit:master' into master

This commit is contained in:
li 2024-05-06 17:02:32 +08:00 committed by GitHub
commit aa13b29470
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
114 changed files with 3071 additions and 1436 deletions

@ -1 +1 @@
Subproject commit 9a545d7d09fc4b570c1d29a798622c7923111735
Subproject commit 26d54bbc7b1860a450434dce49bbc8fcbcbae88b

@ -1 +1 @@
Subproject commit a8a80e0738b052aa5671ef82a295ef388bd28e13
Subproject commit 527c0f5117b489fda78fcd123d446370ddd9ec9a

12
AUTHORS
View File

@ -44,7 +44,6 @@ Xinghua Zhao <(holychaossword@hotmail.com>
[Dw9](https://github.com/Dw9)
明月惊鹊 <mingyuejingque@gmail.com>
cgm <2958580318@qq.com>
hejilin <1724010622@qq.com>
alexliyu7352 <liyu7352@gmail.com>
cgm <2958580318@qq.com>
[haorui wang](https://github.com/HaoruiWang)
@ -98,3 +97,14 @@ WuPeng <wp@zafu.edu.cn>
[zjx94](https://github.com/zjx94)
[LeiZhi.Mai ](https://github.com/blueskiner)
[JiaHao](https://github.com/nashiracn)
[chdahuzi](https://github.com/chdahuzi)
[snysmtx](https://github.com/snysmtx)
[SetoKaiba](https://github.com/SetoKaiba)
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)

View File

@ -57,7 +57,8 @@ option(ENABLE_SCTP "Enable SCTP" ON)
option(ENABLE_WEBRTC "Enable WebRTC" ON)
option(ENABLE_X264 "Enable x264" OFF)
option(ENABLE_WEPOLL "Enable wepoll" ON)
option(DISABLE_REPORT "Disable report to report.zlmediakit.com" off)
option(ENABLE_VIDEOSTACK "Enable video stack" OFF)
option(DISABLE_REPORT "Disable report to report.zlmediakit.com" OFF)
option(USE_SOLUTION_FOLDERS "Enable solution dir supported" ON)
##############################################################################
# socket256k.0socket,使用系统内核默认值(设置为0仅对linux有效)
@ -141,8 +142,8 @@ if(GIT_FOUND)
endif()
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/version.h.ini
${CMAKE_CURRENT_BINARY_DIR}/version.h
${CMAKE_CURRENT_SOURCE_DIR}/ZLMVersion.h.ini
${CMAKE_CURRENT_BINARY_DIR}/ZLMVersion.h
@ONLY)
message(STATUS "Git version is ${BRANCH_NAME} ${COMMIT_HASH}/${COMMIT_TIME} ${BUILD_TIME}")
@ -191,13 +192,19 @@ if(UNIX)
set(COMPILE_OPTIONS_DEFAULT
"-fPIC"
"-Wall;-Wextra"
"-Wno-unused-function;-Wno-unused-parameter;-Wno-unused-variable"
"-Wno-unused-function;-Wno-unused-parameter;-Wno-unused-variable;-Wno-deprecated-declarations"
"-Wno-error=extra;-Wno-error=missing-field-initializers;-Wno-error=type-limits")
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
set(COMPILE_OPTIONS_DEFAULT ${COMPILE_OPTIONS_DEFAULT} "-g3")
else()
set(COMPILE_OPTIONS_DEFAULT ${COMPILE_OPTIONS_DEFAULT} "-g0")
endif()
elseif(WIN32)
if (MSVC)
set(COMPILE_OPTIONS_DEFAULT
# TODO: /wd4819
"/wd4566;/wd4819"
"/wd4566;/wd4819;/utf-8"
# warning C4530: C++ exception handler used, but unwind semantics are not enabled.
"/EHsc")
# disable Windows logo
@ -364,7 +371,7 @@ if(ENABLE_JEMALLOC_STATIC)
set(ENABLE_JEMALLOC_STAT OFF)
endif ()
include(Jemalloc)
include_directories(SYSTEM ${DEP_ROOT_DIR}/${JEMALLOC_NAME}/include/jemalloc)
include_directories(SYSTEM ${DEP_ROOT_DIR}/${JEMALLOC_NAME}/include)
link_directories(${DEP_ROOT_DIR}/${JEMALLOC_NAME}/lib)
#
# Used to affect subsequent lookup process
@ -526,3 +533,9 @@ endif ()
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/www" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/config.ini" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/default.pem" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
# VideoStack
# Copy the default background image used by VideoStack when there is no video stream
if (ENABLE_VIDEOSTACK AND ENABLE_FFMPEG AND ENABLE_X264)
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/novideo.yuv" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
endif ()

View File

@ -222,7 +222,7 @@ bash build_docker_images.sh
- 请关注微信公众号获取最新消息推送:
<img src=https://user-images.githubusercontent.com/11495632/232451702-4c50bc72-84d8-4c94-af2b-57290088ba7a.png width=15% />
- 也可以自愿有偿加入知识星球咨询和获取资料
- 也可以自愿有偿加入知识星球咨询、获取资料以及加入微信技术群
<img src= https://user-images.githubusercontent.com/11495632/231946329-aa8517b0-3cf5-49cf-8c75-a93ed58cb9d2.png width=30% />
@ -352,6 +352,17 @@ bash build_docker_images.sh
[zjx94](https://github.com/zjx94)
[LeiZhi.Mai ](https://github.com/blueskiner)
[JiaHao](https://github.com/nashiracn)
[chdahuzi](https://github.com/chdahuzi)
[snysmtx](https://github.com/snysmtx)
[SetoKaiba](https://github.com/SetoKaiba)
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
同时感谢JetBrains对开源项目的支持本项目使用CLion开发与调试

View File

@ -279,12 +279,12 @@ git submodule update --init
return;
}
auto viedoTrack = strongPlayer->getTrack(TrackVideo);
if (!viedoTrack) {
auto videoTrack = strongPlayer->getTrack(TrackVideo);
if (!videoTrack) {
WarnL << "No video Track!";
return;
}
viedoTrack->addDelegate([](const Frame::Ptr &frame) {
videoTrack->addDelegate([](const Frame::Ptr &frame) {
//please decode video here
});
});
@ -510,6 +510,17 @@ Thanks to all those who have supported this project in various ways, including b
[zjx94](https://github.com/zjx94)
[LeiZhi.Mai ](https://github.com/blueskiner)
[JiaHao](https://github.com/nashiracn)
[chdahuzi](https://github.com/chdahuzi)
[snysmtx](https://github.com/snysmtx)
[SetoKaiba](https://github.com/SetoKaiba)
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
Also thank to JetBrains for their support for open source project, we developed and debugged zlmediakit with CLion:

View File

@ -24,6 +24,10 @@
# define API_CALL
#endif
#ifndef _WIN32
#define _strdup strdup
#endif
#if defined(_WIN32) && defined(_MSC_VER)
# if !defined(GENERATE_EXPORT)
# if defined(MediaKitApi_EXPORTS)

View File

@ -177,6 +177,33 @@ typedef struct {
*/
void(API_CALL *on_mk_media_send_rtp_stop)(const char *vhost, const char *app, const char *stream, const char *ssrc, int err, const char *msg);
/**
* rtc sctp连接中///
* @param rtc_transport
*/
void(API_CALL *on_mk_rtc_sctp_connecting)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_connected)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_failed)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_closed)(mk_rtc_transport rtc_transport);
/**
* rtc数据通道发送数据回调
* @param rtc_transport
* @param msg
* @param len
*/
void(API_CALL *on_mk_rtc_sctp_send)(mk_rtc_transport rtc_transport, const uint8_t *msg, size_t len);
/**
* rtc数据通道接收数据回调
* @param rtc_transport
* @param streamId id
* @param ppid id
* @param msg
* @param len
*/
void(API_CALL *on_mk_rtc_sctp_received)(mk_rtc_transport rtc_transport, uint16_t streamId, uint32_t ppid, const uint8_t *msg, size_t len);
} mk_events;

View File

@ -103,6 +103,16 @@ API_EXPORT int API_CALL mk_media_source_get_track_count(const mk_media_source ct
API_EXPORT mk_track API_CALL mk_media_source_get_track(const mk_media_source ctx, int index);
// MediaSource::broadcastMessage
API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx, const char *msg, size_t len);
// MediaSource::getOriginUrl()
API_EXPORT const char* API_CALL mk_media_source_get_origin_url(const mk_media_source ctx);
// MediaSource::getOriginType()
API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source ctx);
// MediaSource::getCreateStamp()
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx);
// MediaSource::isRecording() 0:hls,1:MP4
API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx, int type);
/**
* ZLMediaKit中被称作为MediaSource
@ -142,11 +152,11 @@ API_EXPORT void API_CALL mk_media_source_find(const char *schema,
void *user_data,
on_mk_media_source_find_cb cb);
API_EXPORT const mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4);
API_EXPORT mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4);
//MediaSource::for_each_media()
API_EXPORT void API_CALL mk_media_source_for_each(void *user_data, on_mk_media_source_find_cb cb, const char *schema,
const char *vhost, const char *app, const char *stream);
@ -352,6 +362,20 @@ API_EXPORT mk_auth_invoker API_CALL mk_auth_invoker_clone(const mk_auth_invoker
*/
API_EXPORT void API_CALL mk_auth_invoker_clone_release(const mk_auth_invoker ctx);
///////////////////////////////////////////WebRtcTransport/////////////////////////////////////////////
//WebRtcTransport对象的C映射
typedef struct mk_rtc_transport_t *mk_rtc_transport;
/**
* rtc数据通道
* @param ctx
* @param streamId id
* @param ppid id
* @param msg
* @param len
*/
API_EXPORT void API_CALL mk_rtc_send_datachannel(const mk_rtc_transport ctx, uint16_t streamId, uint32_t ppid, const char* msg, size_t len);
#ifdef __cplusplus
}
#endif

View File

@ -12,6 +12,7 @@
#define MK_PROXY_PLAYER_H_
#include "mk_common.h"
#include "mk_util.h"
#ifdef __cplusplus
extern "C" {
@ -31,6 +32,17 @@ typedef struct mk_proxy_player_t *mk_proxy_player;
*/
API_EXPORT mk_proxy_player API_CALL mk_proxy_player_create(const char *vhost, const char *app, const char *stream, int hls_enabled, int mp4_enabled);
/**
*
* @param vhost __defaultVhost__
* @param app
* @param stream
* @param option ProtocolOption相关配置
* @return
*/
API_EXPORT mk_proxy_player API_CALL mk_proxy_player_create2(const char *vhost, const char *app, const char *stream, mk_ini option);
/**
*
* @param ctx

View File

@ -33,12 +33,11 @@ static TcpServer::Ptr shell_server;
#ifdef ENABLE_RTPPROXY
#include "Rtp/RtpServer.h"
static std::shared_ptr<RtpServer> rtpServer;
static RtpServer::Ptr rtpServer;
#endif
#ifdef ENABLE_WEBRTC
#include "../webrtc/WebRtcSession.h"
#include "../webrtc/WebRtcTransport.h"
static UdpServer::Ptr rtcServer_udp;
static TcpServer::Ptr rtcServer_tcp;
#endif
@ -305,10 +304,10 @@ API_EXPORT void API_CALL mk_webrtc_get_answer_sdp2(void *user_data, on_user_data
std::string offer_str = offer;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
auto args = std::make_shared<WebRtcArgsUrl>(url);
WebRtcPluginManager::Instance().getAnswerSdp(*session, type, *args,
[offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
WebRtcPluginManager::Instance().negotiateSdp(*session, type, *args, [offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
auto sdp_answer = exchangeSdp(exchanger, offer_str);
auto sdp_answer = handler.getAnswerSdp(offer_str);
cb(ptr.get(), sdp_answer.data(), nullptr);
} catch (std::exception &ex) {
cb(ptr.get(), nullptr, ex.what());

View File

@ -15,6 +15,10 @@
#include "Rtsp/RtspSession.h"
#include "Record/MP4Recorder.h"
#ifdef ENABLE_WEBRTC
#include "webrtc/WebRtcTransport.h"
#endif
using namespace toolkit;
using namespace mediakit;
@ -167,6 +171,43 @@ API_EXPORT void API_CALL mk_events_listen(const mk_events *events){
sender.getMediaTuple().stream.c_str(), ssrc.c_str(), ex.getErrCode(), ex.what());
}
});
#ifdef ENABLE_WEBRTC
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpConnecting,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_connecting) {
s_events.on_mk_rtc_sctp_connecting((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpConnected,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_connected) {
s_events.on_mk_rtc_sctp_connected((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpFailed,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_failed) {
s_events.on_mk_rtc_sctp_failed((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpClosed,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_closed) {
s_events.on_mk_rtc_sctp_closed((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpSend,[](BroadcastRtcSctpSendArgs){
if (s_events.on_mk_rtc_sctp_send) {
s_events.on_mk_rtc_sctp_send((mk_rtc_transport)&sender, data, len);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpReceived,[](BroadcastRtcSctpReceivedArgs){
if (s_events.on_mk_rtc_sctp_received) {
s_events.on_mk_rtc_sctp_received((mk_rtc_transport)&sender, streamId, ppid, msg, len);
}
});
#endif
});
}

View File

@ -18,6 +18,10 @@
#include "Http/HttpClient.h"
#include "Rtsp/RtspSession.h"
#ifdef ENABLE_WEBRTC
#include "webrtc/WebRtcTransport.h"
#endif
using namespace toolkit;
using namespace mediakit;
@ -126,7 +130,7 @@ API_EXPORT const char* API_CALL mk_parser_get_content(const mk_parser ctx, size_
API_EXPORT const char* API_CALL mk_media_info_get_params(const mk_media_info ctx){
assert(ctx);
MediaInfo *info = (MediaInfo *)ctx;
return info->param_strs.c_str();
return info->params.c_str();
}
API_EXPORT const char* API_CALL mk_media_info_get_schema(const mk_media_info ctx){
@ -224,6 +228,30 @@ API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx,
return src->broadcastMessage(any);
}
API_EXPORT const char* API_CALL mk_media_source_get_origin_url(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return _strdup(src->getOriginUrl().c_str());
}
API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return static_cast<int>(src->getOriginType());
}
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return src->getCreateStamp();
}
API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx,int type) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return src->isRecording((Recorder::type)type);
}
API_EXPORT int API_CALL mk_media_source_close(const mk_media_source ctx,int force){
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
@ -274,11 +302,11 @@ API_EXPORT void API_CALL mk_media_source_find(const char *schema,
cb(user_data, (mk_media_source)src.get());
}
API_EXPORT const mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4) {
API_EXPORT mk_media_source API_CALL mk_media_source_find2(const char *schema,
const char *vhost,
const char *app,
const char *stream,
int from_mp4) {
assert(schema && vhost && app && stream);
auto src = MediaSource::find(schema, vhost, app, stream, from_mp4);
return (mk_media_source)src.get();
@ -497,4 +525,22 @@ API_EXPORT void API_CALL mk_auth_invoker_clone_release(const mk_auth_invoker ctx
assert(ctx);
Broadcast::AuthInvoker *invoker = (Broadcast::AuthInvoker *)ctx;
delete invoker;
}
}
///////////////////////////////////////////WebRtcTransport/////////////////////////////////////////////
API_EXPORT void API_CALL mk_rtc_send_datachannel(const mk_rtc_transport ctx, uint16_t streamId, uint32_t ppid, const char *msg, size_t len) {
#ifdef ENABLE_WEBRTC
assert(ctx && msg);
WebRtcTransport *transport = (WebRtcTransport *)ctx;
std::string msg_str(msg, len);
std::weak_ptr<WebRtcTransport> weak_trans = transport->shared_from_this();
transport->getPoller()->async([streamId, ppid, msg_str, weak_trans]() {
// 切换线程后再操作
if (auto trans = weak_trans.lock()) {
trans->sendDatachannel(streamId, ppid, msg_str.c_str(), msg_str.size());
}
});
#else
WarnL << "未启用webrtc功能, 编译时请开启ENABLE_WEBRTC";
#endif
}

View File

@ -10,6 +10,7 @@
#include "mk_proxyplayer.h"
#include "Player/PlayerProxy.h"
#include "mk_util.h"
using namespace toolkit;
using namespace mediakit;
@ -23,6 +24,14 @@ API_EXPORT mk_proxy_player API_CALL mk_proxy_player_create(const char *vhost, co
return (mk_proxy_player) obj;
}
API_EXPORT mk_proxy_player API_CALL mk_proxy_player_create2(const char *vhost, const char *app, const char *stream, mk_ini ini) {
assert(vhost && app && stream);
ProtocolOption option(*((mINI *)ini));
PlayerProxy::Ptr *obj(new PlayerProxy::Ptr(new PlayerProxy(vhost, app, stream, option)));
return (mk_proxy_player)obj;
}
API_EXPORT void API_CALL mk_proxy_player_release(mk_proxy_player ctx) {
assert(ctx);
PlayerProxy::Ptr *obj = (PlayerProxy::Ptr *) ctx;

View File

@ -21,10 +21,6 @@ using namespace std;
using namespace toolkit;
using namespace mediakit;
#ifndef _WIN32
#define _strdup strdup
#endif
API_EXPORT void API_CALL mk_free(void *ptr) {
free(ptr);
}

152
api/tests/h264_pusher.c Normal file
View File

@ -0,0 +1,152 @@
/*
* Copyright (c) 2016-present The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/ZLMediaKit/ZLMediaKit).
*
* Use of this source code is governed by MIT-like license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include <signal.h>
#include <stdio.h>
#include <string.h>
#ifdef _WIN32
#include "windows.h"
#else
#include "unistd.h"
#endif
#include "mk_mediakit.h"
static int exit_flag = 0;
static void s_on_exit(int sig) {
exit_flag = 1;
}
static void on_h264_frame(void *user_data, mk_h264_splitter splitter, const char *data, int size) {
#ifdef _WIN32
Sleep(40);
#else
usleep(40 * 1000);
#endif
static int dts = 0;
mk_frame frame = mk_frame_create(MKCodecH264, dts, dts, data, size, NULL, NULL);
dts += 40;
mk_media_input_frame((mk_media)user_data, frame);
mk_frame_unref(frame);
}
typedef struct {
mk_pusher pusher;
char *url;
} Context;
void release_context(void *user_data) {
Context *ptr = (Context *)user_data;
if (ptr->pusher) {
mk_pusher_release(ptr->pusher);
}
free(ptr->url);
free(ptr);
log_info("停止推流");
}
void on_push_result(void *user_data, int err_code, const char *err_msg) {
Context *ptr = (Context *)user_data;
if (err_code == 0) {
log_info("推流成功: %s", ptr->url);
} else {
log_warn("推流%s失败: %d(%s)", ptr->url, err_code, err_msg);
}
}
void on_push_shutdown(void *user_data, int err_code, const char *err_msg) {
Context *ptr = (Context *)user_data;
log_warn("推流%s中断: %d(%s)", ptr->url, err_code, err_msg);
}
void API_CALL on_regist(void *user_data, mk_media_source sender, int regist) {
Context *ptr = (Context *)user_data;
const char *schema = mk_media_source_get_schema(sender);
if (strstr(ptr->url, schema) != ptr->url) {
// 协议匹配失败
return;
}
if (!regist) {
// 注销
if (ptr->pusher) {
mk_pusher_release(ptr->pusher);
ptr->pusher = NULL;
}
} else {
// 注册
if (!ptr->pusher) {
ptr->pusher = mk_pusher_create_src(sender);
mk_pusher_set_on_result2(ptr->pusher, on_push_result, ptr, NULL);
mk_pusher_set_on_shutdown2(ptr->pusher, on_push_shutdown, ptr, NULL);
// 开始推流
mk_pusher_publish(ptr->pusher, ptr->url);
}
}
}
int main(int argc, char *argv[]) {
if (argc < 3) {
log_error("Usage: /path/to/h264/file rtsp_or_rtmp_url");
return -1;
}
mk_config config = { .ini = NULL,
.ini_is_path = 1,
.log_level = 0,
.log_mask = LOG_CONSOLE,
.log_file_path = NULL,
.log_file_days = 0,
.ssl = NULL,
.ssl_is_path = 1,
.ssl_pwd = NULL,
.thread_num = 0 };
mk_env_init(&config);
FILE *fp = fopen(argv[1], "rb");
if (!fp) {
log_error("打开文件失败!");
return -1;
}
mk_media media = mk_media_create("__defaultVhost__", "live", "test", 0, 0, 0);
// h264的codec
codec_args v_args = { 0 };
mk_track v_track = mk_track_create(MKCodecH264, &v_args);
mk_media_init_track(media, v_track);
mk_media_init_complete(media);
mk_track_unref(v_track);
Context *ctx = (Context *)malloc(sizeof(Context));
memset(ctx, 0, sizeof(Context));
ctx->url = strdup(argv[2]);
mk_media_set_on_regist2(media, on_regist, ctx, release_context);
// 创建h264分帧器
mk_h264_splitter splitter = mk_h264_splitter_create(on_h264_frame, media, 0);
signal(SIGINT, s_on_exit); // 设置退出信号
signal(SIGTERM, s_on_exit); // 设置退出信号
char buf[1024];
while (!exit_flag) {
int size = fread(buf, 1, sizeof(buf) - 1, fp);
if (size > 0) {
mk_h264_splitter_input_data(splitter, buf, size);
} else {
// 文件读完了,重新开始
fseek(fp, 0, SEEK_SET);
}
}
log_info("文件读取完毕");
mk_h264_splitter_release(splitter);
mk_media_release(media);
fclose(fp);
return 0;
}

View File

@ -24,7 +24,7 @@ bin=/usr/bin/ffmpeg
#FFmpeg拉流再推流的命令模板通过该模板可以设置再编码的一些参数
cmd=%s -re -i %s -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s
#FFmpeg生成截图的命令可以通过修改该配置改变截图分辨率或质量
snap=%s -i %s -y -f mjpeg -frames:v 1 %s
snap=%s -i %s -y -f mjpeg -frames:v 1 -an %s
#FFmpeg日志的路径如果置空则不生成FFmpeg日志
#可以为相对(相对于本可执行程序目录)或绝对路径
log=./ffmpeg/ffmpeg.log
@ -135,6 +135,8 @@ segDur=2
#m3u8索引中,hls保留切片个数(实际保留切片个数大2~3个)
#如果设置为0则不删除切片而是保存为点播
segNum=3
#HLS切片延迟个数大于0将生成hls_delay.m3u8文件0则不生成
segDelay=0
#HLS切片从m3u8文件中移除后继续保留在磁盘上的个数
segRetain=5
#是否广播 hls切片(ts/fmp4)完成通知(on_record_ts)
@ -217,7 +219,7 @@ timeout_sec=15
retry_count=3
[http]
#http服务器字符编码windows上默认gb2312
#http服务器字符编码
charSet=utf-8
#http链接超时时间
keepAliveSecond=30
@ -275,6 +277,8 @@ sampleMS=500
fastStart=0
#MP4点播(rtsp/rtmp/http-flv/ws-flv)是否循环播放文件
fileRepeat=0
#MP4录制写文件格式是否采用fmp4启用的话断电未完成录制的文件也能正常打开
enableFmp4=0
[rtmp]
#rtmp必须在此时间内完成握手否则服务器会断开链接单位秒
@ -327,6 +331,13 @@ opus_pt=100
#如果不调用startSendRtp相关接口可以置0节省内存
gop_cache=1
#国标发送g711 rtp 打包时每个包的语音时长是多少默认是100 ms范围为20~180ms (gb28181-2016c.2.4规定)
#最好为20 的倍数程序自动向20的倍数取整
rtp_g711_dur_ms = 100
#udp接收数据socket buffer大小配置
#4*1024*1024=4196304
udp_recv_socket_buffer=4194304
[rtc]
#rtc播放推流、播放超时时间
timeoutSec=15
@ -346,7 +357,7 @@ tcpPort = 8000
rembBitRate=0
#rtc支持的音频codec类型,在前面的优先级更高
#以下范例为所有支持的音频codec
preferredCodecA=PCMU,PCMA,opus,mpeg4-generic
preferredCodecA=PCMA,PCMU,opus,mpeg4-generic
#rtc支持的视频codec类型,在前面的优先级更高
#以下范例为所有支持的视频codec
preferredCodecV=H264,H265,AV1,VP9,VP8

1
conf/novideo.yuv Normal file

File diff suppressed because one or more lines are too long

View File

@ -17,7 +17,7 @@ using namespace toolkit;
namespace mediakit {
void AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
CHECK(pkt->size() > 2);
CHECK_RET(pkt->size() > 2);
if (pkt->isConfigFrame()) {
getTrack()->setExtraData((uint8_t *)pkt->data() + 2, pkt->size() - 2);
return;

View File

@ -8,13 +8,27 @@ G711RtpEncoder::G711RtpEncoder(CodecId codec, uint32_t channels){
_channels = channels;
}
void G711RtpEncoder::setOpt(int opt, const toolkit::Any &param) {
if (opt == RTP_ENCODER_PKT_DUR_MS) {
if (param.is<uint32_t>()) {
auto dur = param.get<uint32_t>();
if (dur < 20 || dur > 180) {
WarnL << "set g711 rtp encoder duration ms failed for " << dur;
return;
}
// 向上 20ms 取整
_pkt_dur_ms = (dur + 19) / 20 * 20;
}
}
}
bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto dur = (_cache_frame->size() - _cache_frame->prefixSize()) / (8 * _channels);
auto next_pts = _cache_frame->pts() + dur;
if (next_pts == 0) {
_cache_frame->_pts = frame->pts();
} else {
if ((next_pts + 20) < frame->pts()) { // 有丢包超过20ms
if ((next_pts + _pkt_dur_ms) < frame->pts()) { // 有丢包超过20ms
_cache_frame->_pts = frame->pts() - dur;
}
}
@ -24,24 +38,20 @@ bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto ptr = _cache_frame->data() + _cache_frame->prefixSize();
auto len = _cache_frame->size() - _cache_frame->prefixSize();
auto remain_size = len;
auto max_size = 160 * _channels; // 20 ms per rtp
int n = 0;
bool mark = false;
size_t max_size = 160 * _channels * _pkt_dur_ms / 20; // 20 ms per 160 byte
size_t n = 0;
bool mark = true;
while (remain_size >= max_size) {
size_t rtp_size;
if (remain_size >= max_size) {
rtp_size = max_size;
} else {
break;
}
assert(remain_size >= max_size);
const size_t rtp_size = max_size;
n++;
stamp += 20;
stamp += _pkt_dur_ms;
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), false);
ptr += rtp_size;
remain_size -= rtp_size;
}
_cache_frame->_buffer.erase(0, n * max_size);
_cache_frame->_pts += 20 * n;
_cache_frame->_pts += (uint64_t)_pkt_dur_ms * n;
return len > 0;
}

View File

@ -36,8 +36,11 @@ public:
*/
bool inputFrame(const Frame::Ptr &frame) override;
void setOpt(int opt, const toolkit::Any &param) override;
private:
uint32_t _channels = 1;
uint32_t _pkt_dur_ms = 20;
FrameImp::Ptr _cache_frame;
};

View File

@ -18,12 +18,12 @@ namespace mediakit {
void H264RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
if (pkt->isConfigFrame()) {
CHECK(pkt->size() > 5);
CHECK_RET(pkt->size() > 5);
getTrack()->setExtraData((uint8_t *)pkt->data() + 5, pkt->size() - 5);
return;
}
CHECK(pkt->size() > 9);
CHECK_RET(pkt->size() > 9);
uint8_t *cts_ptr = (uint8_t *)(pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->time_stamp + cts;

View File

@ -44,7 +44,7 @@ void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
auto data = (uint8_t *)pkt->data() + RtmpPacketInfo::kEnhancedRtmpHeaderSize;
auto size = pkt->size() - RtmpPacketInfo::kEnhancedRtmpHeaderSize;
auto pts = pkt->time_stamp;
CHECK(size > 3);
CHECK_RET(size > 3);
if (RtmpPacketType::PacketTypeCodedFrames == _info.video.pkt_type) {
// SI24 = [CompositionTime Offset]
int32_t cts = (((data[0] << 16) | (data[1] << 8) | (data[2])) + 0xff800000) ^ 0xff800000;
@ -52,7 +52,7 @@ void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
data += 3;
size -= 3;
}
CHECK(size > 4);
CHECK_RET(size > 4);
splitFrame(data, size, pkt->time_stamp, pts);
break;
}
@ -63,12 +63,12 @@ void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
// 国内扩展(12) H265 rtmp
if (pkt->isConfigFrame()) {
CHECK(pkt->size() > 5);
CHECK_RET(pkt->size() > 5);
getTrack()->setExtraData((uint8_t *)pkt->data() + 5, pkt->size() - 5);
return;
}
CHECK(pkt->size() > 9);
CHECK_RET(pkt->size() > 9);
uint8_t *cts_ptr = (uint8_t *)(pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->time_stamp + cts;

View File

@ -605,6 +605,7 @@ void JPEGRtpEncoder::rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uin
int i;
int default_huffman_tables = 0;
uint8_t *out = nullptr;
uint16_t restart_interval = 0;
/* preparse the header for getting some info */
for (i = 0; i < size; i++) {
@ -714,6 +715,9 @@ void JPEGRtpEncoder::rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uin
return;
}
break;
} else if (buf[i + 1] == DRI) {
type |= 0x40;
restart_interval = AV_RB16(&buf[i + 4]);
}
}
if (default_huffman_tables && default_huffman_tables != 31) {
@ -744,6 +748,9 @@ void JPEGRtpEncoder::rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uin
if (off == 0 && nb_qtables)
hdr_size += 4 + 64 * nb_qtables;
if (type & 0x40)
hdr_size += 4;
/* payload max in one packet */
len = MIN(size, (int)getRtpInfo().getMaxSize() - hdr_size);
@ -759,6 +766,13 @@ void JPEGRtpEncoder::rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uin
bytestream_put_byte(&p, w);
bytestream_put_byte(&p, h);
/* set dri */
if (type & 0x40) {
bytestream_put_be16(&p, restart_interval);
bytestream_put_byte(&p, 0xff);
bytestream_put_byte(&p, 0xff);
}
if (off == 0 && nb_qtables) {
/* set quantization tables header */
bytestream_put_byte(&p, 0);

View File

@ -18,7 +18,7 @@
%bcond_with cxx_api
Name: ZLMediaKit
Version: 5.0.0
Version: 8.0.0
Release: 1%{?dist}
Summary: A lightweight, high performance and stable stream server and client framework based on C++11.

View File

@ -31,7 +31,9 @@ if(PKG_CONFIG_FOUND)
list(APPEND LINK_LIBRARIES PkgConfig::SDL2)
message(STATUS "found library: ${SDL2_LIBRARIES}")
endif()
else()
endif()
if(NOT SDL2_FOUND)
find_package(SDL2 QUIET)
if(SDL2_FOUND)
include_directories(SYSTEM ${SDL2_INCLUDE_DIR})

View File

@ -10,6 +10,7 @@
#include <signal.h>
#include "Util/logger.h"
#include "Util/util.h"
#include <iostream>
#include "Common/config.h"
#include "Rtsp/UDPServer.h"
@ -48,80 +49,76 @@ int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstanc, LPSTR lpCmdLine,
int main(int argc, char *argv[]) {
#endif
static char *url = argv[1];
//设置退出信号处理函数
signal(SIGINT, [](int) { SDLDisplayerHelper::Instance().shutdown(); });
//设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>());
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
{
// 设置退出信号处理函数
signal(SIGINT, [](int) { SDLDisplayerHelper::Instance().shutdown(); });
// 设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>());
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
if (argc < 3) {
ErrorL << "\r\n测试方法:./test_player rtxp_url rtp_type\r\n"
<< "例如:./test_player rtsp://admin:123456@127.0.0.1/live/0 0\r\n";
return 0;
}
auto player = std::make_shared<MediaPlayer>();
//sdl要求在main线程初始化
auto displayer = std::make_shared<YuvDisplayer>(nullptr, url);
weak_ptr<MediaPlayer> weakPlayer = player;
player->setOnPlayResult([weakPlayer, displayer](const SockException &ex) {
InfoL << "OnPlayResult:" << ex.what();
auto strongPlayer = weakPlayer.lock();
if (ex || !strongPlayer) {
return;
if (argc < 3) {
ErrorL << "\r\n测试方法:./test_player rtxp_url rtp_type\r\n"
<< "例如:./test_player rtsp://admin:123456@127.0.0.1/live/0 0\r\n";
return 0;
}
auto videoTrack = dynamic_pointer_cast<VideoTrack>(strongPlayer->getTrack(TrackVideo, false));
auto audioTrack = dynamic_pointer_cast<AudioTrack>(strongPlayer->getTrack(TrackAudio,false));
auto player = std::make_shared<MediaPlayer>();
// sdl要求在main线程初始化
auto displayer = std::make_shared<YuvDisplayer>(nullptr, url);
weak_ptr<MediaPlayer> weakPlayer = player;
player->setOnPlayResult([weakPlayer, displayer](const SockException &ex) {
InfoL << "OnPlayResult:" << ex.what();
auto strongPlayer = weakPlayer.lock();
if (ex || !strongPlayer) {
return;
}
if (videoTrack) {
auto decoder = std::make_shared<FFmpegDecoder>(videoTrack);
decoder->setOnDecode([displayer](const FFmpegFrame::Ptr &yuv) {
SDLDisplayerHelper::Instance().doTask([yuv, displayer]() {
//sdl要求在main线程渲染
displayer->displayYUV(yuv->get());
return true;
auto videoTrack = dynamic_pointer_cast<VideoTrack>(strongPlayer->getTrack(TrackVideo, false));
auto audioTrack = dynamic_pointer_cast<AudioTrack>(strongPlayer->getTrack(TrackAudio, false));
if (videoTrack) {
auto decoder = std::make_shared<FFmpegDecoder>(videoTrack);
decoder->setOnDecode([displayer](const FFmpegFrame::Ptr &yuv) {
SDLDisplayerHelper::Instance().doTask([yuv, displayer]() {
// sdl要求在main线程渲染
displayer->displayYUV(yuv->get());
return true;
});
});
});
videoTrack->addDelegate([decoder](const Frame::Ptr &frame) {
return decoder->inputFrame(frame, false, true);
});
videoTrack->addDelegate([decoder](const Frame::Ptr &frame) { return decoder->inputFrame(frame, false, true); });
}
if (audioTrack) {
auto decoder = std::make_shared<FFmpegDecoder>(audioTrack);
auto audio_player = std::make_shared<AudioPlayer>();
// FFmpeg解码时已经统一转换为16位整型pcm
audio_player->setup(audioTrack->getAudioSampleRate(), audioTrack->getAudioChannel(), AUDIO_S16);
FFmpegSwr::Ptr swr;
decoder->setOnDecode([audio_player, swr](const FFmpegFrame::Ptr &frame) mutable {
if (!swr) {
swr = std::make_shared<FFmpegSwr>(AV_SAMPLE_FMT_S16, frame->get()->channels, frame->get()->channel_layout, frame->get()->sample_rate);
}
auto pcm = swr->inputFrame(frame);
auto len = pcm->get()->nb_samples * pcm->get()->channels * av_get_bytes_per_sample((enum AVSampleFormat)pcm->get()->format);
audio_player->playPCM((const char *)(pcm->get()->data[0]), MIN(len, frame->get()->linesize[0]));
});
audioTrack->addDelegate([decoder](const Frame::Ptr &frame) { return decoder->inputFrame(frame, false, true); });
}
});
player->setOnShutdown([](const SockException &ex) { WarnL << "play shutdown: " << ex.what(); });
(*player)[Client::kRtpType] = atoi(argv[2]);
// 不等待track ready再回调播放成功事件这样可以加快秒开速度
(*player)[Client::kWaitTrackReady] = false;
if (argc > 3) {
(*player)[Client::kPlayTrack] = atoi(argv[3]);
}
if (audioTrack) {
auto decoder = std::make_shared<FFmpegDecoder>(audioTrack);
auto audio_player = std::make_shared<AudioPlayer>();
//FFmpeg解码时已经统一转换为16位整型pcm
audio_player->setup(audioTrack->getAudioSampleRate(), audioTrack->getAudioChannel(), AUDIO_S16);
FFmpegSwr::Ptr swr;
decoder->setOnDecode([audio_player, swr](const FFmpegFrame::Ptr &frame) mutable{
if (!swr) {
swr = std::make_shared<FFmpegSwr>(AV_SAMPLE_FMT_S16, frame->get()->channels,
frame->get()->channel_layout, frame->get()->sample_rate);
}
auto pcm = swr->inputFrame(frame);
auto len = pcm->get()->nb_samples * pcm->get()->channels * av_get_bytes_per_sample((enum AVSampleFormat)pcm->get()->format);
audio_player->playPCM((const char *) (pcm->get()->data[0]), MIN(len, frame->get()->linesize[0]));
});
audioTrack->addDelegate([decoder](const Frame::Ptr &frame) {
return decoder->inputFrame(frame, false, true);
});
}
});
player->setOnShutdown([](const SockException &ex){
WarnL << "play shutdown: " << ex.what();
});
(*player)[Client::kRtpType] = atoi(argv[2]);
//不等待track ready再回调播放成功事件这样可以加快秒开速度
(*player)[Client::kWaitTrackReady] = false;
if (argc > 3) {
(*player)[Client::kPlayTrack] = atoi(argv[3]);
player->play(argv[1]);
SDLDisplayerHelper::Instance().runLoop();
}
player->play(argv[1]);
SDLDisplayerHelper::Instance().runLoop();
sleep(1);
return 0;
}

View File

@ -1,10 +1,10 @@
{
"info": {
"_postman_id": "509e5f6b-728c-4d5f-b3e8-521d76b2cc7a",
"_postman_id": "8b3cdc62-3e18-4700-9ddd-dc9f58ebce83",
"name": "ZLMediaKit",
"description": "媒体服务器",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
"_exporter_id": "29185956"
"_exporter_id": "26338564"
},
"item": [
{
@ -33,6 +33,72 @@
},
"response": []
},
{
"name": "关闭多屏拼接(stack/stop)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/getApiList?secret={{ZLMediaKit_secret}}&id=stack_test",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"getApiList"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "id",
"value": "stack_test"
}
]
}
},
"response": []
},
{
"name": "添加多屏拼接(stack/start)",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"gapv\": 0.002,\r\n \"gaph\": 0.001,\r\n \"width\": 1920,\r\n \"url\": [\r\n [\r\n \"rtsp://kkem.me/live/test3\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy2\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy3\",\r\n \"rtsp://kkem.me/live/cy4\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy6\",\r\n \"rtsp://kkem.me/live/cy7\",\r\n \"rtsp://kkem.me/live/cy8\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy9\",\r\n \"rtsp://kkem.me/live/cy10\",\r\n \"rtsp://kkem.me/live/cy11\",\r\n \"rtsp://kkem.me/live/cy12\"\r\n ]\r\n ],\r\n \"id\": \"89\",\r\n \"row\": 4,\r\n \"col\": 4,\r\n \"height\": 1080,\r\n \"span\": [\r\n [\r\n [\r\n 0,\r\n 0\r\n ],\r\n [\r\n 1,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 3,\r\n 0\r\n ],\r\n [\r\n 3,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 2,\r\n 3\r\n ],\r\n [\r\n 3,\r\n 3\r\n ]\r\n ]\r\n ]\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/stack/start?secret={{ZLMediaKit_secret}}",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"stack",
"start"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
}
]
}
},
"response": []
},
{
"name": "获取网络线程负载(getThreadsLoad)",
"request": {
@ -1216,7 +1282,7 @@
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/seekRecordStamp?secret={{ZLMediaKit_secret}}&vhost={{defaultVhost}}&app=live&stream=obs&stamp",
"raw": "{{ZLMediaKit_URL}}/index/api/seekRecordStamp?secret={{ZLMediaKit_secret}}&vhost={{defaultVhost}}&app=live&stream=obs&stamp=1000",
"host": [
"{{ZLMediaKit_URL}}"
],
@ -1469,9 +1535,15 @@
"disabled": true
},
{
"key": "only_audio",
"key": "only_track",
"value": "1",
"description": "是否为单音频track用于语音对讲",
"description": "是否为单音频/单视频track0不设置1单音频2单视频",
"disabled": true
},
{
"key": "local_ip",
"value": "::",
"description": "指定创建RTP的本地ipipv4可填”0.0.0.0“ipv6可填”::“,一般保持默认",
"disabled": true
}
]
@ -1485,14 +1557,14 @@
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/openRtpServer?secret={{ZLMediaKit_secret}}&port=0&tcp_mode=1&stream_id=test",
"raw": "{{ZLMediaKit_URL}}/index/api/openRtpServerMultiplex?secret={{ZLMediaKit_secret}}&port=0&tcp_mode=1&stream_id=test",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"openRtpServer"
"openRtpServerMultiplex"
],
"query": [
{
@ -1516,9 +1588,15 @@
"description": "该端口绑定的流id\n"
},
{
"key": "only_audio",
"key": "only_track",
"value": "0",
"description": "是否为单音频track用于语音对讲",
"description": "是否为单音频/单视频track0不设置1单音频2单视频",
"disabled": true
},
{
"key": "local_ip",
"value": "::",
"description": "指定创建RTP的本地ipipv4可填”0.0.0.0“ipv6可填”::“,一般保持默认",
"disabled": true
}
]
@ -1966,6 +2044,47 @@
},
"response": []
},
{
"name": "获取rtp发送列表(listRtpSender)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/listRtpSender?secret={{ZLMediaKit_secret}}&vhost={{defaultVhost}}&app=live&stream=test",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"listRtpSender"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "live",
"description": "应用名,例如 live"
},
{
"key": "stream",
"value": "test",
"description": "流id例如 obs"
}
]
}
},
"response": []
},
{
"name": "获取版本信息(version)",
"request": {
@ -2252,4 +2371,4 @@
"value": "__defaultVhost__"
}
]
}
}

View File

@ -40,7 +40,7 @@ onceToken token([]() {
//ffmpeg日志保存路径
mINI::Instance()[kLog] = "./ffmpeg/ffmpeg.log";
mINI::Instance()[kCmd] = "%s -re -i %s -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s";
mINI::Instance()[kSnap] = "%s -i %s -y -f mjpeg -frames:v 1 %s";
mINI::Instance()[kSnap] = "%s -i %s -y -f mjpeg -frames:v 1 -an %s";
mINI::Instance()[kRestartSec] = 0;
});
}

592
server/VideoStack.cpp Normal file
View File

@ -0,0 +1,592 @@
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
#include "VideoStack.h"
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "Util/logger.h"
#include "Util/util.h"
#include "json/value.h"
#include <Thread/WorkThreadPool.h>
#include <fstream>
#include <libavutil/pixfmt.h>
#include <memory>
#include <mutex>
// ITU-R BT.601
// #define RGB_TO_Y(R, G, B) ((( 66 * (R) + 129 * (G) + 25 * (B)+128) >> 8)+16)
// #define RGB_TO_U(R, G, B) (((-38 * (R) - 74 * (G) + 112 * (B)+128) >> 8)+128)
// #define RGB_TO_V(R, G, B) (((112 * (R) - 94 * (G) - 18 * (B)+128) >> 8)+128)
// ITU-R BT.709
#define RGB_TO_Y(R, G, B) (((47 * (R) + 157 * (G) + 16 * (B) + 128) >> 8) + 16)
#define RGB_TO_U(R, G, B) (((-26 * (R)-87 * (G) + 112 * (B) + 128) >> 8) + 128)
#define RGB_TO_V(R, G, B) (((112 * (R)-102 * (G)-10 * (B) + 128) >> 8) + 128)
INSTANCE_IMP(VideoStackManager)
Param::~Param()
{
VideoStackManager::Instance().unrefChannel(
id, width, height, pixfmt);
}
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
{
_tmp = std::make_shared<mediakit::FFmpegFrame>();
_tmp->get()->width = _width;
_tmp->get()->height = _height;
_tmp->get()->format = _pixfmt;
av_frame_get_buffer(_tmp->get(), 32);
memset(_tmp->get()->data[0], 0, _tmp->get()->linesize[0] * _height);
memset(_tmp->get()->data[1], 0, _tmp->get()->linesize[1] * _height / 2);
memset(_tmp->get()->data[2], 0, _tmp->get()->linesize[2] * _height / 2);
auto frame = VideoStackManager::Instance().getBgImg();
_sws = std::make_shared<mediakit::FFmpegSws>(_pixfmt, _width, _height);
_tmp = _sws->inputFrame(frame);
}
void Channel::addParam(const std::weak_ptr<Param>& p)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
_params.push_back(p);
}
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
std::weak_ptr<Channel> weakSelf = shared_from_this();
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
_poller->async([weakSelf, frame]() {
auto self = weakSelf.lock();
if (!self) {
return;
}
self->_tmp = self->_sws->inputFrame(frame);
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
});
}
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func)
{
for (auto& wp : _params) {
if (auto sp = wp.lock()) {
func(sp);
}
}
}
void Channel::fillBuffer(const Param::Ptr& p)
{
if (auto buf = p->weak_buf.lock()) {
copyData(buf, p);
}
}
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p)
{
switch (p->pixfmt) {
case AV_PIX_FMT_YUV420P: {
for (int i = 0; i < p->height; i++) {
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i,
_tmp->get()->width);
}
//确保height为奇数时也能正确的复制到最后一行uv数据
for (int i = 0; i < (p->height + 1) / 2; i++) {
// U平面
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i,
_tmp->get()->width / 2);
// V平面
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i,
_tmp->get()->width / 2);
}
break;
}
case AV_PIX_FMT_NV12: {
//TODO: 待实现
break;
}
default:
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
break;
}
}
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
_channels.push_back(chn);
}
void StackPlayer::play()
{
auto url = _url;
//创建拉流 解码对象
_player = std::make_shared<mediakit::MediaPlayer>();
std::weak_ptr<mediakit::MediaPlayer> weakPlayer = _player;
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
(*_player)[mediakit::Client::kWaitTrackReady] = false;
(*_player)[mediakit::Client::kRtpType] = mediakit::Rtsp::RTP_TCP;
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
auto self = weakSelf.lock();
if (!self) {
return;
}
if (!ex) {
// 取消定时器
self->_timer.reset();
self->_failedCount = 0;
} else {
self->onDisconnect();
self->rePlay(url);
}
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false));
//auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
if (videoTrack) {
//TODO:添加使用显卡还是cpu解码的判断逻辑
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"});
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
auto self = weakSelf.lock();
if (!self) {
return;
}
self->onFrame(frame);
});
videoTrack->addDelegate([decoder](const mediakit::Frame::Ptr& frame) {
return decoder->inputFrame(frame, false, true);
});
}
});
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
auto self = weakSelf.lock();
if (!self) {
return;
}
self->onDisconnect();
self->rePlay(url);
});
_player->play(url);
}
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
chn->onFrame(frame);
}
}
}
void StackPlayer::onDisconnect()
{
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
auto frame = VideoStackManager::Instance().getBgImg();
chn->onFrame(frame);
}
}
}
void StackPlayer::rePlay(const std::string& url)
{
_failedCount++;
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000)); //步进延迟 重试间隔
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
_timer = std::make_shared<toolkit::Timer>(
delay / 1000.0f, [weakSelf, url]() {
auto self = weakSelf.lock();
if (!self) {
}
WarnL << "replay [" << self->_failedCount << "]:" << url;
self->_player->play(url);
return false;
},
nullptr);
}
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
, _fps(fps)
, _bitRate(bitRate)
{
_buffer = std::make_shared<mediakit::FFmpegFrame>();
_buffer->get()->width = _width;
_buffer->get()->height = _height;
_buffer->get()->format = _pixfmt;
av_frame_get_buffer(_buffer->get(), 32);
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id });
mediakit::VideoInfo info;
info.codecId = mediakit::CodecH264;
info.iWidth = _width;
info.iHeight = _height;
info.iFrameRate = _fps;
info.iBitRate = _bitRate;
_dev->initVideo(info);
//dev->initAudio(); //TODO:音频
_dev->addTrackCompleted();
_isExit = false;
}
VideoStack::~VideoStack()
{
_isExit = true;
if (_thread.joinable()) {
_thread.join();
}
}
void VideoStack::setParam(const Params& params)
{
if (_params) {
for (auto& p : (*_params)) {
if (!p)
continue;
p->weak_buf.reset();
}
}
initBgColor();
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_buf = _buffer;
if (auto chn = p->weak_chn.lock()) {
chn->addParam(p);
chn->fillBuffer(p);
}
}
_params = params;
}
void VideoStack::start()
{
_thread = std::thread([&]() {
uint64_t pts = 0;
int frameInterval = 1000 / _fps;
auto lastEncTP = std::chrono::steady_clock::now();
while (!_isExit) {
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) {
lastEncTP = std::chrono::steady_clock::now();
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
pts += frameInterval;
}
}
});
}
void VideoStack::initBgColor()
{
//填充底色
auto R = 20;
auto G = 20;
auto B = 20;
double Y = RGB_TO_Y(R, G, B);
double U = RGB_TO_U(R, G, B);
double V = RGB_TO_V(R, G, B);
memset(_buffer->get()->data[0], Y, _buffer->get()->linesize[0] * _height);
memset(_buffer->get()->data[1], U, _buffer->get()->linesize[1] * _height / 2);
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
}
Channel::Ptr VideoStackManager::getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto it = _channelMap.find(key);
if (it != _channelMap.end()) {
return it->second->acquire();
}
return createChannel(id, width, height, pixfmt);
}
void VideoStackManager::unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto chn_it = _channelMap.find(key);
if (chn_it != _channelMap.end() && chn_it->second->dispose()) {
_channelMap.erase(chn_it);
auto player_it = _playerMap.find(id);
if (player_it != _playerMap.end() && player_it->second->dispose()) {
_playerMap.erase(player_it);
}
}
}
int VideoStackManager::startVideoStack(const Json::Value& json)
{
std::string id;
int width, height;
auto params = parseParams(json, id, width, height);
if (!params) {
ErrorL << "Videostack parse params failed!";
return -1;
}
auto stack = std::make_shared<VideoStack>(id, width, height);
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
stack->setParam(params);
stack->start();
std::lock_guard<std::recursive_mutex> lock(_mx);
_stackMap[id] = stack;
return 0;
}
int VideoStackManager::resetVideoStack(const Json::Value& json)
{
std::string id;
int width, height;
auto params = parseParams(json, id, width, height);
if (!params) {
return -1;
}
VideoStack::Ptr stack;
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it == _stackMap.end()) {
return -2;
}
stack = it->second;
}
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
stack->setParam(params);
return 0;
}
int VideoStackManager::stopVideoStack(const std::string& id)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it != _stackMap.end()) {
_stackMap.erase(it);
InfoL << "VideoStack stop: " << id;
return 0;
}
return -1;
}
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg()
{
return _bgImg;
}
Params VideoStackManager::parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height)
{
try {
id = json["id"].asString();
width = json["width"].asInt();
height = json["height"].asInt();
int rows = json["row"].asInt(); //堆叠行数
int cols = json["col"].asInt(); //堆叠列数
float gapv = json["gapv"].asFloat(); //垂直间距
float gaph = json["gaph"].asFloat(); //水平间距
//单个间距
int gaphPix = static_cast<int>(round(width * gaph));
int gapvPix = static_cast<int>(round(height * gapv));
// 根据间距计算格子宽高
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
std::string url = json["url"][row][col].asString();
auto param = std::make_shared<Param>();
param->posX = gridWidth * col + col * gaphPix;
param->posY = gridHeight * row + row * gapvPix;
param->width = gridWidth;
param->height = gridHeight;
param->id = url;
(*params)[row * cols + col] = param;
}
}
//判断是否需要合并格子 (焦点屏)
if (!json["span"].empty() && json.isMember("span")) {
for (const auto& subArray : json["span"]) {
if (!subArray.isArray() || subArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
}
std::array<int, 4> mergePos;
int index = 0;
for (const auto& innerArray : subArray) {
if (!innerArray.isArray() || innerArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
}
for (const auto& number : innerArray) {
if (index < mergePos.size()) {
mergePos[index++] = number.asInt();
}
}
}
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
if (i == mergePos[0] && j == mergePos[1]) {
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix;
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix;
} else {
(*params)[i * cols + j] = nullptr;
}
}
}
}
}
return params;
} catch (const std::exception& e) {
ErrorL << "Videostack parse params failed! " << e.what();
return nullptr;
}
}
bool VideoStackManager::loadBgImg(const std::string& path)
{
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
_bgImg->get()->width = 1280;
_bgImg->get()->height = 720;
_bgImg->get()->format = AV_PIX_FMT_YUV420P;
av_frame_get_buffer(_bgImg->get(), 32);
std::ifstream file(path, std::ios::binary);
if (!file.is_open()) {
return false;
}
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V
return true;
}
Channel::Ptr VideoStackManager::createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
StackPlayer::Ptr player;
auto it = _playerMap.find(id);
if (it != _playerMap.end()) {
player = it->second->acquire();
} else {
player = createPlayer(id);
}
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt));
auto chn = refChn->acquire();
player->addChannel(chn);
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn;
return chn;
}
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
_playerMap[id] = refPlayer;
auto player = refPlayer->acquire();
if (!id.empty()) {
player->play();
}
return player;
}
#endif

208
server/VideoStack.h Normal file
View File

@ -0,0 +1,208 @@
#pragma once
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "Player/MediaPlayer.h"
#include "json/json.h"
#include <mutex>
template <typename T>
class RefWrapper {
public:
using Ptr = std::shared_ptr<RefWrapper<T>>;
template <typename... Args>
explicit RefWrapper(Args&&... args)
: _rc(0)
, _entity(std::forward<Args>(args)...)
{
}
T acquire()
{
++_rc;
return _entity;
}
bool dispose() { return --_rc <= 0; }
private:
T _entity;
std::atomic<int> _rc;
};
class Channel;
struct Param {
using Ptr = std::shared_ptr<Param>;
int posX = 0;
int posY = 0;
int width = 0;
int height = 0;
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P;
std::string id {};
// runtime
std::weak_ptr<Channel> weak_chn;
std::weak_ptr<mediakit::FFmpegFrame> weak_buf;
~Param();
};
using Params = std::shared_ptr<std::vector<Param::Ptr>>;
class Channel : public std::enable_shared_from_this<Channel> {
public:
using Ptr = std::shared_ptr<Channel>;
Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
void addParam(const std::weak_ptr<Param>& p);
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
void fillBuffer(const Param::Ptr& p);
protected:
void forEachParam(const std::function<void(const Param::Ptr&)>& func);
void copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p);
private:
std::string _id;
int _width;
int _height;
AVPixelFormat _pixfmt;
mediakit::FFmpegFrame::Ptr _tmp;
std::recursive_mutex _mx;
std::vector<std::weak_ptr<Param>> _params;
mediakit::FFmpegSws::Ptr _sws;
toolkit::EventPoller::Ptr _poller;
};
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
public:
using Ptr = std::shared_ptr<StackPlayer>;
StackPlayer(const std::string& url)
: _url(url)
{
}
void addChannel(const std::weak_ptr<Channel>& chn);
void play();
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
void onDisconnect();
protected:
void rePlay(const std::string& url);
private:
std::string _url;
mediakit::MediaPlayer::Ptr _player;
//用于断线重连
toolkit::Timer::Ptr _timer;
int _failedCount = 0;
std::recursive_mutex _mx;
std::vector<std::weak_ptr<Channel>> _channels;
};
class VideoStack {
public:
using Ptr = std::shared_ptr<VideoStack>;
VideoStack(const std::string& url,
int width = 1920,
int height = 1080,
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
float fps = 25.0,
int bitRate = 2 * 1024 * 1024);
~VideoStack();
void setParam(const Params& params);
void start();
protected:
void initBgColor();
public:
Params _params;
mediakit::FFmpegFrame::Ptr _buffer;
private:
std::string _id;
int _width;
int _height;
AVPixelFormat _pixfmt;
float _fps;
int _bitRate;
mediakit::DevChannel::Ptr _dev;
bool _isExit;
std::thread _thread;
};
class VideoStackManager {
public:
static VideoStackManager& Instance();
Channel::Ptr getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
void unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
int startVideoStack(const Json::Value& json);
int resetVideoStack(const Json::Value& json);
int stopVideoStack(const std::string& id);
bool loadBgImg(const std::string& path);
mediakit::FFmpegFrame::Ptr getBgImg();
protected:
Params parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height);
protected:
Channel::Ptr createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
StackPlayer::Ptr createPlayer(const std::string& id);
private:
mediakit::FFmpegFrame::Ptr _bgImg;
private:
std::recursive_mutex _mx;
std::unordered_map<std::string, VideoStack::Ptr> _stackMap;
std::unordered_map<std::string, RefWrapper<Channel::Ptr>::Ptr> _channelMap;
std::unordered_map<std::string, RefWrapper<StackPlayer::Ptr>::Ptr> _playerMap;
};
#endif

View File

@ -20,6 +20,7 @@
#include <functional>
#include <unordered_map>
#include <regex>
#include "Util/MD5.h"
#include "Util/util.h"
#include "Util/File.h"
@ -58,7 +59,11 @@
#endif
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined (ENABLE_FFMPEG)
#include "VideoStack.h"
#endif
using namespace std;
@ -114,7 +119,7 @@ static HttpApi toApi(const function<void(API_ARGS_MAP_ASYNC)> &cb) {
//参数解析成map
auto args = getAllArgs(parser);
cb(sender, headerOut, HttpAllArgs<decltype(args)>(parser, args), val, invoker);
cb(sender, headerOut, ArgsMap(parser, args), val, invoker);
};
}
@ -142,7 +147,7 @@ static HttpApi toApi(const function<void(API_ARGS_JSON_ASYNC)> &cb) {
Json::Reader reader;
reader.parse(parser.content(), args);
cb(sender, headerOut, HttpAllArgs<decltype(args)>(parser, args), val, invoker);
cb(sender, headerOut, ArgsJson(parser, args), val, invoker);
};
}
@ -162,7 +167,7 @@ static HttpApi toApi(const function<void(API_ARGS_STRING_ASYNC)> &cb) {
Json::Value val;
val["code"] = API::Success;
cb(sender, headerOut, HttpAllArgs<string>(parser, (string &)parser.content()), val, invoker);
cb(sender, headerOut, ArgsString(parser, (string &)parser.content()), val, invoker);
};
}
@ -203,7 +208,7 @@ static ApiArgsType getAllArgs(const Parser &parser) {
if (parser["Content-Type"].find("application/x-www-form-urlencoded") == 0) {
auto contentArgs = parser.parseArgs(parser.content());
for (auto &pr : contentArgs) {
allArgs[pr.first] = HttpSession::urlDecode(pr.second);
allArgs[pr.first] = strCoding::UrlDecodeComponent(pr.second);
}
} else if (parser["Content-Type"].find("application/json") == 0) {
try {
@ -296,22 +301,71 @@ static inline void addHttpListener(){
});
}
template <typename Type>
class ServiceController {
public:
using Pointer = std::shared_ptr<Type>;
std::unordered_map<std::string, Pointer> _map;
mutable std::recursive_mutex _mtx;
void clear() {
decltype(_map) copy;
{
std::lock_guard<std::recursive_mutex> lck(_mtx);
copy.swap(_map);
}
}
size_t erase(const std::string &key) {
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _map.erase(key);
}
Pointer find(const std::string &key) const {
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto it = _map.find(key);
if (it == _map.end()) {
return nullptr;
}
return it->second;
}
template<class ..._Args>
Pointer make(const std::string &key, _Args&& ...__args) {
// assert(!find(key));
auto server = std::make_shared<Type>(std::forward<_Args>(__args)...);
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto it = _map.emplace(key, server);
assert(it.second);
return server;
}
template<class ..._Args>
Pointer makeWithAction(const std::string &key, function<void(Pointer)> action, _Args&& ...__args) {
// assert(!find(key));
auto server = std::make_shared<Type>(std::forward<_Args>(__args)...);
action(server);
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto it = _map.emplace(key, server);
assert(it.second);
return server;
}
};
//拉流代理器列表
static unordered_map<string, PlayerProxy::Ptr> s_proxyMap;
static recursive_mutex s_proxyMapMtx;
static ServiceController<PlayerProxy> s_player_proxy;
//推流代理器列表
static unordered_map<string, PusherProxy::Ptr> s_proxyPusherMap;
static recursive_mutex s_proxyPusherMapMtx;
static ServiceController<PusherProxy> s_pusher_proxy;
//FFmpeg拉流代理器列表
static unordered_map<string, FFmpegSource::Ptr> s_ffmpegMap;
static recursive_mutex s_ffmpegMapMtx;
static ServiceController<FFmpegSource> s_ffmpeg_src;
#if defined(ENABLE_RTPPROXY)
//rtp服务器列表
static unordered_map<string, RtpServer::Ptr> s_rtpServerMap;
static recursive_mutex s_rtpServerMapMtx;
static ServiceController<RtpServer> s_rtp_server;
#endif
static inline string getProxyKey(const string &vhost, const string &app, const string &stream) {
@ -335,6 +389,7 @@ void dumpMediaTuple(const MediaTuple &tuple, Json::Value& item) {
item[VHOST_KEY] = tuple.vhost;
item["app"] = tuple.app;
item["stream"] = tuple.stream;
item["params"] = tuple.params;
}
Value makeMediaSourceJson(MediaSource &media){
@ -414,47 +469,24 @@ Value makeMediaSourceJson(MediaSource &media){
}
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex) {
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
if (s_rtpServerMap.find(stream_id) != s_rtpServerMap.end()) {
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
if (s_rtp_server.find(stream_id)) {
//为了防止RtpProcess所有权限混乱的问题不允许重复添加相同的stream_id
return 0;
}
RtpServer::Ptr server = std::make_shared<RtpServer>();
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_audio, multiplex);
auto server = s_rtp_server.makeWithAction(stream_id, [&](RtpServer::Ptr server) {
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_track, multiplex);
});
server->setOnDetach([stream_id]() {
//设置rtp超时移除事件
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
s_rtpServerMap.erase(stream_id);
s_rtp_server.erase(stream_id);
});
//保存对象
s_rtpServerMap.emplace(stream_id, server);
//回复json
return server->getPort();
}
void connectRtpServer(const string &stream_id, const string &dst_url, uint16_t dst_port, const function<void(const SockException &ex)> &cb) {
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto it = s_rtpServerMap.find(stream_id);
if (it == s_rtpServerMap.end()) {
cb(SockException(Err_other, "未找到rtp服务"));
return;
}
it->second->connectToServer(dst_url, dst_port, cb);
}
bool closeRtpServer(const string &stream_id) {
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto it = s_rtpServerMap.find(stream_id);
if (it == s_rtpServerMap.end()) {
return false;
}
auto server = it->second;
s_rtpServerMap.erase(it);
return true;
}
#endif
void getStatisticJson(const function<void(Value &val)> &cb) {
@ -545,23 +577,23 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
const ProtocolOption &option, int rtp_type, float timeout_sec, const mINI &args,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = getProxyKey(vhost, app, stream);
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
if (s_proxyMap.find(key) != s_proxyMap.end()) {
if (s_player_proxy.find(key)) {
//已经在拉流了
cb(SockException(Err_other, "This stream already exists"), key);
return;
}
//添加拉流代理
auto player = std::make_shared<PlayerProxy>(vhost, app, stream, option, retry_count);
s_proxyMap[key] = player;
auto player = s_player_proxy.make(key, vhost, app, stream, option, retry_count);
// 先透传参数
player->mINI::operator=(args);
// 先透传拷贝参数
for (auto &pr : args) {
(*player)[pr.first] = pr.second;
}
//指定RTP over TCP(播放rtsp时有效)
(*player)[Client::kRtpType] = rtp_type;
if (timeout_sec > 0.1) {
if (timeout_sec > 0.1f) {
//播放握手超时时间
(*player)[Client::kTimeoutMS] = timeout_sec * 1000;
}
@ -569,28 +601,69 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
//开始播放,如果播放失败或者播放中止,将会自动重试若干次,默认一直重试
player->setPlayCallbackOnce([cb, key](const SockException &ex) {
if (ex) {
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
s_proxyMap.erase(key);
s_player_proxy.erase(key);
}
cb(ex, key);
});
//被主动关闭拉流
player->setOnClose([key](const SockException &ex) {
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
s_proxyMap.erase(key);
s_player_proxy.erase(key);
});
player->play(url);
};
template <typename Type>
static void getArgsValue(const HttpAllArgs<ApiArgsType> &allArgs, const string &key, Type &value) {
auto val = allArgs[key];
if (!val.empty()) {
value = (Type)val;
void addStreamPusherProxy(const string &schema,
const string &vhost,
const string &app,
const string &stream,
const string &url,
int retry_count,
int rtp_type,
float timeout_sec,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = getPusherKey(schema, vhost, app, stream, url);
auto src = MediaSource::find(schema, vhost, app, stream);
if (!src) {
cb(SockException(Err_other, "can not find the source stream"), key);
return;
}
if (s_pusher_proxy.find(key)) {
//已经在推流了
cb(SockException(Err_success), key);
return;
}
//添加推流代理
auto pusher = s_pusher_proxy.make(key, src, retry_count);
//指定RTP over TCP(播放rtsp时有效)
pusher->emplace(Client::kRtpType, rtp_type);
if (timeout_sec > 0.1f) {
//推流握手超时时间
pusher->emplace(Client::kTimeoutMS, timeout_sec * 1000);
}
//开始推流,如果推流失败或者推流中止,将会自动重试若干次,默认一直重试
pusher->setPushCallbackOnce([cb, key, url](const SockException &ex) {
if (ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
s_pusher_proxy.erase(key);
}
cb(ex, key);
});
//被主动关闭推流
pusher->setOnClose([key, url](const SockException &ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
s_pusher_proxy.erase(key);
});
pusher->publish(url);
}
/**
* api接口
* api都支持GET和POST两种方式
@ -656,7 +729,7 @@ void installWebApi() {
CHECK_SECRET();
auto &ini = mINI::Instance();
int changed = API::Success;
for (auto &pr : allArgs.getArgs()) {
for (auto &pr : allArgs.args) {
if (ini.find(pr.first) == ini.end()) {
#if 1
//没有这个key
@ -972,59 +1045,6 @@ void installWebApi() {
val["count_hit"] = (Json::UInt64)count_hit;
});
static auto addStreamPusherProxy = [](const string &schema,
const string &vhost,
const string &app,
const string &stream,
const string &url,
int retry_count,
int rtp_type,
float timeout_sec,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = getPusherKey(schema, vhost, app, stream, url);
auto src = MediaSource::find(schema, vhost, app, stream);
if (!src) {
cb(SockException(Err_other, "can not find the source stream"), key);
return;
}
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
if (s_proxyPusherMap.find(key) != s_proxyPusherMap.end()) {
//已经在推流了
cb(SockException(Err_success), key);
return;
}
//添加推流代理
auto pusher = std::make_shared<PusherProxy>(src, retry_count);
s_proxyPusherMap[key] = pusher;
//指定RTP over TCP(播放rtsp时有效)
(*pusher)[Client::kRtpType] = rtp_type;
if (timeout_sec > 0.1) {
//推流握手超时时间
(*pusher)[Client::kTimeoutMS] = timeout_sec * 1000;
}
//开始推流,如果推流失败或者推流中止,将会自动重试若干次,默认一直重试
pusher->setPushCallbackOnce([cb, key, url](const SockException &ex) {
if (ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
s_proxyPusherMap.erase(key);
}
cb(ex, key);
});
//被主动关闭推流
pusher->setOnClose([key, url](const SockException &ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
s_proxyPusherMap.erase(key);
});
pusher->publish(url);
};
//动态添加rtsp/rtmp推流代理
//测试url http://127.0.0.1/index/api/addStreamPusherProxy?schema=rtmp&vhost=__defaultVhost__&app=proxy&stream=0&dst_url=rtmp://127.0.0.1/live/obs
api_regist("/index/api/addStreamPusherProxy", [](API_ARGS_MAP_ASYNC) {
@ -1057,8 +1077,7 @@ void installWebApi() {
api_regist("/index/api/delStreamPusherProxy", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("key");
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
val["data"]["flag"] = s_proxyPusherMap.erase(allArgs["key"]) == 1;
val["data"]["flag"] = s_pusher_proxy.erase(allArgs["key"]) == 1;
});
//动态添加rtsp/rtmp拉流代理
@ -1068,7 +1087,7 @@ void installWebApi() {
CHECK_ARGS("vhost","app","stream","url");
mINI args;
for (auto &pr : allArgs.getArgs()) {
for (auto &pr : allArgs.args) {
args.emplace(pr.first, pr.second);
}
@ -1099,8 +1118,7 @@ void installWebApi() {
api_regist("/index/api/delStreamProxy",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("key");
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
val["data"]["flag"] = s_proxyMap.erase(allArgs["key"]) == 1;
val["data"]["flag"] = s_player_proxy.erase(allArgs["key"]) == 1;
});
static auto addFFmpegSource = [](const string &ffmpeg_cmd_key,
@ -1111,25 +1129,21 @@ void installWebApi() {
bool enable_mp4,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = MD5(dst_url).hexdigest();
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
if (s_ffmpegMap.find(key) != s_ffmpegMap.end()) {
if (s_ffmpeg_src.find(key)) {
//已经在拉流了
cb(SockException(Err_success), key);
return;
}
FFmpegSource::Ptr ffmpeg = std::make_shared<FFmpegSource>();
s_ffmpegMap[key] = ffmpeg;
auto ffmpeg = s_ffmpeg_src.make(key);
ffmpeg->setOnClose([key]() {
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
s_ffmpegMap.erase(key);
s_ffmpeg_src.erase(key);
});
ffmpeg->setupRecordFlag(enable_hls, enable_mp4);
ffmpeg->play(ffmpeg_cmd_key, src_url, dst_url, timeout_ms, [cb, key](const SockException &ex) {
if (ex) {
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
s_ffmpegMap.erase(key);
s_ffmpeg_src.erase(key);
}
cb(ex, key);
});
@ -1163,15 +1177,14 @@ void installWebApi() {
api_regist("/index/api/delFFmpegSource",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("key");
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
val["data"]["flag"] = s_ffmpegMap.erase(allArgs["key"]) == 1;
val["data"]["flag"] = s_ffmpeg_src.erase(allArgs["key"]) == 1;
});
//新增http api下载可执行程序文件接口
//测试url http://127.0.0.1/index/api/downloadBin
api_regist("/index/api/downloadBin",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
invoker.responseFile(allArgs.getParser().getHeader(),StrCaseMap(),exePath());
invoker.responseFile(allArgs.parser.getHeader(), StrCaseMap(), exePath());
});
#if defined(ENABLE_RTPPROXY)
@ -1197,8 +1210,17 @@ void installWebApi() {
//兼容老版本请求新版本去除enable_tcp参数并新增tcp_mode参数
tcp_mode = 1;
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, "::", allArgs["re_use_port"].as<bool>(),
allArgs["ssrc"].as<uint32_t>(), allArgs["only_audio"].as<bool>());
auto only_track = allArgs["only_track"].as<int>();
if (allArgs["only_audio"].as<bool>()) {
// 兼容老版本请求新版本去除only_audio参数并新增only_track参数
only_track = 1;
}
std::string local_ip = "::";
if (!allArgs["local_ip"].empty()) {
local_ip = allArgs["local_ip"];
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, allArgs["re_use_port"].as<bool>(),
allArgs["ssrc"].as<uint32_t>(), only_track);
if (port == 0) {
throw InvalidArgsException("该stream_id已存在");
}
@ -1215,9 +1237,16 @@ void installWebApi() {
// 兼容老版本请求新版本去除enable_tcp参数并新增tcp_mode参数
tcp_mode = 1;
}
auto port = openRtpServer(
allArgs["port"], stream_id, tcp_mode, "::", true, 0, allArgs["only_audio"].as<bool>(),true);
auto only_track = allArgs["only_track"].as<int>();
if (allArgs["only_audio"].as<bool>()) {
// 兼容老版本请求新版本去除only_audio参数并新增only_track参数
only_track = 1;
}
std::string local_ip = "::";
if (!allArgs["local_ip"].empty()) {
local_ip = allArgs["local_ip"];
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, true, 0, only_track,true);
if (port == 0) {
throw InvalidArgsException("该stream_id已存在");
}
@ -1228,22 +1257,27 @@ void installWebApi() {
api_regist("/index/api/connectRtpServer", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("stream_id", "dst_url", "dst_port");
connectRtpServer(
allArgs["stream_id"], allArgs["dst_url"], allArgs["dst_port"],
[val, headerOut, invoker](const SockException &ex) mutable {
if (ex) {
val["code"] = API::OtherFailed;
val["msg"] = ex.what();
}
invoker(200, headerOut, val.toStyledString());
});
auto cb = [val, headerOut, invoker](const SockException &ex) mutable {
if (ex) {
val["code"] = API::OtherFailed;
val["msg"] = ex.what();
}
invoker(200, headerOut, val.toStyledString());
};
auto server = s_rtp_server.find(allArgs["stream_id"]);
if (!server) {
cb(SockException(Err_other, "未找到rtp服务"));
return;
}
server->connectToServer(allArgs["dst_url"], allArgs["dst_port"], cb);
});
api_regist("/index/api/closeRtpServer",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("stream_id");
if(!closeRtpServer(allArgs["stream_id"])){
if(s_rtp_server.erase(allArgs["stream_id"]) == 0){
val["hit"] = 0;
return;
}
@ -1254,19 +1288,18 @@ void installWebApi() {
CHECK_SECRET();
CHECK_ARGS("stream_id", "ssrc");
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto it = s_rtpServerMap.find(allArgs["stream_id"]);
if (it == s_rtpServerMap.end()) {
auto server = s_rtp_server.find(allArgs["stream_id"]);
if (!server) {
throw ApiRetException("RtpServer not found by stream_id", API::NotFound);
}
it->second->updateSSRC(allArgs["ssrc"]);
server->updateSSRC(allArgs["ssrc"]);
});
api_regist("/index/api/listRtpServer",[](API_ARGS_MAP){
CHECK_SECRET();
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
for (auto &pr : s_rtpServerMap) {
std::lock_guard<std::recursive_mutex> lck(s_rtp_server._mtx);
for (auto &pr : s_rtp_server._map) {
Value obj;
obj["stream_id"] = pr.first;
obj["port"] = pr.second->getPort();
@ -1282,7 +1315,11 @@ void installWebApi() {
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto type = allArgs["type"].empty() ? (int)MediaSourceEvent::SendRtpArgs::kRtpPS : allArgs["type"].as<int>();
if (!allArgs["use_ps"].empty()) {
// 兼容之前的use_ps参数
type = allArgs["use_ps"].as<int>();
}
MediaSourceEvent::SendRtpArgs args;
args.passive = false;
args.dst_url = allArgs["dst_url"];
@ -1292,11 +1329,11 @@ void installWebApi() {
args.is_udp = allArgs["is_udp"];
args.src_port = allArgs["src_port"];
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
args.use_ps = allArgs["use_ps"].empty() ? true : allArgs["use_ps"].as<bool>();
args.type = (MediaSourceEvent::SendRtpArgs::Type)type;
args.only_audio = allArgs["only_audio"].as<bool>();
args.udp_rtcp_timeout = allArgs["udp_rtcp_timeout"];
args.recv_stream_id = allArgs["recv_stream_id"];
TraceL << "startSendRtp, pt " << int(args.pt) << " ps " << args.use_ps << " audio " << args.only_audio;
TraceL << "startSendRtp, pt " << int(args.pt) << " rtp type " << type << " audio " << args.only_audio;
src->getOwnerPoller()->async([=]() mutable {
src->startSendRtp(args, [val, headerOut, invoker](uint16_t local_port, const SockException &ex) mutable {
@ -1310,6 +1347,26 @@ void installWebApi() {
});
});
api_regist("/index/api/listRtpSender",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream");
auto src = MediaSource::find(allArgs["vhost"], allArgs["app"], allArgs["stream"]);
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto muxer = src->getMuxer();
CHECK(muxer, "get muxer from media source failed");
src->getOwnerPoller()->async([=]() mutable {
muxer->forEachRtpSender([&](const std::string &ssrc) mutable {
val["data"].append(ssrc);
});
invoker(200, headerOut, val.toStyledString());
});
});
api_regist("/index/api/startSendRtpPassive",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream", "ssrc");
@ -1318,6 +1375,11 @@ void installWebApi() {
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto type = allArgs["type"].empty() ? (int)MediaSourceEvent::SendRtpArgs::kRtpPS : allArgs["type"].as<int>();
if (!allArgs["use_ps"].empty()) {
// 兼容之前的use_ps参数
type = allArgs["use_ps"].as<int>();
}
MediaSourceEvent::SendRtpArgs args;
args.passive = true;
@ -1325,12 +1387,12 @@ void installWebApi() {
args.is_udp = false;
args.src_port = allArgs["src_port"];
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
args.use_ps = allArgs["use_ps"].empty() ? true : allArgs["use_ps"].as<bool>();
args.type = (MediaSourceEvent::SendRtpArgs::Type)type;
args.only_audio = allArgs["only_audio"].as<bool>();
args.recv_stream_id = allArgs["recv_stream_id"];
//tcp被动服务器等待链接超时时间
args.tcp_passive_close_delay_ms = allArgs["close_delay_ms"];
TraceL << "startSendRtpPassive, pt " << int(args.pt) << " ps " << args.use_ps << " audio " << args.only_audio;
TraceL << "startSendRtpPassive, pt " << int(args.pt) << " rtp type " << type << " audio " << args.only_audio;
src->getOwnerPoller()->async([=]() mutable {
src->startSendRtp(args, [val, headerOut, invoker](uint16_t local_port, const SockException &ex) mutable {
@ -1492,18 +1554,11 @@ void installWebApi() {
api_regist("/index/api/getProxyPusherInfo", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("key");
decltype(s_proxyPusherMap.end()) it;
{
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
it = s_proxyPusherMap.find(allArgs["key"]);
}
if (it == s_proxyPusherMap.end()) {
auto pusher = s_pusher_proxy.find(allArgs["key"]);
if (!pusher) {
throw ApiRetException("can not find pusher", API::NotFound);
}
auto pusher = it->second;
val["data"]["status"] = pusher->getStatus();
val["data"]["liveSecs"] = pusher->getLiveSecs();
val["data"]["rePublishCount"] = pusher->getRePublishCount();
@ -1513,18 +1568,11 @@ void installWebApi() {
api_regist("/index/api/getProxyInfo", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("key");
decltype(s_proxyMap.end()) it;
{
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
it = s_proxyMap.find(allArgs["key"]);
}
if (it == s_proxyMap.end()) {
auto proxy = s_player_proxy.find(allArgs["key"]);
if (!proxy) {
throw ApiRetException("can not find the proxy", API::NotFound);
}
auto proxy = it->second;
val["data"]["status"] = proxy->getStatus();
val["data"]["liveSecs"] = proxy->getLiveSecs();
val["data"]["rePullCount"] = proxy->getRePullCount();
@ -1536,7 +1584,7 @@ void installWebApi() {
api_regist("/index/api/deleteRecordDirectory", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream", "period");
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"]};
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"], ""};
auto record_path = Recorder::getRecordPath(Recorder::type_mp4, tuple, allArgs["customized_path"]);
auto period = allArgs["period"];
record_path = record_path + period + "/";
@ -1575,7 +1623,7 @@ void installWebApi() {
api_regist("/index/api/getMP4RecordFile", [](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream");
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"]};
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"], ""};
auto record_path = Recorder::getRecordPath(Recorder::type_mp4, tuple, allArgs["customized_path"]);
auto period = allArgs["period"];
@ -1663,7 +1711,7 @@ void installWebApi() {
//截图存在,且未过期,那么返回之
res_old_snap = true;
responseSnap(path, allArgs.getParser().getHeader(), invoker);
responseSnap(path, allArgs.parser.getHeader(), invoker);
//中断遍历
return false;
});
@ -1694,7 +1742,7 @@ void installWebApi() {
File::delete_file(new_snap);
rename(new_snap_tmp.data(), new_snap.data());
}
responseSnap(new_snap, allArgs.getParser().getHeader(), invoker, err_msg);
responseSnap(new_snap, allArgs.parser.getHeader(), invoker, err_msg);
});
});
@ -1709,7 +1757,7 @@ void installWebApi() {
#ifdef ENABLE_WEBRTC
class WebRtcArgsImp : public WebRtcArgs {
public:
WebRtcArgsImp(const HttpAllArgs<string> &args, std::string session_id)
WebRtcArgsImp(const ArgsString &args, std::string session_id)
: _args(args)
, _session_id(std::move(session_id)) {}
~WebRtcArgsImp() override = default;
@ -1727,30 +1775,26 @@ void installWebApi() {
CHECK_ARGS("app", "stream");
return StrPrinter << "rtc://" << _args["Host"] << "/" << _args["app"] << "/"
<< _args["stream"] << "?" << _args.getParser().params() + "&session=" + _session_id;
<< _args["stream"] << "?" << _args.parser.params() + "&session=" + _session_id;
}
private:
HttpAllArgs<string> _args;
ArgsString _args;
std::string _session_id;
};
api_regist("/index/api/webrtc",[](API_ARGS_STRING_ASYNC){
CHECK_ARGS("type");
auto type = allArgs["type"];
auto offer = allArgs.getArgs();
auto offer = allArgs.args;
CHECK(!offer.empty(), "http body(webrtc offer sdp) is empty");
auto &session = static_cast<Session&>(sender);
auto args = std::make_shared<WebRtcArgsImp>(allArgs, sender.getIdentifier());
WebRtcPluginManager::Instance().getAnswerSdp(static_cast<Session&>(sender), type, *args,
[invoker, val, offer, headerOut](const WebRtcInterface &exchanger) mutable {
//设置返回类型
headerOut["Content-Type"] = HttpFileManager::getContentType(".json");
//设置跨域
headerOut["Access-Control-Allow-Origin"] = "*";
WebRtcPluginManager::Instance().negotiateSdp(session, type, *args, [invoker, val, offer, headerOut](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
val["sdp"] = exchangeSdp(exchanger, offer);
val["sdp"] = handler.getAnswerSdp(offer);
val["id"] = exchanger.getIdentifier();
val["type"] = "answer";
invoker(200, headerOut, val.toStyledString());
@ -1764,26 +1808,24 @@ void installWebApi() {
static constexpr char delete_webrtc_url [] = "/index/api/delete_webrtc";
static auto whip_whep_func = [](const char *type, API_ARGS_STRING_ASYNC) {
auto offer = allArgs.getArgs();
auto offer = allArgs.args;
CHECK(!offer.empty(), "http body(webrtc offer sdp) is empty");
auto &session = static_cast<Session&>(sender);
auto location = std::string("http") + (session.overSsl() ? "s" : "") + "://" + allArgs["host"] + delete_webrtc_url;
auto location = std::string(session.overSsl() ? "https://" : "http://") + allArgs["host"] + delete_webrtc_url;
auto args = std::make_shared<WebRtcArgsImp>(allArgs, sender.getIdentifier());
WebRtcPluginManager::Instance().getAnswerSdp(session, type, *args,
[invoker, offer, headerOut, location](const WebRtcInterface &exchanger) mutable {
// 设置跨域
headerOut["Access-Control-Allow-Origin"] = "*";
try {
// 设置返回类型
headerOut["Content-Type"] = "application/sdp";
headerOut["Location"] = location + "?id=" + exchanger.getIdentifier() + "&token=" + exchanger.deleteRandStr();
invoker(201, headerOut, exchangeSdp(exchanger, offer));
} catch (std::exception &ex) {
headerOut["Content-Type"] = "text/plain";
invoker(406, headerOut, ex.what());
}
});
WebRtcPluginManager::Instance().negotiateSdp(session, type, *args, [invoker, offer, headerOut, location](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
// 设置返回类型
headerOut["Content-Type"] = "application/sdp";
headerOut["Location"] = location + "?id=" + exchanger.getIdentifier() + "&token=" + exchanger.deleteRandStr();
invoker(201, headerOut, handler.getAnswerSdp(offer));
} catch (std::exception &ex) {
headerOut["Content-Type"] = "text/plain";
invoker(406, headerOut, ex.what());
}
});
};
api_regist("/index/api/whip", [](API_ARGS_STRING_ASYNC) { whip_whep_func("push", API_ARGS_VALUE, invoker); });
@ -1791,7 +1833,7 @@ void installWebApi() {
api_regist(delete_webrtc_url, [](API_ARGS_MAP_ASYNC) {
CHECK_ARGS("id", "token");
CHECK(allArgs.getParser().method() == "DELETE", "http method is not DELETE: " + allArgs.getParser().method());
CHECK(allArgs.parser.method() == "DELETE", "http method is not DELETE: " + allArgs.parser.method());
auto obj = WebRtcTransportManager::Instance().getItem(allArgs["id"]);
if (!obj) {
invoker(404, headerOut, "id not found");
@ -1841,7 +1883,7 @@ void installWebApi() {
std::set<std::string> ret;
auto vec = toolkit::split(str, ";");
for (auto &item : vec) {
auto root = File::absolutePath(item, "", true);
auto root = File::absolutePath("", item, true);
ret.emplace(std::move(root));
}
return ret;
@ -1877,44 +1919,50 @@ void installWebApi() {
if (!save_name.empty()) {
res_header.emplace("Content-Disposition", "attachment;filename=\"" + save_name + "\"");
}
invoker.responseFile(allArgs.getParser().getHeader(), res_header, allArgs["file_path"]);
invoker.responseFile(allArgs.parser.getHeader(), res_header, allArgs["file_path"]);
}
};
bool flag = NOTICE_EMIT(BroadcastHttpAccessArgs, Broadcast::kBroadcastHttpAccess, allArgs.getParser(), file_path, false, file_invoker, sender);
bool flag = NOTICE_EMIT(BroadcastHttpAccessArgs, Broadcast::kBroadcastHttpAccess, allArgs.parser, file_path, false, file_invoker, sender);
if (!flag) {
// 文件下载鉴权事件无人监听,不允许下载
invoker(401, StrCaseMap {}, "None http access event listener");
}
});
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
VideoStackManager::Instance().loadBgImg("novideo.yuv");
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastStreamNoneReader, [](BroadcastStreamNoneReaderArgs) {
auto id = sender.getMediaTuple().stream;
VideoStackManager::Instance().stopVideoStack(id);
});
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
CHECK_SECRET();
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
invoker(200, headerOut, val.toStyledString());
});
api_regist("/index/api/stack/stop", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("id");
auto ret = VideoStackManager::Instance().stopVideoStack(allArgs["id"]);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
invoker(200, headerOut, val.toStyledString());
});
#endif
}
void unInstallWebApi(){
{
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
auto proxyMap(std::move(s_proxyMap));
proxyMap.clear();
}
{
lock_guard<recursive_mutex> lck(s_ffmpegMapMtx);
auto ffmpegMap(std::move(s_ffmpegMap));
ffmpegMap.clear();
}
{
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
auto proxyPusherMap(std::move(s_proxyPusherMap));
proxyPusherMap.clear();
}
{
s_player_proxy.clear();
s_ffmpeg_src.clear();
s_pusher_proxy.clear();
#if defined(ENABLE_RTPPROXY)
RtpSelector::Instance().clear();
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto rtpServerMap(std::move(s_rtpServerMap));
rtpServerMap.clear();
s_rtp_server.clear();
#endif
}
NoticeCenter::Instance().delListener(&web_api_tag);
}

View File

@ -115,72 +115,41 @@ std::string getValue(const mediakit::Parser &parser, Args &args, const First &fi
template<typename Args>
class HttpAllArgs {
mediakit::Parser* _parser = nullptr;
Args* _args = nullptr;
public:
HttpAllArgs(const mediakit::Parser &parser, Args &args) {
_get_args = [&args]() {
return (void *) &args;
};
_get_parser = [&parser]() -> const mediakit::Parser & {
return parser;
};
_get_value = [](HttpAllArgs &that, const std::string &key) {
return getValue(that.getParser(), that.getArgs(), key);
};
_clone = [&](HttpAllArgs &that) {
that._get_args = [args]() {
return (void *) &args;
};
that._get_parser = [parser]() -> const mediakit::Parser & {
return parser;
};
that._get_value = [](HttpAllArgs &that, const std::string &key) {
return getValue(that.getParser(), that.getArgs(), key);
};
that._cache_able = true;
};
}
const mediakit::Parser& parser;
Args& args;
HttpAllArgs(const HttpAllArgs &that) {
if (that._cache_able) {
_get_args = that._get_args;
_get_parser = that._get_parser;
_get_value = that._get_value;
_cache_able = true;
} else {
that._clone(*this);
HttpAllArgs(const mediakit::Parser &p, Args &a): parser(p), args(a) {}
HttpAllArgs(const HttpAllArgs &that): _parser(new mediakit::Parser(that.parser)),
_args(new Args(that.args)),
parser(*_parser), args(*_args) {}
~HttpAllArgs() {
if (_parser) {
delete _parser;
}
if (_args) {
delete _args;
}
}
template<typename Key>
toolkit::variant operator[](const Key &key) const {
return (toolkit::variant)_get_value(*(HttpAllArgs*)this, key);
return (toolkit::variant)getValue(parser, args, key);
}
const mediakit::Parser &getParser() const {
return _get_parser();
}
Args &getArgs() {
return *((Args *) _get_args());
}
const Args &getArgs() const {
return *((Args *) _get_args());
}
private:
bool _cache_able = false;
std::function<void *() > _get_args;
std::function<const mediakit::Parser &() > _get_parser;
std::function<std::string(HttpAllArgs &that, const std::string &key)> _get_value;
std::function<void(HttpAllArgs &that) > _clone;
};
#define API_ARGS_MAP toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<ApiArgsType> &allArgs, Json::Value &val
using ArgsMap = HttpAllArgs<ApiArgsType>;
using ArgsJson = HttpAllArgs<Json::Value>;
using ArgsString = HttpAllArgs<std::string>;
#define API_ARGS_MAP toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsMap &allArgs, Json::Value &val
#define API_ARGS_MAP_ASYNC API_ARGS_MAP, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_JSON toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<Json::Value> &allArgs, Json::Value &val
#define API_ARGS_JSON toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsJson &allArgs, Json::Value &val
#define API_ARGS_JSON_ASYNC API_ARGS_JSON, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_STRING toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<std::string> &allArgs, Json::Value &val
#define API_ARGS_STRING toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsString &allArgs, Json::Value &val
#define API_ARGS_STRING_ASYNC API_ARGS_STRING, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_VALUE sender, headerOut, allArgs, val
@ -233,9 +202,7 @@ void installWebApi();
void unInstallWebApi();
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex=false);
void connectRtpServer(const std::string &stream_id, const std::string &dst_url, uint16_t dst_port, const std::function<void(const toolkit::SockException &ex)> &cb);
bool closeRtpServer(const std::string &stream_id);
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
#endif
Json::Value makeMediaSourceJson(mediakit::MediaSource &media);

View File

@ -225,7 +225,7 @@ static ArgsType make_json(const MediaInfo &args) {
ArgsType body;
body["schema"] = args.schema;
dumpMediaTuple(args, body);
body["params"] = args.param_strs;
body["params"] = args.params;
return body;
}
@ -286,7 +286,7 @@ static string getPullUrl(const string &origin_fmt, const MediaInfo &info) {
return "";
}
// 告知源站这是来自边沿站的拉流请求,如果未找到流请立即返回拉流失败
return string(url) + '?' + kEdgeServerParam + '&' + VHOST_KEY + '=' + info.vhost + '&' + info.param_strs;
return string(url) + '?' + kEdgeServerParam + '&' + VHOST_KEY + '=' + info.vhost + '&' + info.params;
}
static void pullStreamFromOrigin(const vector<string> &urls, size_t index, size_t failed_cnt, const MediaInfo &args, const function<void()> &closePlayer) {
@ -498,7 +498,7 @@ void installWebHook() {
return;
}
if (start_with(args.param_strs, kEdgeServerParam)) {
if (start_with(args.params, kEdgeServerParam)) {
// 源站收到来自边沿站的溯源请求,流不存在时立即返回拉流失败
closePlayer();
return;

View File

@ -38,7 +38,7 @@
#endif
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
#if !defined(_WIN32)
@ -258,6 +258,15 @@ int start_main(int argc,char *argv[]) {
//加载配置文件,如果配置文件不存在就创建一个
loadIniConfig(g_ini_file.data());
auto &secret = mINI::Instance()[API::kSecret];
if (secret == "035c73f7-bb6b-4889-a715-d9eb2d1925cc" || secret.empty()) {
// 使用默认secret被禁止启动
secret = makeRandStr(32, true);
mINI::Instance().dumpFile(g_ini_file);
WarnL << "The " << API::kSecret << " is invalid, modified it to: " << secret
<< ", saved config file: " << g_ini_file;
}
if (!File::is_dir(ssl_file)) {
// 不是文件夹,加载证书,证书包含公钥和私钥
SSL_Initor::Instance().loadCertificate(ssl_file.data());
@ -353,14 +362,6 @@ int start_main(int argc,char *argv[]) {
InfoL << "已启动http hook 接口";
try {
auto &secret = mINI::Instance()[API::kSecret];
if (secret == "035c73f7-bb6b-4889-a715-d9eb2d1925cc" || secret.empty()) {
// 使用默认secret被禁止启动
secret = makeRandStr(32, true);
mINI::Instance().dumpFile(g_ini_file);
WarnL << "The " << API::kSecret << " is invalid, modified it to: " << secret
<< ", saved config file: " << g_ini_file;
}
//rtsp服务器端口默认554
if (rtspPort) { rtspSrv->start<RtspSession>(rtspPort); }
//rtsps服务器端口默认322
@ -393,8 +394,8 @@ int start_main(int argc,char *argv[]) {
#endif//defined(ENABLE_WEBRTC)
#if defined(ENABLE_SRT)
// srt udp服务器
if(srtPort) { srtSrv->start<SRT::SrtSession>(srtPort); }
// srt udp服务器
if (srtPort) { srtSrv->start<SRT::SrtSession>(srtPort); }
#endif//defined(ENABLE_SRT)
} catch (std::exception &ex) {

View File

@ -37,6 +37,7 @@ bool MediaSink::addTrack(const Track::Ptr &track_in) {
}
// 克隆Track只拷贝其数据不拷贝其数据转发关系
auto track = track_in->clone();
CHECK(track, "Clone track failed: ", track_in->getCodecName());
auto index = track->getIndex();
if (!_track_map.emplace(index, std::make_pair(track, false)).second) {
WarnL << "Already add a same track: " << track->getIndex() << ", codec: " << track->getCodecName();
@ -132,7 +133,7 @@ void MediaSink::checkTrackIfReady() {
}
GET_CONFIG(uint32_t, kMaxAddTrackMS, General::kWaitAddTrackMS);
if (_track_map.size() == 1 && _ticker.elapsedTime() > kMaxAddTrackMS) {
if (_track_map.size() == 1 && (_ticker.elapsedTime() > kMaxAddTrackMS || !_enable_audio)) {
// 如果只有一个Track那么在该Track添加后我们最多还等待若干时间(可能后面还会添加Track)
emitAllTrackReady();
return;
@ -186,6 +187,8 @@ void MediaSink::emitAllTrackReady() {
pr.second.for_each([&](const Frame::Ptr &frame) { MediaSink::inputFrame(frame); });
}
_frame_unread.clear();
} else {
throw toolkit::SockException(toolkit::Err_shutdown, "no vaild track data");
}
}

View File

@ -113,7 +113,7 @@ ProtocolOption::ProtocolOption() {
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
struct MediaSourceNull : public MediaSource {
MediaSourceNull() : MediaSource("schema", MediaTuple{"vhost", "app", "stream"}) {};
MediaSourceNull() : MediaSource("schema", MediaTuple{"vhost", "app", "stream", ""}) {};
int readerCount() override { return 0; }
};
@ -583,7 +583,7 @@ void MediaInfo::parse(const std::string &url_in){
auto url = url_in;
auto pos = url.find("?");
if (pos != string::npos) {
param_strs = url.substr(pos + 1);
params = url.substr(pos + 1);
url.erase(pos);
}
@ -616,9 +616,10 @@ void MediaInfo::parse(const std::string &url_in){
stream = stream_id;
}
auto params = Parser::parseArgs(param_strs);
if (params.find(VHOST_KEY) != params.end()) {
vhost = params[VHOST_KEY];
auto kv = Parser::parseArgs(params);
auto it = kv.find(VHOST_KEY);
if (it != kv.end()) {
vhost = it->second;
}
GET_CONFIG(bool, enableVhost, General::kEnableVhost);

View File

@ -92,10 +92,11 @@ public:
class SendRtpArgs {
public:
enum Type { kRtpRAW = 0, kRtpPS = 1, kRtpTS = 2 };
// 是否采用udp方式发送rtp
bool is_udp = true;
// rtp采用ps还是es方式
bool use_ps = true;
// rtp类型
Type type = kRtpPS;
//发送es流时指定是否只发送纯音频流
bool only_audio = false;
//tcp被动方式
@ -135,6 +136,15 @@ private:
toolkit::Timer::Ptr _async_close_timer;
};
template <typename MAP, typename KEY, typename TYPE>
static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
auto val = ((MAP &)allArgs)[key];
if (!val.empty()) {
value = (TYPE)val;
}
}
class ProtocolOption {
public:
ProtocolOption();
@ -242,15 +252,6 @@ public:
GET_OPT_VALUE(stream_replace);
GET_OPT_VALUE(max_track);
}
private:
template <typename MAP, typename KEY, typename TYPE>
static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
auto val = ((MAP &)allArgs)[key];
if (!val.empty()) {
value = (TYPE)val;
}
}
};
//该对象用于拦截感兴趣的MediaSourceEvent事件
@ -298,7 +299,6 @@ public:
std::string full_url;
std::string schema;
std::string host;
std::string param_strs;
};
bool equalMediaTuple(const MediaTuple& a, const MediaTuple& b);

View File

@ -44,6 +44,7 @@ public:
}
void resetTimer(const EventPoller::Ptr &poller) {
std::lock_guard<std::recursive_mutex> lck(_mtx);
std::weak_ptr<FramePacedSender> weak_self = shared_from_this();
_timer = std::make_shared<Timer>(_paced_sender_ms / 1000.0f, [weak_self]() {
if (auto strong_self = weak_self.lock()) {
@ -55,6 +56,7 @@ public:
}
bool inputFrame(const Frame::Ptr &frame) override {
std::lock_guard<std::recursive_mutex> lck(_mtx);
if (!_timer) {
setCurrentStamp(frame->dts());
resetTimer(EventPoller::getCurrentPoller());
@ -66,6 +68,7 @@ public:
private:
void onTick() {
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto dst = _cache.empty() ? 0 : _cache.back().first;
while (!_cache.empty()) {
auto &front = _cache.front();
@ -110,6 +113,7 @@ private:
OnFrame _cb;
Ticker _ticker;
Timer::Ptr _timer;
std::recursive_mutex _mtx;
std::list<std::pair<uint64_t, Frame::Ptr>> _cache;
};
@ -169,6 +173,12 @@ std::string MultiMediaSourceMuxer::shortUrl() const {
return _tuple.shortUrl();
}
void MultiMediaSourceMuxer::forEachRtpSender(const std::function<void(const std::string &ssrc)> &cb) const {
for (auto &pr : _rtp_sender) {
cb(pr.first);
}
}
MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_sec, const ProtocolOption &option): _tuple(tuple) {
if (!option.stream_replace.empty()) {
// 支持在on_publish hook中替换stream_id
@ -593,15 +603,17 @@ void MultiMediaSourceMuxer::resetTracks() {
}
}
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame) {
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
auto frame = frame_in;
if (_option.modify_stamp != ProtocolOption::kModifyStampOff) {
// 时间戳不采用原始的绝对时间戳
const_cast<Frame::Ptr&>(frame) = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
frame = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
}
return _paced_sender ? _paced_sender->inputFrame(frame) : onTrackFrame_l(frame);
}
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame_in) {
auto frame = frame_in;
bool ret = false;
if (_rtmp) {
ret = _rtmp->inputFrame(frame) ? true : ret;
@ -629,7 +641,7 @@ bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
}
if (_ring) {
// 此场景由于直接转发可能存在切换线程引起的数据被缓存在管道所以需要CacheAbleFrame
const_cast<Frame::Ptr &>(frame) = Frame::getCacheAbleFrame(frame);
frame = Frame::getCacheAbleFrame(frame);
if (frame->getTrackType() == TrackVideo) {
// 视频时遇到第一帧配置帧或关键帧则标记为gop开始处
auto video_key_pos = frame->keyFrame() || frame->configFrame();

View File

@ -133,6 +133,8 @@ public:
const MediaTuple &getMediaTuple() const;
std::string shortUrl() const;
void forEachRtpSender(const std::function<void(const std::string &ssrc)> &cb) const;
protected:
/////////////////////////////////MediaSink override/////////////////////////////////

View File

@ -294,8 +294,8 @@ void RtspUrl::setup(bool is_ssl, const string &url, const string &user, const st
splitUrl(ip, ip, port);
_url = std::move(url);
_user = strCoding::UrlDecode(std::move(user));
_passwd = strCoding::UrlDecode(std::move(passwd));
_user = strCoding::UrlDecodeUserOrPass(user);
_passwd = strCoding::UrlDecodeUserOrPass(passwd);
_host = std::move(ip);
_port = port;
_is_ssl = is_ssl;

View File

@ -30,7 +30,7 @@ struct StrCaseCompare {
class StrCaseMap : public std::multimap<std::string, std::string, StrCaseCompare> {
public:
using Super = multimap<std::string, std::string, StrCaseCompare>;
using Super = std::multimap<std::string, std::string, StrCaseCompare>;
std::string &operator[](const std::string &k) {
auto it = find(k);

View File

@ -58,6 +58,12 @@ const string kBroadcastStreamNoneReader = "kBroadcastStreamNoneReader";
const string kBroadcastHttpBeforeAccess = "kBroadcastHttpBeforeAccess";
const string kBroadcastSendRtpStopped = "kBroadcastSendRtpStopped";
const string kBroadcastRtpServerTimeout = "kBroadcastRtpServerTimeout";
const string kBroadcastRtcSctpConnecting = "kBroadcastRtcSctpConnecting";
const string kBroadcastRtcSctpConnected = "kBroadcastRtcSctpConnected";
const string kBroadcastRtcSctpFailed = "kBroadcastRtcSctpFailed";
const string kBroadcastRtcSctpClosed = "kBroadcastRtcSctpClosed";
const string kBroadcastRtcSctpSend = "kBroadcastRtcSctpSend";
const string kBroadcastRtcSctpReceived = "kBroadcastRtcSctpReceived";
} // namespace Broadcast
@ -175,12 +181,7 @@ static onceToken token([]() {
mINI::Instance()[kKeepAliveSecond] = 15;
mINI::Instance()[kDirMenu] = true;
mINI::Instance()[kVirtualPath] = "";
#if defined(_WIN32)
mINI::Instance()[kCharSet] = "gb2312";
#else
mINI::Instance()[kCharSet] = "utf-8";
#endif
mINI::Instance()[kRootPath] = "./www";
mINI::Instance()[kNotFound] = StrPrinter << "<html>"
@ -291,6 +292,7 @@ const string kSampleMS = RECORD_FIELD "sampleMS";
const string kFileBufSize = RECORD_FIELD "fileBufSize";
const string kFastStart = RECORD_FIELD "fastStart";
const string kFileRepeat = RECORD_FIELD "fileRepeat";
const string kEnableFmp4 = RECORD_FIELD "enableFmp4";
static onceToken token([]() {
mINI::Instance()[kAppName] = "record";
@ -298,6 +300,7 @@ static onceToken token([]() {
mINI::Instance()[kFileBufSize] = 64 * 1024;
mINI::Instance()[kFastStart] = false;
mINI::Instance()[kFileRepeat] = false;
mINI::Instance()[kEnableFmp4] = false;
});
} // namespace Record
@ -307,6 +310,7 @@ namespace Hls {
const string kSegmentDuration = HLS_FIELD "segDur";
const string kSegmentNum = HLS_FIELD "segNum";
const string kSegmentKeep = HLS_FIELD "segKeep";
const string kSegmentDelay = HLS_FIELD "segDelay";
const string kSegmentRetain = HLS_FIELD "segRetain";
const string kFileBufSize = HLS_FIELD "fileBufSize";
const string kBroadcastRecordTs = HLS_FIELD "broadcastRecordTs";
@ -317,6 +321,7 @@ static onceToken token([]() {
mINI::Instance()[kSegmentDuration] = 2;
mINI::Instance()[kSegmentNum] = 3;
mINI::Instance()[kSegmentKeep] = false;
mINI::Instance()[kSegmentDelay] = 0;
mINI::Instance()[kSegmentRetain] = 5;
mINI::Instance()[kFileBufSize] = 64 * 1024;
mINI::Instance()[kBroadcastRecordTs] = false;
@ -336,6 +341,8 @@ const string kH265PT = RTP_PROXY_FIELD "h265_pt";
const string kPSPT = RTP_PROXY_FIELD "ps_pt";
const string kOpusPT = RTP_PROXY_FIELD "opus_pt";
const string kGopCache = RTP_PROXY_FIELD "gop_cache";
const string kRtpG711DurMs = RTP_PROXY_FIELD "rtp_g711_dur_ms";
const string kUdpRecvSocketBuffer = RTP_PROXY_FIELD "udp_recv_socket_buffer";
static onceToken token([]() {
mINI::Instance()[kDumpDir] = "";
@ -346,6 +353,8 @@ static onceToken token([]() {
mINI::Instance()[kPSPT] = 96;
mINI::Instance()[kOpusPT] = 100;
mINI::Instance()[kGopCache] = 1;
mINI::Instance()[kRtpG711DurMs] = 100;
mINI::Instance()[kUdpRecvSocketBuffer] = 4 * 1024 * 1024;
});
} // namespace RtpProxy

View File

@ -109,6 +109,21 @@ extern const std::string kBroadcastReloadConfig;
extern const std::string kBroadcastRtpServerTimeout;
#define BroadcastRtpServerTimeoutArgs uint16_t &local_port, const string &stream_id,int &tcp_mode, bool &re_use_port, uint32_t &ssrc
// rtc transport sctp 连接状态
extern const std::string kBroadcastRtcSctpConnecting;
extern const std::string kBroadcastRtcSctpConnected;
extern const std::string kBroadcastRtcSctpFailed;
extern const std::string kBroadcastRtcSctpClosed;
#define BroadcastRtcSctpConnectArgs WebRtcTransport& sender
// rtc transport sctp 发送数据
extern const std::string kBroadcastRtcSctpSend;
#define BroadcastRtcSctpSendArgs WebRtcTransport& sender, const uint8_t *&data, size_t& len
// rtc transport sctp 接收数据
extern const std::string kBroadcastRtcSctpReceived;
#define BroadcastRtcSctpReceivedArgs WebRtcTransport& sender, uint16_t &streamId, uint32_t &ppid, const uint8_t *&msg, size_t &len
#define ReloadConfigTag ((void *)(0xFF))
#define RELOAD_KEY(arg, key) \
do { \
@ -339,6 +354,8 @@ extern const std::string kFileBufSize;
extern const std::string kFastStart;
// mp4文件是否重头循环读取
extern const std::string kFileRepeat;
// mp4录制文件是否采用fmp4格式
extern const std::string kEnableFmp4;
} // namespace Record
////////////HLS相关配置///////////
@ -349,6 +366,8 @@ extern const std::string kSegmentDuration;
extern const std::string kSegmentNum;
// 如果设置为0则不保留切片设置为1则一直保留切片
extern const std::string kSegmentKeep;
// HLS切片延迟个数大于0将生成hls_delay.m3u8文件0则不生成
extern const std::string kSegmentDelay;
// HLS切片从m3u8文件中移除后继续保留在磁盘上的个数
extern const std::string kSegmentRetain;
// HLS文件写缓存大小
@ -380,6 +399,11 @@ extern const std::string kPSPT;
extern const std::string kOpusPT;
// RtpSender相关功能是否提前开启gop缓存优化级联秒开体验默认开启
extern const std::string kGopCache;
//国标发送g711 rtp 打包时每个包的语音时长是多少默认是100 ms范围为20~180ms (gb28181-2016c.2.4规定)
//最好为20 的倍数程序自动向20的倍数取整
extern const std::string kRtpG711DurMs;
// udp recv socket buffer size
extern const std::string kUdpRecvSocketBuffer;
} // namespace RtpProxy
/**

View File

@ -14,7 +14,7 @@
using namespace toolkit;
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
extern "C" {
@ -44,4 +44,4 @@ const char kServerName[] = "ZLMediaKit-8.0(build in " __DATE__ " " __TIME__ ")"
const char kServerName[] = "ZLMediaKit(git hash:" COMMIT_HASH "/" COMMIT_TIME ",branch:" BRANCH_NAME ",build time:" BUILD_TIME ")";
#endif
}//namespace mediakit
}//namespace mediakit

View File

@ -36,6 +36,16 @@
#define CHECK(exp, ...) ::mediakit::Assert_ThrowCpp(!(exp), #exp, __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__)
#endif // CHECK
#ifndef CHECK_RET
#define CHECK_RET(...) \
try { \
CHECK(__VA_ARGS__); \
} catch (AssertFailedException & ex) { \
WarnL << ex.what(); \
return; \
}
#endif
#ifndef MAX
#define MAX(a, b) ((a) > (b) ? (a) : (b))
#endif // MAX

View File

@ -52,24 +52,78 @@ char HexStrToBin(const char *str) {
}
return (high << 4) | low;
}
string strCoding::UrlEncode(const string &str) {
static string UrlEncodeCommon(const string &str,const char* dont_escape){
string out;
size_t len = str.size();
for (size_t i = 0; i < len; ++i) {
char ch = str[i];
if (isalnum((uint8_t) ch)) {
if (isalnum((uint8_t) ch) || strchr(dont_escape, (uint8_t) ch) != NULL) {
out.push_back(ch);
} else {
char buf[4];
sprintf(buf, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
snprintf(buf, 4, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
out.append(buf);
}
}
return out;
}
static string UrlDecodeCommon(const string &str,const char* dont_unescape){
string output;
size_t i = 0, len = str.length();
while (i < len) {
if (str[i] == '%') {
if (i + 3 > len) {
// %后面必须还有两个字节才会反转义
output.append(str, i, len - i);
break;
}
char ch = HexStrToBin(&(str[i + 1]));
if (ch == -1 || strchr(dont_unescape, (unsigned char)ch) != NULL) {
// %后面两个字节不是16进制字符串转义失败或者转义出来可能会造成url包含非path部分比如#?说明提交的是非法拼接的url直接拼接3个原始字符
output.append(str, i, 3);
} else {
output += ch;
}
i += 3;
} else {
output += str[i];
++i;
}
}
return output;
}
string strCoding::UrlDecode(const string &str) {
string strCoding::UrlEncodePath(const string &str) {
const char *dont_escape = "!#&'*+:=?@/._-$,;~()";
return UrlEncodeCommon(str,dont_escape);
}
string strCoding::UrlEncodeComponent(const string &str) {
const char *dont_escape = "!'()*-._~";
return UrlEncodeCommon(str,dont_escape);
}
std::string strCoding::UrlEncodeUserOrPass(const std::string &str) {
// from rfc https://datatracker.ietf.org/doc/html/rfc3986
// §2.3 Unreserved characters (mark)
//'-', '_', '.', '~'
// §2.2 Reserved characters (reserved)
// '$', '&', '+', ',', '/', ':', ';', '=', '?', '@',
// §3.2.1
// The RFC allows ';', ':', '&', '=', '+', '$', and ',' in
// userinfo, so we must escape only '@', '/', and '?'.
// The parsing of userinfo treats ':' as special so we must escape
// that too.
const char *dont_escape = "$&+,;=-._~";
return UrlEncodeCommon(str,dont_escape);
}
string strCoding::UrlDecodePath(const string &str) {
const char *dont_unescape = "#$&+,/:;=?@";
return UrlDecodeCommon(str,dont_unescape);
}
std::string strCoding::UrlDecodeComponent(const std::string &str) {
string output;
size_t i = 0, len = str.length();
while (i < len) {
@ -87,6 +141,9 @@ string strCoding::UrlDecode(const string &str) {
output += ch;
}
i += 3;
} else if (str[i] == '+') {
output += ' ';
++i;
} else {
output += str[i];
++i;
@ -95,27 +152,11 @@ string strCoding::UrlDecode(const string &str) {
return output;
}
#if 0
#include "Util/onceToken.h"
static toolkit::onceToken token([]() {
auto str0 = strCoding::UrlDecode(
"rtsp%3A%2F%2Fadmin%3AJm13317934%25jm%40111.47.84.69%3A554%2FStreaming%2FChannels%2F101%3Ftransportmode%3Dunicast%26amp%3Bprofile%3DProfile_1");
auto str1 = strCoding::UrlDecode("%j1"); // 测试%后面两个字节不是16进制字符串
auto str2 = strCoding::UrlDecode("%a"); // 测试%后面字节数不够
auto str3 = strCoding::UrlDecode("%"); // 测试只有%
auto str4 = strCoding::UrlDecode("%%%"); // 测试多个%
auto str5 = strCoding::UrlDecode("%%%%40"); // 测试多个非法%后恢复正常解析
auto str6 = strCoding::UrlDecode("Jm13317934%jm"); // 测试多个非法%后恢复正常解析
cout << str0 << endl;
cout << str1 << endl;
cout << str2 << endl;
cout << str3 << endl;
cout << str4 << endl;
cout << str5 << endl;
cout << str6 << endl;
});
#endif
std::string strCoding::UrlDecodeUserOrPass(const std::string &str) {
const char *dont_unescape = "";
return UrlDecodeCommon(str,dont_unescape);
}
///////////////////////////////windows专用///////////////////////////////////
#if defined(_WIN32)
void UnicodeToGB2312(char* pOut, wchar_t uData)

View File

@ -18,8 +18,12 @@ namespace mediakit {
class strCoding {
public:
static std::string UrlEncode(const std::string &str); //urlutf8 编码
static std::string UrlDecode(const std::string &str); //urlutf8解码
static std::string UrlEncodePath(const std::string &str); //url路径 utf8编码
static std::string UrlEncodeComponent(const std::string &str); // url参数 utf8编码
static std::string UrlDecodePath(const std::string &str); //url路径 utf8解码
static std::string UrlDecodeComponent(const std::string &str); // url参数 utf8解码
static std::string UrlEncodeUserOrPass(const std::string &str); // url中用户名与密码编码
static std::string UrlDecodeUserOrPass(const std::string &str); // url中用户名与密码解码
#if defined(_WIN32)
static std::string UTF8ToGB2312(const std::string &str);//utf_8转为gb2312
static std::string GB2312ToUTF8(const std::string &str); //gb2312 转utf_8

View File

@ -69,12 +69,16 @@ CodecId getCodecByMovId(int object_id) {
if (object_id == MOV_OBJECT_NONE) {
return CodecInvalid;
}
switch (object_id) {
#define XX(name, type, value, str, mpeg_id, mp4_id) case mp4_id : return name;
CODEC_MAP(XX)
#define XX(name, type, value, str, mpeg_id, mp4_id) { mp4_id, name },
static map<int, CodecId> s_map = { CODEC_MAP(XX) };
#undef XX
default : WarnL << "Unsupported mov: " << object_id; return CodecInvalid;
auto it = s_map.find(object_id);
if (it == s_map.end()) {
WarnL << "Unsupported mov: " << object_id;
return CodecInvalid;
}
return it->second;
}
#endif
@ -89,17 +93,20 @@ int getMpegIdByCodec(CodecId codec) {
}
CodecId getCodecByMpegId(int mpeg_id) {
if (mpeg_id == PSI_STREAM_RESERVED) {
if (mpeg_id == PSI_STREAM_RESERVED || mpeg_id == 0xBD) {
// 海康的 PS 流中会有0xBD 的包
return CodecInvalid;
}
switch (mpeg_id) {
#define XX(name, type, value, str, mpeg_id, mp4_id) case mpeg_id : return name;
CODEC_MAP(XX)
#define XX(name, type, value, str, mpeg_id, mp4_id) { mpeg_id, name },
static map<int, CodecId> s_map = { CODEC_MAP(XX) };
#undef XX
// 海康的 PS 流中会有0xBD 的包
case 0xBD: return CodecInvalid;
default : WarnL << "Unsupported mpeg: " << mpeg_id; return CodecInvalid;
auto it = s_map.find(mpeg_id);
if (it == s_map.end()) {
WarnL << "Unsupported mpeg: " << mpeg_id;
return CodecInvalid;
}
return it->second;
}
#endif

View File

@ -129,8 +129,8 @@ public:
_fps = fps;
}
int getVideoHeight() const override { return _width; }
int getVideoWidth() const override { return _height; }
int getVideoWidth() const override { return _width; }
int getVideoHeight() const override { return _height; }
float getVideoFps() const override { return _fps; }
bool ready() const override { return true; }

View File

@ -28,10 +28,6 @@
#include "HttpClient.h"
#include "Common/macros.h"
#ifndef _WIN32
#define ENABLE_MMAP
#endif
using namespace std;
using namespace toolkit;
@ -57,12 +53,25 @@ Buffer::Ptr HttpStringBody::readData(size_t size) {
}
//////////////////////////////////////////////////////////////////
#ifdef ENABLE_MMAP
static mutex s_mtx;
static unordered_map<string /*file_path*/, std::tuple<char */*ptr*/, int64_t /*size*/, weak_ptr<char> /*mmap*/ > > s_shared_mmap;
#if defined(_WIN32)
static void mmap_close(HANDLE _hfile, HANDLE _hmapping, void *_addr) {
if (_addr) {
::UnmapViewOfFile(_addr);
}
if (_hmapping) {
::CloseHandle(_hmapping);
}
if (_hfile != INVALID_HANDLE_VALUE) {
::CloseHandle(_hfile);
}
}
#endif
//删除mmap记录
static void delSharedMmap(const string &file_path, char *ptr) {
lock_guard<mutex> lck(s_mtx);
@ -97,24 +106,67 @@ static std::shared_ptr<char> getSharedMmap(const string &file_path, int64_t &fil
file_size = -1;
return nullptr;
}
#if defined(_WIN32)
auto fd = _fileno(fp.get());
#else
//获取文件大小
file_size = File::fileSize(fp.get());
auto fd = fileno(fp.get());
#endif
int fd = fileno(fp.get());
if (fd < 0) {
WarnL << "fileno failed:" << get_uv_errmsg(false);
return nullptr;
}
#ifndef _WIN32
auto ptr = (char *)mmap(NULL, file_size, PROT_READ, MAP_SHARED, fd, 0);
if (ptr == MAP_FAILED) {
WarnL << "mmap " << file_path << " failed:" << get_uv_errmsg(false);
return nullptr;
}
std::shared_ptr<char> ret(ptr, [file_size, fp, file_path](char *ptr) {
munmap(ptr, file_size);
delSharedMmap(file_path, ptr);
});
#else
auto hfile = ::CreateFileA(file_path.data(), GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
if (hfile == INVALID_HANDLE_VALUE) {
WarnL << "CreateFileA() " << file_path << " failed:";
return nullptr;
}
file_size = ::GetFileSize(hfile, NULL);
auto hmapping = ::CreateFileMapping(hfile, NULL, PAGE_READONLY, 0, 0, NULL);
if (hmapping == NULL) {
mmap_close(hfile, NULL, NULL);
WarnL << "CreateFileMapping() " << file_path << " failed:";
return nullptr;
}
auto addr_ = ::MapViewOfFile(hmapping, FILE_MAP_READ, 0, 0, 0);
if (addr_ == nullptr) {
mmap_close(hfile, hmapping, addr_);
WarnL << "MapViewOfFile() " << file_path << " failed:";
return nullptr;
}
std::shared_ptr<char> ret((char *)(addr_), [hfile, hmapping, file_path](char *addr_) {
mmap_close(hfile, hmapping, addr_);
delSharedMmap(file_path, addr_);
});
#endif
#if 0
if (file_size < 10 * 1024 * 1024 && file_path.rfind(".ts") != string::npos) {
//如果是小ts文件那么尝试先加载到内存
@ -131,14 +183,12 @@ static std::shared_ptr<char> getSharedMmap(const string &file_path, int64_t &fil
}
return ret;
}
#endif
HttpFileBody::HttpFileBody(const string &file_path, bool use_mmap) {
#ifdef ENABLE_MMAP
if (use_mmap ) {
_map_addr = getSharedMmap(file_path, _read_to);
_map_addr = getSharedMmap(file_path, _read_to);
}
#endif
if (!_map_addr && _read_to != -1) {
//mmap失败(且不是由于文件不存在导致的)或未执行mmap时才进入fread逻辑分支
_fp.reset(fopen(file_path.data(), "rb"), [](FILE *fp) {

View File

@ -66,7 +66,8 @@ void HttpClient::sendRequest(const string &url) {
_http_persistent = true;
if (_body && _body->remainSize()) {
_header.emplace("Content-Length", to_string(_body->remainSize()));
_header.emplace("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8");
GET_CONFIG(string, charSet, Http::kCharSet);
_header.emplace("Content-Type", "application/x-www-form-urlencoded; charset=" + charSet);
}
bool host_changed = (_last_host != host + ":" + to_string(port)) || (_is_https != is_https);

View File

@ -34,7 +34,7 @@ public:
for (auto &pr : *this) {
ret.append(pr.first);
ret.append("=");
ret.append(strCoding::UrlEncode(pr.second));
ret.append(strCoding::UrlEncodeComponent(pr.second));
ret.append("&");
}
if (ret.size()) {

View File

@ -228,7 +228,7 @@ static bool makeFolderMenu(const string &httpPath, const string &strFullPath, st
multimap<string/*url name*/, std::pair<string/*note name*/, string/*file path*/> > file_map;
File::scanDir(strPathPrefix, [&](const std::string &path, bool isDir) {
auto name = fileName(strPathPrefix, path);
file_map.emplace(strCoding::UrlEncode(name), std::make_pair(name, path));
file_map.emplace(strCoding::UrlEncodePath(name), std::make_pair(name, path));
return true;
});
//如果是root目录添加虚拟目录

View File

@ -65,18 +65,18 @@ void HttpRequestSplitter::input(const char *data,size_t len) {
_content_len = onRecvHeader(header_ptr, header_size);
}
if(_remain_data_size <= 0){
//没有剩余数据,清空缓存
_remain_data.clear();
return;
}
/*
*
* HttpRequestSplitter::reset()
*/
tail_ref = tail_tmp;
if(_remain_data_size <= 0){
//没有剩余数据,清空缓存
_remain_data.clear();
return;
}
if(_content_len == 0){
//尚未找到http头缓存定位到剩余数据部分
_remain_data.assign(ptr,_remain_data_size);

View File

@ -65,6 +65,7 @@ ssize_t HttpSession::onRecvHeader(const char *header, size_t len) {
_parser.parse(header, len);
CHECK(_parser.url()[0] == '/');
_origin = _parser["Origin"];
urlDecode(_parser);
auto &cmd = _parser.method();
@ -124,29 +125,18 @@ ssize_t HttpSession::onRecvHeader(const char *header, size_t len) {
}
//// body size明确指定且小于最大值的情况 ////
auto body = std::make_shared<std::string>();
// 预留一定的内存buffer防止频繁的内存拷贝
body->reserve(content_len);
_on_recv_body = [this, body, content_len, it](const char *data, size_t len) mutable {
body->append(data, len);
if (body->size() < content_len) {
// 未收满数据
return true;
}
_on_recv_body = [this, it](const char *data, size_t len) mutable {
// 收集body完毕
_parser.setContent(std::move(*body));
_parser.setContent(std::string(data, len));
(this->*(it->second))();
_parser.clear();
// 后续是header
setContentLen(0);
// _on_recv_body置空
return false;
};
// 声明后续都是bodyHttp body在本对象缓冲不通过HttpRequestSplitter保存
return -1;
// 声明body长度通过HttpRequestSplitter缓存然后一次性回调到_on_recv_body
return content_len;
}
void HttpSession::onRecvContent(const char *data, size_t len) {
@ -617,8 +607,8 @@ void HttpSession::sendResponse(int code,
headerOut.emplace("Connection", bClose ? "close" : "keep-alive");
GET_CONFIG(bool, allow_cross_domains, Http::kAllowCrossDomains);
if (allow_cross_domains) {
headerOut.emplace("Access-Control-Allow-Origin", "*");
if (allow_cross_domains && !_origin.empty()) {
headerOut.emplace("Access-Control-Allow-Origin", _origin);
headerOut.emplace("Access-Control-Allow-Credentials", "true");
}
@ -693,22 +683,10 @@ void HttpSession::sendResponse(int code,
AsyncSender::onSocketFlushed(data);
}
string HttpSession::urlDecode(const string &str) {
auto ret = strCoding::UrlDecode(str);
#ifdef _WIN32
GET_CONFIG(string, charSet, Http::kCharSet);
bool isGb2312 = !strcasecmp(charSet.data(), "gb2312");
if (isGb2312) {
ret = strCoding::UTF8ToGB2312(ret);
}
#endif // _WIN32
return ret;
}
void HttpSession::urlDecode(Parser &parser) {
parser.setUrl(urlDecode(parser.url()));
parser.setUrl(strCoding::UrlDecodePath(parser.url()));
for (auto &pr : _parser.getUrlArgs()) {
const_cast<string &>(pr.second) = urlDecode(pr.second);
const_cast<string &>(pr.second) = strCoding::UrlDecodeComponent(pr.second);
}
}

View File

@ -44,7 +44,6 @@ public:
void onRecv(const toolkit::Buffer::Ptr &) override;
void onError(const toolkit::SockException &err) override;
void onManager() override;
static std::string urlDecode(const std::string &str);
void setTimeoutSec(size_t second);
void setMaxReqSize(size_t max_req_size);
@ -136,6 +135,8 @@ private:
size_t _max_req_size = 0;
//消耗的总流量
uint64_t _total_bytes_usage = 0;
// http请求中的 Origin字段
std::string _origin;
Parser _parser;
toolkit::Ticker _ticker;
TSMediaSource::RingType::RingReader::Ptr _ts_reader;

View File

@ -23,11 +23,16 @@ namespace mediakit {
PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &in_poller, const string &url_in) {
auto poller = in_poller ? in_poller : EventPollerPool::Instance().getPoller();
static auto releasePlayer = [poller](PlayerBase *ptr) {
poller->async([ptr]() {
onceToken token(nullptr, [&]() { delete ptr; });
ptr->teardown();
});
std::weak_ptr<EventPoller> weak_poller = poller;
static auto release_func = [weak_poller](PlayerBase *ptr) {
if (auto poller = weak_poller.lock()) {
poller->async([ptr]() {
onceToken token(nullptr, [&]() { delete ptr; });
ptr->teardown();
});
} else {
delete ptr;
}
};
string url = url_in;
string prefix = findSubString(url.data(), NULL, "://");
@ -38,29 +43,29 @@ PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &in_poller, cons
}
if (strcasecmp("rtsps", prefix.data()) == 0) {
return PlayerBase::Ptr(new TcpClientWithSSL<RtspPlayerImp>(poller), releasePlayer);
return PlayerBase::Ptr(new TcpClientWithSSL<RtspPlayerImp>(poller), release_func);
}
if (strcasecmp("rtsp", prefix.data()) == 0) {
return PlayerBase::Ptr(new RtspPlayerImp(poller), releasePlayer);
return PlayerBase::Ptr(new RtspPlayerImp(poller), release_func);
}
if (strcasecmp("rtmps", prefix.data()) == 0) {
return PlayerBase::Ptr(new TcpClientWithSSL<RtmpPlayerImp>(poller), releasePlayer);
return PlayerBase::Ptr(new TcpClientWithSSL<RtmpPlayerImp>(poller), release_func);
}
if (strcasecmp("rtmp", prefix.data()) == 0) {
return PlayerBase::Ptr(new RtmpPlayerImp(poller), releasePlayer);
return PlayerBase::Ptr(new RtmpPlayerImp(poller), release_func);
}
if ((strcasecmp("http", prefix.data()) == 0 || strcasecmp("https", prefix.data()) == 0)) {
if (end_with(url, ".m3u8") || end_with(url_in, ".m3u8")) {
return PlayerBase::Ptr(new HlsPlayerImp(poller), releasePlayer);
return PlayerBase::Ptr(new HlsPlayerImp(poller), release_func);
}
if (end_with(url, ".ts") || end_with(url_in, ".ts")) {
return PlayerBase::Ptr(new TsPlayerImp(poller), releasePlayer);
return PlayerBase::Ptr(new TsPlayerImp(poller), release_func);
}
if (end_with(url, ".flv") || end_with(url_in, ".flv")) {
return PlayerBase::Ptr(new FlvPlayerImp(poller), releasePlayer);
return PlayerBase::Ptr(new FlvPlayerImp(poller), release_func);
}
}

View File

@ -17,31 +17,37 @@ using namespace toolkit;
namespace mediakit {
PusherBase::Ptr PusherBase::createPusher(const EventPoller::Ptr &poller,
PusherBase::Ptr PusherBase::createPusher(const EventPoller::Ptr &in_poller,
const MediaSource::Ptr &src,
const std::string & url) {
static auto releasePusher = [](PusherBase *ptr){
onceToken token(nullptr,[&](){
delete ptr;
});
ptr->teardown();
auto poller = in_poller ? in_poller : EventPollerPool::Instance().getPoller();
std::weak_ptr<EventPoller> weak_poller = poller;
static auto release_func = [weak_poller](PusherBase *ptr) {
if (auto poller = weak_poller.lock()) {
poller->async([ptr]() {
onceToken token(nullptr, [&]() { delete ptr; });
ptr->teardown();
});
} else {
delete ptr;
}
};
std::string prefix = findSubString(url.data(), NULL, "://");
if (strcasecmp("rtsps",prefix.data()) == 0) {
return PusherBase::Ptr(new TcpClientWithSSL<RtspPusherImp>(poller, std::dynamic_pointer_cast<RtspMediaSource>(src)), releasePusher);
return PusherBase::Ptr(new TcpClientWithSSL<RtspPusherImp>(poller, std::dynamic_pointer_cast<RtspMediaSource>(src)), release_func);
}
if (strcasecmp("rtsp",prefix.data()) == 0) {
return PusherBase::Ptr(new RtspPusherImp(poller, std::dynamic_pointer_cast<RtspMediaSource>(src)), releasePusher);
return PusherBase::Ptr(new RtspPusherImp(poller, std::dynamic_pointer_cast<RtspMediaSource>(src)), release_func);
}
if (strcasecmp("rtmps",prefix.data()) == 0) {
return PusherBase::Ptr(new TcpClientWithSSL<RtmpPusherImp>(poller, std::dynamic_pointer_cast<RtmpMediaSource>(src)), releasePusher);
return PusherBase::Ptr(new TcpClientWithSSL<RtmpPusherImp>(poller, std::dynamic_pointer_cast<RtmpMediaSource>(src)), release_func);
}
if (strcasecmp("rtmp",prefix.data()) == 0) {
return PusherBase::Ptr(new RtmpPusherImp(poller, std::dynamic_pointer_cast<RtmpMediaSource>(src)), releasePusher);
return PusherBase::Ptr(new RtmpPusherImp(poller, std::dynamic_pointer_cast<RtmpMediaSource>(src)), release_func);
}
throw std::invalid_argument("not supported push schema:" + url);

View File

@ -24,15 +24,40 @@ HlsMaker::HlsMaker(bool is_fmp4, float seg_duration, uint32_t seg_number, bool s
_seg_keep = seg_keep;
}
void HlsMaker::makeIndexFile(bool eof) {
void HlsMaker::makeIndexFile(bool include_delay, bool eof) {
GET_CONFIG(uint32_t, segDelay, Hls::kSegmentDelay);
GET_CONFIG(uint32_t, segRetain, Hls::kSegmentRetain);
std::deque<std::tuple<int, std::string>> temp(_seg_dur_list);
if (!include_delay && _seg_number) {
while (temp.size() > _seg_number) {
temp.pop_front();
}
}
int maxSegmentDuration = 0;
for (auto &tp : _seg_dur_list) {
for (auto &tp : temp) {
int dur = std::get<0>(tp);
if (dur > maxSegmentDuration) {
maxSegmentDuration = dur;
}
}
auto index_seq = _seg_number ? (_file_index > _seg_number ? _file_index - _seg_number : 0LL) : 0LL;
uint64_t index_seq;
if (_seg_number) {
if (include_delay) {
if (_file_index > _seg_number + segDelay) {
index_seq = _file_index - _seg_number - segDelay;
} else {
index_seq = 0LL;
}
} else {
if (_file_index > _seg_number) {
index_seq = _file_index - _seg_number;
} else {
index_seq = 0LL;
}
}
} else {
index_seq = 0LL;
}
string index_str;
index_str.reserve(2048);
@ -50,7 +75,7 @@ void HlsMaker::makeIndexFile(bool eof) {
}
stringstream ss;
for (auto &tp : _seg_dur_list) {
for (auto &tp : temp) {
ss << "#EXTINF:" << std::setprecision(3) << std::get<0>(tp) / 1000.0 << ",\n" << std::get<1>(tp) << "\n";
}
index_str += ss.str();
@ -58,7 +83,7 @@ void HlsMaker::makeIndexFile(bool eof) {
if (eof) {
index_str += "#EXT-X-ENDLIST\n";
}
onWriteHls(index_str);
onWriteHls(index_str, include_delay);
}
void HlsMaker::inputInitSegment(const char *data, size_t len) {
@ -91,12 +116,13 @@ void HlsMaker::inputData(const char *data, size_t len, uint64_t timestamp, bool
}
void HlsMaker::delOldSegment() {
GET_CONFIG(uint32_t, segDelay, Hls::kSegmentDelay);
if (_seg_number == 0) {
//如果设置为保留0个切片则认为是保存为点播
return;
}
//在hls m3u8索引文件中,我们保存的切片个数跟_seg_number相关设置一致
if (_file_index > _seg_number) {
if (_file_index > _seg_number + segDelay) {
_seg_dur_list.pop_front();
}
//如果设置为一直保存,就不删除
@ -105,8 +131,8 @@ void HlsMaker::delOldSegment() {
}
GET_CONFIG(uint32_t, segRetain, Hls::kSegmentRetain);
//但是实际保存的切片个数比m3u8所述多若干个,这样做的目的是防止播放器在切片删除前能下载完毕
if (_file_index > _seg_number + segRetain) {
onDelSegment(_file_index - _seg_number - segRetain - 1);
if (_file_index > _seg_number + segDelay + segRetain) {
onDelSegment(_file_index - _seg_number - segDelay - segRetain - 1);
}
}
@ -125,6 +151,7 @@ void HlsMaker::addNewSegment(uint64_t stamp) {
}
void HlsMaker::flushLastSegment(bool eof){
GET_CONFIG(uint32_t, segDelay, Hls::kSegmentDelay);
if (_last_file_name.empty()) {
//不存在上个切片
return;
@ -139,7 +166,11 @@ void HlsMaker::flushLastSegment(bool eof){
//先flush ts切片否则可能存在ts文件未写入完毕就被访问的情况
onFlushLastSegment(seg_dur);
//然后写m3u8文件
makeIndexFile(eof);
makeIndexFile(false, eof);
//写入切片延迟的m3u8文件
if (segDelay) {
makeIndexFile(true, eof);
}
}
bool HlsMaker::isLive() const {

View File

@ -96,7 +96,7 @@ protected:
/**
* m3u8文件回调
*/
virtual void onWriteHls(const std::string &data) = 0;
virtual void onWriteHls(const std::string &data, bool include_delay) = 0;
/**
* ts ,
@ -115,7 +115,7 @@ private:
* m3u8文件
* @param eof true代表点播
*/
void makeIndexFile(bool eof = false);
void makeIndexFile(bool include_delay, bool eof = false);
/**
* ts切片

View File

@ -21,11 +21,20 @@ using namespace toolkit;
namespace mediakit {
std::string getDelayPath(const std::string& originalPath) {
std::size_t pos = originalPath.find(".m3u8");
if (pos != std::string::npos) {
return originalPath.substr(0, pos) + "_delay.m3u8";
}
return originalPath;
}
HlsMakerImp::HlsMakerImp(bool is_fmp4, const string &m3u8_file, const string &params, uint32_t bufSize, float seg_duration,
uint32_t seg_number, bool seg_keep) : HlsMaker(is_fmp4, seg_duration, seg_number, seg_keep) {
_poller = EventPollerPool::Instance().getPoller();
_path_prefix = m3u8_file.substr(0, m3u8_file.rfind('/'));
_path_hls = m3u8_file;
_path_hls_delay = getDelayPath(m3u8_file);
_params = params;
_buf_size = bufSize;
_file_buf.reset(new char[bufSize], [](char *ptr) { delete[] ptr; });
@ -62,7 +71,8 @@ void HlsMakerImp::clearCache(bool immediately, bool eof) {
{
std::list<std::string> lst;
lst.emplace_back(_path_hls);
if (!_path_init.empty()) {
lst.emplace_back(_path_hls_delay);
if (!_path_init.empty() && eof) {
lst.emplace_back(_path_init);
}
for (auto &pr : _segment_file_paths) {
@ -146,16 +156,17 @@ void HlsMakerImp::onWriteSegment(const char *data, size_t len) {
}
}
void HlsMakerImp::onWriteHls(const std::string &data) {
auto hls = makeFile(_path_hls);
void HlsMakerImp::onWriteHls(const std::string &data, bool include_delay) {
auto path = include_delay ? _path_hls_delay : _path_hls;
auto hls = makeFile(path);
if (hls) {
fwrite(data.data(), data.size(), 1, hls.get());
hls.reset();
if (_media_src) {
if (_media_src && !include_delay) {
_media_src->setIndexFile(data);
}
} else {
WarnL << "Create hls file failed," << _path_hls << " " << get_uv_errmsg();
WarnL << "Create hls file failed," << path << " " << get_uv_errmsg();
}
}
@ -184,10 +195,8 @@ std::shared_ptr<FILE> HlsMakerImp::makeFile(const string &file, bool setbuf) {
return ret;
}
void HlsMakerImp::setMediaSource(const string &vhost, const string &app, const string &stream_id) {
_info.app = app;
_info.stream = stream_id;
_info.vhost = vhost;
void HlsMakerImp::setMediaSource(const MediaTuple& tuple) {
static_cast<MediaTuple &>(_info) = tuple;
_media_src = std::make_shared<HlsMediaSource>(isFmp4() ? HLS_FMP4_SCHEMA : HLS_SCHEMA, _info);
}

View File

@ -27,11 +27,8 @@ public:
/**
*
* @param vhost
* @param app
* @param stream_id id
*/
void setMediaSource(const std::string &vhost, const std::string &app, const std::string &stream_id);
void setMediaSource(const MediaTuple& tuple);
/**
* MediaSource
@ -49,7 +46,7 @@ protected:
void onDelSegment(uint64_t index) override;
void onWriteInitSegment(const char *data, size_t len) override;
void onWriteSegment(const char *data, size_t len) override;
void onWriteHls(const std::string &data) override;
void onWriteHls(const std::string &data, bool include_delay) override;
void onFlushLastSegment(uint64_t duration_ms) override;
private:
@ -60,6 +57,7 @@ private:
int _buf_size;
std::string _params;
std::string _path_hls;
std::string _path_hls_delay;
std::string _path_init;
std::string _path_prefix;
RecordInfo _info;

View File

@ -34,7 +34,7 @@ public:
}
void setMediaSource(const MediaTuple& tuple) {
_hls->setMediaSource(tuple.vhost, tuple.app, tuple.stream);
_hls->setMediaSource(tuple);
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener) {

View File

@ -31,7 +31,8 @@ void MP4Muxer::openMP4(const string &file) {
MP4FileIO::Writer MP4Muxer::createWriter() {
GET_CONFIG(bool, mp4FastStart, Record::kFastStart);
return _mp4_file->createWriter(mp4FastStart ? MOV_FLAG_FASTSTART : 0, false);
GET_CONFIG(bool, recordEnableFmp4, Record::kEnableFmp4);
return _mp4_file->createWriter(mp4FastStart ? MOV_FLAG_FASTSTART : 0, recordEnableFmp4);
}
void MP4Muxer::closeMP4() {

View File

@ -38,7 +38,7 @@ MP4Reader::MP4Reader(const std::string &vhost, const std::string &app, const std
void MP4Reader::setup(const std::string &vhost, const std::string &app, const std::string &stream_id, const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller) {
//读写文件建议放在后台线程
auto tuple = MediaTuple{vhost, app, stream_id};
auto tuple = MediaTuple{vhost, app, stream_id, ""};
_poller = poller ? std::move(poller) : WorkThreadPool::Instance().getPoller();
_file_path = file_path;
if (_file_path.empty()) {

View File

@ -22,12 +22,10 @@ using namespace toolkit;
namespace mediakit {
MP4Recorder::MP4Recorder(const string &path, const string &vhost, const string &app, const string &stream_id, size_t max_second) {
MP4Recorder::MP4Recorder(const MediaTuple &tuple, const string &path, size_t max_second) {
_folder_path = path;
/////record 业务逻辑//////
_info.app = app;
_info.stream = stream_id;
_info.vhost = vhost;
static_cast<MediaTuple &>(_info) = tuple;
_info.folder = path;
GET_CONFIG(uint32_t, s_max_second, Protocol::kMP4MaxSecond);
_max_second = max_second ? max_second : s_max_second;
@ -117,11 +115,13 @@ bool MP4Recorder::inputFrame(const Frame::Ptr &frame) {
if (!(_have_video && frame->getTrackType() == TrackAudio)) {
//如果有视频且输入的是音频,那么应该忽略切片逻辑
if (_last_dts == 0 || _last_dts > frame->dts()) {
//极少情况下dts时间戳可能回退
_last_dts = frame->dts();
//b帧情况下dts时间戳可能回退
_last_dts = MAX(frame->dts(), _last_dts);
}
auto duration = 5u; // 默认至少一帧5ms
if (frame->dts() > 0 && frame->dts() > _last_dts) {
duration = MAX(duration, frame->dts() - _last_dts);
}
auto duration = frame->dts() - _last_dts;
if (!_muxer || ((duration > _max_second * 1000) && (!_have_video || (_have_video && frame->keyFrame())))) {
//成立条件
// 1、_muxer为空

View File

@ -26,7 +26,7 @@ class MP4Recorder final : public MediaSinkInterface {
public:
using Ptr = std::shared_ptr<MP4Recorder>;
MP4Recorder(const std::string &path, const std::string &vhost, const std::string &app, const std::string &stream_id, size_t max_second);
MP4Recorder(const MediaTuple &tuple, const std::string &path, size_t max_second);
~MP4Recorder() override;
/**

View File

@ -68,8 +68,7 @@ string Recorder::getRecordPath(Recorder::type type, const MediaTuple& tuple, con
}
return File::absolutePath(m3u8FilePath, hlsPath);
}
default:
return "";
default: return "";
}
}
@ -85,13 +84,12 @@ std::shared_ptr<MediaSinkInterface> Recorder::createRecorder(type type, const Me
#else
throw std::invalid_argument("hls相关功能未打开请开启ENABLE_HLS宏后编译再测试");
#endif
}
case Recorder::type_mp4: {
#if defined(ENABLE_MP4)
auto path = Recorder::getRecordPath(type, tuple, option.mp4_save_path);
return std::make_shared<MP4Recorder>(path, tuple.vhost, tuple.app, tuple.stream, option.mp4_max_second);
return std::make_shared<MP4Recorder>(tuple, path, option.mp4_max_second);
#else
throw std::invalid_argument("mp4相关功能未打开请开启ENABLE_MP4宏后编译再测试");
#endif

View File

@ -22,6 +22,7 @@ struct MediaTuple {
std::string vhost;
std::string app;
std::string stream;
std::string params;
std::string shortUrl() const {
return vhost + '/' + app + '/' + stream;
}

View File

@ -210,7 +210,7 @@ string FCI_NACK::dumpString() const {
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma pack(push, 1)
class RunLengthChunk {
public:
static size_t constexpr kSize = 2;
@ -241,6 +241,7 @@ public:
// 打印本对象
string dumpString() const;
};
#pragma pack(pop)
RunLengthChunk::RunLengthChunk(SymbolStatus status, uint16_t run_length) {
type = 0;
@ -261,7 +262,7 @@ string RunLengthChunk::dumpString() const {
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma pack(push, 1)
class StatusVecChunk {
public:
static size_t constexpr kSize = 2;
@ -292,6 +293,7 @@ public:
// 打印本对象
string dumpString() const;
};
#pragma pack(pop)
StatusVecChunk::StatusVecChunk(bool symbol_bit, const vector<SymbolStatus> &status) {
CHECK(status.size() << symbol_bit <= 14);

View File

@ -14,6 +14,7 @@
#include "Rtcp.h"
namespace mediakit {
#pragma pack(push, 1)
/////////////////////////////////////////// PSFB ////////////////////////////////////////////////////
@ -375,6 +376,6 @@ private:
// feedback packet count,反馈包号,本包是第几个transport-cc包每次加1 |
uint8_t fb_pkt_count;
};
#pragma pack(pop)
} // namespace mediakit
#endif // ZLMEDIAKIT_RTCPFCI_H

View File

@ -29,7 +29,13 @@ public:
getRtmpRing()->setDelegate(_media_src);
}
~RtmpMediaSourceMuxer() override { RtmpMuxer::flush(); }
~RtmpMediaSourceMuxer() override {
try {
RtmpMuxer::flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
setDelegate(listener);

View File

@ -19,9 +19,17 @@ using namespace toolkit;
namespace mediakit{
PSEncoderImp::PSEncoderImp(uint32_t ssrc, uint8_t payload_type) : MpegMuxer(true) {
GET_CONFIG(uint32_t,video_mtu,Rtp::kVideoMtuSize);
PSEncoderImp::PSEncoderImp(uint32_t ssrc, uint8_t payload_type, bool ps_or_ts) : MpegMuxer(ps_or_ts) {
GET_CONFIG(uint32_t, s_video_mtu, Rtp::kVideoMtuSize);
_rtp_encoder = std::make_shared<CommonRtpEncoder>();
auto video_mtu = s_video_mtu;
if (!ps_or_ts) {
// 确保ts rtp负载部分长度是188的倍数
video_mtu = RtpPacket::kRtpHeaderSize + (s_video_mtu - (s_video_mtu % 188));
if (video_mtu > s_video_mtu) {
video_mtu -= 188;
}
}
_rtp_encoder->setRtpInfo(ssrc, video_mtu, 90000, payload_type);
auto ring = std::make_shared<RtpRing::RingType>();
ring->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key) { onRTP(std::move(rtp), is_key); }));

View File

@ -16,11 +16,19 @@
#include "Record/MPEG.h"
#include "Common/MediaSink.h"
namespace mediakit{
namespace mediakit {
class CommonRtpEncoder;
class PSEncoderImp : public MpegMuxer{
class PSEncoderImp : public MpegMuxer {
public:
PSEncoderImp(uint32_t ssrc, uint8_t payload_type = 96);
/**
* psh或ts rtp编码器
* @param ssrc rtp的ssrc
* @param payload_type rtp的pt
* @param ps_or_ts true: ps, false: ts
*/
PSEncoderImp(uint32_t ssrc, uint8_t payload_type = 96, bool ps_or_ts = true);
~PSEncoderImp() override;
protected:

View File

@ -34,6 +34,12 @@ bool RawEncoderImp::addTrack(const Track::Ptr &track) {
auto ring = std::make_shared<RtpRing::RingType>();
ring->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key) { onRTP(std::move(rtp), true); }));
_rtp_encoder->setRtpRing(std::move(ring));
if (track->getCodecId() == CodecG711A || track->getCodecId() == CodecG711U) {
GET_CONFIG(uint32_t, dur_ms, RtpProxy::kRtpG711DurMs);
Any param;
param.set<uint32_t>(dur_ms);
_rtp_encoder->setOpt(RtpCodec::RTP_ENCODER_PKT_DUR_MS, param);
}
return true;
}

View File

@ -40,7 +40,9 @@ private:
class RtpCachePS : public RtpCache, public PSEncoderImp {
public:
RtpCachePS(onFlushed cb, uint32_t ssrc, uint8_t payload_type = 96) : RtpCache(std::move(cb)), PSEncoderImp(ssrc, payload_type) {};
RtpCachePS(onFlushed cb, uint32_t ssrc, uint8_t payload_type = 96, bool ps_or_ts = true) :
RtpCache(std::move(cb)), PSEncoderImp(ssrc, ps_or_ts ? payload_type : Rtsp::PT_MP2T, ps_or_ts) {};
void flush() override;
protected:
@ -56,6 +58,7 @@ protected:
void onRTP(toolkit::Buffer::Ptr rtp, bool is_key = false) override;
};
}//namespace mediakit
} //namespace mediakit
#endif//ENABLE_RTPPROXY
#endif //ZLMEDIAKIT_RTPCACHE_H

View File

@ -11,6 +11,7 @@
#if defined(ENABLE_RTPPROXY)
#include "GB28181Process.h"
#include "RtpProcess.h"
#include "RtpSelector.h"
#include "Http/HttpTSPlayer.h"
#include "Util/File.h"
#include "Common/config.h"
@ -198,8 +199,8 @@ void RtpProcess::setStopCheckRtp(bool is_check){
}
}
void RtpProcess::setOnlyAudio(bool only_audio){
_only_audio = only_audio;
void RtpProcess::setOnlyTrack(OnlyTrack only_track) {
_only_track = only_track;
}
void RtpProcess::onDetach() {
@ -255,8 +256,13 @@ void RtpProcess::emitOnPublish() {
}
if (err.empty()) {
strong_self->_muxer = std::make_shared<MultiMediaSourceMuxer>(strong_self->_media_info, 0.0f, option);
if (strong_self->_only_audio) {
strong_self->_muxer->setOnlyAudio();
if (!option.stream_replace.empty()) {
RtpSelector::Instance().addStreamReplace(strong_self->_media_info.stream, option.stream_replace);
}
switch (strong_self->_only_track) {
case kOnlyAudio: strong_self->_muxer->setOnlyAudio(); break;
case kOnlyVideo: strong_self->_muxer->enableAudio(false); break;
default: break;
}
strong_self->_muxer->setMediaListener(strong_self);
strong_self->doCachedFunc();

View File

@ -24,6 +24,7 @@ public:
friend class RtpProcessHelper;
RtpProcess(const std::string &stream_id);
~RtpProcess();
enum OnlyTrack { kAll = 0, kOnlyAudio = 1, kOnlyVideo = 2 };
/**
* rtp
@ -58,10 +59,10 @@ public:
void setStopCheckRtp(bool is_check=false);
/**
* track
* track/
* inputRtp前调用此方法
*/
void setOnlyAudio(bool only_audio);
void setOnlyTrack(OnlyTrack only_track);
/**
* flush输出缓存
@ -93,7 +94,7 @@ private:
void doCachedFunc();
private:
bool _only_audio = false;
OnlyTrack _only_track = kAll;
std::string _auth_err;
uint64_t _dts = 0;
uint64_t _total_bytes = 0;

View File

@ -23,6 +23,7 @@ INSTANCE_IMP(RtpSelector);
void RtpSelector::clear(){
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
_map_rtp_process.clear();
_map_stream_replace.clear();
}
bool RtpSelector::getSSRC(const char *data, size_t data_len, uint32_t &ssrc){
@ -36,17 +37,23 @@ bool RtpSelector::getSSRC(const char *data, size_t data_len, uint32_t &ssrc){
RtpProcess::Ptr RtpSelector::getProcess(const string &stream_id,bool makeNew) {
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
auto it = _map_rtp_process.find(stream_id);
string stream_id_origin = stream_id;
auto it_replace = _map_stream_replace.find(stream_id);
if (it_replace != _map_stream_replace.end()) {
stream_id_origin = it_replace->second;
}
auto it = _map_rtp_process.find(stream_id_origin);
if (it == _map_rtp_process.end() && !makeNew) {
return nullptr;
}
if (it != _map_rtp_process.end() && makeNew) {
//已经被其他线程持有了,不得再被持有,否则会存在线程安全的问题
throw ProcessExisted(StrPrinter << "RtpProcess(" << stream_id << ") already existed");
throw ProcessExisted(StrPrinter << "RtpProcess(" << stream_id_origin << ") already existed");
}
RtpProcessHelper::Ptr &ref = _map_rtp_process[stream_id];
RtpProcessHelper::Ptr &ref = _map_rtp_process[stream_id_origin];
if (!ref) {
ref = std::make_shared<RtpProcessHelper>(stream_id, shared_from_this());
ref = std::make_shared<RtpProcessHelper>(stream_id_origin, shared_from_this());
ref->attachEvent();
createTimer();
}
@ -81,10 +88,25 @@ void RtpSelector::delProcess(const string &stream_id,const RtpProcess *ptr) {
}
process = it->second->getProcess();
_map_rtp_process.erase(it);
delStreamReplace(stream_id);
}
process->onDetach();
}
void RtpSelector::addStreamReplace(const string &stream_id, const std::string &stream_replace) {
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
_map_stream_replace[stream_replace] = stream_id;
}
void RtpSelector::delStreamReplace(const string &stream_id) {
for (auto it = _map_stream_replace.begin(); it != _map_stream_replace.end(); ++it) {
if (it->second == stream_id) {
_map_stream_replace.erase(it);
break;
}
}
}
void RtpSelector::onManager() {
List<RtpProcess::Ptr> clear_list;
{
@ -96,6 +118,7 @@ void RtpSelector::onManager() {
}
WarnL << "RtpProcess timeout:" << it->first;
clear_list.emplace_back(it->second->getProcess());
delStreamReplace(it->first);
it = _map_rtp_process.erase(it);
}
}

View File

@ -70,14 +70,18 @@ public:
*/
void delProcess(const std::string &stream_id, const RtpProcess *ptr);
void addStreamReplace(const std::string &stream_id, const std::string &stream_replace);
private:
void onManager();
void createTimer();
void delStreamReplace(const std::string &stream_id);
private:
toolkit::Timer::Ptr _timer;
std::recursive_mutex _mtx_map;
std::unordered_map<std::string,RtpProcessHelper::Ptr> _map_rtp_process;
std::unordered_map<std::string,std::string> _map_stream_replace;
};
}//namespace mediakit

View File

@ -40,10 +40,11 @@ void RtpSender::startSend(const MediaSourceEvent::SendRtpArgs &args, const funct
if (!_interface) {
//重连时不重新创建对象
auto lam = [this](std::shared_ptr<List<Buffer::Ptr>> list) { onFlushRtpList(std::move(list)); };
if (args.use_ps) {
_interface = std::make_shared<RtpCachePS>(lam, atoi(args.ssrc.data()), args.pt);
} else {
_interface = std::make_shared<RtpCacheRaw>(lam, atoi(args.ssrc.data()), args.pt, args.only_audio);
switch (args.type) {
case MediaSourceEvent::SendRtpArgs::kRtpPS: _interface = std::make_shared<RtpCachePS>(lam, atoi(args.ssrc.data()), args.pt, true); break;
case MediaSourceEvent::SendRtpArgs::kRtpTS: _interface = std::make_shared<RtpCachePS>(lam, atoi(args.ssrc.data()), args.pt, false); break;
case MediaSourceEvent::SendRtpArgs::kRtpRAW: _interface = std::make_shared<RtpCacheRaw>(lam, atoi(args.ssrc.data()), args.pt, args.only_audio); break;
default: CHECK(0, "invalid rtp type:" + to_string(args.type)); break;
}
}

View File

@ -42,12 +42,12 @@ public:
}
}
void setRtpServerInfo(uint16_t local_port,RtpServer::TcpMode mode,bool re_use_port,uint32_t ssrc, bool only_audio) {
void setRtpServerInfo(uint16_t local_port, RtpServer::TcpMode mode, bool re_use_port, uint32_t ssrc, int only_track) {
_local_port = local_port;
_tcp_mode = mode;
_re_use_port = re_use_port;
_ssrc = ssrc;
_only_audio = only_audio;
_only_track = only_track;
}
void setOnDetach(function<void()> cb) {
@ -61,7 +61,7 @@ public:
void onRecvRtp(const Socket::Ptr &sock, const Buffer::Ptr &buf, struct sockaddr *addr) {
if (!_process) {
_process = RtpSelector::Instance().getProcess(_stream_id, true);
_process->setOnlyAudio(_only_audio);
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
_process->setOnDetach(std::move(_on_detach));
cancelDelayTask();
}
@ -142,7 +142,7 @@ private:
private:
bool _re_use_port = false;
bool _only_audio = false;
int _only_track = 0;
uint16_t _local_port = 0;
uint32_t _ssrc = 0;
RtpServer::TcpMode _tcp_mode = RtpServer::NONE;
@ -156,7 +156,7 @@ private:
EventPoller::DelayTask::Ptr _delay_task;
};
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex) {
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
//创建udp服务器
Socket::Ptr rtp_socket = Socket::createSocket(nullptr, true);
Socket::Ptr rtcp_socket = Socket::createSocket(nullptr, true);
@ -174,7 +174,8 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
}
//设置udp socket读缓存
SockUtil::setRecvBuf(rtp_socket->rawFD(), 4 * 1024 * 1024);
GET_CONFIG(int, udpRecvSocketBuffer, RtpProxy::kUdpRecvSocketBuffer);
SockUtil::setRecvBuf(rtp_socket->rawFD(), udpRecvSocketBuffer);
TcpServer::Ptr tcp_server;
_tcp_mode = tcp_mode;
@ -183,7 +184,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
tcp_server = std::make_shared<TcpServer>(rtp_socket->getPoller());
(*tcp_server)[RtpSession::kStreamID] = stream_id;
(*tcp_server)[RtpSession::kSSRC] = ssrc;
(*tcp_server)[RtpSession::kOnlyAudio] = only_audio;
(*tcp_server)[RtpSession::kOnlyTrack] = only_track;
if (tcp_mode == PASSIVE) {
tcp_server->start<RtpSession>(local_port, local_ip);
} else if (stream_id.empty()) {
@ -200,7 +201,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
//指定了流id那么一个端口一个流(不管是否包含多个ssrc的多个流绑定rtp源后会筛选掉ip端口不匹配的流)
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), stream_id);
helper->startRtcp();
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_audio);
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_track);
bool bind_peer_addr = false;
auto ssrc_ptr = std::make_shared<uint32_t>(ssrc);
_ssrc = ssrc_ptr;
@ -222,7 +223,8 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
} else {
//单端口多线程接收多个流根据ssrc区分流
udp_server = std::make_shared<UdpServer>(rtp_socket->getPoller());
(*udp_server)[RtpSession::kOnlyAudio] = only_audio;
(*udp_server)[RtpSession::kOnlyTrack] = only_track;
(*udp_server)[RtpSession::kUdpRecvBuffer] = udpRecvSocketBuffer;
udp_server->start<RtpSession>(local_port, local_ip);
rtp_socket = nullptr;
}

View File

@ -44,7 +44,7 @@ public:
* @param multiplex
*/
void start(uint16_t local_port, const std::string &stream_id = "", TcpMode tcp_mode = PASSIVE,
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, bool only_audio = false, bool multiplex = false);
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, int only_track = 0, bool multiplex = false);
/**
* tcp服务(tcp主动模式)
@ -81,7 +81,7 @@ protected:
std::shared_ptr<RtcpHelper> _rtcp_helper;
std::function<void()> _on_cleanup;
bool _only_audio = false;
int _only_track = 0;
//用于tcp主动模式
TcpMode _tcp_mode = NONE;
};

View File

@ -23,7 +23,8 @@ namespace mediakit{
const string RtpSession::kStreamID = "stream_id";
const string RtpSession::kSSRC = "ssrc";
const string RtpSession::kOnlyAudio = "only_audio";
const string RtpSession::kOnlyTrack = "only_track";
const string RtpSession::kUdpRecvBuffer = "udp_recv_socket_buffer";
void RtpSession::attachServer(const Server &server) {
setParams(const_cast<Server &>(server));
@ -32,7 +33,13 @@ void RtpSession::attachServer(const Server &server) {
void RtpSession::setParams(mINI &ini) {
_stream_id = ini[kStreamID];
_ssrc = ini[kSSRC];
_only_audio = ini[kOnlyAudio];
_only_track = ini[kOnlyTrack];
int udp_socket_buffer = ini[kUdpRecvBuffer];
if (_is_udp) {
// 设置udp socket读缓存
SockUtil::setRecvBuf(getSock()->rawFD(),
(udp_socket_buffer > 0) ? udp_socket_buffer : (4 * 1024 * 1024));
}
}
RtpSession::RtpSession(const Socket::Ptr &sock)
@ -40,10 +47,6 @@ RtpSession::RtpSession(const Socket::Ptr &sock)
socklen_t addr_len = sizeof(_addr);
getpeername(sock->rawFD(), (struct sockaddr *)&_addr, &addr_len);
_is_udp = sock->sockType() == SockNum::Sock_UDP;
if (_is_udp) {
// 设置udp socket读缓存
SockUtil::setRecvBuf(getSock()->rawFD(), 4 * 1024 * 1024);
}
}
RtpSession::~RtpSession() = default;
@ -60,6 +63,7 @@ void RtpSession::onError(const SockException &err) {
WarnP(this) << _stream_id << " " << err;
if (_process) {
RtpSelector::Instance().delProcess(_stream_id, _process.get());
_process = nullptr;
}
}
@ -121,7 +125,7 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
_delay_close = true;
return;
}
_process->setOnlyAudio(_only_audio);
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
_process->setDelegate(static_pointer_cast<RtpSession>(shared_from_this()));
}
try {
@ -272,4 +276,4 @@ const char *RtpSession::searchByPsHeaderFlag(const char *data, size_t len) {
}
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
#endif//defined(ENABLE_RTPPROXY)

View File

@ -24,7 +24,8 @@ class RtpSession : public toolkit::Session, public RtpSplitter, public MediaSour
public:
static const std::string kStreamID;
static const std::string kSSRC;
static const std::string kOnlyAudio;
static const std::string kOnlyTrack;
static const std::string kUdpRecvBuffer;
RtpSession(const toolkit::Socket::Ptr &sock);
~RtpSession() override;
@ -51,7 +52,7 @@ private:
bool _is_udp = false;
bool _search_rtp = false;
bool _search_rtp_finished = false;
bool _only_audio = false;
int _only_track = 0;
uint32_t _ssrc = 0;
toolkit::Ticker _ticker;
std::string _stream_id;

View File

@ -93,6 +93,17 @@ public:
RtpInfo &getRtpInfo() { return *_rtp_info; }
enum {
RTP_ENCODER_PKT_DUR_MS = 1 // 主要应用于g711 rtp 打包器每个包的时间长度option_value 为int*, option_len 为4
};
/**
* @brief rtp打包器与解包器的相关参数g711 rtp 使setsockopt
*
* @param opt
* @param param
*/
virtual void setOpt(int opt, const toolkit::Any &param) {};
private:
std::unique_ptr<RtpInfo> _rtp_info;
};

View File

@ -352,12 +352,20 @@ public:
}
void makeSockPair(std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
auto &sock0 = pair.first;
auto &sock1 = pair.second;
auto sock_pair = getPortPair();
if (!sock_pair) {
throw runtime_error("none reserved port in pool");
}
makeSockPair_l(sock_pair, pair, local_ip, re_use_port, is_udp);
// 确保udp和tcp模式都能打开
auto new_pair = std::make_pair(Socket::createSocket(), Socket::createSocket());
makeSockPair_l(sock_pair, new_pair, local_ip, re_use_port, !is_udp);
}
void makeSockPair_l(const std::shared_ptr<uint16_t> &sock_pair, std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
auto &sock0 = pair.first;
auto &sock1 = pair.second;
if (is_udp) {
if (!sock0->bindUdpSock(2 * *sock_pair, local_ip.data(), re_use_port)) {
// 分配端口失败

View File

@ -210,7 +210,8 @@ void RtspPlayer::handleResDESCRIBE(const Parser &parser) {
if (play_track != TrackInvalid) {
auto track = sdpParser.getTrack(play_track);
_sdp_track.emplace_back(track);
sdp = track->toString();
auto title_track = sdpParser.getTrack(TrackTitle);
sdp = (title_track ? title_track->toString() : "") + track->toString();
} else {
_sdp_track = sdpParser.getAvailableTrack();
sdp = sdpParser.toString();

View File

@ -16,41 +16,17 @@ SrtSession::SrtSession(const Socket::Ptr &sock)
// TraceL<<"after addr len "<<addr_len<<" family "<<_peer_addr.ss_family;
}
EventPoller::Ptr SrtSession::queryPoller(const Buffer::Ptr &buffer) {
uint8_t *data = (uint8_t *)buffer->data();
size_t size = buffer->size();
if (DataPacket::isDataPacket(data, size)) {
uint32_t socket_id = DataPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
return trans ? trans->getPoller() : nullptr;
}
if (HandshakePacket::isHandshakePacket(data, size)) {
auto type = HandshakePacket::getHandshakeType(data, size);
if (type == HandshakePacket::HS_TYPE_INDUCTION) {
// 握手第一阶段
return nullptr;
} else if (type == HandshakePacket::HS_TYPE_CONCLUSION) {
// 握手第二阶段
uint32_t sync_cookie = HandshakePacket::getSynCookie(data, size);
auto trans = SrtTransportManager::Instance().getHandshakeItem(std::to_string(sync_cookie));
return trans ? trans->getPoller() : nullptr;
} else {
WarnL << " not reach there";
}
} else {
uint32_t socket_id = ControlPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
return trans ? trans->getPoller() : nullptr;
}
return nullptr;
}
void SrtSession::attachServer(const toolkit::Server &server) {
SockUtil::setRecvBuf(getSock()->rawFD(), 1024 * 1024);
}
extern SrtTransport::Ptr querySrtTransport(uint8_t *data, size_t size, const EventPoller::Ptr& poller);
EventPoller::Ptr SrtSession::queryPoller(const Buffer::Ptr &buffer) {
auto transport = querySrtTransport((uint8_t *)buffer->data(), buffer->size(), nullptr);
return transport ? transport->getPoller() : nullptr;
}
void SrtSession::onRecv(const Buffer::Ptr &buffer) {
uint8_t *data = (uint8_t *)buffer->data();
size_t size = buffer->size();
@ -58,45 +34,7 @@ void SrtSession::onRecv(const Buffer::Ptr &buffer) {
if (_find_transport) {
//只允许寻找一次transport
_find_transport = false;
if (DataPacket::isDataPacket(data, size)) {
uint32_t socket_id = DataPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
if (trans) {
_transport = std::move(trans);
} else {
WarnL << " data packet not find transport ";
}
}
if (HandshakePacket::isHandshakePacket(data, size)) {
auto type = HandshakePacket::getHandshakeType(data, size);
if (type == HandshakePacket::HS_TYPE_INDUCTION) {
// 握手第一阶段
_transport = std::make_shared<SrtTransportImp>(getPoller());
} else if (type == HandshakePacket::HS_TYPE_CONCLUSION) {
// 握手第二阶段
uint32_t sync_cookie = HandshakePacket::getSynCookie(data, size);
auto trans = SrtTransportManager::Instance().getHandshakeItem(std::to_string(sync_cookie));
if (trans) {
_transport = std::move(trans);
} else {
WarnL << " hanshake packet not find transport ";
}
} else {
WarnL << " not reach there";
}
} else {
uint32_t socket_id = ControlPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
if (trans) {
_transport = std::move(trans);
} else {
WarnL << " not find transport";
}
}
_transport = querySrtTransport(data, size, getPoller());
if (_transport) {
_transport->setSession(static_pointer_cast<Session>(shared_from_this()));
}

View File

@ -61,7 +61,7 @@ void SrtTransport::switchToOtherTransport(uint8_t *buf, int len, uint32_t socket
BufferRaw::Ptr tmp = BufferRaw::create();
struct sockaddr_storage tmp_addr = *addr;
tmp->assign((char *)buf, len);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socketid));
auto trans = SrtTransportManager::Instance().getItem(socketid);
if (trans) {
trans->getPoller()->async([tmp, tmp_addr, trans] {
trans->inputSockData((uint8_t *)tmp->data(), tmp->size(), (struct sockaddr_storage *)&tmp_addr);
@ -700,30 +700,30 @@ void SrtTransport::sendPacket(Buffer::Ptr pkt, bool flush) {
}
}
std::string SrtTransport::getIdentifier() {
std::string SrtTransport::getIdentifier() const {
return _selected_session ? _selected_session->getIdentifier() : "";
}
void SrtTransport::registerSelfHandshake() {
SrtTransportManager::Instance().addHandshakeItem(std::to_string(_sync_cookie), shared_from_this());
SrtTransportManager::Instance().addHandshakeItem(_sync_cookie, shared_from_this());
}
void SrtTransport::unregisterSelfHandshake() {
if (_sync_cookie == 0) {
return;
}
SrtTransportManager::Instance().removeHandshakeItem(std::to_string(_sync_cookie));
SrtTransportManager::Instance().removeHandshakeItem(_sync_cookie);
}
void SrtTransport::registerSelf() {
if (_socket_id == 0) {
return;
}
SrtTransportManager::Instance().addItem(std::to_string(_socket_id), shared_from_this());
SrtTransportManager::Instance().addItem(_socket_id, shared_from_this());
}
void SrtTransport::unregisterSelf() {
SrtTransportManager::Instance().removeItem(std::to_string(_socket_id));
SrtTransportManager::Instance().removeItem(_socket_id);
}
void SrtTransport::onShutdown(const SockException &ex) {
@ -739,7 +739,7 @@ void SrtTransport::onShutdown(const SockException &ex) {
}
}
size_t SrtTransport::getPayloadSize() {
size_t SrtTransport::getPayloadSize() const {
size_t ret = (_mtu - 28 - 16) / 188 * 188;
return ret;
}
@ -792,15 +792,13 @@ SrtTransportManager &SrtTransportManager::Instance() {
return s_instance;
}
void SrtTransportManager::addItem(const std::string &key, const SrtTransport::Ptr &ptr) {
void SrtTransportManager::addItem(const uint32_t key, const SrtTransport::Ptr &ptr) {
std::lock_guard<std::mutex> lck(_mtx);
_map[key] = ptr;
}
SrtTransport::Ptr SrtTransportManager::getItem(const std::string &key) {
if (key.empty()) {
return nullptr;
}
SrtTransport::Ptr SrtTransportManager::getItem(const uint32_t key) {
assert(key > 0);
std::lock_guard<std::mutex> lck(_mtx);
auto it = _map.find(key);
if (it == _map.end()) {
@ -809,25 +807,23 @@ SrtTransport::Ptr SrtTransportManager::getItem(const std::string &key) {
return it->second.lock();
}
void SrtTransportManager::removeItem(const std::string &key) {
void SrtTransportManager::removeItem(const uint32_t key) {
std::lock_guard<std::mutex> lck(_mtx);
_map.erase(key);
}
void SrtTransportManager::addHandshakeItem(const std::string &key, const SrtTransport::Ptr &ptr) {
void SrtTransportManager::addHandshakeItem(const uint32_t key, const SrtTransport::Ptr &ptr) {
std::lock_guard<std::mutex> lck(_handshake_mtx);
_handshake_map[key] = ptr;
}
void SrtTransportManager::removeHandshakeItem(const std::string &key) {
void SrtTransportManager::removeHandshakeItem(const uint32_t key) {
std::lock_guard<std::mutex> lck(_handshake_mtx);
_handshake_map.erase(key);
}
SrtTransport::Ptr SrtTransportManager::getHandshakeItem(const std::string &key) {
if (key.empty()) {
return nullptr;
}
SrtTransport::Ptr SrtTransportManager::getHandshakeItem(const uint32_t key) {
assert(key > 0);
std::lock_guard<std::mutex> lck(_handshake_mtx);
auto it = _handshake_map.find(key);
if (it == _handshake_map.end()) {

View File

@ -45,7 +45,7 @@ public:
virtual void inputSockData(uint8_t *buf, int len, struct sockaddr_storage *addr);
virtual void onSendTSData(const Buffer::Ptr &buffer, bool flush);
std::string getIdentifier();
std::string getIdentifier() const;
void unregisterSelf();
void unregisterSelfHandshake();
@ -89,7 +89,7 @@ private:
void sendShutDown();
void sendMsgDropReq(uint32_t first, uint32_t last);
size_t getPayloadSize();
size_t getPayloadSize() const;
void createTimerForCheckAlive();
@ -164,23 +164,23 @@ private:
class SrtTransportManager {
public:
static SrtTransportManager &Instance();
SrtTransport::Ptr getItem(const std::string &key);
void addItem(const std::string &key, const SrtTransport::Ptr &ptr);
void removeItem(const std::string &key);
SrtTransport::Ptr getItem(const uint32_t key);
void addItem(const uint32_t key, const SrtTransport::Ptr &ptr);
void removeItem(const uint32_t key);
void addHandshakeItem(const std::string &key, const SrtTransport::Ptr &ptr);
void removeHandshakeItem(const std::string &key);
SrtTransport::Ptr getHandshakeItem(const std::string &key);
void addHandshakeItem(const uint32_t key, const SrtTransport::Ptr &ptr);
void removeHandshakeItem(const uint32_t key);
SrtTransport::Ptr getHandshakeItem(const uint32_t key);
private:
SrtTransportManager() = default;
private:
std::mutex _mtx;
std::unordered_map<std::string, std::weak_ptr<SrtTransport>> _map;
std::unordered_map<uint32_t , std::weak_ptr<SrtTransport>> _map;
std::mutex _handshake_mtx;
std::unordered_map<std::string, std::weak_ptr<SrtTransport>> _handshake_map;
std::unordered_map<uint32_t, std::weak_ptr<SrtTransport>> _handshake_map;
};
} // namespace SRT

View File

@ -24,6 +24,32 @@ SrtTransportImp::~SrtTransportImp() {
}
}
SrtTransport::Ptr querySrtTransport(uint8_t *data, size_t size, const EventPoller::Ptr& poller) {
if (DataPacket::isDataPacket(data, size)) {
uint32_t socket_id = DataPacket::getSocketID(data, size);
return SrtTransportManager::Instance().getItem(socket_id);
}
if (HandshakePacket::isHandshakePacket(data, size)) {
auto type = HandshakePacket::getHandshakeType(data, size);
if (type == HandshakePacket::HS_TYPE_INDUCTION) {
// 握手第一阶段
return poller ? std::make_shared<SrtTransportImp>(poller) : nullptr;
}
if (type == HandshakePacket::HS_TYPE_CONCLUSION) {
// 握手第二阶段
uint32_t sync_cookie = HandshakePacket::getSynCookie(data, size);
return SrtTransportManager::Instance().getHandshakeItem(sync_cookie);
}
}
uint32_t socket_id = ControlPacket::getSocketID(data, size);
return SrtTransportManager::Instance().getItem(socket_id);
}
void SrtTransportImp::onHandShakeFinished(std::string &streamid, struct sockaddr_storage *addr) {
SrtTransport::onHandShakeFinished(streamid,addr);
// TODO parse stream id like this zlmediakit.com/live/test?token=1213444&type=push
@ -37,8 +63,8 @@ void SrtTransportImp::onHandShakeFinished(std::string &streamid, struct sockaddr
return;
}
auto params = Parser::parseArgs(_media_info.param_strs);
if (params["m"] == "publish") {
auto kv = Parser::parseArgs(_media_info.params);
if (kv["m"] == "publish") {
_is_pusher = true;
_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ts, this);
emitOnPublish();
@ -72,10 +98,10 @@ bool SrtTransportImp::parseStreamid(std::string &streamid) {
app = tmps[0];
stream_name = tmps[1];
} else {
if (_media_info.param_strs.empty()) {
_media_info.param_strs = it.first + "=" + it.second;
if (_media_info.params.empty()) {
_media_info.params = it.first + "=" + it.second;
} else {
_media_info.param_strs += "&" + it.first + "=" + it.second;
_media_info.params += "&" + it.first + "=" + it.second;
}
}
}
@ -92,7 +118,7 @@ bool SrtTransportImp::parseStreamid(std::string &streamid) {
_media_info.app = app;
_media_info.stream = stream_name;
TraceL << " mediainfo=" << _media_info.shortUrl() << " params=" << _media_info.param_strs;
TraceL << " mediainfo=" << _media_info.shortUrl() << " params=" << _media_info.params;
return true;
}

View File

@ -180,7 +180,7 @@ int main(int argc, char *argv[]) {
auto pusher = std::make_shared<MediaPusher>(src);
pusher->setOnCreateSocket([](const EventPoller::Ptr &poller) {
//socket关闭互斥锁提高性能
return std::make_shared<Socket>(poller, false);
return Socket::createSocket(poller, false);
});
//设置推流失败监听
pusher->setOnPublished([&mtx, &pusher_map, index](const SockException &ex) {

View File

@ -123,7 +123,7 @@ int main(int argc, char *argv[]) {
auto tag = player.get();
player->setOnCreateSocket([](const EventPoller::Ptr &poller) {
//socket关闭互斥锁提高性能
return std::make_shared<Socket>(poller, false);
return Socket::createSocket(poller, false);
});
//设置播放失败监听
player->setOnPlayResult([&mtx, &player_map, tag](const SockException &ex) {

View File

@ -166,7 +166,7 @@ int main(int argc, char *argv[]) {
auto tag = pusher.get();
pusher->setOnCreateSocket([](const EventPoller::Ptr &poller) {
//socket关闭互斥锁提高性能
return std::make_shared<Socket>(poller, false);
return Socket::createSocket(poller, false);
});
//设置推流失败监听
pusher->setOnPublished([&mtx, &pusher_map, tag](const SockException &ex) {

View File

@ -27,7 +27,7 @@ using namespace mediakit;
class FlvSplitterImp : public FlvSplitter {
public:
FlvSplitterImp() {
_src = std::make_shared<RtmpMediaSourceImp>(MediaTuple{DEFAULT_VHOST, "live", "test"});
_src = std::make_shared<RtmpMediaSourceImp>(MediaTuple{DEFAULT_VHOST, "live", "test", ""});
}
~FlvSplitterImp() override = default;

View File

@ -10,11 +10,8 @@
#include <map>
#include <iostream>
#include "Util/MD5.h"
#include "Util/File.h"
#include "Util/logger.h"
#include "Util/SSLBox.h"
#include "Util/util.h"
#include "Util/logger.h"
#include "Network/TcpServer.h"
#include "Common/config.h"
#include "Rtsp/RtspSession.h"
@ -29,102 +26,102 @@ using namespace mediakit;
static semaphore sem;
#if defined(ENABLE_RTPPROXY)
static bool loadFile(const char *path, const EventPoller::Ptr &poller){
FILE *fp = fopen(path, "rb");
static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
std::shared_ptr<FILE> fp(fopen(path, "rb"), [](FILE *fp) {
sem.post();
if (fp) {
fclose(fp);
}
});
if (!fp) {
WarnL << "open file failed:" << path;
return false;
}
uint64_t timeStamp_last = 0;
uint16_t len;
char rtp[0xFFFF];
struct sockaddr_storage addr;
memset(&addr, 0, sizeof(addr));
addr.ss_family = AF_INET;
auto sock = Socket::createSocket(poller);
size_t total_size = 0;
RtpProcess::Ptr process;
uint32_t ssrc = 0;
while (true) {
if (2 != fread(&len, 1, 2, fp)) {
WarnL;
break;
}
len = ntohs(len);
if (len < 12 || len > sizeof(rtp)) {
WarnL << len;
break;
}
auto process = RtpSelector::Instance().getProcess("test", true);
if (len != fread(rtp, 1, len, fp)) {
WarnL;
break;
}
total_size += len;
uint64_t timeStamp = 0;
if (!process) {
if (!RtpSelector::getSSRC(rtp, len, ssrc)) {
WarnL << "get ssrc from rtp failed:" << len;
return false;
uint64_t stamp_last = 0;
auto total_size = std::make_shared<size_t>(0);
auto do_read = [fp, total_size, sock, addr, process, stamp_last]() mutable -> int {
uint16_t len;
char rtp[0xFFFF];
while (true) {
if (2 != fread(&len, 1, 2, fp.get())) {
WarnL << "Read rtp size failed";
// 重新播放
fseek(fp.get(), 0, SEEK_SET);
return 1;
}
process = RtpSelector::Instance().getProcess(printSSRC(ssrc), true);
}
if (process) {
len = ntohs(len);
if (len < 12 || len > sizeof(rtp)) {
WarnL << "Invalid rtp size: " << len;
return 0;
}
if (len != fread(rtp, 1, len, fp.get())) {
WarnL << "Read rtp data failed";
return 0;
}
(*total_size) += len;
uint64_t stamp = 0;
try {
process->inputRtp(true, sock, rtp, len, (struct sockaddr *)&addr, &timeStamp);
} catch (...) {
RtpSelector::Instance().delProcess(printSSRC(ssrc), process.get());
throw;
process->inputRtp(true, sock, rtp, len, (struct sockaddr *)&addr, &stamp);
} catch (std::exception &ex) {
WarnL << "Input rtp failed: " << ex.what();
return 0;
}
auto diff = stamp - stamp_last;
if (diff < 0 || diff > 500) {
diff = 1;
}
if (diff) {
stamp_last = stamp;
return diff;
}
}
auto diff = timeStamp - timeStamp_last;
if (diff > 0 && diff < 500) {
usleep(diff * 1000);
} else {
usleep(1 * 1000);
};
poller->doDelayTask(1, [do_read, total_size, process]() mutable {
auto ret = do_read();
if (!ret) {
WarnL << *total_size / 1024 << "KB";
RtpSelector::Instance().delProcess("test", process.get());
}
timeStamp_last = timeStamp;
}
WarnL << total_size / 1024 << "KB";
fclose(fp);
return ret;
});
return true;
}
#endif//#if defined(ENABLE_RTPPROXY)
#endif // #if defined(ENABLE_RTPPROXY)
int main(int argc,char *argv[]) {
//设置日志
int main(int argc, char *argv[]) {
// 设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel"));
#if defined(ENABLE_RTPPROXY)
//启动异步日志线程
// 启动异步日志线程
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
loadIniConfig((exeDir() + "config.ini").data());
TcpServer::Ptr rtspSrv(new TcpServer());
TcpServer::Ptr rtmpSrv(new TcpServer());
TcpServer::Ptr httpSrv(new TcpServer());
rtspSrv->start<RtspSession>(554);//默认554
rtmpSrv->start<RtmpSession>(1935);//默认1935
httpSrv->start<HttpSession>(80);//默认80
//此处选择是否导出调试文件
// mINI::Instance()[RtpProxy::kDumpDir] = "/Users/xzl/Desktop/";
rtspSrv->start<RtspSession>(554); // 默认554
rtmpSrv->start<RtmpSession>(1935); // 默认1935
httpSrv->start<HttpSession>(80); // 默认80
// 此处选择是否导出调试文件
// mINI::Instance()[RtpProxy::kDumpDir] = "/Users/xzl/Desktop/";
if (argc == 2){
auto poller = EventPollerPool::Instance().getPoller();
poller->async_first([poller,argv](){
loadFile(argv[1],poller);
sem.post();
});
if (argc == 2) {
loadFile(argv[1], EventPollerPool::Instance().getPoller());
sem.wait();
sleep(1);
} else {
ErrorL << "parameter error.";
}
else
ErrorL << "parameter error.";
#else
ErrorL << "please ENABLE_RTPPROXY and then test";
#endif//#if defined(ENABLE_RTPPROXY)
#endif // #if defined(ENABLE_RTPPROXY)
return 0;
}

View File

@ -90,7 +90,7 @@ void initEventListener() {
static onceToken s_token([]() {
//监听kBroadcastOnGetRtspRealm事件决定rtsp链接是否需要鉴权(传统的rtsp鉴权方案)才能访问
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastOnGetRtspRealm, [](BroadcastOnGetRtspRealmArgs) {
DebugL << "RTSP是否需要鉴权事件" << args.getUrl() << " " << args.param_strs;
DebugL << "RTSP是否需要鉴权事件" << args.getUrl() << " " << args.params;
if (string("1") == args.stream) {
// live/1需要认证
//该流需要认证并且设置realm
@ -104,7 +104,7 @@ void initEventListener() {
//监听kBroadcastOnRtspAuth事件返回正确的rtsp鉴权用户密码
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastOnRtspAuth, [](BroadcastOnRtspAuthArgs) {
DebugL << "RTSP播放鉴权:" << args.getUrl() << " " << args.param_strs;
DebugL << "RTSP播放鉴权:" << args.getUrl() << " " << args.params;
DebugL << "RTSP用户" << user_name << (must_no_encrypt ? " Base64" : " MD5") << " 方式登录";
string user = user_name;
//假设我们异步读取数据库
@ -134,14 +134,14 @@ void initEventListener() {
//监听rtsp/rtmp推流事件返回结果告知是否有推流权限
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastMediaPublish, [](BroadcastMediaPublishArgs) {
DebugL << "推流鉴权:" << args.getUrl() << " " << args.param_strs;
DebugL << "推流鉴权:" << args.getUrl() << " " << args.params;
invoker("", ProtocolOption());//鉴权成功
//invoker("this is auth failed message");//鉴权失败
});
//监听rtsp/rtsps/rtmp/http-flv播放事件返回结果告知是否有播放权限(rtsp通过kBroadcastOnRtspAuth或此事件都可以实现鉴权)
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastMediaPlayed, [](BroadcastMediaPlayedArgs) {
DebugL << "播放鉴权:" << args.getUrl() << " " << args.param_strs;
DebugL << "播放鉴权:" << args.getUrl() << " " << args.params;
invoker("");//鉴权成功
//invoker("this is auth failed message");//鉴权失败
});
@ -183,13 +183,13 @@ void initEventListener() {
*
* ZLMediaKit会把其立即转发给播放器(55)
*/
DebugL << "未找到流事件:" << args.getUrl() << " " << args.param_strs;
DebugL << "未找到流事件:" << args.getUrl() << " " << args.params;
});
//监听播放或推流结束时消耗流量事件
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastFlowReport, [](BroadcastFlowReportArgs) {
DebugL << "播放器(推流器)断开连接事件:" << args.getUrl() << " " << args.param_strs << "\r\n使用流量:" << totalBytes << " bytes,连接时长:" << totalDuration << "";
DebugL << "播放器(推流器)断开连接事件:" << args.getUrl() << " " << args.params << "\r\n使用流量:" << totalBytes << " bytes,连接时长:" << totalDuration << "";
});

Some files were not shown because too many files have changed in this diff Show More