Merge branch 'ZLMediaKit:master' into master

This commit is contained in:
Dw9 2023-07-24 14:07:55 +08:00 committed by GitHub
commit b696894f6b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 747 additions and 482 deletions

View File

@ -201,8 +201,8 @@ if(CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin")
endif()
# mediakit runtime
update_cached_list(MK_LINK_LIBRARIES "")
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_VERSION)
update_cached(MK_LINK_LIBRARIES "")
update_cached(MK_COMPILE_DEFINITIONS ENABLE_VERSION)
if (DISABLE_REPORT)
update_cached_list(MK_COMPILE_DEFINITIONS DISABLE_REPORT)
@ -462,7 +462,7 @@ if(ENABLE_SERVER)
endif()
# Android add_subdirectory
if(ENABLE_SERVER_LIB)
if(ENABLE_SERVER_LIB AND NOT CMAKE_PARENT_LIST_FILE STREQUAL CMAKE_CURRENT_LIST_FILE)
set(MK_LINK_LIBRARIES ${MK_LINK_LIBRARIES} PARENT_SCOPE)
endif()

View File

@ -69,6 +69,7 @@
- 支持H264/H265/AAC/G711/OPUS编码其他编码能转发但不能转协议
- 支持[RTMP-H265](https://github.com/ksvc/FFmpeg/wiki)
- 支持[RTMP-OPUS](https://github.com/ZLMediaKit/ZLMediaKit/wiki/RTMP%E5%AF%B9H265%E5%92%8COPUS%E7%9A%84%E6%94%AF%E6%8C%81)
- 支持[enhanced-rtmp(H265)](https://github.com/veovera/enhanced-rtmp)
- HLS
- 支持HLS文件(mpegts/fmp4)生成自带HTTP文件服务器

View File

@ -68,6 +68,7 @@
- Supports H264/H265/AAC/G711/OPUS encoding. Other encodings can be forwarded but cannot be converted to protocol
- Supports [RTMP-H265](https://github.com/ksvc/FFmpeg/wiki)
- Supports [RTMP-OPUS](https://github.com/ZLMediaKit/ZLMediaKit/wiki/RTMP%E5%AF%B9H265%E5%92%8COPUS%E7%9A%84%E6%94%AF%E6%8C%81)
- Supports [enhanced-rtmp(H265)](https://github.com/veovera/enhanced-rtmp)
- HLS
- Supports HLS file(mpegts/fmp4) generation and comes with an HTTP file server

View File

@ -142,9 +142,6 @@ deleteDelaySec=10
segKeep=0
[hook]
#在推流时如果url参数匹对admin_params那么可以不经过hook鉴权直接推流成功播放时亦然
#该配置项的目的是为了开发者自己调试测试,该参数暴露后会有泄露隐私的安全隐患
admin_params=secret=035c73f7-bb6b-4889-a715-d9eb2d1925cc
#是否启用hook事件启用后推拉流都将进行鉴权
enable=0
#播放器或推流器使用流量事件,置空则关闭
@ -273,8 +270,6 @@ handshakeSecond=15
#rtmp超时时间如果该时间内未收到客户端的数据
#或者tcp发送缓存超过这个时间则会断开连接单位秒
keepAliveSecond=15
#在接收rtmp推流时是否重新生成时间戳(很多推流器的时间戳着实很烂)
modifyStamp=0
#rtmp服务器监听端口
port=1935
#rtmps服务器监听地址

View File

@ -34,7 +34,7 @@ if(ENABLE_SERVER_LIB)
PRIVATE ${COMPILE_OPTIONS_DEFAULT})
target_link_libraries(MediaServer
PRIVATE ${MK_LINK_LIBRARIES})
update_cached(MK_LINK_LIBRARIES MediaServer)
update_cached_list(MK_LINK_LIBRARIES MediaServer)
return()
endif()

View File

@ -44,6 +44,8 @@ typedef enum {
OtherFailed = -1,//业务代码执行失败,
Success = 0//执行成功
} ApiErr;
extern const std::string kSecret;
}//namespace API
class ApiRetException: public std::runtime_error {

View File

@ -48,7 +48,6 @@ const string kOnServerExited = HOOK_FIELD "on_server_exited";
const string kOnServerKeepalive = HOOK_FIELD "on_server_keepalive";
const string kOnSendRtpStopped = HOOK_FIELD "on_send_rtp_stopped";
const string kOnRtpServerTimeout = HOOK_FIELD "on_rtp_server_timeout";
const string kAdminParams = HOOK_FIELD "admin_params";
const string kAliveInterval = HOOK_FIELD "alive_interval";
const string kRetry = HOOK_FIELD "retry";
const string kRetryDelay = HOOK_FIELD "retry_delay";
@ -74,7 +73,6 @@ static onceToken token([]() {
mINI::Instance()[kOnServerKeepalive] = "";
mINI::Instance()[kOnSendRtpStopped] = "";
mINI::Instance()[kOnRtpServerTimeout] = "";
mINI::Instance()[kAdminParams] = "secret=035c73f7-bb6b-4889-a715-d9eb2d1925cc";
mINI::Instance()[kAliveInterval] = 30.0;
mINI::Instance()[kRetry] = 1;
mINI::Instance()[kRetryDelay] = 3.0;
@ -331,11 +329,10 @@ static mINI jsonToMini(const Value &obj) {
void installWebHook() {
GET_CONFIG(bool, hook_enable, Hook::kEnable);
GET_CONFIG(string, hook_adminparams, Hook::kAdminParams);
NoticeCenter::Instance().addListener(&web_hook_tag, Broadcast::kBroadcastMediaPublish, [](BroadcastMediaPublishArgs) {
GET_CONFIG(string, hook_publish, Hook::kOnPublish);
if (!hook_enable || args.param_strs == hook_adminparams || hook_publish.empty() || sender.get_peer_ip() == "127.0.0.1") {
if (!hook_enable || hook_publish.empty() || sender.get_peer_ip() == "127.0.0.1") {
invoker("", ProtocolOption());
return;
}
@ -360,7 +357,7 @@ void installWebHook() {
NoticeCenter::Instance().addListener(&web_hook_tag, Broadcast::kBroadcastMediaPlayed, [](BroadcastMediaPlayedArgs) {
GET_CONFIG(string, hook_play, Hook::kOnPlay);
if (!hook_enable || args.param_strs == hook_adminparams || hook_play.empty() || sender.get_peer_ip() == "127.0.0.1") {
if (!hook_enable || hook_play.empty() || sender.get_peer_ip() == "127.0.0.1") {
invoker("");
return;
}
@ -374,7 +371,7 @@ void installWebHook() {
NoticeCenter::Instance().addListener(&web_hook_tag, Broadcast::kBroadcastFlowReport, [](BroadcastFlowReportArgs) {
GET_CONFIG(string, hook_flowreport, Hook::kOnFlowReport);
if (!hook_enable || args.param_strs == hook_adminparams || hook_flowreport.empty() || sender.get_peer_ip() == "127.0.0.1") {
if (!hook_enable || hook_flowreport.empty() || sender.get_peer_ip() == "127.0.0.1") {
return;
}
auto body = make_json(args);
@ -393,7 +390,7 @@ void installWebHook() {
// 监听kBroadcastOnGetRtspRealm事件决定rtsp链接是否需要鉴权(传统的rtsp鉴权方案)才能访问
NoticeCenter::Instance().addListener(&web_hook_tag, Broadcast::kBroadcastOnGetRtspRealm, [](BroadcastOnGetRtspRealmArgs) {
GET_CONFIG(string, hook_rtsp_realm, Hook::kOnRtspRealm);
if (!hook_enable || args.param_strs == hook_adminparams || hook_rtsp_realm.empty() || sender.get_peer_ip() == "127.0.0.1") {
if (!hook_enable || hook_rtsp_realm.empty() || sender.get_peer_ip() == "127.0.0.1") {
// 无需认证
invoker("");
return;
@ -620,7 +617,7 @@ void installWebHook() {
// 追踪用户的目的是为了缓存上次鉴权结果,减少鉴权次数,提高性能
NoticeCenter::Instance().addListener(&web_hook_tag, Broadcast::kBroadcastHttpAccess, [](BroadcastHttpAccessArgs) {
GET_CONFIG(string, hook_http_access, Hook::kOnHttpAccess);
if (sender.get_peer_ip() == "127.0.0.1" || parser.params() == hook_adminparams) {
if (sender.get_peer_ip() == "127.0.0.1") {
// 如果是本机或超级管理员访问那么不做访问鉴权权限有效期1个小时
invoker("", "", 60 * 60);
return;

View File

@ -352,6 +352,11 @@ int start_main(int argc,char *argv[]) {
#endif //defined(ENABLE_SRT)
try {
auto secret = mINI::Instance()[API::kSecret];
if (secret == "035c73f7-bb6b-4889-a715-d9eb2d1925cc" || secret.empty()) {
// 使用默认secret被禁止启动
throw std::invalid_argument("please modify the configuration named " + API::kSecret + " in " + g_ini_file);
}
//rtsp服务器端口默认554
if (rtspPort) { rtspSrv->start<RtspSession>(rtspPort); }
//rtsps服务器端口默认322
@ -389,8 +394,7 @@ int start_main(int argc,char *argv[]) {
#endif//defined(ENABLE_SRT)
} catch (std::exception &ex) {
WarnL << "端口占用或无权限:" << ex.what();
ErrorL << "程序启动失败,请修改配置文件中端口号后重试!";
ErrorL << "Start server failed: " << ex.what();
sleep(1);
#if !defined(_WIN32)
if (pid != getpid() && kill_parent_if_failed) {

View File

@ -436,6 +436,7 @@ FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num, const std:
av_dict_set(&dict, "zerolatency", "1", 0);
av_dict_set(&dict, "strict", "-2", 0);
#ifdef AV_CODEC_CAP_TRUNCATED
if (codec->capabilities & AV_CODEC_CAP_TRUNCATED) {
/* we do not send complete frames */
_context->flags |= AV_CODEC_FLAG_TRUNCATED;
@ -443,6 +444,7 @@ FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num, const std:
// 此时业务层应该需要合帧
_do_merger = true;
}
#endif
int ret = avcodec_open2(_context.get(), codec, &dict);
av_dict_free(&dict);

View File

@ -48,8 +48,13 @@ void Parser::parse(const char *buf, size_t size) {
clear();
auto ptr = buf;
while (true) {
auto next_line = strstr(ptr, "\r\n");
CHECK(next_line);
auto next_line = strchr(ptr, '\n');
auto offset = 1;
CHECK(next_line && next_line > ptr);
if (*(next_line - 1) == '\r') {
next_line -= 1;
offset = 2;
}
if (ptr == buf) {
auto blank = strchr(ptr, ' ');
CHECK(blank > ptr && blank < next_line);
@ -76,7 +81,7 @@ void Parser::parse(const char *buf, size_t size) {
}
_headers.emplace_force(trim(std::move(key)), trim(std::move(value)));
}
ptr = next_line + 2;
ptr = next_line + offset;
if (strncmp(ptr, "\r\n", 2) == 0) { // 协议解析完毕
_content.assign(ptr + 2, buf + size);
break;

View File

@ -246,7 +246,7 @@ AACTrack::AACTrack(const string &aac_cfg) {
onReady();
}
const string &AACTrack::getAacCfg() const {
const string &AACTrack::getConfig() const {
return _cfg;
}
@ -342,7 +342,7 @@ Sdp::Ptr AACTrack::getSdp() {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<AACSdp>(getAacCfg(), getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
return std::make_shared<AACSdp>(getConfig(), getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
}
}//namespace mediakit

View File

@ -44,7 +44,7 @@ public:
/**
* aac
*/
const std::string &getAacCfg() const;
const std::string &getConfig() const;
bool ready() override;
CodecId getCodecId() const override;

View File

@ -16,12 +16,9 @@ using namespace toolkit;
namespace mediakit {
static string getAacCfg(const RtmpPacket &thiz) {
static string getConfig(const RtmpPacket &thiz) {
string ret;
if (thiz.getMediaType() != FLV_CODEC_AAC) {
return ret;
}
if (!thiz.isCfgFrame()) {
if ((RtmpAudioCodec)thiz.getRtmpCodecId() != RtmpAudioCodec::aac) {
return ret;
}
if (thiz.buffer.size() < 4) {
@ -33,8 +30,8 @@ static string getAacCfg(const RtmpPacket &thiz) {
}
void AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
if (pkt->isCfgFrame()) {
_aac_cfg = getAacCfg(*pkt);
if (pkt->isConfigFrame()) {
_aac_cfg = getConfig(*pkt);
if (!_aac_cfg.empty()) {
onGetAAC(nullptr, 0, 0);
}
@ -82,7 +79,7 @@ AACRtmpEncoder::AACRtmpEncoder(const Track::Ptr &track) {
void AACRtmpEncoder::makeConfigPacket() {
if (_track && _track->ready()) {
//从track中和获取aac配置信息
_aac_cfg = _track->getAacCfg();
_aac_cfg = _track->getConfig();
}
if (!_aac_cfg.empty()) {
@ -93,51 +90,45 @@ void AACRtmpEncoder::makeConfigPacket() {
bool AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (_aac_cfg.empty()) {
if (frame->prefixSize()) {
//包含adts头,从adts头获取aac配置信息
_aac_cfg = makeAacConfig((uint8_t *) (frame->data()), frame->prefixSize());
// 包含adts头,从adts头获取aac配置信息
_aac_cfg = makeAacConfig((uint8_t *)(frame->data()), frame->prefixSize());
}
makeConfigPacket();
}
if(_aac_cfg.empty()){
if (_aac_cfg.empty()) {
return false;
}
auto rtmpPkt = RtmpPacket::create();
//header
uint8_t is_config = false;
rtmpPkt->buffer.push_back(_audio_flv_flags);
rtmpPkt->buffer.push_back(!is_config);
//aac data
rtmpPkt->buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_AUDIO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = frame->dts();
rtmpPkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt);
auto pkt = RtmpPacket::create();
// header
pkt->buffer.push_back(_audio_flv_flags);
pkt->buffer.push_back((uint8_t)RtmpAACPacketType::aac_raw);
// aac data
pkt->buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
pkt->body_size = pkt->buffer.size();
pkt->chunk_id = CHUNK_AUDIO;
pkt->stream_index = STREAM_MEDIA;
pkt->time_stamp = frame->dts();
pkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(pkt);
return true;
}
void AACRtmpEncoder::makeAudioConfigPkt() {
_audio_flv_flags = getAudioRtmpFlags(std::make_shared<AACTrack>(_aac_cfg));
auto rtmpPkt = RtmpPacket::create();
//header
uint8_t is_config = true;
rtmpPkt->buffer.push_back(_audio_flv_flags);
rtmpPkt->buffer.push_back(!is_config);
//aac config
rtmpPkt->buffer.append(_aac_cfg);
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_AUDIO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = 0;
rtmpPkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt);
auto pkt = RtmpPacket::create();
// header
pkt->buffer.push_back(_audio_flv_flags);
pkt->buffer.push_back((uint8_t)RtmpAACPacketType::aac_config_header);
// aac config
pkt->buffer.append(_aac_cfg);
pkt->body_size = pkt->buffer.size();
pkt->chunk_id = CHUNK_AUDIO;
pkt->stream_index = STREAM_MEDIA;
pkt->time_stamp = 0;
pkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(pkt);
}
}//namespace mediakit

View File

@ -64,7 +64,7 @@ AACRtpDecoder::AACRtpDecoder(const Track::Ptr &track) {
if (!aacTrack || !aacTrack->ready()) {
WarnL << "该aac track无效!";
} else {
_aac_cfg = aacTrack->getAacCfg();
_aac_cfg = aacTrack->getConfig();
}
obtainFrame();
}

View File

@ -201,17 +201,17 @@ static CodecId getVideoCodecIdByAmf(const AMFValue &val){
}
if (val.type() != AMF_NULL) {
auto type_id = val.as_integer();
auto type_id = (RtmpVideoCodec)val.as_integer();
switch (type_id) {
case FLV_CODEC_H264 : return CodecH264;
case FLV_CODEC_H265 : return CodecH265;
default : WarnL << "暂不支持该视频Amf:" << type_id; return CodecInvalid;
case RtmpVideoCodec::h264: return CodecH264;
case RtmpVideoCodec::h265: return CodecH265;
default: WarnL << "暂不支持该视频Amf:" << (int)type_id; return CodecInvalid;
}
}
return CodecInvalid;
}
Track::Ptr getTrackByCodecId(CodecId codecId, int sample_rate = 0, int channels = 0, int sample_bit = 0) {
Track::Ptr Factory::getTrackByCodecId(CodecId codecId, int sample_rate, int channels, int sample_bit) {
switch (codecId){
case CodecH264 : return std::make_shared<H264Track>();
case CodecH265 : return std::make_shared<H265Track>();
@ -243,13 +243,13 @@ static CodecId getAudioCodecIdByAmf(const AMFValue &val) {
}
if (val.type() != AMF_NULL) {
auto type_id = val.as_integer();
auto type_id = (RtmpAudioCodec)val.as_integer();
switch (type_id) {
case FLV_CODEC_AAC : return CodecAAC;
case FLV_CODEC_G711A : return CodecG711A;
case FLV_CODEC_G711U : return CodecG711U;
case FLV_CODEC_OPUS : return CodecOpus;
default : WarnL << "暂不支持该音频Amf:" << type_id; return CodecInvalid;
case RtmpAudioCodec::aac : return CodecAAC;
case RtmpAudioCodec::g711a : return CodecG711A;
case RtmpAudioCodec::g711u : return CodecG711U;
case RtmpAudioCodec::opus : return CodecOpus;
default : WarnL << "暂不支持该音频Amf:" << (int)type_id; return CodecInvalid;
}
}
@ -291,13 +291,13 @@ RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track, bool is_enc
}
AMFValue Factory::getAmfByCodecId(CodecId codecId) {
switch (codecId){
case CodecAAC: return AMFValue(FLV_CODEC_AAC);
case CodecH264: return AMFValue(FLV_CODEC_H264);
case CodecH265: return AMFValue(FLV_CODEC_H265);
case CodecG711A: return AMFValue(FLV_CODEC_G711A);
case CodecG711U: return AMFValue(FLV_CODEC_G711U);
case CodecOpus: return AMFValue(FLV_CODEC_OPUS);
switch (codecId) {
case CodecAAC: return AMFValue((int)RtmpAudioCodec::aac);
case CodecH264: return AMFValue((int)RtmpVideoCodec::h264);
case CodecH265: return AMFValue((int)RtmpVideoCodec::h265);
case CodecG711A: return AMFValue((int)RtmpAudioCodec::g711a);
case CodecG711U: return AMFValue((int)RtmpAudioCodec::g711u);
case CodecOpus: return AMFValue((int)RtmpAudioCodec::opus);
default: return AMFValue(AMF_NULL);
}
}

View File

@ -21,6 +21,16 @@ namespace mediakit{
class Factory {
public:
/**
* codec_id track
* @param codecId id
* @param sample_rate 90000
* @param channels
* @param sample_bit
*/
static Track::Ptr getTrackByCodecId(CodecId codecId, int sample_rate = 0, int channels = 0, int sample_bit = 0);
////////////////////////////////rtsp相关//////////////////////////////////
/**
* sdp生成Track对象

View File

@ -12,9 +12,10 @@
#include "SPSParser.h"
#include "Util/logger.h"
#include "Util/base64.h"
#include "Common/config.h"
using namespace toolkit;
using namespace std;
using namespace toolkit;
namespace mediakit {
@ -248,7 +249,14 @@ public:
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName() << "/" << 90000 << "\r\n";
_printer << "a=fmtp:" << payload_type << " packetization-mode=1; profile-level-id=";
/**
Single NAI Unit Mode = 0. // Single NAI mode (Only nals from 1-23 are allowed)
Non Interleaved Mode = 1// Non-interleaved Mode: 1-2324 (STAP-A)28 (FU-A) are allowed
Interleaved Mode = 2, // 25 (STAP-B)26 (MTAP16)27 (MTAP24)28 (EU-A)and 29 (EU-B) are allowed.
**/
GET_CONFIG(bool, h264_stap_a, Rtp::kH264StapA);
_printer << "a=fmtp:" << payload_type << " packetization-mode=" << h264_stap_a << "; profile-level-id=";
uint32_t profile_level_id = 0;
if (strSPS.length() >= 4) { // sanity check

View File

@ -30,10 +30,7 @@ H264Frame::Ptr H264RtmpDecoder::obtainFrame() {
* 0x00 00 00 01sps pps
*/
static bool getH264Config(const RtmpPacket &thiz, string &sps, string &pps) {
if (thiz.getMediaType() != FLV_CODEC_H264) {
return false;
}
if (!thiz.isCfgFrame()) {
if ((RtmpVideoCodec)thiz.getRtmpCodecId() != RtmpVideoCodec::h264) {
return false;
}
if (thiz.buffer.size() < 13) {
@ -59,7 +56,7 @@ static bool getH264Config(const RtmpPacket &thiz, string &sps, string &pps) {
}
void H264RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
if (pkt->isCfgFrame()) {
if (pkt->isConfigFrame()) {
//缓存sps pps后续插入到I帧之前
if (!getH264Config(*pkt, _sps, _pps)) {
WarnL << "get h264 sps/pps failed, rtmp packet is: " << hexdump(pkt->data(), pkt->size());
@ -159,26 +156,21 @@ bool H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
}
return _merger.inputFrame(frame, [this](uint64_t dts, uint64_t pts, const Buffer::Ptr &, bool have_key_frame) {
//flags
_rtmp_packet->buffer[0] = FLV_CODEC_H264 | ((have_key_frame ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
//not config
_rtmp_packet->buffer[1] = true;
int32_t cts = pts - dts;
if (cts < 0) {
cts = 0;
}
//cts
set_be24(&_rtmp_packet->buffer[2], cts);
_rtmp_packet->time_stamp = dts;
_rtmp_packet->body_size = _rtmp_packet->buffer.size();
_rtmp_packet->chunk_id = CHUNK_VIDEO;
_rtmp_packet->stream_index = STREAM_MEDIA;
_rtmp_packet->type_id = MSG_VIDEO;
//输出rtmp packet
RtmpCodec::inputRtmp(_rtmp_packet);
_rtmp_packet = nullptr;
}, &_rtmp_packet->buffer);
// flags
_rtmp_packet->buffer[0] = (uint8_t)RtmpVideoCodec::h264 | ((uint8_t)(have_key_frame ? RtmpFrameType::key_frame : RtmpFrameType::inter_frame) << 4);
_rtmp_packet->buffer[1] = (uint8_t)RtmpH264PacketType::h264_nalu;
int32_t cts = pts - dts;
// cts
set_be24(&_rtmp_packet->buffer[2], cts);
_rtmp_packet->time_stamp = dts;
_rtmp_packet->body_size = _rtmp_packet->buffer.size();
_rtmp_packet->chunk_id = CHUNK_VIDEO;
_rtmp_packet->stream_index = STREAM_MEDIA;
_rtmp_packet->type_id = MSG_VIDEO;
// 输出rtmp packet
RtmpCodec::inputRtmp(_rtmp_packet);
_rtmp_packet = nullptr;
}, &_rtmp_packet->buffer);
}
void H264RtmpEncoder::makeVideoConfigPkt() {
@ -186,42 +178,39 @@ void H264RtmpEncoder::makeVideoConfigPkt() {
WarnL << "sps长度不足4字节";
return;
}
int8_t flags = FLV_CODEC_H264;
flags |= (FLV_KEY_FRAME << 4);
bool is_config = true;
auto rtmpPkt = RtmpPacket::create();
//header
rtmpPkt->buffer.push_back(flags);
rtmpPkt->buffer.push_back(!is_config);
//cts
rtmpPkt->buffer.append("\x0\x0\x0", 3);
//AVCDecoderConfigurationRecord start
rtmpPkt->buffer.push_back(1); // version
rtmpPkt->buffer.push_back(_sps[1]); // profile
rtmpPkt->buffer.push_back(_sps[2]); // compat
rtmpPkt->buffer.push_back(_sps[3]); // level
rtmpPkt->buffer.push_back((char)0xff); // 6 bits reserved + 2 bits nal size length - 1 (11)
rtmpPkt->buffer.push_back((char)0xe1); // 3 bits reserved + 5 bits number of sps (00001)
//sps
auto flags = (uint8_t)RtmpVideoCodec::h264;
flags |= ((uint8_t)RtmpFrameType::key_frame << 4);
auto pkt = RtmpPacket::create();
// header
pkt->buffer.push_back(flags);
pkt->buffer.push_back((uint8_t)RtmpH264PacketType::h264_config_header);
// cts
pkt->buffer.append("\x0\x0\x0", 3);
// AVCDecoderConfigurationRecord start
pkt->buffer.push_back(1); // version
pkt->buffer.push_back(_sps[1]); // profile
pkt->buffer.push_back(_sps[2]); // compat
pkt->buffer.push_back(_sps[3]); // level
pkt->buffer.push_back((char)0xff); // 6 bits reserved + 2 bits nal size length - 1 (11)
pkt->buffer.push_back((char)0xe1); // 3 bits reserved + 5 bits number of sps (00001)
// sps
uint16_t size = (uint16_t)_sps.size();
size = htons(size);
rtmpPkt->buffer.append((char *) &size, 2);
rtmpPkt->buffer.append(_sps);
//pps
rtmpPkt->buffer.push_back(1); // version
pkt->buffer.append((char *)&size, 2);
pkt->buffer.append(_sps);
// pps
pkt->buffer.push_back(1); // version
size = (uint16_t)_pps.size();
size = htons(size);
rtmpPkt->buffer.append((char *) &size, 2);
rtmpPkt->buffer.append(_pps);
pkt->buffer.append((char *)&size, 2);
pkt->buffer.append(_pps);
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_VIDEO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = 0;
rtmpPkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(rtmpPkt);
pkt->body_size = pkt->buffer.size();
pkt->chunk_id = CHUNK_VIDEO;
pkt->stream_index = STREAM_MEDIA;
pkt->time_stamp = 0;
pkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(pkt);
}
}//namespace mediakit

View File

@ -12,12 +12,12 @@
#include "H265Rtmp.h"
#ifdef ENABLE_MP4
#include "mpeg4-hevc.h"
#endif//ENABLE_MP4
#endif // ENABLE_MP4
using namespace std;
using namespace toolkit;
namespace mediakit{
namespace mediakit {
H265RtmpDecoder::H265RtmpDecoder() {
_h265frame = obtainFrame();
@ -30,46 +30,105 @@ H265Frame::Ptr H265RtmpDecoder::obtainFrame() {
}
#ifdef ENABLE_MP4
static bool decode_HEVCDecoderConfigurationRecord(uint8_t *extra, size_t bytes, string &frame) {
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
if (mpeg4_hevc_decoder_configuration_record_load((uint8_t *)extra, bytes, &hevc) > 0) {
uint8_t *config = new uint8_t[bytes * 2];
int size = mpeg4_hevc_to_nalu(&hevc, config, bytes * 2);
if (size > 4) {
frame.assign((char *)config + 4, size - 4);
}
delete[] config;
return size > 4;
}
return false;
}
/**
* 0x00 00 00 01sps
* @return
*/
static bool getH265ConfigFrame(const RtmpPacket &thiz,string &frame) {
if (thiz.getMediaType() != FLV_CODEC_H265) {
return false;
}
if (!thiz.isCfgFrame()) {
static bool getH265ConfigFrame(const RtmpPacket &thiz, string &frame) {
if ((RtmpVideoCodec)thiz.getRtmpCodecId() != RtmpVideoCodec::h265) {
return false;
}
if (thiz.buffer.size() < 6) {
WarnL << "bad H265 cfg!";
return false;
}
auto extra = thiz.buffer.data() + 5;
auto bytes = thiz.buffer.size() - 5;
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
if (mpeg4_hevc_decoder_configuration_record_load((uint8_t *) extra, bytes, &hevc) > 0) {
uint8_t *config = new uint8_t[bytes * 2];
int size = mpeg4_hevc_to_nalu(&hevc, config, bytes * 2);
if (size > 4) {
frame.assign((char *) config + 4, size - 4);
}
delete [] config;
return size > 4;
}
return false;
return decode_HEVCDecoderConfigurationRecord((uint8_t *)thiz.buffer.data() + 5, thiz.buffer.size() - 5, frame);
}
#endif
void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
if (pkt->isCfgFrame()) {
if (_info.codec == CodecInvalid) {
// 先判断是否为增强型rtmp
parseVideoRtmpPacket((uint8_t *)pkt->data(), pkt->size(), &_info);
}
if (_info.is_enhanced) {
// 增强型rtmp
parseVideoRtmpPacket((uint8_t *)pkt->data(), pkt->size(), &_info);
if (!_info.is_enhanced || _info.codec != CodecH265) {
throw std::invalid_argument("Invalid enhanced-rtmp hevc packet!");
}
auto data = (uint8_t *)pkt->data() + 5;
auto size = pkt->size() - 5;
switch (_info.video.pkt_type) {
case RtmpPacketType::PacketTypeSequenceStart: {
#ifdef ENABLE_MP4
string config;
if (decode_HEVCDecoderConfigurationRecord(data, size, config)) {
onGetH265(config.data(), config.size(), pkt->time_stamp, pkt->time_stamp);
}
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
#endif
break;
}
case RtmpPacketType::PacketTypeCodedFramesX:
case RtmpPacketType::PacketTypeCodedFrames: {
auto pts = pkt->time_stamp;
if (RtmpPacketType::PacketTypeCodedFrames == _info.video.pkt_type) {
// SI24 = [CompositionTime Offset]
CHECK(size > 7);
int32_t cts = (((data[0] << 16) | (data[1] << 8) | (data[2])) + 0xff800000) ^ 0xff800000;
pts += cts;
data += 3;
size -= 3;
}
splitFrame(data, size, pkt->time_stamp, pts);
break;
}
case RtmpPacketType::PacketTypeMetadata: {
// The body does not contain video data. The body is an AMF encoded metadata.
// The metadata will be represented by a series of [name, value] pairs.
// For now the only defined [name, value] pair is [“colorInfo”, Object]
// See Metadata Frame section for more details of this object.
//
// For a deeper understanding of the encoding please see description
// of SCRIPTDATA and SSCRIPTDATAVALUE in the FLV file spec.
// DATA = [“colorInfo”, Object]
break;
}
case RtmpPacketType::PacketTypeSequenceEnd: {
// signals end of sequence
break;
}
default: break;
}
return;
}
// 国内扩展(12) H265 rtmp
if (pkt->isConfigFrame()) {
#ifdef ENABLE_MP4
string config;
if(getH265ConfigFrame(*pkt,config)){
onGetH265(config.data(), config.size(), pkt->time_stamp , pkt->time_stamp);
if (getH265ConfigFrame(*pkt, config)) {
onGetH265(config.data(), config.size(), pkt->time_stamp, pkt->time_stamp);
}
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
@ -78,41 +137,42 @@ void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
}
if (pkt->buffer.size() > 9) {
auto total_len = pkt->buffer.size();
size_t offset = 5;
uint8_t *cts_ptr = (uint8_t *) (pkt->buffer.data() + 2);
uint8_t *cts_ptr = (uint8_t *)(pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->time_stamp + cts;
while (offset + 4 < total_len) {
uint32_t frame_len;
memcpy(&frame_len, pkt->buffer.data() + offset, 4);
frame_len = ntohl(frame_len);
offset += 4;
if (frame_len + offset > total_len) {
break;
}
onGetH265(pkt->buffer.data() + offset, frame_len, pkt->time_stamp, pts);
offset += frame_len;
}
splitFrame((uint8_t *)pkt->data() + 5, pkt->size() - 5, pkt->time_stamp, pts);
}
}
inline void H265RtmpDecoder::onGetH265(const char* pcData, size_t iLen, uint32_t dts,uint32_t pts) {
if(iLen == 0){
void H265RtmpDecoder::splitFrame(const uint8_t *data, size_t size, uint32_t dts, uint32_t pts) {
auto end = data + size;
while (data + 4 < end) {
uint32_t frame_len = load_be32(data);
data += 4;
if (data + frame_len > end) {
break;
}
onGetH265((const char *)data, frame_len, dts, pts);
data += frame_len;
}
}
inline void H265RtmpDecoder::onGetH265(const char *data, size_t size, uint32_t dts, uint32_t pts) {
if (size == 0) {
return;
}
#if 1
_h265frame->_dts = dts;
_h265frame->_pts = pts;
_h265frame->_buffer.assign("\x00\x00\x00\x01", 4); //添加265头
_h265frame->_buffer.append(pcData, iLen);
_h265frame->_buffer.assign("\x00\x00\x00\x01", 4); // 添加265头
_h265frame->_buffer.append(data, size);
//写入环形缓存
// 写入环形缓存
RtmpCodec::inputFrame(_h265frame);
_h265frame = obtainFrame();
#else
//防止内存拷贝这样产生的265帧不会有0x00 00 01头
auto frame = std::make_shared<H265FrameNoCacheAble>((char *)pcData,iLen,dts,pts,0);
// 防止内存拷贝这样产生的265帧不会有0x00 00 01头
auto frame = std::make_shared<H265FrameNoCacheAble>((char *)data, size, dts, pts, 0);
RtmpCodec::inputFrame(frame);
#endif
}
@ -123,16 +183,16 @@ H265RtmpEncoder::H265RtmpEncoder(const Track::Ptr &track) {
_track = dynamic_pointer_cast<H265Track>(track);
}
void H265RtmpEncoder::makeConfigPacket(){
void H265RtmpEncoder::makeConfigPacket() {
if (_track && _track->ready()) {
//尝试从track中获取sps pps信息
// 尝试从track中获取sps pps信息
_sps = _track->getSps();
_pps = _track->getPps();
_vps = _track->getVps();
}
if (!_sps.empty() && !_pps.empty() && !_vps.empty()) {
//获取到sps/pps
// 获取到sps/pps
makeVideoConfigPkt();
_got_config_frame = true;
}
@ -175,50 +235,42 @@ bool H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (!_rtmp_packet) {
_rtmp_packet = RtmpPacket::create();
//flags/not_config/cts预占位
// flags/not_config/cts预占位
_rtmp_packet->buffer.resize(5);
}
return _merger.inputFrame(frame, [this](uint64_t dts, uint64_t pts, const Buffer::Ptr &, bool have_key_frame) {
//flags
_rtmp_packet->buffer[0] = FLV_CODEC_H265 | ((have_key_frame ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
//not config
_rtmp_packet->buffer[1] = true;
int32_t cts = pts - dts;
if (cts < 0) {
cts = 0;
}
//cts
set_be24(&_rtmp_packet->buffer[2], cts);
_rtmp_packet->time_stamp = dts;
_rtmp_packet->body_size = _rtmp_packet->buffer.size();
_rtmp_packet->chunk_id = CHUNK_VIDEO;
_rtmp_packet->stream_index = STREAM_MEDIA;
_rtmp_packet->type_id = MSG_VIDEO;
//输出rtmp packet
RtmpCodec::inputRtmp(_rtmp_packet);
_rtmp_packet = nullptr;
// flags
_rtmp_packet->buffer[0] = (uint8_t)RtmpVideoCodec::h265 | ((uint8_t)(have_key_frame ? RtmpFrameType::key_frame : RtmpFrameType::inter_frame) << 4);
_rtmp_packet->buffer[1] = (uint8_t)RtmpH264PacketType::h264_nalu;
int32_t cts = pts - dts;
// cts
set_be24(&_rtmp_packet->buffer[2], cts);
_rtmp_packet->time_stamp = dts;
_rtmp_packet->body_size = _rtmp_packet->buffer.size();
_rtmp_packet->chunk_id = CHUNK_VIDEO;
_rtmp_packet->stream_index = STREAM_MEDIA;
_rtmp_packet->type_id = MSG_VIDEO;
// 输出rtmp packet
RtmpCodec::inputRtmp(_rtmp_packet);
_rtmp_packet = nullptr;
}, &_rtmp_packet->buffer);
}
void H265RtmpEncoder::makeVideoConfigPkt() {
#ifdef ENABLE_MP4
int8_t flags = FLV_CODEC_H265;
flags |= (FLV_KEY_FRAME << 4);
bool is_config = true;
auto rtmpPkt = RtmpPacket::create();
//header
rtmpPkt->buffer.push_back(flags);
rtmpPkt->buffer.push_back(!is_config);
//cts
rtmpPkt->buffer.append("\x0\x0\x0", 3);
auto flags = (uint8_t)RtmpVideoCodec::h265;
flags |= ((uint8_t)RtmpFrameType::key_frame << 4);
auto pkt = RtmpPacket::create();
// header
pkt->buffer.push_back(flags);
pkt->buffer.push_back((uint8_t)RtmpH264PacketType::h264_config_header);
// cts
pkt->buffer.append("\x0\x0\x0", 3);
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + _vps +
string("\x00\x00\x00\x01", 4) + _sps +
string("\x00\x00\x00\x01", 4) + _pps;
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + _vps + string("\x00\x00\x00\x01", 4) + _sps + string("\x00\x00\x00\x01", 4) + _pps;
h265_annexbtomp4(&hevc, vps_sps_pps.data(), (int)vps_sps_pps.size(), NULL, 0, NULL, NULL);
uint8_t extra_data[1024];
int extra_data_size = mpeg4_hevc_decoder_configuration_record_save(&hevc, extra_data, sizeof(extra_data));
@ -226,17 +278,17 @@ void H265RtmpEncoder::makeVideoConfigPkt() {
WarnL << "生成H265 extra_data 失败";
return;
}
//HEVCDecoderConfigurationRecord
rtmpPkt->buffer.append((char *)extra_data, extra_data_size);
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_VIDEO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = 0;
rtmpPkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(rtmpPkt);
// HEVCDecoderConfigurationRecord
pkt->buffer.append((char *)extra_data, extra_data_size);
pkt->body_size = pkt->buffer.size();
pkt->chunk_id = CHUNK_VIDEO;
pkt->stream_index = STREAM_MEDIA;
pkt->time_stamp = 0;
pkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(pkt);
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
#endif
}
}//namespace mediakit
} // namespace mediakit

View File

@ -15,7 +15,7 @@
#include "Extension/Track.h"
#include "Extension/H265.h"
namespace mediakit{
namespace mediakit {
/**
* h265 Rtmp解码类
* h265 over rtmp h265-Frame
@ -25,7 +25,7 @@ public:
using Ptr = std::shared_ptr<H265RtmpDecoder>;
H265RtmpDecoder();
~H265RtmpDecoder() {}
~H265RtmpDecoder() = default;
/**
* 265 Rtmp包
@ -33,22 +33,23 @@ public:
*/
void inputRtmp(const RtmpPacket::Ptr &rtmp) override;
CodecId getCodecId() const override{
return CodecH265;
}
CodecId getCodecId() const override { return CodecH265; }
protected:
void onGetH265(const char *pcData, size_t iLen, uint32_t dts,uint32_t pts);
H265Frame::Ptr obtainFrame();
void onGetH265(const char *data, size_t size, uint32_t dts, uint32_t pts);
void splitFrame(const uint8_t *data, size_t size, uint32_t dts, uint32_t pts);
protected:
RtmpPacketInfo _info;
H265Frame::Ptr _h265frame;
};
/**
* 265 Rtmp打包类
*/
class H265RtmpEncoder : public H265RtmpDecoder{
class H265RtmpEncoder : public H265RtmpDecoder {
public:
using Ptr = std::shared_ptr<H265RtmpEncoder>;
@ -87,9 +88,9 @@ private:
std::string _pps;
H265Track::Ptr _track;
RtmpPacket::Ptr _rtmp_packet;
FrameMerger _merger{FrameMerger::mp4_nal_size};
FrameMerger _merger { FrameMerger::mp4_nal_size };
};
}//namespace mediakit
} // namespace mediakit
#endif //ZLMEDIAKIT_H265RTMPCODEC_H
#endif // ZLMEDIAKIT_H265RTMPCODEC_H

View File

@ -247,7 +247,7 @@ Frame::Ptr MP4Demuxer::makeFrame(uint32_t track_id, const Buffer::Ptr &buf, int6
AACTrack::Ptr track = dynamic_pointer_cast<AACTrack>(it->second);
assert(track);
//加上adts头
dumpAacConfig(track->getAacCfg(), buf->size() - DATA_OFFSET, (uint8_t *) buf->data() + (DATA_OFFSET - ADTS_HEADER_LEN), ADTS_HEADER_LEN);
dumpAacConfig(track->getConfig(), buf->size() - DATA_OFFSET, (uint8_t *) buf->data() + (DATA_OFFSET - ADTS_HEADER_LEN), ADTS_HEADER_LEN);
ret = std::make_shared<FrameWrapper<FrameFromPtr> >(buf, (uint64_t)dts, (uint64_t)pts, ADTS_HEADER_LEN, DATA_OFFSET - ADTS_HEADER_LEN, codec);
break;
}

View File

@ -234,8 +234,8 @@ bool MP4MuxerInterface::addTrack(const Track::Ptr &track) {
audio_track->getAudioChannel(),
audio_track->getAudioSampleBit() * audio_track->getAudioChannel(),
audio_track->getAudioSampleRate(),
audio_track->getAacCfg().data(),
audio_track->getAacCfg().size());
audio_track->getConfig().data(),
audio_track->getConfig().size());
if (track_id < 0) {
WarnL << "添加AAC Track失败:" << track_id;
return false;

View File

@ -107,14 +107,12 @@ void FlvMuxer::onWriteFlvHeader(const RtmpMediaSource::Ptr &src) {
//flv header
onWrite(buffer, false);
auto &metadata = src->getMetaData();
if (metadata) {
//在有metadata的情况下才发送metadata
//其实metadata没什么用有些推流器不产生metadata
// metadata
src->getMetaData([&](const AMFValue &metadata) {
AMFEncoder invoke;
invoke << "onMetaData" << metadata;
onWriteFlvTag(MSG_DATA, std::make_shared<BufferString>(invoke.data()), 0, false);
}
});
//config frame
src->getConfigFrame([&](const RtmpPacket::Ptr &pkt) {

View File

@ -62,7 +62,7 @@ bool FlvPlayer::onRecvMetadata(const AMFValue &metadata) {
}
void FlvPlayer::onRecvRtmpPacket(RtmpPacket::Ptr packet) {
if (!_play_result && !packet->isCfgFrame()) {
if (!_play_result && !packet->isConfigFrame()) {
_play_result = true;
onPlayResult(SockException(Err_success, "play http-flv success"));
}

View File

@ -91,21 +91,26 @@ void FlvSplitter::onRecvContent(const char *data, size_t len) {
case MSG_DATA3: {
BufferLikeString buffer(string(data, len));
AMFDecoder dec(buffer, _type == MSG_DATA3 ? 3 : 0);
std::string type = dec.load<std::string>();
auto first = dec.load<AMFValue>();
bool flag = true;
if (type == "@setDataFrame") {
std::string type = dec.load<std::string>();
if (type == "onMetaData") {
if (first.type() == AMFType::AMF_STRING) {
auto type = first.as_string();
if (type == "@setDataFrame") {
type = dec.load<std::string>();
if (type == "onMetaData") {
flag = onRecvMetadata(dec.load<AMFValue>());
} else {
WarnL << "unknown type:" << type;
}
} else if (type == "onMetaData") {
flag = onRecvMetadata(dec.load<AMFValue>());
} else {
WarnL << "unknown type:" << type;
WarnL << "unknown notify:" << type;
}
} else if (type == "onMetaData") {
flag = onRecvMetadata(dec.load<AMFValue>());
} else {
WarnL << "unknown notify:" << type;
WarnL << "Parse flv script data failed, invalid amf value: " << first.to_string();
}
if(!flag){
if (!flag) {
throw std::invalid_argument("check rtmp metadata failed");
}
return;

View File

@ -10,10 +10,10 @@
#include "Rtmp.h"
#include "Extension/Factory.h"
namespace mediakit{
TitleMeta::TitleMeta(float dur_sec, size_t fileSize, const std::map<std::string, std::string> &header)
{
namespace mediakit {
TitleMeta::TitleMeta(float dur_sec, size_t fileSize, const std::map<std::string, std::string> &header) {
_metadata.set("duration", dur_sec);
_metadata.set("fileSize", (int)fileSize);
_metadata.set("title", std::string("Streamed by ") + kServerName);
@ -22,14 +22,14 @@ TitleMeta::TitleMeta(float dur_sec, size_t fileSize, const std::map<std::string,
}
}
VideoMeta::VideoMeta(const VideoTrack::Ptr &video){
if(video->getVideoWidth() > 0 ){
VideoMeta::VideoMeta(const VideoTrack::Ptr &video) {
if (video->getVideoWidth() > 0) {
_metadata.set("width", video->getVideoWidth());
}
if(video->getVideoHeight() > 0 ){
if (video->getVideoHeight() > 0) {
_metadata.set("height", video->getVideoHeight());
}
if(video->getVideoFps() > 0 ){
if (video->getVideoFps() > 0) {
_metadata.set("framerate", video->getVideoFps());
}
if (video->getBitRate()) {
@ -39,26 +39,26 @@ VideoMeta::VideoMeta(const VideoTrack::Ptr &video){
_metadata.set("videocodecid", Factory::getAmfByCodecId(_codecId));
}
AudioMeta::AudioMeta(const AudioTrack::Ptr &audio){
AudioMeta::AudioMeta(const AudioTrack::Ptr &audio) {
if (audio->getBitRate()) {
_metadata.set("audiodatarate", audio->getBitRate() / 1024);
}
if(audio->getAudioSampleRate() > 0){
if (audio->getAudioSampleRate() > 0) {
_metadata.set("audiosamplerate", audio->getAudioSampleRate());
}
if(audio->getAudioSampleBit() > 0){
if (audio->getAudioSampleBit() > 0) {
_metadata.set("audiosamplesize", audio->getAudioSampleBit());
}
if(audio->getAudioChannel() > 0){
if (audio->getAudioChannel() > 0) {
_metadata.set("stereo", audio->getAudioChannel() > 1);
}
_codecId = audio->getCodecId();
_metadata.set("audiocodecid", Factory::getAmfByCodecId(_codecId));
}
uint8_t getAudioRtmpFlags(const Track::Ptr &track){
switch (track->getTrackType()){
case TrackAudio : {
uint8_t getAudioRtmpFlags(const Track::Ptr &track) {
switch (track->getTrackType()) {
case TrackAudio: {
auto audioTrack = std::dynamic_pointer_cast<AudioTrack>(track);
if (!audioTrack) {
WarnL << "获取AudioTrack失败";
@ -68,21 +68,21 @@ uint8_t getAudioRtmpFlags(const Track::Ptr &track){
auto iChannel = audioTrack->getAudioChannel();
auto iSampleBit = audioTrack->getAudioSampleBit();
uint8_t flvAudioType ;
switch (track->getCodecId()){
case CodecG711A : flvAudioType = FLV_CODEC_G711A; break;
case CodecG711U : flvAudioType = FLV_CODEC_G711U; break;
case CodecOpus : {
flvAudioType = FLV_CODEC_OPUS;
//opus不通过flags获取音频相关信息
uint8_t flvAudioType;
switch (track->getCodecId()) {
case CodecG711A: flvAudioType = (uint8_t)RtmpAudioCodec::g711a; break;
case CodecG711U: flvAudioType = (uint8_t)RtmpAudioCodec::g711u; break;
case CodecOpus: {
flvAudioType = (uint8_t)RtmpAudioCodec::opus;
// opus不通过flags获取音频相关信息
iSampleRate = 44100;
iSampleBit = 16;
iChannel = 2;
break;
}
case CodecAAC : {
flvAudioType = FLV_CODEC_AAC;
//aac不通过flags获取音频相关信息
case CodecAAC: {
flvAudioType = (uint8_t)RtmpAudioCodec::aac;
// aac不通过flags获取音频相关信息
iSampleRate = 44100;
iSampleBit = 16;
iChannel = 2;
@ -93,23 +93,15 @@ uint8_t getAudioRtmpFlags(const Track::Ptr &track){
uint8_t flvSampleRate;
switch (iSampleRate) {
case 44100:
flvSampleRate = 3;
break;
case 22050:
flvSampleRate = 2;
break;
case 11025:
flvSampleRate = 1;
break;
case 44100: flvSampleRate = 3; break;
case 22050: flvSampleRate = 2; break;
case 11025: flvSampleRate = 1; break;
case 16000: // nellymoser only
case 8000: // nellymoser only
case 5512: // not MP3
flvSampleRate = 0;
break;
default:
WarnL << "FLV does not support sample rate " << iSampleRate << " ,choose from (44100, 22050, 11025)";
return 0;
default: WarnL << "FLV does not support sample rate " << iSampleRate << " ,choose from (44100, 22050, 11025)"; return 0;
}
uint8_t flvStereoOrMono = (iChannel > 1);
@ -117,32 +109,28 @@ uint8_t getAudioRtmpFlags(const Track::Ptr &track){
return (flvAudioType << 4) | (flvSampleRate << 2) | (flvSampleBit << 1) | flvStereoOrMono;
}
default : return 0;
default: return 0;
}
}
void Metadata::addTrack(AMFValue &metadata, const Track::Ptr &track) {
Metadata::Ptr new_metadata;
switch (track->getTrackType()) {
case TrackVideo: {
new_metadata = std::make_shared<VideoMeta>(std::dynamic_pointer_cast<VideoTrack>(track));
}
break;
}
case TrackAudio: {
new_metadata = std::make_shared<AudioMeta>(std::dynamic_pointer_cast<AudioTrack>(track));
}
break;
default:
return;
}
default: return;
}
new_metadata->getMetadata().object_for_each([&](const std::string &key, const AMFValue &value) {
metadata.set(key, value);
});
new_metadata->getMetadata().object_for_each([&](const std::string &key, const AMFValue &value) { metadata.set(key, value); });
}
RtmpPacket::Ptr RtmpPacket::create(){
RtmpPacket::Ptr RtmpPacket::create() {
#if 0
static ResourcePool<RtmpPacket> packet_pool;
static onceToken token([]() {
@ -156,8 +144,7 @@ RtmpPacket::Ptr RtmpPacket::create(){
#endif
}
void RtmpPacket::clear()
{
void RtmpPacket::clear() {
is_abs_stamp = false;
time_stamp = 0;
ts_field = 0;
@ -165,36 +152,56 @@ void RtmpPacket::clear()
buffer.clear();
}
bool RtmpPacket::isVideoKeyFrame() const
{
return type_id == MSG_VIDEO && (uint8_t)buffer[0] >> 4 == FLV_KEY_FRAME && (uint8_t)buffer[1] == 1;
bool RtmpPacket::isVideoKeyFrame() const {
if (type_id != MSG_VIDEO) {
return false;
}
RtmpFrameType frame_type;
if (buffer[0] >> 7) {
// IsExHeader == 1
frame_type = (RtmpFrameType)((buffer[0] >> 4) & 0x07);
} else {
// IsExHeader == 0
frame_type = (RtmpFrameType)(buffer[0] >> 4);
}
return frame_type == RtmpFrameType::key_frame;
}
bool RtmpPacket::isCfgFrame() const
{
bool RtmpPacket::isConfigFrame() const {
switch (type_id) {
case MSG_VIDEO: return buffer[1] == 0;
case MSG_AUDIO: {
switch (getMediaType()) {
case FLV_CODEC_AAC: return buffer[1] == 0;
default: return false;
case MSG_AUDIO: {
return (RtmpAudioCodec)getRtmpCodecId() == RtmpAudioCodec::aac && (RtmpAACPacketType)buffer[1] == RtmpAACPacketType::aac_config_header;
}
}
default: return false;
case MSG_VIDEO: {
if (!isVideoKeyFrame()) {
return false;
}
if (buffer[0] >> 7) {
// IsExHeader == 1
return (RtmpPacketType)(buffer[0] & 0x0f) == RtmpPacketType::PacketTypeSequenceStart;
}
// IsExHeader == 0
switch ((RtmpVideoCodec)getRtmpCodecId()) {
case RtmpVideoCodec::h265:
case RtmpVideoCodec::h264: {
return (RtmpH264PacketType)buffer[1] == RtmpH264PacketType::h264_config_header;
}
default: return false;
}
}
default: return false;
}
}
int RtmpPacket::getMediaType() const
{
int RtmpPacket::getRtmpCodecId() const {
switch (type_id) {
case MSG_VIDEO: return (uint8_t)buffer[0] & 0x0F;
case MSG_AUDIO: return (uint8_t)buffer[0] >> 4;
default: return 0;
case MSG_VIDEO: return (uint8_t)buffer[0] & 0x0F;
case MSG_AUDIO: return (uint8_t)buffer[0] >> 4;
default: return 0;
}
}
int RtmpPacket::getAudioSampleRate() const
{
int RtmpPacket::getAudioSampleRate() const {
if (type_id != MSG_AUDIO) {
return 0;
}
@ -203,8 +210,7 @@ int RtmpPacket::getAudioSampleRate() const
return sampleRate[flvSampleRate];
}
int RtmpPacket::getAudioSampleBit() const
{
int RtmpPacket::getAudioSampleBit() const {
if (type_id != MSG_AUDIO) {
return 0;
}
@ -213,8 +219,7 @@ int RtmpPacket::getAudioSampleBit() const
return sampleBit[flvSampleBit];
}
int RtmpPacket::getAudioChannel() const
{
int RtmpPacket::getAudioChannel() const {
if (type_id != MSG_AUDIO) {
return 0;
}
@ -223,8 +228,7 @@ int RtmpPacket::getAudioChannel() const
return channel[flvStereoOrMono];
}
RtmpPacket & RtmpPacket::operator=(const RtmpPacket &that)
{
RtmpPacket &RtmpPacket::operator=(const RtmpPacket &that) {
is_abs_stamp = that.is_abs_stamp;
stream_index = that.stream_index;
body_size = that.body_size;
@ -234,32 +238,76 @@ RtmpPacket & RtmpPacket::operator=(const RtmpPacket &that)
return *this;
}
RtmpHandshake::RtmpHandshake(uint32_t _time, uint8_t *_random /*= nullptr*/)
{
RtmpHandshake::RtmpHandshake(uint32_t _time, uint8_t *_random /*= nullptr*/) {
_time = htonl(_time);
memcpy(time_stamp, &_time, 4);
if (!_random) {
random_generate((char *)random, sizeof(random));
}
else {
} else {
memcpy(random, _random, sizeof(random));
}
}
void RtmpHandshake::random_generate(char *bytes, int size)
{
static char cdata[] = { 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x2d, 0x72,
0x74, 0x6d, 0x70, 0x2d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x2d, 0x77, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x2d, 0x77, 0x69,
0x6e, 0x74, 0x65, 0x72, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x40, 0x31, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d };
void RtmpHandshake::random_generate(char *bytes, int size) {
static char cdata[] = { 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x2d, 0x72, 0x74, 0x6d, 0x70, 0x2d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x77, 0x69, 0x6e,
0x6c, 0x69, 0x6e, 0x2d, 0x77, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x40, 0x31, 0x32, 0x36, 0x2e, 0x63,
0x6f, 0x6d };
for (int i = 0; i < size; i++) {
bytes[i] = cdata[rand() % (sizeof(cdata) - 1)];
}
}
}//namespace mediakit
CodecId parseVideoRtmpPacket(const uint8_t *data, size_t size, RtmpPacketInfo *info) {
RtmpPacketInfo save;
info = info ? info : &save;
info->codec = CodecInvalid;
CHECK(size > 0);
if (data[0] >> 7) {
// IsExHeader == 1
CHECK(size >= 5, "Invalid rtmp buffer size: ", size);
info->is_enhanced = true;
info->video.frame_type = (RtmpFrameType)((data[0] >> 4) & 0x07);
info->video.pkt_type = (RtmpPacketType)(data[0] & 0x0f);
if (memcmp(data + 1, "av01", 4) == 0) {
// AV1
info->codec = CodecAV1;
} else if (memcmp(data + 1, "vp09", 4) == 0) {
// VP9
info->codec = CodecVP9;
} else if (memcmp(data + 1, "hvc1", 4) == 0) {
// HEVC(H265)
info->codec = CodecH265;
} else {
WarnL << "Rtmp video codec not supported: " << std::string((char *)data + 1, 4);
}
} else {
// IsExHeader == 0
info->is_enhanced = false;
info->video.frame_type = (RtmpFrameType)(data[0] >> 4);
auto rtmp_codec = (RtmpVideoCodec)(data[0] & 0x0f);
switch (rtmp_codec) {
case RtmpVideoCodec::h264: {
CHECK(size >= 1, "Invalid rtmp buffer size: ", size);
info->codec = CodecH264;
info->video.h264_pkt_type = (RtmpH264PacketType)data[1];
break;
}
case RtmpVideoCodec::h265: {
CHECK(size >= 1, "Invalid rtmp buffer size: ", size);
info->codec = CodecH265;
info->video.h264_pkt_type = (RtmpH264PacketType)data[1];
break;
}
default: WarnL << "Rtmp video codec not supported: " << (int)rtmp_codec; break;
}
}
return info->codec;
}
} // namespace mediakit
namespace toolkit {
StatisticImp(mediakit::RtmpPacket);
StatisticImp(mediakit::RtmpPacket);
}

View File

@ -63,18 +63,6 @@
#define CHUNK_AUDIO 6 /*音频chunkID*/
#define CHUNK_VIDEO 7 /*视频chunkID*/
#define FLV_KEY_FRAME 1
#define FLV_INTER_FRAME 2
#define FLV_CODEC_AAC 10
#define FLV_CODEC_H264 7
//金山扩展: https://github.com/ksvc/FFmpeg/wiki
#define FLV_CODEC_H265 12
#define FLV_CODEC_G711A 7
#define FLV_CODEC_G711U 8
//参考学而思网校: https://github.com/notedit/rtmp/commit/6e314ac5b29611431f8fb5468596b05815743c10
#define FLV_CODEC_OPUS 13
namespace mediakit {
#if defined(_WIN32)
@ -182,11 +170,15 @@ public:
void clear();
// video config frame和key frame都返回true
// 用于gop缓存定位
bool isVideoKeyFrame() const;
bool isCfgFrame() const;
int getMediaType() const;
// aac config或h264/h265 config返回true支持增强型rtmp
// 用于缓存解码配置信息
bool isConfigFrame() const;
int getRtmpCodecId() const;
int getAudioSampleRate() const;
int getAudioSampleBit() const;
int getAudioChannel() const;
@ -269,5 +261,114 @@ private:
//根据音频track获取flags
uint8_t getAudioRtmpFlags(const Track::Ptr &track);
////////////////// rtmp video //////////////////////////
//https://rtmp.veriskope.com/pdf/video_file_format_spec_v10_1.pdf
// UB [4]; Type of video frame.
enum class RtmpFrameType : uint8_t {
reserved = 0,
key_frame = 1, // key frame (for AVC, a seekable frame)
inter_frame = 2, // inter frame (for AVC, a non-seekable frame)
disposable_inter_frame = 3, // disposable inter frame (H.263 only)
generated_key_frame = 4, // generated key frame (reserved for server use only)
video_info_frame = 5, // video info/command frame
};
// UB [4]; Codec Identifier.
enum class RtmpVideoCodec : uint8_t {
h263 = 2, // Sorenson H.263
screen_video = 3, // Screen video
vp6 = 4, // On2 VP6
vp6_alpha = 5, // On2 VP6 with alpha channel
screen_video2 = 6, // Screen video version 2
h264 = 7, // avc
h265 = 12, // 国内扩展
};
// UI8;
enum class RtmpH264PacketType : uint8_t {
h264_config_header = 0, // AVC or HEVC sequence header(sps/pps)
h264_nalu = 1, // AVC or HEVC NALU
h264_end_seq = 2, // AVC or HEVC end of sequence (lower level NALU sequence ender is not REQUIRED or supported)
};
// https://github.com/veovera/enhanced-rtmp/blob/main/enhanced-rtmp.pdf
// UB[4]
enum class RtmpPacketType : uint8_t {
PacketTypeSequenceStart = 0,
PacketTypeCodedFrames = 1,
PacketTypeSequenceEnd = 2,
// CompositionTime Offset is implied to equal zero. This is
// an optimization to save putting SI24 composition time value of zero on
// the wire. See pseudo code below in the VideoTagBody section
PacketTypeCodedFramesX = 3,
// VideoTagBody does not contain video data. VideoTagBody
// instead contains an AMF encoded metadata. See Metadata Frame
// section for an illustration of its usage. As an example, the metadata
// can be HDR information. This is a good way to signal HDR
// information. This also opens up future ways to express additional
// metadata that is meant for the next video sequence.
//
// note: presence of PacketTypeMetadata means that FrameType
// flags at the top of this table should be ignored
PacketTypeMetadata = 4,
// Carriage of bitstream in MPEG-2 TS format
// note: PacketTypeSequenceStart and PacketTypeMPEG2TSSequenceStart
// are mutually exclusive
PacketTypeMPEG2TSSequenceStart = 5,
};
////////////////// rtmp audio //////////////////////////
//https://rtmp.veriskope.com/pdf/video_file_format_spec_v10_1.pdf
// UB [4]; Format of SoundData
enum class RtmpAudioCodec : uint8_t {
/**
0 = Linear PCM, platform endian
1 = ADPCM
2 = MP3
3 = Linear PCM, little endian
4 = Nellymoser 16 kHz mono
5 = Nellymoser 8 kHz mono
6 = Nellymoser
7 = G.711 A-law logarithmic PCM
8 = G.711 mu-law logarithmic PCM
9 = reserved
10 = AAC
11 = Speex
14 = MP3 8 kHz
15 = Device-specific sound
*/
g711a = 7,
g711u = 8,
aac = 10,
opus = 13 // 国内扩展
};
// UI8;
enum class RtmpAACPacketType : uint8_t {
aac_config_header = 0, // AAC sequence header
aac_raw = 1, // AAC raw
};
////////////////////////////////////////////
struct RtmpPacketInfo {
CodecId codec = CodecInvalid;
bool is_enhanced;
union {
struct {
RtmpFrameType frame_type;
RtmpPacketType pkt_type; // enhanced = true
RtmpH264PacketType h264_pkt_type; // enhanced = false
} video;
};
};
// https://github.com/veovera/enhanced-rtmp
CodecId parseVideoRtmpPacket(const uint8_t *data, size_t size, RtmpPacketInfo *info = nullptr);
}//namespace mediakit
#endif//__rtmp_h

View File

@ -19,12 +19,12 @@ size_t RtmpDemuxer::trackCount(const AMFValue &metadata) {
size_t ret = 0;
metadata.object_for_each([&](const string &key, const AMFValue &val) {
if (key == "videocodecid") {
//找到视频
// 找到视频
++ret;
return;
}
if (key == "audiocodecid") {
//找到音频
// 找到音频
++ret;
return;
}
@ -32,7 +32,7 @@ size_t RtmpDemuxer::trackCount(const AMFValue &metadata) {
return ret;
}
bool RtmpDemuxer::loadMetaData(const AMFValue &val){
bool RtmpDemuxer::loadMetaData(const AMFValue &val) {
bool ret = false;
try {
int audiosamplerate = 0;
@ -60,12 +60,12 @@ bool RtmpDemuxer::loadMetaData(const AMFValue &val){
return;
}
if (key == "videocodecid") {
//找到视频
// 找到视频
videocodecid = &val;
return;
}
if (key == "audiocodecid") {
//找到音频
// 找到音频
audiocodecid = &val;
return;
}
@ -75,16 +75,22 @@ bool RtmpDemuxer::loadMetaData(const AMFValue &val){
}
if (key == "videodatarate") {
videodatarate = val.as_integer();
_videodatarate = videodatarate * 1024;
return;
}
});
if (videocodecid) {
//有视频
// 有视频
ret = true;
makeVideoTrack(*videocodecid, videodatarate * 1024);
if (videocodecid->type() == AMF_NUMBER && videocodecid->as_integer() == (int)RtmpVideoCodec::h264) {
// https://github.com/veovera/enhanced-rtmp/issues/8
_complete_delay = true;
} else {
makeVideoTrack(*videocodecid, videodatarate * 1024);
}
}
if (audiocodecid) {
//有音频
// 有音频
ret = true;
makeAudioTrack(*audiocodecid, audiosamplerate, audiochannels, audiosamplesize, audiodatarate * 1024);
}
@ -92,8 +98,8 @@ bool RtmpDemuxer::loadMetaData(const AMFValue &val){
WarnL << ex.what();
}
if (ret) {
//metadata中存在track相关的描述那么我们根据metadata判断有多少个track
if (ret && !_complete_delay) {
// metadata中存在track相关的描述那么我们根据metadata判断有多少个track
addTrackCompleted();
}
return ret;
@ -108,8 +114,14 @@ void RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) {
case MSG_VIDEO: {
if (!_try_get_video_track) {
_try_get_video_track = true;
auto codec = AMFValue(pkt->getMediaType());
makeVideoTrack(codec, 0);
RtmpPacketInfo info;
auto codec_id = parseVideoRtmpPacket((uint8_t *)pkt->data(), pkt->size(), &info);
if (codec_id != CodecInvalid) {
makeVideoTrack(Factory::getTrackByCodecId(codec_id), _videodatarate);
if (_complete_delay) {
addTrackCompleted();
}
}
}
if (_video_rtmp_decoder) {
_video_rtmp_decoder->inputRtmp(pkt);
@ -120,7 +132,7 @@ void RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) {
case MSG_AUDIO: {
if (!_try_get_audio_track) {
_try_get_audio_track = true;
auto codec = AMFValue(pkt->getMediaType());
auto codec = AMFValue(pkt->getRtmpCodecId());
makeAudioTrack(codec, pkt->getAudioSampleRate(), pkt->getAudioChannel(), pkt->getAudioSampleBit(), 0);
}
if (_audio_rtmp_decoder) {
@ -128,51 +140,55 @@ void RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) {
}
break;
}
default : break;
default: break;
}
}
void RtmpDemuxer::makeVideoTrack(const AMFValue &videoCodec, int bit_rate) {
makeVideoTrack(Factory::getVideoTrackByAmf(videoCodec), bit_rate);
}
void RtmpDemuxer::makeVideoTrack(const Track::Ptr &track, int bit_rate) {
if (_video_rtmp_decoder) {
return;
}
//生成Track对象
_video_track = dynamic_pointer_cast<VideoTrack>(Factory::getVideoTrackByAmf(videoCodec));
// 生成Track对象
_video_track = dynamic_pointer_cast<VideoTrack>(track);
if (!_video_track) {
return;
}
//生成rtmpCodec对象以便解码rtmp
// 生成rtmpCodec对象以便解码rtmp
_video_rtmp_decoder = Factory::getRtmpCodecByTrack(_video_track, false);
if (!_video_rtmp_decoder) {
//找不到相应的rtmp解码器该track无效
// 找不到相应的rtmp解码器该track无效
_video_track.reset();
return;
}
_video_track->setBitRate(bit_rate);
//设置rtmp解码器代理生成的frame写入该Track
// 设置rtmp解码器代理生成的frame写入该Track
_video_rtmp_decoder->addDelegate(_video_track);
addTrack(_video_track);
_try_get_video_track = true;
}
void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec,int sample_rate, int channels, int sample_bit, int bit_rate) {
void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec, int sample_rate, int channels, int sample_bit, int bit_rate) {
if (_audio_rtmp_decoder) {
return;
}
//生成Track对象
// 生成Track对象
_audio_track = dynamic_pointer_cast<AudioTrack>(Factory::getAudioTrackByAmf(audioCodec, sample_rate, channels, sample_bit));
if (!_audio_track) {
return;
}
//生成rtmpCodec对象以便解码rtmp
// 生成rtmpCodec对象以便解码rtmp
_audio_rtmp_decoder = Factory::getRtmpCodecByTrack(_audio_track, false);
if (!_audio_rtmp_decoder) {
//找不到相应的rtmp解码器该track无效
// 找不到相应的rtmp解码器该track无效
_audio_track.reset();
return;
}
_audio_track->setBitRate(bit_rate);
//设置rtmp解码器代理生成的frame写入该Track
// 设置rtmp解码器代理生成的frame写入该Track
_audio_rtmp_decoder->addDelegate(_audio_track);
addTrack(_audio_track);
_try_get_audio_track = true;

View File

@ -45,12 +45,15 @@ public:
private:
void makeVideoTrack(const AMFValue &val, int bit_rate);
void makeVideoTrack(const Track::Ptr &val, int bit_rate);
void makeAudioTrack(const AMFValue &val, int sample_rate, int channels, int sample_bit, int bit_rate);
private:
bool _try_get_video_track = false;
bool _try_get_audio_track = false;
bool _complete_delay = false;
float _duration = 0;
int _videodatarate = 0;
AudioTrack::Ptr _audio_track;
VideoTrack::Ptr _video_track;
RtmpCodec::Ptr _audio_rtmp_decoder;

View File

@ -73,42 +73,29 @@ public:
/**
* metadata
*/
const AMFValue &getMetaData() const {
template <typename FUNC>
void getMetaData(const FUNC &func) const {
std::lock_guard<std::recursive_mutex> lock(_mtx);
return _metadata;
if (_metadata) {
func(_metadata);
}
}
/**
* config帧
*/
template<typename FUNC>
void getConfigFrame(const FUNC &f) {
template <typename FUNC>
void getConfigFrame(const FUNC &func) {
std::lock_guard<std::recursive_mutex> lock(_mtx);
for (auto &pr : _config_frame_map) {
f(pr.second);
func(pr.second);
}
}
/**
* metadata
*/
virtual void setMetaData(const AMFValue &metadata) {
_metadata = metadata;
_metadata.set("title", std::string("Streamed by ") + kServerName);
_have_video = _metadata["videocodecid"];
_have_audio = _metadata["audiocodecid"];
if (_ring) {
regist();
}
}
/**
* metadata
*/
void updateMetaData(const AMFValue &metadata) {
std::lock_guard<std::recursive_mutex> lock(_mtx);
_metadata = metadata;
}
virtual void setMetaData(const AMFValue &metadata);
/**
* rtmp包

View File

@ -2,15 +2,15 @@
#include "RtmpMediaSourceImp.h"
namespace mediakit {
uint32_t RtmpMediaSource::getTimeStamp(TrackType trackType)
{
uint32_t RtmpMediaSource::getTimeStamp(TrackType trackType) {
assert(trackType >= TrackInvalid && trackType < TrackMax);
if (trackType != TrackInvalid) {
//获取某track的时间戳
// 获取某track的时间戳
return _track_stamps[trackType];
}
//获取所有track的最小时间戳
// 获取所有track的最小时间戳
uint32_t ret = UINT32_MAX;
for (auto &stamp : _track_stamps) {
if (stamp > 0 && stamp < ret) {
@ -20,38 +20,61 @@ uint32_t RtmpMediaSource::getTimeStamp(TrackType trackType)
return ret;
}
void RtmpMediaSource::onWrite(RtmpPacket::Ptr pkt, bool /*= true*/)
{
bool is_video = pkt->type_id == MSG_VIDEO;
_speed[is_video ? TrackVideo : TrackAudio] += pkt->size();
//保存当前时间戳
switch (pkt->type_id) {
case MSG_VIDEO: _track_stamps[TrackVideo] = pkt->time_stamp, _have_video = true; break;
case MSG_AUDIO: _track_stamps[TrackAudio] = pkt->time_stamp, _have_audio = true; break;
default: break;
void RtmpMediaSource::setMetaData(const AMFValue &metadata) {
{
std::lock_guard<std::recursive_mutex> lock(_mtx);
_metadata = metadata;
_metadata.set("title", std::string("Streamed by ") + kServerName);
}
if (pkt->isCfgFrame()) {
_have_video = _metadata["videocodecid"];
_have_audio = _metadata["audiocodecid"];
if (_ring) {
regist();
AMFEncoder enc;
enc << "onMetaData" << _metadata;
RtmpPacket::Ptr packet = RtmpPacket::create();
packet->buffer = enc.data();
packet->type_id = MSG_DATA;
packet->time_stamp = 0;
packet->chunk_id = CHUNK_CLIENT_REQUEST_AFTER;
packet->stream_index = STREAM_MEDIA;
onWrite(std::move(packet));
}
}
void RtmpMediaSource::onWrite(RtmpPacket::Ptr pkt, bool /*= true*/) {
bool is_video = pkt->type_id == MSG_VIDEO;
_speed[is_video ? TrackVideo : TrackAudio] += pkt->size();
// 保存当前时间戳
switch (pkt->type_id) {
case MSG_VIDEO: _track_stamps[TrackVideo] = pkt->time_stamp, _have_video = true; break;
case MSG_AUDIO: _track_stamps[TrackAudio] = pkt->time_stamp, _have_audio = true; break;
default: break;
}
if (pkt->isConfigFrame()) {
std::lock_guard<std::recursive_mutex> lock(_mtx);
_config_frame_map[pkt->type_id] = pkt;
if (!_ring) {
//注册后收到config帧更新到各播放器
// 注册后收到config帧更新到各播放器
return;
}
}
if (!_ring) {
std::weak_ptr<RtmpMediaSource> weakSelf = std::static_pointer_cast<RtmpMediaSource>(shared_from_this());
auto lam = [weakSelf](int size) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
std::weak_ptr<RtmpMediaSource> weak_self = std::static_pointer_cast<RtmpMediaSource>(shared_from_this());
auto lam = [weak_self](int size) {
auto strong_self = weak_self.lock();
if (!strong_self) {
return;
}
strongSelf->onReaderChanged(size);
strong_self->onReaderChanged(size);
};
//GOP默认缓冲512组RTMP包每组RTMP包时间戳相同(如果开启合并写了那么每组为合并写时间内的RTMP包),
//每次遇到关键帧第一个RTMP包则会清空GOP缓存(因为有新的关键帧了,同样可以实现秒开)
// GOP默认缓冲512组RTMP包每组RTMP包时间戳相同(如果开启合并写了那么每组为合并写时间内的RTMP包),
// 每次遇到关键帧第一个RTMP包则会清空GOP缓存(因为有新的关键帧了,同样可以实现秒开)
_ring = std::make_shared<RingType>(_ring_size, std::move(lam));
if (_metadata) {
regist();
@ -62,47 +85,42 @@ void RtmpMediaSource::onWrite(RtmpPacket::Ptr pkt, bool /*= true*/)
PacketCache<RtmpPacket>::inputPacket(stamp, is_video, std::move(pkt), key);
}
RtmpMediaSourceImp::RtmpMediaSourceImp(const MediaTuple& tuple, int ringSize) : RtmpMediaSource(tuple, ringSize)
{
RtmpMediaSourceImp::RtmpMediaSourceImp(const MediaTuple &tuple, int ringSize)
: RtmpMediaSource(tuple, ringSize) {
_demuxer = std::make_shared<RtmpDemuxer>();
_demuxer->setTrackListener(this);
}
void RtmpMediaSourceImp::setMetaData(const AMFValue &metadata)
{
void RtmpMediaSourceImp::setMetaData(const AMFValue &metadata) {
if (!_demuxer->loadMetaData(metadata)) {
//该metadata无效需要重新生成
// 该metadata无效需要重新生成
_metadata = metadata;
_recreate_metadata = true;
}
RtmpMediaSource::setMetaData(metadata);
}
void RtmpMediaSourceImp::onWrite(RtmpPacket::Ptr pkt, bool /*= true*/)
{
void RtmpMediaSourceImp::onWrite(RtmpPacket::Ptr pkt, bool /*= true*/) {
if (!_all_track_ready || _muxer->isEnabled()) {
//未获取到所有Track后或者开启转协议那么需要解复用rtmp
// 未获取到所有Track后或者开启转协议那么需要解复用rtmp
_demuxer->inputRtmp(pkt);
}
RtmpMediaSource::onWrite(std::move(pkt));
}
int RtmpMediaSourceImp::totalReaderCount()
{
int RtmpMediaSourceImp::totalReaderCount() {
return readerCount() + (_muxer ? _muxer->totalReaderCount() : 0);
}
void RtmpMediaSourceImp::setProtocolOption(const ProtocolOption &option)
{
//不重复生成rtmp
void RtmpMediaSourceImp::setProtocolOption(const ProtocolOption &option) {
// 不重复生成rtmp
_option = option;
//不重复生成rtmp协议
// 不重复生成rtmp协议
_option.enable_rtmp = false;
_muxer = std::make_shared<MultiMediaSourceMuxer>(_tuple, _demuxer->getDuration(), _option);
_muxer->setMediaListener(getListener());
_muxer->setTrackListener(std::static_pointer_cast<RtmpMediaSourceImp>(shared_from_this()));
//让_muxer对象拦截一部分事件(比如说录像相关事件)
// 让_muxer对象拦截一部分事件(比如说录像相关事件)
MediaSource::setListener(_muxer);
for (auto &track : _demuxer->getTracks(false)) {
@ -111,8 +129,7 @@ void RtmpMediaSourceImp::setProtocolOption(const ProtocolOption &option)
}
}
bool RtmpMediaSourceImp::addTrack(const Track::Ptr &track)
{
bool RtmpMediaSourceImp::addTrack(const Track::Ptr &track) {
if (_muxer) {
if (_muxer->addTrack(track)) {
track->addDelegate(_muxer);
@ -122,45 +139,38 @@ bool RtmpMediaSourceImp::addTrack(const Track::Ptr &track)
return false;
}
void RtmpMediaSourceImp::addTrackCompleted()
{
void RtmpMediaSourceImp::addTrackCompleted() {
if (_muxer) {
_muxer->addTrackCompleted();
}
}
void RtmpMediaSourceImp::resetTracks()
{
void RtmpMediaSourceImp::resetTracks() {
if (_muxer) {
_muxer->resetTracks();
}
}
void RtmpMediaSourceImp::onAllTrackReady()
{
void RtmpMediaSourceImp::onAllTrackReady() {
_all_track_ready = true;
if (_recreate_metadata) {
//更新metadata
// 更新metadata
for (auto &track : _muxer->getTracks()) {
Metadata::addTrack(_metadata, track);
}
RtmpMediaSource::updateMetaData(_metadata);
RtmpMediaSource::setMetaData(_metadata);
}
}
void RtmpMediaSourceImp::setListener(const std::weak_ptr<MediaSourceEvent> &listener)
{
void RtmpMediaSourceImp::setListener(const std::weak_ptr<MediaSourceEvent> &listener) {
if (_muxer) {
//_muxer对象不能处理的事件再给listener处理
_muxer->setMediaListener(listener);
}
else {
//未创建_muxer对象事件全部给listener处理
} else {
// 未创建_muxer对象事件全部给listener处理
MediaSource::setListener(listener);
}
}
}
} // namespace mediakit

View File

@ -191,6 +191,13 @@ void RtmpPlayer::send_connect() {
obj.set("audioCodecs", (double) (0x0400));
//只支持H264
obj.set("videoCodecs", (double) (0x0080));
AMFValue fourCcList(AMF_STRICT_ARRAY);
fourCcList.add("av01");
fourCcList.add("vp09");
fourCcList.add("hvc1");
obj.set("fourCcList", fourCcList);
sendInvoke("connect", obj);
addOnResultCB([this](AMFDecoder &dec) {
//TraceL << "connect result";
@ -332,7 +339,7 @@ void RtmpPlayer::onMediaData_l(RtmpPacket::Ptr chunk_data) {
return;
}
if (chunk_data->isCfgFrame()) {
if (chunk_data->isConfigFrame()) {
//输入配置帧以便初始化完成各个track
onRtmpPacket(chunk_data);
} else {

View File

@ -135,6 +135,13 @@ void RtmpPusher::send_connect() {
obj.set("type", "nonprivate");
obj.set("tcUrl", _tc_url);
obj.set("swfUrl", _tc_url);
AMFValue fourCcList(AMF_STRICT_ARRAY);
fourCcList.add("av01");
fourCcList.add("vp09");
fourCcList.add("hvc1");
obj.set("fourCcList", fourCcList);
sendInvoke("connect", obj);
addOnResultCB([this](AMFDecoder &dec) {
//TraceL << "connect result";
@ -183,10 +190,14 @@ void RtmpPusher::send_metaData(){
throw std::runtime_error("the media source was released");
}
AMFEncoder enc;
enc << "@setDataFrame" << "onMetaData" << src->getMetaData();
sendRequest(MSG_DATA, enc.data());
// metadata
src->getMetaData([&](const AMFValue &metadata) {
AMFEncoder enc;
enc << "@setDataFrame" << "onMetaData" << metadata;
sendRequest(MSG_DATA, enc.data());
});
// config frame
src->getConfigFrame([&](const RtmpPacket::Ptr &pkt) {
sendRtmp(pkt->type_id, _stream_index, pkt, pkt->time_stamp, pkt->chunk_id);
});
@ -207,7 +218,16 @@ void RtmpPusher::send_metaData(){
if (++i == size) {
strong_self->setSendFlushFlag(true);
}
strong_self->sendRtmp(rtmp->type_id, strong_self->_stream_index, rtmp, rtmp->time_stamp, rtmp->chunk_id);
if (rtmp->type_id == MSG_DATA) {
// update metadata
AMFEncoder enc;
enc << "@setDataFrame";
auto pkt = enc.data();
pkt.append(rtmp->data(), rtmp->size());
strong_self->sendRequest(MSG_DATA, pkt);
} else {
strong_self->sendRtmp(rtmp->type_id, strong_self->_stream_index, rtmp, rtmp->time_stamp, rtmp->chunk_id);
}
});
});
_rtmp_reader->setDetachCB([weak_self]() {

View File

@ -291,17 +291,14 @@ void RtmpSession::sendPlayResponse(const string &err, const RtmpMediaSource::Ptr
"description", "Now published." ,
"details", _media_info.stream,
"clientid", "0"});
auto &metadata = src->getMetaData();
if(metadata){
//在有metadata的情况下才发送metadata
//其实metadata没什么用有些推流器不产生metadata
// onMetaData
// metadata
src->getMetaData([&](const AMFValue &metadata) {
invoke.clear();
invoke << "onMetaData" << metadata;
sendResponse(MSG_DATA, invoke.data());
}
});
// config frame
src->getConfigFrame([&](const RtmpPacket::Ptr &pkt) {
onSendMedia(pkt);
});
@ -481,6 +478,7 @@ void RtmpSession::setMetaData(AMFDecoder &dec) {
throw std::runtime_error("can only set metadata");
}
_push_metadata = dec.load<AMFValue>();
_set_meta_data = false;
}
void RtmpSession::onProcessCmd(AMFDecoder &dec) {
@ -528,6 +526,7 @@ void RtmpSession::onRtmpChunk(RtmpPacket::Ptr packet) {
} else if (type == "onMetaData") {
//兼容某些不规范的推流器
_push_metadata = dec.load<AMFValue>();
_set_meta_data = false;
} else {
TraceP(this) << "unknown notify:" << type;
}

View File

@ -1816,11 +1816,17 @@ bool RtcConfigure::onCheckCodecProfile(const RtcCodecPlan &plan, CodecId codec)
return true;
}
/**
Single NAI Unit Mode = 0. // Single NAI mode (Only nals from 1-23 are allowed)
Non Interleaved Mode = 1// Non-interleaved Mode: 1-2324 (STAP-A)28 (FU-A) are allowed
Interleaved Mode = 2, // 25 (STAP-B)26 (MTAP16)27 (MTAP24)28 (EU-A)and 29 (EU-B) are allowed.
**/
void RtcConfigure::onSelectPlan(RtcCodecPlan &plan, CodecId codec) const {
if (_rtsp_video_plan && codec == CodecH264 && getCodecId(_rtsp_video_plan->codec) == CodecH264) {
//h264时设置packetization-mod为一致
// h264时设置packetization-mod为一致
auto mode = _rtsp_video_plan->fmtp[kMode];
plan.fmtp[kMode] = mode.empty() ? "0" : mode;
GET_CONFIG(bool, h264_stap_a, Rtp::kH264StapA);
plan.fmtp[kMode] = mode.empty() ? std::to_string(h264_stap_a) : mode;
}
}

View File

@ -149,7 +149,7 @@ void WebRtcPusher::onRtcConfigure(RtcConfigure &configure) const {
configure.audio.direction = configure.video.direction = RtpDirection::recvonly;
}
float WebRtcPusher::getLossRate(MediaSource &sender,TrackType type){
float WebRtcPusher::getLossRate(MediaSource &sender,TrackType type) {
return WebRtcTransportImp::getLossRate(type);
}
@ -159,8 +159,13 @@ void WebRtcPusher::OnDtlsTransportClosed(const RTC::DtlsTransport *dtlsTransport
WebRtcTransportImp::OnDtlsTransportClosed(dtlsTransport);
}
void WebRtcPusher::onRtcpBye(){
void WebRtcPusher::onRtcpBye() {
WebRtcTransportImp::onRtcpBye();
}
void WebRtcPusher::onShutdown(const SockException &ex) {
_push_src = nullptr;
WebRtcTransportImp::onShutdown(ex);
}
}// namespace mediakit

View File

@ -23,15 +23,17 @@ public:
static Ptr create(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option, bool preferred_tcp = false);
protected:
///////WebRtcTransportImp override///////
void onStartWebRTC() override;
void onDestory() override;
void onRtcConfigure(RtcConfigure &configure) const override;
void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) override;
void onShutdown(const SockException &ex) override;
void onRtcpBye() override;
//// dtls相关的回调 ////
void OnDtlsTransportClosed(const RTC::DtlsTransport *dtlsTransport) override;
void OnDtlsTransportClosed(const RTC::DtlsTransport *dtlsTransport) override;
protected:
///////MediaSourceEvent override///////