重构代码

This commit is contained in:
xia-chu 2023-12-08 21:45:24 +08:00
parent 44c82752b3
commit fbe159db0e
167 changed files with 1075 additions and 1817 deletions

View File

@ -47,7 +47,7 @@ set(MediaServer_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/media-server")
# TODO:
# movflv MP4
if (ENABLE_MP4 OR ENABLE_HLS_FMP4)
if (ENABLE_MP4)
# MOV
set(MediaServer_MOV_ROOT ${MediaServer_ROOT}/libmov)
aux_source_directory(${MediaServer_MOV_ROOT}/include MOV_SRC_LIST)
@ -80,10 +80,6 @@ if (ENABLE_MP4 OR ENABLE_HLS_FMP4)
message(STATUS "ENABLE_MP4 defined")
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_MP4)
endif ()
if (ENABLE_HLS_FMP4)
message(STATUS "ENABLE_HLS_FMP4 defined")
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_HLS_FMP4)
endif ()
endif ()
# mpeg ts

View File

@ -44,7 +44,6 @@ option(ENABLE_JEMALLOC_STATIC "Enable static linking to the jemalloc library" OF
option(ENABLE_JEMALLOC_DUMP "Enable jemalloc to dump malloc statistics" OFF)
option(ENABLE_MEM_DEBUG "Enable Memory Debug" OFF)
option(ENABLE_MP4 "Enable MP4" ON)
option(ENABLE_HLS_FMP4 "Enable HLS-FMP4" ON)
option(ENABLE_MSVC_MT "Enable MSVC Mt/Mtd lib" ON)
option(ENABLE_MYSQL "Enable MySQL" OFF)
option(ENABLE_OPENSSL "Enable OpenSSL" ON)

View File

@ -280,7 +280,6 @@ API_EXPORT uint16_t API_CALL mk_rtc_server_start(uint16_t port) {
class WebRtcArgsUrl : public mediakit::WebRtcArgs {
public:
WebRtcArgsUrl(std::string url) { _url = std::move(url); }
~WebRtcArgsUrl() = default;
toolkit::variant operator[](const std::string &key) const override {
if (key == "url") {
@ -305,7 +304,8 @@ API_EXPORT void API_CALL mk_webrtc_get_answer_sdp2(void *user_data, on_user_data
auto session = std::make_shared<HttpSession>(Socket::createSocket());
std::string offer_str = offer;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
WebRtcPluginManager::Instance().getAnswerSdp(*session, type, WebRtcArgsUrl(url),
auto args = std::make_shared<WebRtcArgsUrl>(url);
WebRtcPluginManager::Instance().getAnswerSdp(*session, type, *args,
[offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
try {
auto sdp_answer = exchangeSdp(exchanger, offer_str);

View File

@ -24,7 +24,6 @@ public:
MediaPlayerForC(){
_player = std::make_shared<MediaPlayer>();
}
~MediaPlayerForC() = default;
MediaPlayer *operator->(){
return _player.get();

View File

@ -33,9 +33,8 @@ private:
class SessionForC : public toolkit::Session {
public:
SessionForC(const toolkit::Socket::Ptr &pSock) ;
~SessionForC() override = default;
void onRecv(const toolkit::Buffer::Ptr &buffer) override ;
SessionForC(const toolkit::Socket::Ptr &pSock);
void onRecv(const toolkit::Buffer::Ptr &buffer) override;
void onError(const toolkit::SockException &err) override;
void onManager() override;
std::shared_ptr<void> _user_data;

View File

@ -80,8 +80,6 @@ public:
_user_data = std::move(user_data);
}
~TimerForC() = default;
uint64_t operator()(){
lock_guard<recursive_mutex> lck(_mxt);
if(!_cb){
@ -135,8 +133,6 @@ API_EXPORT void API_CALL mk_timer_release(mk_timer ctx){
class WorkThreadPoolForC : public TaskExecutorGetterImp {
public:
~WorkThreadPoolForC() override = default;
WorkThreadPoolForC(const char *name, size_t n_thread, int priority) {
//最低优先级
addPoller(name, n_thread, (ThreadPool::Priority) priority, false);

View File

@ -1,4 +1,4 @@
/*
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xia-chu/ZLMediaKit).
@ -27,8 +27,6 @@ public:
}
}
~VideoTrackForC() override = default;
int getVideoHeight() const override {
return _args.video.height;
}
@ -45,16 +43,16 @@ public:
return _codec_id;
}
bool ready() override {
bool ready() const override {
return true;
}
Track::Ptr clone() override {
auto track_in = std::shared_ptr<Track>(shared_from_this());
Track::Ptr clone() const override {
auto track_in = std::shared_ptr<Track>(const_cast<VideoTrackForC *>(this)->shared_from_this());
return Factory::getTrackByAbstractTrack(track_in);
}
Sdp::Ptr getSdp() override {
Sdp::Ptr getSdp(uint8_t) const override {
return nullptr;
}
@ -65,17 +63,15 @@ private:
class AudioTrackForC : public AudioTrackImp, public std::enable_shared_from_this<AudioTrackForC> {
public:
~AudioTrackForC() override = default;
AudioTrackForC(int codec_id, codec_args *args) :
AudioTrackImp((CodecId) codec_id, args->audio.sample_rate, args->audio.channels, 16) {}
Track::Ptr clone() override {
auto track_in = std::shared_ptr<Track>(shared_from_this());
Track::Ptr clone() const override {
auto track_in = std::shared_ptr<Track>(const_cast<AudioTrackForC *>(this)->shared_from_this());
return Factory::getTrackByAbstractTrack(track_in);
}
Sdp::Ptr getSdp() override {
Sdp::Ptr getSdp(uint8_t payload_type) const override {
return nullptr;
}
};

View File

@ -286,6 +286,10 @@ keepAliveSecond=15
port=1935
#rtmps服务器监听地址
sslport=0
# rtmp是否直接代理模式
directProxy=1
#h265 rtmp打包采用增强型rtmp标准还是国内拓展标准
enhanced=1
[rtp]
#音频mtu大小该参数限制rtp最大字节数推荐不要超过1400
@ -347,6 +351,11 @@ preferredCodecA=PCMU,PCMA,opus,mpeg4-generic
#以下范例为所有支持的视频codec
preferredCodecV=H264,H265,AV1,VP9,VP8
#webrtc比特率设置
start_bitrate=0
max_bitrate=0
min_bitrate=0
[srt]
#srt播放推流、播放超时时间,单位秒
timeoutSec=5

View File

@ -157,12 +157,12 @@ void Process::run(const string &cmd, string log_file) {
WarnL << "clone process failed:" << get_uv_errmsg();
free(_process_stack);
_process_stack = nullptr;
throw std::runtime_error(StrPrinter << "fork child process failed,err:" << get_uv_errmsg());
throw std::runtime_error(StrPrinter << "clone child process failed, cmd: " << cmd << ",err:" << get_uv_errmsg());
}
#else
_pid = fork();
if (_pid == -1) {
throw std::runtime_error(StrPrinter << "fork child process failed,err:" << get_uv_errmsg());
throw std::runtime_error(StrPrinter << "fork child process failed, cmd: " << cmd << ",err:" << get_uv_errmsg());
}
if (_pid == 0) {
//子进程

View File

@ -53,7 +53,6 @@ public:
ApiRetException(const char *str = "success" ,int code = API::Success):runtime_error(str){
_code = code;
}
~ApiRetException() = default;
int code(){ return _code; }
private:
int _code;
@ -62,19 +61,16 @@ private:
class AuthException : public ApiRetException {
public:
AuthException(const char *str):ApiRetException(str,API::AuthFailed){}
~AuthException() = default;
};
class InvalidArgsException: public ApiRetException {
public:
InvalidArgsException(const char *str):ApiRetException(str,API::InvalidArgs){}
~InvalidArgsException() = default;
};
class SuccessException: public ApiRetException {
public:
SuccessException():ApiRetException("success",API::Success){}
~SuccessException() = default;
};
using ApiArgsType = std::map<std::string, std::string, mediakit::StrCaseCompare>;
@ -155,8 +151,6 @@ public:
}
}
~HttpAllArgs() = default;
template<typename Key>
toolkit::variant operator[](const Key &key) const {
return (toolkit::variant)_get_value(*(HttpAllArgs*)this, key);

View File

@ -174,7 +174,7 @@ void do_http_hook(const string &url, const ArgsType &body, const function<void(c
GET_CONFIG(float, retry_delay, Hook::kRetryDelay);
const_cast<ArgsType &>(body)["mediaServerId"] = mediaServerId;
const_cast<ArgsType &>(body)["hook_index"] = s_hook_index++;
const_cast<ArgsType &>(body)["hook_index"] = (Json::UInt64)(s_hook_index++);
auto requester = std::make_shared<HttpRequester>();
requester->setMethod("POST");

View File

@ -203,11 +203,6 @@ public:
"日志保存文件夹路径",/*该选项说明文字*/
nullptr);
}
~CMD_main() override{}
const char *description() const override{
return "主程序命令参数";
}
};
//全局变量在WebApi中用于保存配置文件用
@ -264,13 +259,13 @@ int start_main(int argc,char *argv[]) {
loadIniConfig(g_ini_file.data());
if (!File::is_dir(ssl_file)) {
//不是文件夹,加载证书,证书包含公钥和私钥
// 不是文件夹,加载证书,证书包含公钥和私钥
SSL_Initor::Instance().loadCertificate(ssl_file.data());
} else {
//加载文件夹下的所有证书
File::scanDir(ssl_file, [](const string &path, bool isDir) {
File::scanDir(ssl_file,[](const string &path, bool isDir){
if (!isDir) {
//最后的一个证书会当做默认证书(客户端ssl握手时未指定主机)
// 最后的一个证书会当做默认证书(客户端ssl握手时未指定主机)
SSL_Initor::Instance().loadCertificate(path.data());
}
return true;
@ -290,6 +285,7 @@ int start_main(int argc,char *argv[]) {
//如果需要调用getSnap和addFFmpegSource接口可以关闭cpu亲和性
EventPollerPool::setPoolSize(threads);
WorkThreadPool::setPoolSize(threads);
EventPollerPool::enableCpuAffinity(affinity);
//简单的telnet服务器可用于服务器调试但是不能使用23端口否则telnet上了莫名其妙的现象

View File

@ -65,7 +65,6 @@ private:
class TaskManager {
public:
TaskManager() = default;
virtual ~TaskManager();
void setMaxTaskSize(size_t size);
@ -84,7 +83,6 @@ private:
class ThreadExitException : public std::runtime_error {
public:
ThreadExitException() : std::runtime_error("exit") {}
~ThreadExitException() = default;
};
private:

View File

@ -49,7 +49,6 @@ public:
//fDuration<=0为直播否则为点播
DevChannel(const MediaTuple& tuple, float duration = 0, const ProtocolOption &option = ProtocolOption())
: MultiMediaSourceMuxer(tuple, duration, option) {}
~DevChannel() override = default;
/**
* Track

View File

@ -16,9 +16,6 @@
namespace mediakit {
class JemallocUtil {
public:
JemallocUtil() = default;
~JemallocUtil() = default;
static void enable_profiling();
static void disable_profiling();

View File

@ -205,7 +205,6 @@ class FrameFromStaticPtr : public FrameFromPtr {
public:
template<typename ... ARGS>
FrameFromStaticPtr(ARGS &&...args) : FrameFromPtr(std::forward<ARGS>(args)...) {};
~FrameFromStaticPtr() override = default;
bool cacheAble() const override {
return true;
@ -260,7 +259,7 @@ bool MediaSink::addMuteAudioTrack() {
if (_track_map.find(TrackAudio) != _track_map.end()) {
return false;
}
auto audio = std::make_shared<AACTrack>(makeAacConfig(MUTE_ADTS_DATA, ADTS_HEADER_LEN));
auto audio = std::make_shared<AACTrack>(MUTE_ADTS_DATA, ADTS_HEADER_LEN);
_track_map[audio->getTrackType()] = std::make_pair(audio, true);
audio->addDelegate([this](const Frame::Ptr &frame) {
return onTrackFrame(frame);

View File

@ -21,7 +21,6 @@ namespace mediakit{
class TrackListener {
public:
TrackListener() = default;
virtual ~TrackListener() = default;
/**
@ -45,9 +44,6 @@ public:
class MediaSinkInterface : public FrameWriterInterface, public TrackListener {
public:
using Ptr = std::shared_ptr<MediaSinkInterface>;
MediaSinkInterface() = default;
~MediaSinkInterface() override = default;
};
/**
@ -56,8 +52,6 @@ public:
class MuteAudioMaker : public FrameDispatcher {
public:
using Ptr = std::shared_ptr<MuteAudioMaker>;
MuteAudioMaker() = default;
~MuteAudioMaker() override = default;
bool inputFrame(const Frame::Ptr &frame) override;
private:
@ -71,9 +65,6 @@ private:
class MediaSink : public MediaSinkInterface, public TrackSource{
public:
using Ptr = std::shared_ptr<MediaSink>;
MediaSink() = default;
~MediaSink() override = default;
/**
* frame
* @param frame
@ -112,7 +103,7 @@ public:
std::vector<Track::Ptr> getTracks(bool trackReady = true) const override;
/**
* track已经准备完成
* onAllTrackReady事件
*/
bool isAllTrackReady() const;
@ -188,9 +179,6 @@ private:
class MediaSinkDelegate : public MediaSink {
public:
MediaSinkDelegate() = default;
~MediaSinkDelegate() override = default;
/**
* track监听器
*/
@ -207,9 +195,6 @@ private:
class Demuxer : protected TrackListener, public TrackSource {
public:
Demuxer() = default;
~Demuxer() override = default;
void setTrackListener(TrackListener *listener, bool wait_track_ready = false);
std::vector<Track::Ptr> getTracks(bool trackReady = true) const override;

View File

@ -50,10 +50,8 @@ public:
public:
template<typename ...T>
NotImplemented(T && ...args) : std::runtime_error(std::forward<T>(args)...) {}
~NotImplemented() override = default;
};
MediaSourceEvent() = default;
virtual ~MediaSourceEvent() = default;
// 获取媒体源类型
@ -254,9 +252,6 @@ private:
//该对象用于拦截感兴趣的MediaSourceEvent事件
class MediaSourceEventInterceptor : public MediaSourceEvent {
public:
MediaSourceEventInterceptor() = default;
~MediaSourceEventInterceptor() override = default;
void setDelegate(const std::weak_ptr<MediaSourceEvent> &listener);
std::shared_ptr<MediaSourceEvent> getDelegate() const;
@ -289,7 +284,6 @@ private:
*/
class MediaInfo: public MediaTuple {
public:
~MediaInfo() = default;
MediaInfo() = default;
MediaInfo(const std::string &url) { parse(url); }
void parse(const std::string &url);

View File

@ -32,13 +32,11 @@ public:
class Listener {
public:
Listener() = default;
virtual ~Listener() = default;
virtual void onAllTrackReady() = 0;
};
MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_sec = 0.0,const ProtocolOption &option = ProtocolOption());
~MultiMediaSourceMuxer() override = default;
/**
*

View File

@ -18,9 +18,6 @@ namespace mediakit {
/// 缓存刷新策略类
class FlushPolicy {
public:
FlushPolicy() = default;
~FlushPolicy() = default;
bool isFlushAble(bool is_video, bool is_key, uint64_t new_stamp, size_t cache_size);
private:
@ -79,13 +76,8 @@ private:
// 但是却对性能提升很大,这样做还是比较划算的
GET_CONFIG(int, mergeWriteMS, General::kMergeWriteMS);
GET_CONFIG(int, rtspLowLatency, Rtsp::kLowLatency);
if (std::is_same<packet, RtpPacket>::value && rtspLowLatency) {
return true;
}
return std::is_same<packet, RtpPacket>::value ? false : (mergeWriteMS <= 0);
return std::is_same<packet, RtpPacket>::value ? rtspLowLatency : (mergeWriteMS <= 0);
}
private:

View File

@ -293,7 +293,6 @@ void RtspUrl::setup(bool is_ssl, const string &url, const string &user, const st
uint16_t port = is_ssl ? 322 : 554;
splitUrl(ip, ip, port);
_url = std::move(url);
_user = strCoding::UrlDecode(std::move(user));
_passwd = strCoding::UrlDecode(std::move(passwd));

View File

@ -31,8 +31,6 @@ struct StrCaseCompare {
class StrCaseMap : public std::multimap<std::string, std::string, StrCaseCompare> {
public:
using Super = multimap<std::string, std::string, StrCaseCompare>;
StrCaseMap() = default;
~StrCaseMap() = default;
std::string &operator[](const std::string &k) {
auto it = find(k);
@ -60,9 +58,6 @@ public:
// rtsp/http/sip解析类
class Parser {
public:
Parser() = default;
~Parser() = default;
// 解析http/rtsp/sip请求需要确保buf以\0结尾
void parse(const char *buf, size_t size);
@ -132,8 +127,6 @@ public:
std::string _host;
public:
RtspUrl() = default;
~RtspUrl() = default;
void parse(const std::string &url);
private:

View File

@ -17,10 +17,10 @@
namespace mediakit {
class DeltaStamp{
class DeltaStamp {
public:
DeltaStamp();
~DeltaStamp() = default;
virtual ~DeltaStamp() = default;
/**
*
@ -48,9 +48,6 @@ protected:
//计算相对时间戳或者产生平滑时间戳
class Stamp : public DeltaStamp{
public:
Stamp() = default;
~Stamp() = default;
/**
* ,dts回退等功能
* @param dts dts0
@ -116,8 +113,6 @@ private:
//pts排序后就是dts
class DtsGenerator{
public:
DtsGenerator() = default;
~DtsGenerator() = default;
bool getDts(uint64_t pts, uint64_t &dts);
private:
@ -136,9 +131,6 @@ private:
class NtpStamp {
public:
NtpStamp() = default;
~NtpStamp() = default;
void setNtpStamp(uint32_t rtp_stamp, uint64_t ntp_stamp_ms);
uint64_t getNtpStamp(uint32_t rtp_stamp, uint32_t sample_rate);

View File

@ -235,10 +235,14 @@ namespace Rtmp {
#define RTMP_FIELD "rtmp."
const string kHandshakeSecond = RTMP_FIELD "handshakeSecond";
const string kKeepAliveSecond = RTMP_FIELD "keepAliveSecond";
const string kDirectProxy = RTMP_FIELD "directProxy";
const string kEnhanced = RTMP_FIELD "enhanced";
static onceToken token([]() {
mINI::Instance()[kHandshakeSecond] = 15;
mINI::Instance()[kKeepAliveSecond] = 15;
mINI::Instance()[kDirectProxy] = 1;
mINI::Instance()[kEnhanced] = 1;
});
} // namespace Rtmp

View File

@ -297,6 +297,10 @@ namespace Rtmp {
extern const std::string kHandshakeSecond;
// 维持链接超时时间默认15秒
extern const std::string kKeepAliveSecond;
// 是否直接代理
extern const std::string kDirectProxy;
// h265-rtmp是否采用增强型(或者国内扩展)
extern const std::string kEnhanced;
} // namespace Rtmp
////////////RTP配置///////////

View File

@ -75,7 +75,6 @@ class AssertFailedException : public std::runtime_error {
public:
template<typename ...T>
AssertFailedException(T && ...args) : std::runtime_error(std::forward<T>(args)...) {}
~AssertFailedException() override = default;
};
extern const char kServerName[];

View File

@ -205,39 +205,31 @@ public:
/**
*
* @param aac_cfg aac两个字节的配置描述
* @param payload_type rtp payload type
* @param sample_rate
* @param payload_type rtp payload type 98
* @param channels
* @param bitrate
*/
AACSdp(const string &aac_cfg,
int sample_rate,
int channels,
int bitrate = 128,
int payload_type = 98) : Sdp(sample_rate,payload_type){
AACSdp(const string &aac_cfg, int payload_type, int sample_rate, int channels, int bitrate)
: Sdp(sample_rate, payload_type) {
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName() << "/" << sample_rate << "/" << channels << "\r\n";
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(CodecAAC) << "/" << sample_rate << "/" << channels << "\r\n";
string configStr;
char buf[4] = {0};
for(auto &ch : aac_cfg){
char buf[4] = { 0 };
for (auto &ch : aac_cfg) {
snprintf(buf, sizeof(buf), "%02X", (uint8_t)ch);
configStr.append(buf);
}
_printer << "a=fmtp:" << payload_type << " streamtype=5;profile-level-id=1;mode=AAC-hbr;"
<< "sizelength=13;indexlength=3;indexdeltalength=3;config=" << configStr << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
}
string getSdp() const override {
return _printer;
}
string getSdp() const override { return _printer; }
CodecId getCodecId() const override {
return CodecAAC;
}
private:
_StrPrinter _printer;
};
@ -249,18 +241,16 @@ AACTrack::AACTrack(const string &aac_cfg) {
throw std::invalid_argument("adts配置必须最少2个字节");
}
_cfg = aac_cfg;
onReady();
update();
}
const string &AACTrack::getConfig() const {
return _cfg;
}
AACTrack::AACTrack(const uint8_t *adts, size_t size) : AACTrack(makeAacConfig(adts, size)) {}
CodecId AACTrack::getCodecId() const {
return CodecAAC;
}
bool AACTrack::ready() {
bool AACTrack::ready() const {
return !_cfg.empty();
}
@ -276,9 +266,23 @@ int AACTrack::getAudioChannel() const {
return _channel;
}
static Frame::Ptr addADTSHeader(const Frame::Ptr &frame_in, const std::string &aac_config) {
auto frame = FrameImp::create();
frame->_codec_id = CodecAAC;
// 生成adts头
char adts_header[32] = { 0 };
auto size = dumpAacConfig(aac_config, frame_in->size(), (uint8_t *)adts_header, sizeof(adts_header));
CHECK(size > 0, "Invalid adts config");
frame->_prefix_size = size;
frame->_dts = frame_in->dts();
frame->_buffer.assign(adts_header, size);
frame->_buffer.append(frame_in->data(), frame_in->size());
return frame;
}
bool AACTrack::inputFrame(const Frame::Ptr &frame) {
if (!frame->prefixSize()) {
return inputFrame_l(frame);
return inputFrame_l(addADTSHeader(frame, getExtraData()->toString()));
}
bool ret = false;
@ -289,14 +293,14 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) {
auto ptr = frame->data();
auto end = frame->data() + frame->size();
while (ptr < end) {
auto frame_len = getAacFrameLength((uint8_t *) ptr, end - ptr);
auto frame_len = getAacFrameLength((uint8_t *)ptr, end - ptr);
if (frame_len < ADTS_HEADER_LEN) {
break;
}
if (frame_len == (int)frame->size()) {
return inputFrame_l(frame);
}
auto sub_frame = std::make_shared<FrameTSInternal<FrameFromPtr> >(frame, (char *) ptr, frame_len, ADTS_HEADER_LEN,dts,pts);
auto sub_frame = std::make_shared<FrameTSInternal<FrameFromPtr>>(frame, (char *)ptr, frame_len, ADTS_HEADER_LEN, dts, pts);
ptr += frame_len;
if (ptr > end) {
WarnL << "invalid aac length in adts header: " << frame_len
@ -307,52 +311,51 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) {
if (inputFrame_l(sub_frame)) {
ret = true;
}
dts += 1024*1000/getAudioSampleRate();
pts += 1024*1000/getAudioSampleRate();
dts += 1024 * 1000 / getAudioSampleRate();
pts += 1024 * 1000 / getAudioSampleRate();
}
return ret;
}
bool AACTrack::inputFrame_l(const Frame::Ptr &frame) {
if (_cfg.empty()) {
//未获取到aac_cfg信息
if (frame->prefixSize()) {
//根据7个字节的adts头生成aac config
_cfg = makeAacConfig((uint8_t *) (frame->data()), frame->prefixSize());
onReady();
} else {
WarnL << "无法获取adts头!";
}
if (_cfg.empty() && frame->prefixSize()) {
// 未获取到aac_cfg信息根据7个字节的adts头生成aac config
_cfg = makeAacConfig((uint8_t *)(frame->data()), frame->prefixSize());
update();
}
if (frame->size() > frame->prefixSize()) {
//除adts头外有实际负载
// 除adts头外有实际负载
return AudioTrack::inputFrame(frame);
}
return false;
}
toolkit::Buffer::Ptr AACTrack::getExtraData() const {
CHECK(ready());
return std::make_shared<BufferString>(_cfg);
}
void AACTrack::setExtraData(const uint8_t *data, size_t size) {
CHECK(size >= 2);
_cfg.assign((char *)data, size);
update();
}
bool AACTrack::update() {
return parseAacConfig(_cfg, _sampleRate, _channel);
}
void AACTrack::onReady() {
if (!parseAacConfig(_cfg, _sampleRate, _channel)) {
_cfg.clear();
}
Track::Ptr AACTrack::clone() const {
return std::make_shared<AACTrack>(*this);
}
Track::Ptr AACTrack::clone() {
return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this);
}
Sdp::Ptr AACTrack::getSdp() {
if(!ready()){
Sdp::Ptr AACTrack::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
update();
return std::make_shared<AACSdp>(getConfig(), getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
return std::make_shared<AACSdp>(getExtraData()->toString(), payload_type, getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
}
}//namespace mediakit

View File

@ -17,47 +17,40 @@
namespace mediakit{
std::string makeAacConfig(const uint8_t *hex, size_t length);
int getAacFrameLength(const uint8_t *hex, size_t length);
int dumpAacConfig(const std::string &config, size_t length, uint8_t *out, size_t out_size);
bool parseAacConfig(const std::string &config, int &samplerate, int &channels);
/**
* aac音频通道
*/
class AACTrack : public AudioTrack{
class AACTrack : public AudioTrack {
public:
using Ptr = std::shared_ptr<AACTrack>;
/**
* adts头信息
* inputFrame中获取adts头信息
*/
AACTrack() = default;
/**
* aac类型的媒体
* @param aac_cfg aac配置信息
* aac extra data
*/
AACTrack(const std::string &aac_cfg);
/**
* aac
* aac adts头
*/
const std::string &getConfig() const;
AACTrack(const uint8_t *adts, size_t size);
bool ready() override;
bool ready() const override;
CodecId getCodecId() const override;
int getAudioChannel() const override;
int getAudioSampleRate() const override;
int getAudioSampleBit() const override;
bool inputFrame(const Frame::Ptr &frame) override;
toolkit::Buffer::Ptr getExtraData() const override;
void setExtraData(const uint8_t *data, size_t size) override;
bool update() override;
private:
void onReady();
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
Sdp::Ptr getSdp(uint8_t payload_type) const override;
Track::Ptr clone() const override;
bool inputFrame_l(const Frame::Ptr &frame);
private:

View File

@ -16,90 +16,18 @@ using namespace toolkit;
namespace mediakit {
static string getConfig(const RtmpPacket &thiz) {
string ret;
if ((RtmpAudioCodec)thiz.getRtmpCodecId() != RtmpAudioCodec::aac) {
return ret;
}
if (thiz.buffer.size() < 4) {
WarnL << "get aac config failed, rtmp packet is: " << hexdump(thiz.data(), thiz.size());
return ret;
}
ret = thiz.buffer.substr(2);
return ret;
}
void AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
CHECK(pkt->size() > 2);
if (pkt->isConfigFrame()) {
_aac_cfg = getConfig(*pkt);
if (!_aac_cfg.empty()) {
onGetAAC(nullptr, 0, 0);
}
getTrack()->setExtraData((uint8_t *)pkt->data() + 2, pkt->size() - 2);
return;
}
if (!_aac_cfg.empty()) {
onGetAAC(pkt->buffer.data() + 2, pkt->buffer.size() - 2, pkt->time_stamp);
}
}
void AACRtmpDecoder::onGetAAC(const char* data, size_t len, uint32_t stamp) {
auto frame = FrameImp::create();
frame->_codec_id = CodecAAC;
//生成adts头
char adts_header[32] = {0};
auto size = dumpAacConfig(_aac_cfg, len, (uint8_t *) adts_header, sizeof(adts_header));
if (size > 0) {
frame->_buffer.assign(adts_header, size);
frame->_prefix_size = size;
} else {
frame->_buffer.clear();
frame->_prefix_size = 0;
}
if(len > 0){
//追加负载数据
frame->_buffer.append(data, len);
frame->_dts = stamp;
}
if(size > 0 || len > 0){
//有adts头或者实际aac负载
RtmpCodec::inputFrame(frame);
}
RtmpCodec::inputFrame(std::make_shared<FrameFromPtr>(CodecAAC, pkt->buffer.data() + 2, pkt->buffer.size() - 2, pkt->time_stamp));
}
/////////////////////////////////////////////////////////////////////////////////////
AACRtmpEncoder::AACRtmpEncoder(const Track::Ptr &track) {
_track = dynamic_pointer_cast<AACTrack>(track);
}
void AACRtmpEncoder::makeConfigPacket() {
if (_track && _track->ready()) {
//从track中和获取aac配置信息
_aac_cfg = _track->getConfig();
}
if (!_aac_cfg.empty()) {
makeAudioConfigPkt();
}
}
bool AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (_aac_cfg.empty()) {
if (frame->prefixSize()) {
// 包含adts头,从adts头获取aac配置信息
_aac_cfg = makeAacConfig((uint8_t *)(frame->data()), frame->prefixSize());
}
makeConfigPacket();
}
if (_aac_cfg.empty()) {
return false;
}
auto pkt = RtmpPacket::create();
// header
pkt->buffer.push_back(_audio_flv_flags);
@ -115,14 +43,18 @@ bool AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
return true;
}
void AACRtmpEncoder::makeAudioConfigPkt() {
_audio_flv_flags = getAudioRtmpFlags(std::make_shared<AACTrack>(_aac_cfg));
void AACRtmpEncoder::makeConfigPacket() {
_audio_flv_flags = getAudioRtmpFlags(getTrack());
auto pkt = RtmpPacket::create();
// header
pkt->buffer.push_back(_audio_flv_flags);
pkt->buffer.push_back((uint8_t)RtmpAACPacketType::aac_config_header);
// aac config
pkt->buffer.append(_aac_cfg);
auto extra_data = getTrack()->getExtraData();
CHECK(extra_data);
pkt->buffer.append(extra_data->data(), extra_data->size());
pkt->body_size = pkt->buffer.size();
pkt->chunk_id = CHUNK_AUDIO;
pkt->stream_index = STREAM_MEDIA;

View File

@ -19,35 +19,23 @@ namespace mediakit{
/**
* aac Rtmp转adts类
*/
class AACRtmpDecoder : public RtmpCodec{
class AACRtmpDecoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<AACRtmpDecoder>;
AACRtmpDecoder() {}
~AACRtmpDecoder() {}
AACRtmpDecoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
* Rtmp并解码
* @param rtmp Rtmp数据包
*/
void inputRtmp(const RtmpPacket::Ptr &rtmp) override;
CodecId getCodecId() const override{
return CodecAAC;
}
private:
void onGetAAC(const char *data, size_t len, uint32_t stamp);
private:
std::string _aac_cfg;
};
/**
* aac adts转Rtmp类
*/
class AACRtmpEncoder : public AACRtmpDecoder{
class AACRtmpEncoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<AACRtmpEncoder>;
@ -57,8 +45,7 @@ public:
* inputFrame时可以不输入adts头
* @param track
*/
AACRtmpEncoder(const Track::Ptr &track);
~AACRtmpEncoder() {}
AACRtmpEncoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
* aac adts头
@ -72,12 +59,7 @@ public:
void makeConfigPacket() override;
private:
void makeAudioConfigPkt();
private:
uint8_t _audio_flv_flags;
AACTrack::Ptr _track;
std::string _aac_cfg;
uint8_t _audio_flv_flags {0};
};
}//namespace mediakit

View File

@ -12,63 +12,36 @@
namespace mediakit{
AACRtpEncoder::AACRtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PayloadType,
uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc,
ui32MtuSize,
ui32SampleRate,
ui8PayloadType,
ui8Interleaved){
}
bool AACRtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto stamp = frame->dts();
auto data = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto ptr = (char *) data;
auto remain_size = len;
auto max_size = getMaxSize() - 4;
auto ptr = (char *)frame->data() + frame->prefixSize();
auto size = frame->size() - frame->prefixSize();
auto remain_size = size;
auto max_size = getRtpInfo().getMaxSize() - 4;
while (remain_size > 0) {
if (remain_size <= max_size) {
_section_buf[0] = 0;
_section_buf[1] = 16;
_section_buf[2] = (len >> 5) & 0xFF;
_section_buf[3] = ((len & 0x1F) << 3) & 0xFF;
memcpy(_section_buf + 4, ptr, remain_size);
makeAACRtp(_section_buf, remain_size + 4, true, stamp);
outputRtp(ptr, remain_size, size, true, frame->dts());
break;
}
_section_buf[0] = 0;
_section_buf[1] = 16;
_section_buf[2] = ((len) >> 5) & 0xFF;
_section_buf[3] = ((len & 0x1F) << 3) & 0xFF;
memcpy(_section_buf + 4, ptr, max_size);
makeAACRtp(_section_buf, max_size + 4, false, stamp);
outputRtp(ptr, max_size, size, false, frame->dts());
ptr += max_size;
remain_size -= max_size;
}
return len > 0;
return true;
}
void AACRtpEncoder::makeAACRtp(const void *data, size_t len, bool mark, uint64_t stamp) {
RtpCodec::inputRtp(makeRtp(getTrackType(), data, len, mark, stamp), false);
void AACRtpEncoder::outputRtp(const char *data, size_t len, size_t total_len, bool mark, uint64_t stamp) {
auto rtp = getRtpInfo().makeRtp(TrackAudio, nullptr, len + 4, mark, stamp);
auto payload = rtp->data() + RtpPacket::kRtpTcpHeaderSize + RtpPacket::kRtpHeaderSize;
payload[0] = 0;
payload[1] = 16;
payload[2] = ((total_len) >> 5) & 0xFF;
payload[3] = ((total_len & 0x1F) << 3) & 0xFF;
memcpy(payload + 4, data, len);
RtpCodec::inputRtp(std::move(rtp), false);
}
/////////////////////////////////////////////////////////////////////////////////////
AACRtpDecoder::AACRtpDecoder(const Track::Ptr &track) {
auto aacTrack = std::dynamic_pointer_cast<AACTrack>(track);
if (!aacTrack || !aacTrack->ready()) {
WarnL << "该aac track无效!";
} else {
_aac_cfg = aacTrack->getConfig();
}
obtainFrame();
}
AACRtpDecoder::AACRtpDecoder() {
obtainFrame();
}
@ -145,17 +118,8 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool key_pos) {
void AACRtpDecoder::flushData() {
auto ptr = reinterpret_cast<const uint8_t *>(_frame->data());
if ((ptr[0] == 0xFF && (ptr[1] & 0xF0) == 0xF0) && _frame->size() > ADTS_HEADER_LEN) {
//adts头打入了rtp包不符合规范兼容EasyPusher的bug
// adts头打入了rtp包不符合规范兼容EasyPusher的bug
_frame->_prefix_size = ADTS_HEADER_LEN;
} else {
//没有adts头则插入adts头
char adts_header[128] = {0};
auto size = dumpAacConfig(_aac_cfg, _frame->_buffer.size(), (uint8_t *) adts_header, sizeof(adts_header));
if (size > 0) {
//插入adts头
_frame->_buffer.insert(0, adts_header, size);
_frame->_prefix_size = size;
}
}
RtpCodec::inputFrame(_frame);
obtainFrame();

View File

@ -21,8 +21,7 @@ class AACRtpDecoder : public RtpCodec {
public:
using Ptr = std::shared_ptr<AACRtpDecoder>;
AACRtpDecoder(const Track::Ptr &track);
~AACRtpDecoder() {}
AACRtpDecoder();
/**
* rtp并解码
@ -31,20 +30,12 @@ public:
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override;
CodecId getCodecId() const override {
return CodecAAC;
}
protected:
AACRtpDecoder();
private:
void obtainFrame();
void flushData();
private:
uint64_t _last_dts = 0;
std::string _aac_cfg;
FrameImp::Ptr _frame;
};
@ -52,24 +43,10 @@ private:
/**
* aac adts转rtp类
*/
class AACRtpEncoder : public AACRtpDecoder , public RtpInfo {
class AACRtpEncoder : public RtpCodec {
public:
using Ptr = std::shared_ptr<AACRtpEncoder>;
/**
* @param ui32Ssrc ssrc
* @param ui32MtuSize mtu
* @param ui32SampleRate
* @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved
*/
AACRtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PayloadType = 97,
uint8_t ui8Interleaved = TrackAudio * 2);
~AACRtpEncoder() {}
/**
* aac dats头
* @param frame dats头的aac数据
@ -77,10 +54,8 @@ public:
bool inputFrame(const Frame::Ptr &frame) override;
private:
void makeAACRtp(const void *data, size_t len, bool mark, uint64_t stamp);
void outputRtp(const char *data, size_t len, size_t total_len, bool mark, uint64_t stamp);
private:
unsigned char _section_buf[1600];
};
}//namespace mediakit

View File

@ -10,46 +10,26 @@
#include "CommonRtmp.h"
namespace mediakit{
CommonRtmpDecoder::CommonRtmpDecoder(CodecId codec) {
_codec = codec;
obtainFrame();
}
CodecId CommonRtmpDecoder::getCodecId() const {
return _codec;
}
void CommonRtmpDecoder::obtainFrame() {
_frame = FrameImp::create();
_frame->_codec_id = _codec;
}
namespace mediakit {
void CommonRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &rtmp) {
//拷贝负载
_frame->_buffer.assign(rtmp->buffer.data() + 1, rtmp->buffer.size() - 1);
_frame->_dts = rtmp->time_stamp;
//写入环形缓存
RtmpCodec::inputFrame(_frame);
//创建下一帧
obtainFrame();
auto frame = FrameImp::create();
frame->_codec_id = getTrack()->getCodecId();
frame->_buffer.assign(rtmp->buffer.data() + 1, rtmp->buffer.size() - 1);
frame->_dts = rtmp->time_stamp;
RtmpCodec::inputFrame(frame);
}
/////////////////////////////////////////////////////////////////////////////////////
CommonRtmpEncoder::CommonRtmpEncoder(const Track::Ptr &track) : CommonRtmpDecoder(track->getCodecId()) {
_audio_flv_flags = getAudioRtmpFlags(track);
}
bool CommonRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (!_audio_flv_flags) {
return false;
_audio_flv_flags = getAudioRtmpFlags(getTrack());
}
auto rtmp = RtmpPacket::create();
//header
// header
rtmp->buffer.push_back(_audio_flv_flags);
//data
// data
rtmp->buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmp->body_size = rtmp->buffer.size();
rtmp->chunk_id = CHUNK_AUDIO;

View File

@ -23,42 +23,26 @@ class CommonRtmpDecoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<CommonRtmpDecoder>;
~CommonRtmpDecoder() override {}
/**
*
* @param codec id
*/
CommonRtmpDecoder(CodecId codec);
/**
* ID
*/
CodecId getCodecId() const override;
CommonRtmpDecoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
* Rtmp并解码
* @param rtmp Rtmp数据包
*/
void inputRtmp(const RtmpPacket::Ptr &rtmp) override;
private:
void obtainFrame();
private:
CodecId _codec;
FrameImp::Ptr _frame;
};
/**
* rtmp编码类
*/
class CommonRtmpEncoder : public CommonRtmpDecoder {
class CommonRtmpEncoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<CommonRtmpEncoder>;
CommonRtmpEncoder(const Track::Ptr &track);
~CommonRtmpEncoder() override{}
CommonRtmpEncoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
*
@ -66,7 +50,7 @@ public:
bool inputFrame(const Frame::Ptr &frame) override;
private:
uint8_t _audio_flv_flags = 0;
uint8_t _audio_flv_flags { 0 };
};
}//namespace mediakit

View File

@ -18,10 +18,6 @@ CommonRtpDecoder::CommonRtpDecoder(CodecId codec, size_t max_frame_size ){
obtainFrame();
}
CodecId CommonRtpDecoder::getCodecId() const {
return _codec;
}
void CommonRtpDecoder::obtainFrame() {
_frame = FrameImp::create();
_frame->_codec_id = _codec;
@ -66,17 +62,12 @@ bool CommonRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool){
////////////////////////////////////////////////////////////////
CommonRtpEncoder::CommonRtpEncoder(CodecId codec, uint32_t ssrc, uint32_t mtu_size,
uint32_t sample_rate, uint8_t payload_type, uint8_t interleaved)
: CommonRtpDecoder(codec), RtpInfo(ssrc, mtu_size, sample_rate, payload_type, interleaved) {
}
bool CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){
auto stamp = frame->pts();
auto ptr = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto remain_size = len;
auto max_size = getMaxSize();
auto max_size = getRtpInfo().getMaxSize();
bool is_key = frame->keyFrame();
bool mark = false;
while (remain_size > 0) {
@ -87,7 +78,7 @@ bool CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){
rtp_size = remain_size;
mark = true;
}
RtpCodec::inputRtp(makeRtp(getTrackType(), ptr, rtp_size, mark, stamp), is_key);
RtpCodec::inputRtp(getRtpInfo().makeRtp(frame->getTrackType(), ptr, rtp_size, mark, stamp), is_key);
ptr += rtp_size;
remain_size -= rtp_size;
is_key = false;

View File

@ -23,8 +23,6 @@ class CommonRtpDecoder : public RtpCodec {
public:
using Ptr = std::shared_ptr <CommonRtpDecoder>;
~CommonRtpDecoder() override {}
/**
*
* @param codec id
@ -32,11 +30,6 @@ public:
*/
CommonRtpDecoder(CodecId codec, size_t max_frame_size = 2 * 1024);
/**
* ID
*/
CodecId getCodecId() const override;
/**
* rtp并解码
* @param rtp rtp数据包
@ -59,23 +52,10 @@ private:
/**
* rtp编码类
*/
class CommonRtpEncoder : public CommonRtpDecoder, public RtpInfo {
class CommonRtpEncoder : public RtpCodec {
public:
using Ptr = std::shared_ptr <CommonRtpEncoder>;
~CommonRtpEncoder() override {}
/**
*
* @param codec
* @param ssrc ssrc
* @param mtu_size mtu
* @param sample_rate
* @param payload_type pt类型
* @param interleaved rtsp interleaved
*/
CommonRtpEncoder(CodecId codec, uint32_t ssrc, uint32_t mtu_size, uint32_t sample_rate, uint8_t payload_type, uint8_t interleaved);
/**
* rtp
*/

View File

@ -91,9 +91,7 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
return std::make_shared<H265Track>(vps, sps, pps, 0, 0, 0);
}
case CodecJPEG : {
return std::make_shared<JPEGTrack>();
}
case CodecJPEG : return std::make_shared<JPEGTrack>();
default: {
//其他codec不支持
@ -129,53 +127,30 @@ Track::Ptr Factory::getTrackByAbstractTrack(const Track::Ptr& track) {
}
}
RtpCodec::Ptr Factory::getRtpEncoderByCodecId(CodecId codec_id, uint32_t sample_rate, uint8_t pt, uint32_t ssrc) {
GET_CONFIG(uint32_t, audio_mtu, Rtp::kAudioMtuSize);
GET_CONFIG(uint32_t, video_mtu, Rtp::kVideoMtuSize);
auto type = getTrackType(codec_id);
auto mtu = type == TrackVideo ? video_mtu : audio_mtu;
auto interleaved = type * 2;
RtpCodec::Ptr Factory::getRtpEncoderByCodecId(CodecId codec_id, uint8_t pt) {
switch (codec_id) {
case CodecH264: return std::make_shared<H264RtpEncoder>(ssrc, mtu, sample_rate, pt, interleaved);
case CodecH265: return std::make_shared<H265RtpEncoder>(ssrc, mtu, sample_rate, pt, interleaved);
case CodecAAC: return std::make_shared<AACRtpEncoder>(ssrc, mtu, sample_rate, pt, interleaved);
case CodecH264: return std::make_shared<H264RtpEncoder>();
case CodecH265: return std::make_shared<H265RtpEncoder>();
case CodecAAC: return std::make_shared<AACRtpEncoder>();
case CodecL16:
case CodecOpus: return std::make_shared<CommonRtpEncoder>(codec_id, ssrc, mtu, sample_rate, pt, interleaved);
case CodecOpus: return std::make_shared<CommonRtpEncoder>();
case CodecG711A:
case CodecG711U: {
if (pt == Rtsp::PT_PCMA || pt == Rtsp::PT_PCMU) {
return std::make_shared<G711RtpEncoder>(codec_id, ssrc, mtu, sample_rate, pt, interleaved, 1);
return std::make_shared<G711RtpEncoder>(codec_id, 1);
}
return std::make_shared<CommonRtpEncoder>(codec_id, ssrc, mtu, sample_rate, pt, interleaved);
return std::make_shared<CommonRtpEncoder>();
}
case CodecJPEG: return std::make_shared<JPEGRtpEncoder>(ssrc, mtu, sample_rate, pt, interleaved);
case CodecJPEG: return std::make_shared<JPEGRtpEncoder>();
default: WarnL << "暂不支持该CodecId:" << codec_id; return nullptr;
}
}
RtpCodec::Ptr Factory::getRtpEncoderBySdp(const Sdp::Ptr &sdp) {
// ssrc不冲突即可,可以为任意的32位整形
static atomic<uint32_t> s_ssrc(0);
uint32_t ssrc = s_ssrc++;
if (!ssrc) {
// ssrc不能为0
ssrc = s_ssrc++;
}
if (sdp->getTrackType() == TrackVideo) {
//视频的ssrc是偶数方便调试
ssrc = 2 * ssrc;
} else {
//音频ssrc是奇数
ssrc = 2 * ssrc + 1;
}
return getRtpEncoderByCodecId(sdp->getCodecId(), sdp->getSampleRate(), sdp->getPayloadType(), ssrc);
}
RtpCodec::Ptr Factory::getRtpDecoderByTrack(const Track::Ptr &track) {
switch (track->getCodecId()){
case CodecH264 : return std::make_shared<H264RtpDecoder>();
case CodecH265 : return std::make_shared<H265RtpDecoder>();
case CodecAAC : return std::make_shared<AACRtpDecoder>(track->clone());
case CodecAAC : return std::make_shared<AACRtpDecoder>();
case CodecL16 :
case CodecOpus :
case CodecG711A :
@ -267,7 +242,19 @@ Track::Ptr Factory::getAudioTrackByAmf(const AMFValue& amf, int sample_rate, int
return getTrackByCodecId(codecId, sample_rate, channels, sample_bit);
}
RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track, bool is_encode) {
RtmpCodec::Ptr Factory::getRtmpDecoderByTrack(const Track::Ptr &track) {
switch (track->getCodecId()){
case CodecH264 : return std::make_shared<H264RtmpDecoder>(track);
case CodecAAC : return std::make_shared<AACRtmpDecoder>(track);
case CodecH265 : return std::make_shared<H265RtmpDecoder>(track);
case CodecOpus :
case CodecG711A :
case CodecG711U : return std::make_shared<CommonRtmpDecoder>(track);
default : WarnL << "暂不支持该CodecId:" << track->getCodecName(); return nullptr;
}
}
RtmpCodec::Ptr Factory::getRtmpEncoderByTrack(const Track::Ptr &track) {
switch (track->getCodecId()){
case CodecH264 : return std::make_shared<H264RtmpEncoder>(track);
case CodecAAC : return std::make_shared<AACRtmpEncoder>(track);
@ -276,9 +263,7 @@ RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track, bool is_enc
case CodecG711A :
case CodecG711U : {
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if (is_encode && (audio_track->getAudioSampleRate() != 8000 ||
audio_track->getAudioChannel() != 1 ||
audio_track->getAudioSampleBit() != 16)) {
if (audio_track->getAudioSampleRate() != 8000 || audio_track->getAudioChannel() != 1 || audio_track->getAudioSampleBit() != 16) {
//rtmp对g711只支持8000/1/16规格但是ZLMediaKit可以解析其他规格的G711
WarnL << "RTMP只支持8000/1/16规格的G711,目前规格是:"
<< audio_track->getAudioSampleRate() << "/"
@ -294,13 +279,16 @@ RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track, bool is_enc
}
AMFValue Factory::getAmfByCodecId(CodecId codecId) {
GET_CONFIG(bool, enhanced, Rtmp::kEnhanced);
switch (codecId) {
case CodecAAC: return AMFValue((int)RtmpAudioCodec::aac);
case CodecH264: return AMFValue((int)RtmpVideoCodec::h264);
case CodecH265: return AMFValue((int)RtmpVideoCodec::h265);
case CodecH265: return enhanced ? AMFValue((int)RtmpVideoCodec::fourcc_hevc) : AMFValue((int)RtmpVideoCodec::h265);
case CodecG711A: return AMFValue((int)RtmpAudioCodec::g711a);
case CodecG711U: return AMFValue((int)RtmpAudioCodec::g711u);
case CodecOpus: return AMFValue((int)RtmpAudioCodec::opus);
case CodecAV1: return AMFValue((int)RtmpVideoCodec::fourcc_av1);
case CodecVP9: return AMFValue((int)RtmpVideoCodec::fourcc_vp9);
default: return AMFValue(AMF_NULL);
}
}

View File

@ -42,20 +42,12 @@ public:
*/
static Track::Ptr getTrackByAbstractTrack(const Track::Ptr& track);
/**
* sdp生成rtp编码器
* @param sdp sdp对象
*/
static RtpCodec::Ptr getRtpEncoderBySdp(const Sdp::Ptr &sdp);
/**
* codec id生成rtp编码器
* @param codec_id id
* @param sample_rate 90000
* @param pt rtp payload type
* @param ssrc rtp ssrc
*/
static RtpCodec::Ptr getRtpEncoderByCodecId(CodecId codec_id, uint32_t sample_rate, uint8_t pt, uint32_t ssrc);
static RtpCodec::Ptr getRtpEncoderByCodecId(CodecId codec_id, uint8_t pt);
/**
* Track生成Rtp解包器
@ -78,11 +70,16 @@ public:
static Track::Ptr getAudioTrackByAmf(const AMFValue& amf, int sample_rate, int channels, int sample_bit);
/**
* Track获取Rtmp的编码器
* Track获取Rtmp的编码器
* @param track
* @param is_encode
*/
static RtmpCodec::Ptr getRtmpCodecByTrack(const Track::Ptr &track, bool is_encode);
static RtmpCodec::Ptr getRtmpEncoderByTrack(const Track::Ptr &track);
/**
* Track获取Rtmp的解码器
* @param track
*/
static RtmpCodec::Ptr getRtmpDecoderByTrack(const Track::Ptr &track);
/**
* codecId获取rtmp的codec描述

View File

@ -94,6 +94,10 @@ TrackType CodecInfo::getTrackType() const {
return mediakit::getTrackType(getCodecId());
}
std::string CodecInfo::getTrackTypeStr() const {
return getTrackString(getTrackType());
}
static size_t constexpr kMaxFrameCacheSize = 100;
bool FrameMerger::willFlush(const Frame::Ptr &frame) const{
@ -165,7 +169,19 @@ void FrameMerger::doMerge(BufferLikeString &merged, const Frame::Ptr &frame) con
}
}
static bool isNeedMerge(CodecId codec){
switch (codec) {
case CodecH264:
case CodecH265: return true;
default: return false;
}
}
bool FrameMerger::inputFrame(const Frame::Ptr &frame, onOutput cb, BufferLikeString *buffer) {
if (frame && !isNeedMerge(frame->getCodecId())) {
cb(frame->dts(), frame->pts(), frame, true);
return true;
}
if (willFlush(frame)) {
Frame::Ptr back = _frame_cache.back();
Buffer::Ptr merged_frame = back;
@ -233,8 +249,6 @@ public:
*/
FrameWriterInterfaceHelper(onWriteFrame cb) { _callback = std::move(cb); }
virtual ~FrameWriterInterfaceHelper() = default;
/**
*
*/

View File

@ -85,7 +85,6 @@ class CodecInfo {
public:
using Ptr = std::shared_ptr<CodecInfo>;
CodecInfo() = default;
virtual ~CodecInfo() = default;
/**
@ -102,6 +101,7 @@ public:
*
*/
TrackType getTrackType() const;
std::string getTrackTypeStr() const;
};
/**
@ -110,7 +110,6 @@ public:
class Frame : public toolkit::Buffer, public CodecInfo {
public:
using Ptr = std::shared_ptr<Frame>;
virtual ~Frame() = default;
/**
*
@ -267,7 +266,6 @@ private:
class FrameWriterInterface {
public:
using Ptr = std::shared_ptr<FrameWriterInterface>;
FrameWriterInterface() = default;
virtual ~FrameWriterInterface() = default;
/**
@ -287,8 +285,6 @@ public:
class FrameDispatcher : public FrameWriterInterface {
public:
using Ptr = std::shared_ptr<FrameDispatcher>;
FrameDispatcher() = default;
~FrameDispatcher() override = default;
/**
*
@ -404,10 +400,8 @@ class FrameFromPtr : public Frame {
public:
using Ptr = std::shared_ptr<FrameFromPtr>;
FrameFromPtr(
CodecId codec_id, char *ptr, size_t size, uint64_t dts, uint64_t pts = 0, size_t prefix_size = 0,
bool is_key = false)
: FrameFromPtr(ptr, size, dts, pts, prefix_size, is_key) {
FrameFromPtr(CodecId codec_id, char *ptr, size_t size, uint64_t dts, uint64_t pts = 0, size_t prefix_size = 0, bool is_key = false)
: FrameFromPtr(ptr, size, dts, pts, prefix_size,is_key) {
_codec_id = codec_id;
}
@ -477,8 +471,6 @@ public:
_decode_able = frame->decodeAble();
}
~FrameCacheAble() override = default;
/**
*
*/
@ -528,8 +520,6 @@ private:
template <typename Parent>
class FrameWrapper : public Parent {
public:
~FrameWrapper() = default;
/**
* frame
* @param buf
@ -580,7 +570,6 @@ public:
};
FrameMerger(int type);
~FrameMerger() = default;
/**
* FrameMerger::inputFrame传入的onOutput回调

View File

@ -23,41 +23,34 @@ public:
/**
* G711采样率固定为8000
* @param codecId G711A G711U
* @param payload_type rtp payload type
* @param sample_rate
* @param payload_type rtp payload
* @param channels
* @param bitrate
*/
G711Sdp(CodecId codecId,
int sample_rate,
int channels,
int bitrate = 128,
int payload_type = 98) : Sdp(sample_rate,payload_type), _codecId(codecId){
G711Sdp(CodecId codecId, int payload_type, int sample_rate, int channels, int bitrate)
: Sdp(sample_rate, payload_type) {
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName() << "/" << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(codecId) << "/" << sample_rate << "/" << channels << "\r\n";
}
string getSdp() const override {
return _printer;
}
CodecId getCodecId() const override {
return _codecId;
}
private:
_StrPrinter _printer;
CodecId _codecId;
};
Track::Ptr G711Track::clone() {
return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this);
Track::Ptr G711Track::clone() const {
return std::make_shared<G711Track>(*this);
}
Sdp::Ptr G711Track::getSdp() {
if(!ready()){
Sdp::Ptr G711Track::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
@ -66,13 +59,12 @@ Sdp::Ptr G711Track::getSdp() {
const auto sample_rate = getAudioSampleRate();
const auto audio_channel = getAudioChannel();
const auto bitrate = getBitRate() >> 10;
auto payload_type = 98;
if (sample_rate == 8000 && audio_channel == 1) {
// https://datatracker.ietf.org/doc/html/rfc3551#section-6
payload_type = (codec == CodecG711U) ? Rtsp::PT_PCMU : Rtsp::PT_PCMA;
}
return std::make_shared<G711Sdp>(codec, sample_rate, audio_channel, bitrate, payload_type);
return std::make_shared<G711Sdp>(codec, payload_type, sample_rate, audio_channel, bitrate);
}
}//namespace mediakit

View File

@ -25,8 +25,8 @@ public:
G711Track(CodecId codecId, int sample_rate, int channels, int sample_bit) : AudioTrackImp(codecId, 8000, 1, 16) {}
private:
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
Sdp::Ptr getSdp(uint8_t payload_type) const override;
Track::Ptr clone() const override;
};
}//namespace mediakit

View File

@ -2,11 +2,7 @@
namespace mediakit {
G711RtpEncoder::G711RtpEncoder(
CodecId codec, uint32_t ssrc, uint32_t mtu_size, uint32_t sample_rate, uint8_t payload_type, uint8_t interleaved,
uint32_t channels)
: CommonRtpDecoder(codec)
, RtpInfo(ssrc, mtu_size, sample_rate, payload_type, interleaved) {
G711RtpEncoder::G711RtpEncoder(CodecId codec, uint32_t channels){
_cache_frame = FrameImp::create();
_cache_frame->_codec_id = codec;
_channels = channels;
@ -40,7 +36,7 @@ bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
}
n++;
stamp += 20;
RtpCodec::inputRtp(makeRtp(getTrackType(), ptr, rtp_size, mark, stamp), false);
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), false);
ptr += rtp_size;
remain_size -= rtp_size;
}

View File

@ -20,23 +20,16 @@ namespace mediakit {
/**
* G711 rtp编码类
*/
class G711RtpEncoder : public CommonRtpDecoder, public RtpInfo {
class G711RtpEncoder : public RtpCodec {
public:
using Ptr = std::shared_ptr<G711RtpEncoder>;
~G711RtpEncoder() override = default;
/**
*
* @param codec
* @param ssrc ssrc
* @param mtu_size mtu
* @param sample_rate
* @param payload_type pt类型
* @param interleaved rtsp interleaved
* @param channels
*/
G711RtpEncoder(CodecId codec, uint32_t ssrc, uint32_t mtu_size, uint32_t sample_rate, uint8_t payload_type,
uint8_t interleaved, uint32_t channels);
G711RtpEncoder(CodecId codec, uint32_t channels);
/**
* rtp

View File

@ -14,6 +14,10 @@
#include "Util/base64.h"
#include "Common/config.h"
#ifdef ENABLE_MP4
#include "mpeg4-avc.h"
#endif
using namespace std;
using namespace toolkit;
@ -109,24 +113,7 @@ size_t prefixSize(const char *ptr, size_t len) {
H264Track::H264Track(const string &sps, const string &pps, int sps_prefix_len, int pps_prefix_len) {
_sps = sps.substr(sps_prefix_len);
_pps = pps.substr(pps_prefix_len);
onReady();
}
H264Track::H264Track(const Frame::Ptr &sps, const Frame::Ptr &pps) {
if (sps->getCodecId() != CodecH264 || pps->getCodecId() != CodecH264) {
throw std::invalid_argument("必须输入H264类型的帧");
}
_sps = string(sps->data() + sps->prefixSize(), sps->size() - sps->prefixSize());
_pps = string(pps->data() + pps->prefixSize(), pps->size() - pps->prefixSize());
onReady();
}
const string &H264Track::getSps() const {
return _sps;
}
const string &H264Track::getPps() const {
return _pps;
update();
}
CodecId H264Track::getCodecId() const {
@ -145,7 +132,7 @@ float H264Track::getVideoFps() const {
return _fps;
}
bool H264Track::ready() {
bool H264Track::ready() const {
return !_sps.empty() && !_pps.empty();
}
@ -168,19 +155,87 @@ bool H264Track::inputFrame(const Frame::Ptr &frame) {
return ret;
}
toolkit::Buffer::Ptr H264Track::getExtraData() const {
CHECK(ready());
#ifdef ENABLE_MP4
struct mpeg4_avc_t avc;
memset(&avc, 0, sizeof(avc));
string sps_pps = string("\x00\x00\x00\x01", 4) + _sps + string("\x00\x00\x00\x01", 4) + _pps;
h264_annexbtomp4(&avc, sps_pps.data(), (int)sps_pps.size(), NULL, 0, NULL, NULL);
std::string extra_data;
extra_data.resize(1024);
auto extra_data_size = mpeg4_avc_decoder_configuration_record_save(&avc, (uint8_t *)extra_data.data(), extra_data.size());
if (extra_data_size == -1) {
WarnL << "生成H264 extra_data 失败";
return nullptr;
}
extra_data.resize(extra_data_size);
return std::make_shared<BufferString>(std::move(extra_data));
#else
std::string extra_data;
// AVCDecoderConfigurationRecord start
extra_data.push_back(1); // version
extra_data.push_back(_sps[1]); // profile
extra_data.push_back(_sps[2]); // compat
extra_data.push_back(_sps[3]); // level
extra_data.push_back((char)0xff); // 6 bits reserved + 2 bits nal size length - 1 (11)
extra_data.push_back((char)0xe1); // 3 bits reserved + 5 bits number of sps (00001)
// sps
uint16_t size = (uint16_t)_sps.size();
size = htons(size);
extra_data.append((char *)&size, 2);
extra_data.append(_sps);
// pps
extra_data.push_back(1); // version
size = (uint16_t)_pps.size();
size = htons(size);
extra_data.append((char *)&size, 2);
extra_data.append(_pps);
return std::make_shared<BufferString>(std::move(extra_data));
#endif
}
void H264Track::setExtraData(const uint8_t *data, size_t bytes) {
#ifdef ENABLE_MP4
struct mpeg4_avc_t avc;
memset(&avc, 0, sizeof(avc));
if (mpeg4_avc_decoder_configuration_record_load(data, bytes, &avc) > 0) {
std::vector<uint8_t> config(bytes * 2);
int size = mpeg4_avc_to_nalu(&avc, config.data(), bytes * 2);
if (size > 4) {
splitH264((char *)config.data(), size, 4, [&](const char *ptr, size_t len, size_t prefix) {
inputFrame_l(std::make_shared<H264FrameNoCacheAble>((char *)ptr, len, 0, 0, prefix));
});
update();
}
}
#else
CHECK(bytes >= 8); // 6 + 2
size_t offset = 6;
uint16_t sps_size = data[offset] << 8 | data[offset + 1];
auto sps_ptr = data + offset + 2;
offset += (2 + sps_size);
CHECK(bytes >= offset + 2); // + pps_size
_sps.assign((char *)sps_ptr, sps_size);
uint16_t pps_size = data[offset] << 8 | data[offset + 1];
auto pps_ptr = data + offset + 2;
offset += (2 + pps_size);
CHECK(bytes >= offset);
_pps.assign((char *)pps_ptr, pps_size);
update();
#endif
}
bool H264Track::update() {
return getAVCInfo(_sps, _width, _height, _fps);
}
void H264Track::onReady() {
if (!getAVCInfo(_sps, _width, _height, _fps)) {
_sps.clear();
_pps.clear();
}
}
Track::Ptr H264Track::clone() {
return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this);
Track::Ptr H264Track::clone() const {
return std::make_shared<H264Track>(*this);
}
bool H264Track::inputFrame_l(const Frame::Ptr &frame) {
@ -218,7 +273,7 @@ bool H264Track::inputFrame_l(const Frame::Ptr &frame) {
}
if (_width == 0 && ready()) {
onReady();
update();
}
return ret;
}
@ -245,14 +300,12 @@ void H264Track::insertConfigFrame(const Frame::Ptr &frame) {
class H264Sdp : public Sdp {
public:
H264Sdp(const string &strSPS, const string &strPPS, int bitrate, int payload_type = 96)
: Sdp(90000, payload_type) {
//视频通道
H264Sdp(const string &strSPS, const string &strPPS, int payload_type, int bitrate) : Sdp(90000, payload_type) {
_printer << "m=video 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName() << "/" << 90000 << "\r\n";
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(CodecH264) << "/" << 90000 << "\r\n";
/**
Single NAI Unit Mode = 0. // Single NAI mode (Only nals from 1-23 are allowed)
@ -275,23 +328,20 @@ public:
_printer << "; sprop-parameter-sets=";
_printer << encodeBase64(strSPS) << ",";
_printer << encodeBase64(strPPS) << "\r\n";
_printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
}
string getSdp() const { return _printer; }
CodecId getCodecId() const { return CodecH264; }
private:
_StrPrinter _printer;
};
Sdp::Ptr H264Track::getSdp() {
Sdp::Ptr H264Track::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<H264Sdp>(getSps(), getPps(), getBitRate() / 1024);
return std::make_shared<H264Sdp>(_sps, _pps, payload_type, getBitRate() / 1024);
}
} // namespace mediakit

View File

@ -18,7 +18,6 @@
namespace mediakit{
bool getAVCInfo(const std::string &strSps,int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
void splitH264(const char *ptr, size_t len, size_t prefix, const std::function<void(const char *, size_t, size_t)> &cb);
size_t prefixSize(const char *ptr, size_t len);
@ -43,8 +42,6 @@ public:
this->_codec_id = CodecH264;
}
~H264FrameHelper() override = default;
bool keyFrame() const override {
auto nal_ptr = (uint8_t *) this->data() + this->prefixSize();
return H264_TYPE(*nal_ptr) == NAL_IDR && decodeAble();
@ -107,33 +104,21 @@ public:
* @param sps_prefix_len 264340x00 00 00 01
* @param pps_prefix_len 264340x00 00 00 01
*/
H264Track(const std::string &sps,const std::string &pps,int sps_prefix_len = 4,int pps_prefix_len = 4);
H264Track(const std::string &sps, const std::string &pps, int sps_prefix_len = 4, int pps_prefix_len = 4);
/**
* h264类型的媒体
* @param sps sps帧
* @param pps pps帧
*/
H264Track(const Frame::Ptr &sps,const Frame::Ptr &pps);
/**
* 0x00 00 00 01sps/pps
*/
const std::string &getSps() const;
const std::string &getPps() const;
bool ready() override;
bool ready() const override;
CodecId getCodecId() const override;
int getVideoHeight() const override;
int getVideoWidth() const override;
float getVideoFps() const override;
bool inputFrame(const Frame::Ptr &frame) override;
toolkit::Buffer::Ptr getExtraData() const override;
void setExtraData(const uint8_t *data, size_t size) override;
bool update() override;
private:
void onReady();
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
Sdp::Ptr getSdp(uint8_t payload_type) const override;
Track::Ptr clone() const override;
bool inputFrame_l(const Frame::Ptr &frame);
void insertConfigFrame(const Frame::Ptr &frame);

View File

@ -16,139 +16,50 @@ using namespace toolkit;
namespace mediakit {
H264RtmpDecoder::H264RtmpDecoder() {
_h264frame = obtainFrame();
}
H264Frame::Ptr H264RtmpDecoder::obtainFrame() {
auto frame = FrameImp::create<H264Frame>();
frame->_prefix_size = 4;
return frame;
}
/**
* 0x00 00 00 01sps pps
*/
static bool getH264Config(const RtmpPacket &thiz, string &sps, string &pps) {
if ((RtmpVideoCodec)thiz.getRtmpCodecId() != RtmpVideoCodec::h264) {
return false;
}
if (thiz.buffer.size() < 13) {
return false;
}
uint16_t sps_size;
memcpy(&sps_size, thiz.buffer.data() + 11, 2);
sps_size = ntohs(sps_size);
if ((int) thiz.buffer.size() < 13 + sps_size + 1 + 2) {
return false;
}
uint16_t pps_size;
memcpy(&pps_size, thiz.buffer.data() + 13 + sps_size + 1, 2);
pps_size = ntohs(pps_size);
if ((int) thiz.buffer.size() < 13 + sps_size + 1 + 2 + pps_size) {
return false;
}
sps.assign(thiz.buffer.data() + 13, sps_size);
pps.assign(thiz.buffer.data() + 13 + sps_size + 1 + 2, pps_size);
return true;
}
void H264RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
if (pkt->isConfigFrame()) {
//缓存sps pps后续插入到I帧之前
if (!getH264Config(*pkt, _sps, _pps)) {
WarnL << "get h264 sps/pps failed, rtmp packet is: " << hexdump(pkt->data(), pkt->size());
return;
}
onGetH264(_sps.data(), _sps.size(), pkt->time_stamp, pkt->time_stamp);
onGetH264(_pps.data(), _pps.size(), pkt->time_stamp, pkt->time_stamp);
CHECK(pkt->size() > 5);
getTrack()->setExtraData((uint8_t *)pkt->data() + 5, pkt->size() - 5);
return;
}
if (pkt->buffer.size() > 9) {
auto total_len = pkt->buffer.size();
size_t offset = 5;
uint8_t *cts_ptr = (uint8_t *) (pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->time_stamp + cts;
while (offset + 4 < total_len) {
uint32_t frame_len;
memcpy(&frame_len, pkt->buffer.data() + offset, 4);
frame_len = ntohl(frame_len);
offset += 4;
if (frame_len + offset > total_len) {
break;
}
onGetH264(pkt->buffer.data() + offset, frame_len, pkt->time_stamp, pts);
offset += frame_len;
CHECK(pkt->size() > 9);
uint8_t *cts_ptr = (uint8_t *)(pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->time_stamp + cts;
splitFrame((uint8_t *)pkt->data() + 5, pkt->size() - 5, pkt->time_stamp, pts);
}
void H264RtmpDecoder::splitFrame(const uint8_t *data, size_t size, uint32_t dts, uint32_t pts) {
auto end = data + size;
while (data + 4 < end) {
uint32_t frame_len = load_be32(data);
data += 4;
if (data + frame_len > end) {
break;
}
outputFrame((const char *)data, frame_len, dts, pts);
data += frame_len;
}
}
inline void H264RtmpDecoder::onGetH264(const char* data, size_t len, uint32_t dts, uint32_t pts) {
if (!len) {
return;
}
_h264frame->_dts = dts;
_h264frame->_pts = pts;
_h264frame->_buffer.assign("\x00\x00\x00\x01", 4); //添加264头
_h264frame->_buffer.append(data, len);
//写入环形缓存
RtmpCodec::inputFrame(_h264frame);
_h264frame = obtainFrame();
void H264RtmpDecoder::outputFrame(const char *data, size_t len, uint32_t dts, uint32_t pts) {
auto frame = FrameImp::create<H264Frame>();
frame->_prefix_size = 4;
frame->_dts = dts;
frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01", 4); // 添加264头
frame->_buffer.append(data, len);
RtmpCodec::inputFrame(frame);
}
////////////////////////////////////////////////////////////////////////
H264RtmpEncoder::H264RtmpEncoder(const Track::Ptr &track) {
_track = dynamic_pointer_cast<H264Track>(track);
}
void H264RtmpEncoder::makeConfigPacket(){
if (_track && _track->ready()) {
//尝试从track中获取sps pps信息
_sps = _track->getSps();
_pps = _track->getPps();
}
if (!_sps.empty() && !_pps.empty()) {
//获取到sps/pps
makeVideoConfigPkt();
_got_config_frame = true;
}
}
void H264RtmpEncoder::flush() {
inputFrame(nullptr);
}
bool H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (frame) {
auto data = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto type = H264_TYPE(data[0]);
switch (type) {
case H264Frame::NAL_SPS: {
if (!_got_config_frame) {
_sps = string(data, len);
makeConfigPacket();
}
break;
}
case H264Frame::NAL_PPS: {
if (!_got_config_frame) {
_pps = string(data, len);
makeConfigPacket();
}
break;
}
default: break;
}
}
if (!_rtmp_packet) {
_rtmp_packet = RtmpPacket::create();
//flags/not config/cts预占位
@ -173,11 +84,7 @@ bool H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
}, &_rtmp_packet->buffer);
}
void H264RtmpEncoder::makeVideoConfigPkt() {
if (_sps.size() < 4) {
WarnL << "sps长度不足4字节";
return;
}
void H264RtmpEncoder::makeConfigPacket() {
auto flags = (uint8_t)RtmpVideoCodec::h264;
flags |= ((uint8_t)RtmpFrameType::key_frame << 4);
auto pkt = RtmpPacket::create();
@ -187,23 +94,9 @@ void H264RtmpEncoder::makeVideoConfigPkt() {
// cts
pkt->buffer.append("\x0\x0\x0", 3);
// AVCDecoderConfigurationRecord start
pkt->buffer.push_back(1); // version
pkt->buffer.push_back(_sps[1]); // profile
pkt->buffer.push_back(_sps[2]); // compat
pkt->buffer.push_back(_sps[3]); // level
pkt->buffer.push_back((char)0xff); // 6 bits reserved + 2 bits nal size length - 1 (11)
pkt->buffer.push_back((char)0xe1); // 3 bits reserved + 5 bits number of sps (00001)
// sps
uint16_t size = (uint16_t)_sps.size();
size = htons(size);
pkt->buffer.append((char *)&size, 2);
pkt->buffer.append(_sps);
// pps
pkt->buffer.push_back(1); // version
size = (uint16_t)_pps.size();
size = htons(size);
pkt->buffer.append((char *)&size, 2);
pkt->buffer.append(_pps);
auto extra_data = getTrack()->getExtraData();
CHECK(extra_data);
pkt->buffer.append(extra_data->data(), extra_data->size());
pkt->body_size = pkt->buffer.size();
pkt->chunk_id = CHUNK_VIDEO;

View File

@ -24,8 +24,7 @@ class H264RtmpDecoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<H264RtmpDecoder>;
H264RtmpDecoder();
~H264RtmpDecoder() {}
H264RtmpDecoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
* 264 Rtmp包
@ -33,24 +32,15 @@ public:
*/
void inputRtmp(const RtmpPacket::Ptr &rtmp) override;
CodecId getCodecId() const override{
return CodecH264;
}
protected:
void onGetH264(const char *data, size_t len, uint32_t dts, uint32_t pts);
H264Frame::Ptr obtainFrame();
protected:
H264Frame::Ptr _h264frame;
std::string _sps;
std::string _pps;
private:
void outputFrame(const char *data, size_t len, uint32_t dts, uint32_t pts);
void splitFrame(const uint8_t *data, size_t size, uint32_t dts, uint32_t pts);
};
/**
* 264 Rtmp打包类
*/
class H264RtmpEncoder : public H264RtmpDecoder{
class H264RtmpEncoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<H264RtmpEncoder>;
@ -60,8 +50,7 @@ public:
* inputFrame时可以不输入sps pps
* @param track
*/
H264RtmpEncoder(const Track::Ptr &track);
~H264RtmpEncoder() = default;
H264RtmpEncoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
* 264sps pps
@ -80,13 +69,8 @@ public:
void makeConfigPacket() override;
private:
void makeVideoConfigPkt();
private:
bool _got_config_frame = false;
H264Track::Ptr _track;
RtmpPacket::Ptr _rtmp_packet;
FrameMerger _merger{FrameMerger::mp4_nal_size};
FrameMerger _merger { FrameMerger::mp4_nal_size };
};
}//namespace mediakit

View File

@ -192,10 +192,6 @@ void H264RtpDecoder::outputFrame(const RtpPacket::Ptr &rtp, const H264Frame::Ptr
////////////////////////////////////////////////////////////////////////
H264RtpEncoder::H264RtpEncoder(uint32_t ssrc, uint32_t mtu, uint32_t sample_rate, uint8_t pt, uint8_t interleaved)
: RtpInfo(ssrc, mtu, sample_rate, pt, interleaved) {
}
void H264RtpEncoder::insertConfigFrame(uint64_t pts){
if (!_sps || !_pps) {
return;
@ -206,7 +202,7 @@ void H264RtpEncoder::insertConfigFrame(uint64_t pts){
}
void H264RtpEncoder::packRtp(const char *ptr, size_t len, uint64_t pts, bool is_mark, bool gop_pos){
if (len + 3 <= getMaxSize()) {
if (len + 3 <= getRtpInfo().getMaxSize()) {
// 采用STAP-A/Single NAL unit packet per H.264 模式
packRtpSmallFrame(ptr, len, pts, is_mark, gop_pos);
} else {
@ -216,7 +212,7 @@ void H264RtpEncoder::packRtp(const char *ptr, size_t len, uint64_t pts, bool is_
}
void H264RtpEncoder::packRtpFu(const char *ptr, size_t len, uint64_t pts, bool is_mark, bool gop_pos){
auto packet_size = getMaxSize() - 2;
auto packet_size = getRtpInfo().getMaxSize() - 2;
if (len <= packet_size + 1) {
// 小于FU-A打包最小字节长度要求采用STAP-A/Single NAL unit packet per H.264 模式
packRtpSmallFrame(ptr, len, pts, is_mark, gop_pos);
@ -238,7 +234,7 @@ void H264RtpEncoder::packRtpFu(const char *ptr, size_t len, uint64_t pts, bool i
}
//传入nullptr先不做payload的内存拷贝
auto rtp = makeRtp(getTrackType(), nullptr, packet_size + 2, fu_flags->end_bit && is_mark, pts);
auto rtp = getRtpInfo().makeRtp(TrackVideo, nullptr, packet_size + 2, fu_flags->end_bit && is_mark, pts);
//rtp payload 负载部分
uint8_t *payload = rtp->getPayload();
//FU-A 第1个字节
@ -266,7 +262,7 @@ void H264RtpEncoder::packRtpSmallFrame(const char *data, size_t len, uint64_t pt
void H264RtpEncoder::packRtpStapA(const char *ptr, size_t len, uint64_t pts, bool is_mark, bool gop_pos){
// 如果帧长度不超过mtu,为了兼容性 webrtc采用STAP-A模式打包
auto rtp = makeRtp(getTrackType(), nullptr, len + 3, is_mark, pts);
auto rtp = getRtpInfo().makeRtp(TrackVideo, nullptr, len + 3, is_mark, pts);
uint8_t *payload = rtp->getPayload();
//STAP-A
payload[0] = (ptr[0] & (~0x1F)) | 24;
@ -279,7 +275,7 @@ void H264RtpEncoder::packRtpStapA(const char *ptr, size_t len, uint64_t pts, boo
void H264RtpEncoder::packRtpSingleNalu(const char *data, size_t len, uint64_t pts, bool is_mark, bool gop_pos) {
// Single NAL unit packet per H.264 模式
RtpCodec::inputRtp(makeRtp(getTrackType(), data, len, is_mark, pts), gop_pos);
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackVideo, data, len, is_mark, pts), gop_pos);
}
bool H264RtpEncoder::inputFrame(const Frame::Ptr &frame) {

View File

@ -28,7 +28,6 @@ public:
using Ptr = std::shared_ptr<H264RtpDecoder>;
H264RtpDecoder();
~H264RtpDecoder() override = default;
/**
* 264 rtp包
@ -37,10 +36,6 @@ public:
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override;
CodecId getCodecId() const override{
return CodecH264;
}
private:
bool singleFrame(const RtpPacket::Ptr &rtp, const uint8_t *ptr, ssize_t size, uint64_t stamp);
bool unpackStapA(const RtpPacket::Ptr &rtp, const uint8_t *ptr, ssize_t size, uint64_t stamp);
@ -62,26 +57,11 @@ private:
/**
* 264 rtp打包类
*/
class H264RtpEncoder : public H264RtpDecoder ,public RtpInfo{
class H264RtpEncoder : public RtpCodec {
public:
using Ptr = std::shared_ptr<H264RtpEncoder>;
/**
* @param ssrc ssrc
* @param mtu mtu大小
* @param sample_rate 90000
* @param pt pt类型
* @param interleaved rtsp interleaved
*/
H264RtpEncoder(uint32_t ssrc,
uint32_t mtu = 1400,
uint32_t sample_rate = 90000,
uint8_t pt = 96,
uint8_t interleaved = TrackVideo * 2);
~H264RtpEncoder() override = default;
/**
* 264
* @param frame
*/

View File

@ -12,6 +12,10 @@
#include "SPSParser.h"
#include "Util/base64.h"
#ifdef ENABLE_MP4
#include "mpeg4-hevc.h"
#endif
using namespace std;
using namespace toolkit;
@ -58,19 +62,7 @@ H265Track::H265Track(const string &vps,const string &sps, const string &pps,int
_vps = vps.substr(vps_prefix_len);
_sps = sps.substr(sps_prefix_len);
_pps = pps.substr(pps_prefix_len);
onReady();
}
const string &H265Track::getVps() const {
return _vps;
}
const string &H265Track::getSps() const {
return _sps;
}
const string &H265Track::getPps() const {
return _pps;
update();
}
CodecId H265Track::getCodecId() const {
@ -89,7 +81,7 @@ float H265Track::getVideoFps() const {
return _fps;
}
bool H265Track::ready() {
bool H265Track::ready() const {
return !_vps.empty() && !_sps.empty() && !_pps.empty();
}
@ -139,25 +131,58 @@ bool H265Track::inputFrame_l(const Frame::Ptr &frame) {
}
}
if (_width == 0 && ready()) {
onReady();
update();
}
return ret;
}
toolkit::Buffer::Ptr H265Track::getExtraData() const {
CHECK(ready());
#ifdef ENABLE_MP4
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + _vps + string("\x00\x00\x00\x01", 4) + _sps + string("\x00\x00\x00\x01", 4) + _pps;
h265_annexbtomp4(&hevc, vps_sps_pps.data(), (int) vps_sps_pps.size(), NULL, 0, NULL, NULL);
std::string extra_data;
extra_data.resize(1024);
auto extra_data_size = mpeg4_hevc_decoder_configuration_record_save(&hevc, (uint8_t *)extra_data.data(), extra_data.size());
if (extra_data_size == -1) {
WarnL << "生成H265 extra_data 失败";
return nullptr;
}
return std::make_shared<BufferString>(std::move(extra_data));
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265的支持不完善";
return nullptr;
#endif
}
void H265Track::setExtraData(const uint8_t *data, size_t bytes) {
#ifdef ENABLE_MP4
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
if (mpeg4_hevc_decoder_configuration_record_load(data, bytes, &hevc) > 0) {
std::vector<uint8_t> config(bytes * 2);
int size = mpeg4_hevc_to_nalu(&hevc, config.data(), bytes * 2);
if (size > 4) {
splitH264((char *)config.data(), size, 4, [&](const char *ptr, size_t len, size_t prefix) {
inputFrame_l(std::make_shared<H265FrameNoCacheAble>((char *)ptr, len, 0, 0, prefix));
});
update();
}
}
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265的支持不完善";
#endif
}
bool H265Track::update() {
return getHEVCInfo(_vps, _sps, _width, _height, _fps);
}
void H265Track::onReady() {
if (!getHEVCInfo(_vps, _sps, _width, _height, _fps)) {
_vps.clear();
_sps.clear();
_pps.clear();
}
}
Track::Ptr H265Track::clone() {
return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this);
Track::Ptr H265Track::clone() const {
return std::make_shared<H265Track>(*this);
}
void H265Track::insertConfigFrame(const Frame::Ptr &frame) {
@ -205,17 +230,13 @@ public:
* @param payload_type rtp payload type 96
* @param bitrate
*/
H265Sdp(const string &strVPS,
const string &strSPS,
const string &strPPS,
int bitrate = 4000,
int payload_type = 96) : Sdp(90000,payload_type) {
H265Sdp(const string &strVPS, const string &strSPS, const string &strPPS, int payload_type, int bitrate) : Sdp(90000, payload_type) {
//视频通道
_printer << "m=video 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName() << "/" << 90000 << "\r\n";
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(CodecH265) << "/" << 90000 << "\r\n";
_printer << "a=fmtp:" << payload_type << " ";
_printer << "sprop-vps=";
_printer << encodeBase64(strVPS) << "; ";
@ -223,26 +244,20 @@ public:
_printer << encodeBase64(strSPS) << "; ";
_printer << "sprop-pps=";
_printer << encodeBase64(strPPS) << "\r\n";
_printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
}
string getSdp() const override {
return _printer;
}
string getSdp() const override { return _printer; }
CodecId getCodecId() const override {
return CodecH265;
}
private:
_StrPrinter _printer;
};
Sdp::Ptr H265Track::getSdp() {
if(!ready()){
Sdp::Ptr H265Track::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<H265Sdp>(getVps(), getSps(), getPps(), getBitRate() / 1024);
return std::make_shared<H265Sdp>(_vps, _sps, _pps, payload_type, getBitRate() / 1024);
}
}//namespace mediakit

View File

@ -19,8 +19,6 @@
namespace mediakit {
bool getHEVCInfo(const std::string &strVps, const std::string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
template<typename Parent>
class H265FrameHelper : public Parent{
public:
@ -64,8 +62,6 @@ public:
this->_codec_id = CodecH265;
}
~H265FrameHelper() override = default;
bool keyFrame() const override {
auto nal_ptr = (uint8_t *) this->data() + this->prefixSize();
auto type = H265_TYPE(*nal_ptr);
@ -137,25 +133,19 @@ public:
*/
H265Track(const std::string &vps,const std::string &sps, const std::string &pps,int vps_prefix_len = 4, int sps_prefix_len = 4, int pps_prefix_len = 4);
/**
* 0x00 00 00 01vps/sps/pps
*/
const std::string &getVps() const;
const std::string &getSps() const;
const std::string &getPps() const;
bool ready() override;
bool ready() const override;
CodecId getCodecId() const override;
int getVideoWidth() const override;
int getVideoHeight() const override;
float getVideoFps() const override;
bool inputFrame(const Frame::Ptr &frame) override;
toolkit::Buffer::Ptr getExtraData() const override;
void setExtraData(const uint8_t *data, size_t size) override;
bool update() override;
private:
void onReady();
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
Sdp::Ptr getSdp(uint8_t payload_type) const override;
Track::Ptr clone() const override;
bool inputFrame_l(const Frame::Ptr &frame);
void insertConfigFrame(const Frame::Ptr &frame);

View File

@ -8,8 +8,9 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "Rtmp/utils.h"
#include "H265Rtmp.h"
#include "Rtmp/utils.h"
#include "Common/config.h"
#ifdef ENABLE_MP4
#include "mpeg4-hevc.h"
#endif // ENABLE_MP4
@ -19,48 +20,6 @@ using namespace toolkit;
namespace mediakit {
H265RtmpDecoder::H265RtmpDecoder() {
_h265frame = obtainFrame();
}
H265Frame::Ptr H265RtmpDecoder::obtainFrame() {
auto frame = FrameImp::create<H265Frame>();
frame->_prefix_size = 4;
return frame;
}
#ifdef ENABLE_MP4
static bool decode_HEVCDecoderConfigurationRecord(uint8_t *extra, size_t bytes, string &frame) {
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
if (mpeg4_hevc_decoder_configuration_record_load((uint8_t *)extra, bytes, &hevc) > 0) {
uint8_t *config = new uint8_t[bytes * 2];
int size = mpeg4_hevc_to_nalu(&hevc, config, bytes * 2);
if (size > 4) {
frame.assign((char *)config + 4, size - 4);
}
delete[] config;
return size > 4;
}
return false;
}
/**
* 0x00 00 00 01sps
*/
static bool getH265ConfigFrame(const RtmpPacket &thiz, string &frame) {
if ((RtmpVideoCodec)thiz.getRtmpCodecId() != RtmpVideoCodec::h265) {
return false;
}
if (thiz.buffer.size() < 6) {
WarnL << "bad H265 cfg!";
return false;
}
return decode_HEVCDecoderConfigurationRecord((uint8_t *)thiz.buffer.data() + 5, thiz.buffer.size() - 5, frame);
}
#endif
void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
if (_info.codec == CodecInvalid) {
// 先判断是否为增强型rtmp
@ -73,75 +32,47 @@ void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
if (!_info.is_enhanced || _info.codec != CodecH265) {
throw std::invalid_argument("Invalid enhanced-rtmp hevc packet!");
}
auto data = (uint8_t *)pkt->data() + 5;
auto size = pkt->size() - 5;
switch (_info.video.pkt_type) {
case RtmpPacketType::PacketTypeSequenceStart: {
#ifdef ENABLE_MP4
string config;
if (decode_HEVCDecoderConfigurationRecord(data, size, config)) {
onGetH265(config.data(), config.size(), pkt->time_stamp, pkt->time_stamp);
}
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
#endif
getTrack()->setExtraData((uint8_t *)pkt->data() + RtmpPacketInfo::kEnhancedRtmpHeaderSize, pkt->size() - RtmpPacketInfo::kEnhancedRtmpHeaderSize);
break;
}
case RtmpPacketType::PacketTypeCodedFramesX:
case RtmpPacketType::PacketTypeCodedFrames: {
auto data = (uint8_t *)pkt->data() + RtmpPacketInfo::kEnhancedRtmpHeaderSize;
auto size = pkt->size() - RtmpPacketInfo::kEnhancedRtmpHeaderSize;
auto pts = pkt->time_stamp;
CHECK(size > 3);
if (RtmpPacketType::PacketTypeCodedFrames == _info.video.pkt_type) {
// SI24 = [CompositionTime Offset]
CHECK(size > 7);
int32_t cts = (((data[0] << 16) | (data[1] << 8) | (data[2])) + 0xff800000) ^ 0xff800000;
pts += cts;
data += 3;
size -= 3;
}
CHECK(size > 4);
splitFrame(data, size, pkt->time_stamp, pts);
break;
}
case RtmpPacketType::PacketTypeMetadata: {
// The body does not contain video data. The body is an AMF encoded metadata.
// The metadata will be represented by a series of [name, value] pairs.
// For now the only defined [name, value] pair is [“colorInfo”, Object]
// See Metadata Frame section for more details of this object.
//
// For a deeper understanding of the encoding please see description
// of SCRIPTDATA and SSCRIPTDATAVALUE in the FLV file spec.
// DATA = [“colorInfo”, Object]
break;
}
case RtmpPacketType::PacketTypeSequenceEnd: {
// signals end of sequence
break;
}
default: break;
default: WarnL << "Unknown pkt_type: " << (int)_info.video.pkt_type; break;
}
return;
}
// 国内扩展(12) H265 rtmp
if (pkt->isConfigFrame()) {
#ifdef ENABLE_MP4
string config;
if (getH265ConfigFrame(*pkt, config)) {
onGetH265(config.data(), config.size(), pkt->time_stamp, pkt->time_stamp);
}
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
#endif
CHECK(pkt->size() > 5);
getTrack()->setExtraData((uint8_t *)pkt->data() + 5, pkt->size() - 5);
return;
}
if (pkt->buffer.size() > 9) {
uint8_t *cts_ptr = (uint8_t *)(pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->time_stamp + cts;
splitFrame((uint8_t *)pkt->data() + 5, pkt->size() - 5, pkt->time_stamp, pts);
}
CHECK(pkt->size() > 9);
uint8_t *cts_ptr = (uint8_t *)(pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->time_stamp + cts;
splitFrame((uint8_t *)pkt->data() + 5, pkt->size() - 5, pkt->time_stamp, pts);
}
void H265RtmpDecoder::splitFrame(const uint8_t *data, size_t size, uint32_t dts, uint32_t pts) {
@ -152,100 +83,51 @@ void H265RtmpDecoder::splitFrame(const uint8_t *data, size_t size, uint32_t dts,
if (data + frame_len > end) {
break;
}
onGetH265((const char *)data, frame_len, dts, pts);
outputFrame((const char *)data, frame_len, dts, pts);
data += frame_len;
}
}
inline void H265RtmpDecoder::onGetH265(const char *data, size_t size, uint32_t dts, uint32_t pts) {
if (size == 0) {
return;
}
#if 1
_h265frame->_dts = dts;
_h265frame->_pts = pts;
_h265frame->_buffer.assign("\x00\x00\x00\x01", 4); // 添加265头
_h265frame->_buffer.append(data, size);
// 写入环形缓存
RtmpCodec::inputFrame(_h265frame);
_h265frame = obtainFrame();
#else
// 防止内存拷贝这样产生的265帧不会有0x00 00 01头
auto frame = std::make_shared<H265FrameNoCacheAble>((char *)data, size, dts, pts, 0);
inline void H265RtmpDecoder::outputFrame(const char *data, size_t size, uint32_t dts, uint32_t pts) {
auto frame = FrameImp::create<H265Frame>();
frame->_prefix_size = 4;
frame->_dts = dts;
frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01", 4); // 添加265头
frame->_buffer.append(data, size);
RtmpCodec::inputFrame(frame);
#endif
}
////////////////////////////////////////////////////////////////////////
H265RtmpEncoder::H265RtmpEncoder(const Track::Ptr &track) {
_track = dynamic_pointer_cast<H265Track>(track);
}
void H265RtmpEncoder::makeConfigPacket() {
if (_track && _track->ready()) {
// 尝试从track中获取sps pps信息
_sps = _track->getSps();
_pps = _track->getPps();
_vps = _track->getVps();
}
if (!_sps.empty() && !_pps.empty() && !_vps.empty()) {
// 获取到sps/pps
makeVideoConfigPkt();
_got_config_frame = true;
}
}
void H265RtmpEncoder::flush() {
inputFrame(nullptr);
}
bool H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (frame) {
auto data = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto type = H265_TYPE(data[0]);
switch (type) {
case H265Frame::NAL_SPS: {
if (!_got_config_frame) {
_sps = string(data, len);
makeConfigPacket();
}
break;
}
case H265Frame::NAL_PPS: {
if (!_got_config_frame) {
_pps = string(data, len);
makeConfigPacket();
}
break;
}
case H265Frame::NAL_VPS: {
if (!_got_config_frame) {
_vps = string(data, len);
makeConfigPacket();
}
break;
}
default: break;
}
}
if (!_rtmp_packet) {
_rtmp_packet = RtmpPacket::create();
// flags/not_config/cts预占位
_rtmp_packet->buffer.resize(5);
GET_CONFIG(bool, enhanced, Rtmp::kEnhanced);
_rtmp_packet->buffer.resize((enhanced ? RtmpPacketInfo::kEnhancedRtmpHeaderSize : 2) + 3);
}
return _merger.inputFrame(frame, [this](uint64_t dts, uint64_t pts, const Buffer::Ptr &, bool have_key_frame) {
// flags
_rtmp_packet->buffer[0] = (uint8_t)RtmpVideoCodec::h265 | ((uint8_t)(have_key_frame ? RtmpFrameType::key_frame : RtmpFrameType::inter_frame) << 4);
_rtmp_packet->buffer[1] = (uint8_t)RtmpH264PacketType::h264_nalu;
GET_CONFIG(bool, enhanced, Rtmp::kEnhanced);
if (enhanced) {
auto header = (RtmpVideoHeaderEnhanced *)_rtmp_packet->data();
header->enhanced = 1;
header->pkt_type = (int)RtmpPacketType::PacketTypeCodedFrames;
header->frame_type = have_key_frame ? (int)RtmpFrameType::key_frame : (int)RtmpFrameType::inter_frame;
header->fourcc = htonl((uint32_t)RtmpVideoCodec::fourcc_hevc);
} else {
// flags
_rtmp_packet->buffer[0] = (uint8_t)RtmpVideoCodec::h265 | ((uint8_t)(have_key_frame ? RtmpFrameType::key_frame : RtmpFrameType::inter_frame) << 4);
_rtmp_packet->buffer[1] = (uint8_t)RtmpH264PacketType::h264_nalu;
}
int32_t cts = pts - dts;
// cts
set_be24(&_rtmp_packet->buffer[2], cts);
set_be24(&_rtmp_packet->buffer[enhanced ? 5 : 2], cts);
_rtmp_packet->time_stamp = dts;
_rtmp_packet->body_size = _rtmp_packet->buffer.size();
_rtmp_packet->chunk_id = CHUNK_VIDEO;
@ -257,38 +139,36 @@ bool H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
}, &_rtmp_packet->buffer);
}
void H265RtmpEncoder::makeVideoConfigPkt() {
#ifdef ENABLE_MP4
auto flags = (uint8_t)RtmpVideoCodec::h265;
flags |= ((uint8_t)RtmpFrameType::key_frame << 4);
void H265RtmpEncoder::makeConfigPacket() {
auto pkt = RtmpPacket::create();
// header
pkt->buffer.push_back(flags);
pkt->buffer.push_back((uint8_t)RtmpH264PacketType::h264_config_header);
// cts
pkt->buffer.append("\x0\x0\x0", 3);
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + _vps + string("\x00\x00\x00\x01", 4) + _sps + string("\x00\x00\x00\x01", 4) + _pps;
h265_annexbtomp4(&hevc, vps_sps_pps.data(), (int)vps_sps_pps.size(), NULL, 0, NULL, NULL);
uint8_t extra_data[1024];
int extra_data_size = mpeg4_hevc_decoder_configuration_record_save(&hevc, extra_data, sizeof(extra_data));
if (extra_data_size == -1) {
WarnL << "生成H265 extra_data 失败";
return;
GET_CONFIG(bool, enhanced, Rtmp::kEnhanced);
if (enhanced) {
pkt->buffer.resize(RtmpPacketInfo::kEnhancedRtmpHeaderSize);
auto header = (RtmpVideoHeaderEnhanced *)pkt->data();
header->enhanced = 1;
header->pkt_type = (int)RtmpPacketType::PacketTypeSequenceStart;
header->frame_type = (int)RtmpFrameType::key_frame;
header->fourcc = htonl((uint32_t)RtmpVideoCodec::fourcc_hevc);
} else {
auto flags = (uint8_t)RtmpVideoCodec::h265;
flags |= ((uint8_t)RtmpFrameType::key_frame << 4);
// header
pkt->buffer.push_back(flags);
pkt->buffer.push_back((uint8_t)RtmpH264PacketType::h264_config_header);
// cts
pkt->buffer.append("\x0\x0\x0", 3);
}
// HEVCDecoderConfigurationRecord
pkt->buffer.append((char *)extra_data, extra_data_size);
auto extra_data = getTrack()->getExtraData();
CHECK(extra_data);
pkt->buffer.append(extra_data->data(), extra_data->size());
pkt->body_size = pkt->buffer.size();
pkt->chunk_id = CHUNK_VIDEO;
pkt->stream_index = STREAM_MEDIA;
pkt->time_stamp = 0;
pkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(pkt);
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
#endif
}
} // namespace mediakit

View File

@ -24,8 +24,7 @@ class H265RtmpDecoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<H265RtmpDecoder>;
H265RtmpDecoder();
~H265RtmpDecoder() = default;
H265RtmpDecoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
* 265 Rtmp包
@ -33,23 +32,18 @@ public:
*/
void inputRtmp(const RtmpPacket::Ptr &rtmp) override;
CodecId getCodecId() const override { return CodecH265; }
protected:
H265Frame::Ptr obtainFrame();
void onGetH265(const char *data, size_t size, uint32_t dts, uint32_t pts);
void outputFrame(const char *data, size_t size, uint32_t dts, uint32_t pts);
void splitFrame(const uint8_t *data, size_t size, uint32_t dts, uint32_t pts);
protected:
RtmpPacketInfo _info;
H265Frame::Ptr _h265frame;
};
/**
* 265 Rtmp打包类
*/
class H265RtmpEncoder : public H265RtmpDecoder {
class H265RtmpEncoder : public RtmpCodec {
public:
using Ptr = std::shared_ptr<H265RtmpEncoder>;
@ -59,8 +53,7 @@ public:
* inputFrame时可以不输入sps pps
* @param track
*/
H265RtmpEncoder(const Track::Ptr &track);
~H265RtmpEncoder() = default;
H265RtmpEncoder(const Track::Ptr &track) : RtmpCodec(track) {}
/**
* 265sps pps
@ -79,14 +72,6 @@ public:
void makeConfigPacket() override;
private:
void makeVideoConfigPkt();
private:
bool _got_config_frame = false;
std::string _vps;
std::string _sps;
std::string _pps;
H265Track::Ptr _track;
RtmpPacket::Ptr _rtmp_packet;
FrameMerger _merger { FrameMerger::mp4_nal_size };
};

View File

@ -248,20 +248,8 @@ void H265RtpDecoder::outputFrame(const RtpPacket::Ptr &rtp, const H265Frame::Ptr
////////////////////////////////////////////////////////////////////////
H265RtpEncoder::H265RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PayloadType,
uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc,
ui32MtuSize,
ui32SampleRate,
ui8PayloadType,
ui8Interleaved) {
}
void H265RtpEncoder::packRtpFu(const char *ptr, size_t len, uint64_t pts, bool is_mark, bool gop_pos){
auto max_size = getMaxSize() - 3;
auto max_size = getRtpInfo().getMaxSize() - 3;
auto nal_type = H265_TYPE(ptr[0]); //获取NALU的5bit 帧类型
unsigned char s_e_flags;
bool fu_start = true;
@ -283,7 +271,7 @@ void H265RtpEncoder::packRtpFu(const char *ptr, size_t len, uint64_t pts, bool i
{
// 传入nullptr先不做payload的内存拷贝
auto rtp = makeRtp(getTrackType(), nullptr, max_size + 3, mark_bit, pts);
auto rtp = getRtpInfo().makeRtp(TrackVideo, nullptr, max_size + 3, mark_bit, pts);
// rtp payload 负载部分
uint8_t *payload = rtp->getPayload();
// FU 第1个字节表明为FU
@ -304,9 +292,9 @@ void H265RtpEncoder::packRtpFu(const char *ptr, size_t len, uint64_t pts, bool i
}
void H265RtpEncoder::packRtp(const char *ptr, size_t len, uint64_t pts, bool is_mark, bool gop_pos){
if (len <= getMaxSize()) {
if (len <= getRtpInfo().getMaxSize()) {
//signal-nalu
RtpCodec::inputRtp(makeRtp(getTrackType(), ptr, len, is_mark, pts), gop_pos);
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackVideo, ptr, len, is_mark, pts), gop_pos);
} else {
//FU-A模式
packRtpFu(ptr, len, pts, is_mark, gop_pos);

View File

@ -28,7 +28,6 @@ public:
using Ptr = std::shared_ptr<H265RtpDecoder>;
H265RtpDecoder();
~H265RtpDecoder() {}
/**
* 265 rtp包
@ -37,10 +36,6 @@ public:
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override;
CodecId getCodecId() const override{
return CodecH265;
}
private:
bool unpackAp(const RtpPacket::Ptr &rtp, const uint8_t *ptr, ssize_t size, uint64_t stamp);
bool mergeFu(const RtpPacket::Ptr &rtp, const uint8_t *ptr, ssize_t size, uint64_t stamp, uint16_t seq);
@ -63,24 +58,10 @@ private:
/**
* 265 rtp打包类
*/
class H265RtpEncoder : public H265RtpDecoder ,public RtpInfo{
class H265RtpEncoder : public RtpCodec {
public:
using Ptr = std::shared_ptr<H265RtpEncoder>;
/**
* @param ui32Ssrc ssrc
* @param ui32MtuSize mtu大小
* @param ui32SampleRate 90000
* @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved
*/
H265RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize = 1400,
uint32_t ui32SampleRate = 90000,
uint8_t ui8PayloadType = 96,
uint8_t ui8Interleaved = TrackVideo * 2);
~H265RtpEncoder() {}
/**
* 265
* @param frame

View File

@ -31,23 +31,20 @@ void JPEGTrack::getVideoResolution(const uint8_t *buf, int len) {
class JPEGSdp : public Sdp {
public:
JPEGSdp(int bitrate): Sdp(90000, Rtsp::PT_JPEG) {
JPEGSdp(int bitrate) : Sdp(90000, Rtsp::PT_JPEG) {
_printer << "m=video 0 RTP/AVP " << (int)getPayloadType() << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
}
std::string getSdp() const { return _printer; }
CodecId getCodecId() const { return CodecJPEG; }
private:
_StrPrinter _printer;
};
Sdp::Ptr JPEGTrack::getSdp() {
Sdp::Ptr JPEGTrack::getSdp(uint8_t) const {
return std::make_shared<JPEGSdp>(getBitRate() / 1024);
}
} // namespace mediakit

View File

@ -14,12 +14,12 @@ public:
int getVideoHeight() const override { return _height; }
int getVideoWidth() const override { return _width; }
float getVideoFps() const override { return _fps; }
bool ready() override { return _fps > 0; }
bool ready() const override { return _fps > 0; }
bool inputFrame(const Frame::Ptr &frame) override;
private:
Sdp::Ptr getSdp() override;
Track::Ptr clone() override { return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this); }
Sdp::Ptr getSdp(uint8_t payload_type) const override;
Track::Ptr clone() const override { return std::make_shared<JPEGTrack>(*this); }
void getVideoResolution(const uint8_t *buf, int len);
private:
@ -31,35 +31,36 @@ private:
class JPEGFrame : public Frame {
public:
static constexpr auto kJFIFSize = 20u;
/**
* JPEG/MJPEG帧
* @param buffer
* @param dts ,
* @param pix_type pixel format type; AV_PIX_FMT_YUVJ422P || (AVCOL_RANGE_JPEG && AV_PIX_FMT_YUV422P) : 1; AV_PIX_FMT_YUVJ420P || (AVCOL_RANGE_JPEG && AV_PIX_FMT_YUV420P) : 0
* @param prefix_size JFIF头大小
* @param offset buffer有效帧数据偏移量
*/
JPEGFrame(toolkit::Buffer::Ptr buffer, uint64_t dts, uint8_t pix_type = 0, size_t prefix_size = 0) {
JPEGFrame(toolkit::Buffer::Ptr buffer, uint64_t dts, uint8_t pix_type = 0, size_t offset = 0) {
_buffer = std::move(buffer);
_dts = dts;
_pix_type = pix_type;
_prefix_size = prefix_size;
_offset = offset;
// JFIF头固定20个字节长度
CHECK(_buffer->size() > _offset + kJFIFSize);
}
~JPEGFrame() override = default;
uint64_t dts() const override { return _dts; }
size_t prefixSize() const override { return _prefix_size; }
size_t prefixSize() const override { return 0; }
bool keyFrame() const override { return true; }
bool configFrame() const override { return false; }
CodecId getCodecId() const override { return CodecJPEG; }
char *data() const override { return _buffer->data(); }
size_t size() const override { return _buffer->size(); }
char *data() const override { return _buffer->data() + _offset; }
size_t size() const override { return _buffer->size() - _offset; }
uint8_t pixType() const {return _pix_type; }
uint8_t pixType() const { return _pix_type; }
private:
uint8_t _pix_type;
size_t _prefix_size;
size_t _offset;
uint64_t _dts;
toolkit::Buffer::Ptr _buffer;
};

View File

@ -745,10 +745,10 @@ void JPEGRtpEncoder::rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uin
hdr_size += 4 + 64 * nb_qtables;
/* payload max in one packet */
len = MIN(size, (int)getMaxSize() - hdr_size);
len = MIN(size, (int)getRtpInfo().getMaxSize() - hdr_size);
/* marker bit is last packet in frame */
auto rtp_packet = makeRtp(getTrackType(), nullptr, len + hdr_size, size == len, pts);
auto rtp_packet = getRtpInfo().makeRtp(TrackVideo, nullptr, len + hdr_size, size == len, pts);
p = rtp_packet->getPayload();
/* set main header */
@ -788,10 +788,6 @@ JPEGRtpDecoder::JPEGRtpDecoder() {
memset(&_ctx.timestamp, 0, sizeof(_ctx) - offsetof(decltype(_ctx), timestamp));
}
CodecId JPEGRtpDecoder::getCodecId() const {
return CodecJPEG;
}
bool JPEGRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool) {
auto payload = rtp->getPayload();
auto size = rtp->getPayloadSize();
@ -807,7 +803,7 @@ bool JPEGRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool) {
if (0 == jpeg_parse_packet(nullptr, &_ctx, &stamp, payload, size, seq, marker ? RTP_FLAG_MARKER : 0, &type)) {
auto buffer = std::make_shared<toolkit::BufferString>(std::move(_ctx.frame));
// JFIF头固定20个字节长度
auto frame = std::make_shared<JPEGFrame>(std::move(buffer), stamp / 90, type, 20);
auto frame = std::make_shared<JPEGFrame>(std::move(buffer), stamp / 90, type);
_ctx.frame.clear();
RtpCodec::inputFrame(std::move(frame));
}
@ -817,14 +813,10 @@ bool JPEGRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool) {
////////////////////////////////////////////////////////////////////////
JPEGRtpEncoder::JPEGRtpEncoder(
uint32_t ssrc, uint32_t mtu, uint32_t sample_rate, uint8_t payload_type, uint8_t interleaved)
: RtpInfo(ssrc, mtu, sample_rate, payload_type, interleaved) {}
bool JPEGRtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto ptr = (uint8_t *)frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
// JFIF头固定20个字节长度
auto ptr = (uint8_t *)frame->data() + frame->prefixSize() + JPEGFrame::kJFIFSize;
auto len = frame->size() - frame->prefixSize() - JPEGFrame::kJFIFSize;
auto pts = frame->pts();
auto type = 1;
auto jpeg = dynamic_pointer_cast<JPEGFrame>(frame);

View File

@ -25,12 +25,6 @@ public:
typedef std::shared_ptr <JPEGRtpDecoder> Ptr;
JPEGRtpDecoder();
~JPEGRtpDecoder() override = default;
/**
* ID
*/
CodecId getCodecId() const override;
/**
* rtp并解码
@ -43,13 +37,10 @@ private:
struct PayloadContext _ctx;
};
class JPEGRtpEncoder : public JPEGRtpDecoder, public RtpInfo {
class JPEGRtpEncoder : public RtpCodec {
public:
using Ptr = std::shared_ptr<JPEGRtpEncoder>;
JPEGRtpEncoder(uint32_t ssrc, uint32_t mtu = 1400, uint32_t sample_rate = 90000, uint8_t payload_type = 96, uint8_t interleaved = TrackVideo * 2);
~JPEGRtpEncoder() = default;
bool inputFrame(const Frame::Ptr &frame) override;
private:

View File

@ -22,47 +22,36 @@ class L16Sdp : public Sdp {
public:
/**
* L16采样位数固定为16位
* @param codecId CodecL16
* @param payload_type rtp payload type
* @param channels
* @param sample_rate
* @param payload_type rtp payload
* @param bitrate
*/
L16Sdp(CodecId codecId,
int sample_rate,
int channels,
int bitrate = 128,
int payload_type = 98) : Sdp(sample_rate,payload_type), _codecId(codecId){
L16Sdp(int payload_type, int sample_rate, int channels, int bitrate) : Sdp(sample_rate, payload_type) {
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName() << "/" << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(CodecL16) << "/" << sample_rate << "/" << channels << "\r\n";
}
string getSdp() const override {
return _printer;
}
string getSdp() const override { return _printer; }
CodecId getCodecId() const override {
return _codecId;
}
private:
_StrPrinter _printer;
CodecId _codecId;
};
Sdp::Ptr L16Track::getSdp() {
Sdp::Ptr L16Track::getSdp(uint8_t payload_type) const {
WarnL << "Enter L16Track::getSdp function";
if(!ready()){
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<L16Sdp>(getCodecId(), getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
return std::make_shared<L16Sdp>(payload_type, getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
}
Track::Ptr L16Track::clone() {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
Track::Ptr L16Track::clone() const {
return std::make_shared<L16Track>(*this);
}
}//namespace mediakit

View File

@ -25,8 +25,8 @@ public:
L16Track(int sample_rate, int channels) : AudioTrackImp(CodecL16,sample_rate,channels,16){}
private:
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
Sdp::Ptr getSdp(uint8_t payload_type) const override;
Track::Ptr clone() const override;
};
}//namespace mediakit

View File

@ -22,39 +22,33 @@ class OpusSdp : public Sdp {
public:
/**
* opus sdp
* @param payload_type rtp payload type
* @param sample_rate
* @param payload_type rtp payload
* @param channels
* @param bitrate
*/
OpusSdp(int sample_rate,
int channels,
int bitrate = 128,
int payload_type = 98) : Sdp(sample_rate,payload_type){
OpusSdp(int payload_type, int sample_rate, int channels, int bitrate) : Sdp(sample_rate, payload_type) {
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName() << "/" << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(CodecOpus) << "/" << sample_rate << "/" << channels << "\r\n";
}
string getSdp() const override {
return _printer;
}
CodecId getCodecId() const override {
return CodecOpus;
}
private:
_StrPrinter _printer;
};
Sdp::Ptr OpusTrack::getSdp() {
if(!ready()){
Sdp::Ptr OpusTrack::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<OpusSdp>(getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
return std::make_shared<OpusSdp>(payload_type, getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
}
}//namespace mediakit

View File

@ -26,11 +26,11 @@ public:
private:
//克隆该Track
Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
Track::Ptr clone() const override {
return std::make_shared<OpusTrack>(*this);
}
//生成sdp
Sdp::Ptr getSdp() override ;
Sdp::Ptr getSdp(uint8_t payload_type) const override ;
};
}//namespace mediakit

View File

@ -21,23 +21,32 @@ namespace mediakit{
/**
*
*/
class Track : public FrameDispatcher , public CodecInfo{
class Track : public FrameDispatcher, public CodecInfo {
public:
using Ptr = std::shared_ptr<Track>;
/**
*
*/
Track() = default;
virtual ~Track() = default;
/**
*
*
*/
Track(const Track &that) { _bit_rate = that._bit_rate; }
/**
* sps pps等信息
*/
virtual bool ready() = 0;
virtual bool ready() const = 0;
/**
*
*
*
*/
virtual Track::Ptr clone() = 0;
virtual Track::Ptr clone() const = 0;
/**
* track信息sps/pps解析
@ -46,9 +55,19 @@ public:
/**
* sdp
* @return sdp对象
* @return sdp对象
*/
virtual Sdp::Ptr getSdp() = 0;
virtual Sdp::Ptr getSdp(uint8_t payload_type) const = 0;
/**
* extra data, rtmp/mp4生成
*/
virtual toolkit::Buffer::Ptr getExtraData() const { return nullptr; }
/**
* extra data
*/
virtual void setExtraData(const uint8_t *data, size_t size) {}
/**
*
@ -62,14 +81,6 @@ public:
*/
virtual void setBitRate(int bit_rate) { _bit_rate = bit_rate; }
/**
*
*
*/
Track(const Track &that){
_bit_rate = that._bit_rate;
}
private:
int _bit_rate = 0;
};
@ -97,6 +108,40 @@ public:
virtual float getVideoFps() const { return 0; }
};
class VideoTrackImp : public VideoTrack {
public:
using Ptr = std::shared_ptr<VideoTrackImp>;
/**
*
* @param codec_id
* @param width
* @param height
* @param fps
*/
VideoTrackImp(CodecId codec_id, int width, int height, int fps) {
_codec_id = codec_id;
_width = width;
_height = height;
_fps = fps;
}
int getVideoHeight() const override { return _width; }
int getVideoWidth() const override { return _height; }
float getVideoFps() const override { return _fps; }
bool ready() const override { return true; }
Track::Ptr clone() const override { return std::make_shared<VideoTrackImp>(*this); }
Sdp::Ptr getSdp(uint8_t payload_type) const override { return nullptr; }
CodecId getCodecId() const override { return _codec_id; }
private:
CodecId _codec_id;
int _width = 0;
int _height = 0;
float _fps = 0;
};
/**
* Track派生类
*/
@ -131,7 +176,7 @@ public:
* @param channels
* @param sample_bit 16
*/
AudioTrackImp(CodecId codecId,int sample_rate, int channels, int sample_bit){
AudioTrackImp(CodecId codecId, int sample_rate, int channels, int sample_bit){
_codecid = codecId;
_sample_rate = sample_rate;
_channels = channels;
@ -148,7 +193,7 @@ public:
/**
*
*/
bool ready() override {
bool ready() const override {
return true;
}
@ -172,6 +217,10 @@ public:
int getAudioChannel() const override{
return _channels;
}
Track::Ptr clone() const override { return std::make_shared<AudioTrackImp>(*this); }
Sdp::Ptr getSdp(uint8_t payload_type) const override { return nullptr; }
private:
CodecId _codecid;
int _sample_rate;
@ -179,9 +228,8 @@ private:
int _sample_bit;
};
class TrackSource{
class TrackSource {
public:
TrackSource() = default;
virtual ~TrackSource() = default;
/**

View File

@ -26,7 +26,6 @@ public:
template<typename ...ARGS>
FMP4Packet(ARGS && ...args) : toolkit::BufferString(std::forward<ARGS>(args)...) {};
~FMP4Packet() override = default;
public:
uint64_t time_stamp = 0;

View File

@ -36,9 +36,6 @@ typedef struct{
class HlsParser {
public:
HlsParser() = default;
~HlsParser() = default;
bool parse(const std::string &http_url,const std::string &m3u8);
/**

View File

@ -25,7 +25,6 @@ namespace mediakit {
class HlsDemuxer : public MediaSinkInterface , public TrackSource, public std::enable_shared_from_this<HlsDemuxer> {
public:
HlsDemuxer() = default;
~HlsDemuxer() override { _timer = nullptr; }
void start(const toolkit::EventPoller::Ptr &poller, TrackListener *listener);
@ -53,7 +52,6 @@ private:
class HlsPlayer : public HttpClientImp , public PlayerBase , public HlsParser{
public:
HlsPlayer(const toolkit::EventPoller::Ptr &poller);
~HlsPlayer() override = default;
/**
*
@ -120,7 +118,6 @@ class HlsPlayerImp : public PlayerImp<HlsPlayer, PlayerBase>, private TrackListe
public:
using Ptr = std::shared_ptr<HlsPlayerImp>;
HlsPlayerImp(const toolkit::EventPoller::Ptr &poller = nullptr);
~HlsPlayerImp() override = default;
private:
//// HlsPlayer override////

View File

@ -189,7 +189,6 @@ public:
_data = map_addr.get() + offset;
_size = size;
}
~BufferMmap() override = default;
//返回数据长度
char *data() const override { return _data; }
size_t size() const override { return _size; }

View File

@ -30,8 +30,6 @@ namespace mediakit {
class HttpBody : public std::enable_shared_from_this<HttpBody>{
public:
using Ptr = std::shared_ptr<HttpBody>;
HttpBody() = default;
virtual ~HttpBody() = default;
/**
@ -75,7 +73,6 @@ class HttpStringBody : public HttpBody{
public:
using Ptr = std::shared_ptr<HttpStringBody>;
HttpStringBody(std::string str);
~HttpStringBody() override = default;
int64_t remainSize() override;
toolkit::Buffer::Ptr readData(size_t size) override ;
@ -92,7 +89,6 @@ class HttpBufferBody : public HttpBody{
public:
using Ptr = std::shared_ptr<HttpBufferBody>;
HttpBufferBody(toolkit::Buffer::Ptr buffer);
~HttpBufferBody() override = default;
int64_t remainSize() override;
toolkit::Buffer::Ptr readData(size_t size) override;
@ -114,7 +110,6 @@ public:
* @param use_mmap 使mmap方式访问文件
*/
HttpFileBody(const std::string &file_path, bool use_mmap = true);
~HttpFileBody() override = default;
/**
*
@ -151,7 +146,6 @@ public:
* @param boundary boundary字符串
*/
HttpMultiFormBody(const HttpArgs &args,const std::string &filePath,const std::string &boundary = "0xKhTmLbOuNdArY");
virtual ~HttpMultiFormBody() = default;
int64_t remainSize() override ;
toolkit::Buffer::Ptr readData(size_t size) override;

View File

@ -235,7 +235,7 @@ ssize_t HttpClient::onRecvHeader(const char *data, size_t len) {
_total_body_size = _recved_body_size;
if (_recved_body_size > 0) {
onResponseCompleted_l(SockException(Err_success, "success"));
}else{
} else {
onResponseCompleted_l(SockException(Err_other, "no body"));
}
}

View File

@ -29,9 +29,6 @@ namespace mediakit {
class HttpArgs : public std::map<std::string, toolkit::variant, StrCaseCompare> {
public:
HttpArgs() = default;
~HttpArgs() = default;
std::string make() const {
std::string ret;
for (auto &pr : *this) {
@ -52,9 +49,6 @@ public:
using HttpHeader = StrCaseMap;
using Ptr = std::shared_ptr<HttpClient>;
HttpClient() = default;
~HttpClient() override = default;
/**
* http[s]
* @param url url

View File

@ -19,8 +19,6 @@ namespace mediakit {
class HttpClientImp : public toolkit::TcpClientWithSSL<HttpClient> {
public:
using Ptr = std::shared_ptr<HttpClientImp>;
HttpClientImp() = default;
~HttpClientImp() override = default;
protected:
void onConnect(const toolkit::SockException &ex) override;

View File

@ -27,8 +27,6 @@ class HttpCookie {
public:
using Ptr = std::shared_ptr<HttpCookie>;
friend class HttpCookieStorage;
HttpCookie() = default;
~HttpCookie() = default;
void setPath(const std::string &path);
void setHost(const std::string &host);
@ -52,7 +50,6 @@ private:
*/
class HttpCookieStorage{
public:
~HttpCookieStorage() = default;
static HttpCookieStorage &Instance();
void set(const HttpCookie::Ptr &cookie);
std::vector<HttpCookie::Ptr> get(const std::string &host,const std::string &path);

View File

@ -113,8 +113,6 @@ private:
*/
class RandStrGenerator {
public:
RandStrGenerator() = default;
~RandStrGenerator() = default;
/**
*

View File

@ -20,7 +20,6 @@ public:
using Ptr = std::shared_ptr<HttpDownloader>;
using onDownloadResult = std::function<void(const toolkit::SockException &ex, const std::string &filePath)>;
HttpDownloader() = default;
~HttpDownloader() override;
/**

View File

@ -300,8 +300,6 @@ static bool emitHlsPlayed(const Parser &parser, const MediaInfo &media_info, con
class SockInfoImp : public SockInfo{
public:
using Ptr = std::shared_ptr<SockInfoImp>;
SockInfoImp() = default;
~SockInfoImp() override = default;
string get_local_ip() override {
return _local_ip;

View File

@ -24,8 +24,6 @@ public:
typedef std::function<void(int code, const StrCaseMap &headerOut, const HttpBody::Ptr &body)> HttpResponseInvokerLambda0;
typedef std::function<void(int code, const StrCaseMap &headerOut, const std::string &body)> HttpResponseInvokerLambda1;
HttpResponseInvokerImp() = default;
~HttpResponseInvokerImp() = default;
template<typename C>
HttpResponseInvokerImp(const C &c):HttpResponseInvokerImp(typename toolkit::function_traits<C>::stl_function_type(c)) {}
HttpResponseInvokerImp(const HttpResponseInvokerLambda0 &lambda);

View File

@ -22,7 +22,7 @@ namespace mediakit {
void HttpRequestSplitter::input(const char *data,size_t len) {
{
auto size = remainDataSize();
if (size > kMaxCacheSize) {
if (size > _max_cache_size) {
//缓存太多数据无法处理则上抛异常
reset();
throw std::out_of_range("remain data size is too huge, now cleared:" + to_string(size));
@ -142,6 +142,16 @@ const char *HttpRequestSplitter::remainData() const {
return _remain_data.data();
}
void HttpRequestSplitter::setMaxCacheSize(size_t max_cache_size) {
if (!max_cache_size) {
max_cache_size = kMaxCacheSize;
}
_max_cache_size = max_cache_size;
}
HttpRequestSplitter::HttpRequestSplitter() {
setMaxCacheSize(0);
}
} /* namespace mediakit */

View File

@ -18,7 +18,7 @@ namespace mediakit {
class HttpRequestSplitter {
public:
HttpRequestSplitter() = default;
HttpRequestSplitter();
virtual ~HttpRequestSplitter() = default;
/**
@ -44,6 +44,11 @@ public:
*/
const char *remainData() const;
/**
*
*/
void setMaxCacheSize(size_t max_cache_size);
protected:
/**
*
@ -80,6 +85,7 @@ protected:
private:
ssize_t _content_len = 0;
size_t _max_cache_size = 0;
size_t _remain_data_size = 0;
toolkit::BufferLikeString _remain_data;
};

View File

@ -20,9 +20,6 @@ public:
using Ptr = std::shared_ptr<HttpRequester>;
using HttpRequesterResult = std::function<void(const toolkit::SockException &ex, const Parser &response)>;
HttpRequester() = default;
~HttpRequester() override = default;
void setOnResult(const HttpRequesterResult &onResult);
void startRequester(const std::string &url, const HttpRequesterResult &on_result, float timeout_sec = 10);
void setRetry(size_t count, size_t delay);

View File

@ -24,12 +24,11 @@ using namespace toolkit;
namespace mediakit {
HttpSession::HttpSession(const Socket::Ptr &pSock) : Session(pSock) {
GET_CONFIG(uint32_t, keep_alive_sec, Http::kKeepAliveSecond);
pSock->setSendTimeOutSecond(keep_alive_sec);
//设置默认参数
setMaxReqSize(0);
setTimeoutSec(0);
}
HttpSession::~HttpSession() = default;
void HttpSession::onHttpRequest_HEAD() {
// 暂时全部返回200 OK因为HTTP GET存在按需生成流的操作所以不能按照HTTP GET的流程返回
// 如果直接返回404那么又会导致按需生成流的逻辑失效所以HTTP HEAD在静态文件或者已存在资源时才有效
@ -99,11 +98,10 @@ ssize_t HttpSession::onRecvHeader(const char *header, size_t len) {
return _on_recv_body ? -1 : 0;
}
GET_CONFIG(size_t, maxReqSize, Http::kMaxReqSize);
if (content_len > maxReqSize) {
if (content_len > _max_req_size) {
//// 不定长body或超大body ////
if (content_len != SIZE_MAX) {
WarnL << "Http body size is too huge: " << content_len << " > " << maxReqSize
WarnL << "Http body size is too huge: " << content_len << " > " << _max_req_size
<< ", please set " << Http::kMaxReqSize << " in config.ini file.";
}
@ -176,11 +174,27 @@ void HttpSession::onError(const SockException &err) {
}
}
void HttpSession::onManager() {
GET_CONFIG(uint32_t, keepAliveSec, Http::kKeepAliveSecond);
void HttpSession::setTimeoutSec(size_t keep_alive_sec) {
if (!keep_alive_sec) {
GET_CONFIG(size_t, s_keep_alive_sec, Http::kKeepAliveSecond);
keep_alive_sec = s_keep_alive_sec;
}
_keep_alive_sec = keep_alive_sec;
getSock()->setSendTimeOutSecond(keep_alive_sec);
}
if (_ticker.elapsedTime() > keepAliveSec * 1000) {
// 1分钟超时
void HttpSession::setMaxReqSize(size_t max_req_size) {
if (!max_req_size) {
GET_CONFIG(size_t, s_max_req_size, Http::kMaxReqSize);
max_req_size = s_max_req_size;
}
_max_req_size = max_req_size;
setMaxCacheSize(max_req_size);
}
void HttpSession::onManager() {
if (_ticker.elapsedTime() > _keep_alive_sec * 1000) {
//http超时
shutdown(SockException(Err_timeout, "session timeout"));
}
}
@ -509,7 +523,6 @@ public:
_body = body;
_close_when_complete = close_when_complete;
}
~AsyncSenderData() = default;
private:
std::weak_ptr<HttpSession> _session;

View File

@ -40,12 +40,13 @@ public:
using HttpAccessPathInvoker = std::function<void(const std::string &errMsg,const std::string &accessPath, int cookieLifeSecond)>;
HttpSession(const toolkit::Socket::Ptr &pSock);
~HttpSession() override;
void onRecv(const toolkit::Buffer::Ptr &) override;
void onError(const toolkit::SockException &err) override;
void onManager() override;
static std::string urlDecode(const std::string &str);
void setTimeoutSec(size_t second);
void setMaxReqSize(size_t max_req_size);
protected:
//FlvMuxer override
@ -129,6 +130,10 @@ protected:
private:
bool _is_live_stream = false;
bool _live_over_websocket = false;
//超时时间
size_t _keep_alive_sec = 0;
//最大http请求字节大小
size_t _max_req_size = 0;
//消耗的总流量
uint64_t _total_bytes_usage = 0;
Parser _parser;

View File

@ -24,7 +24,6 @@ public:
using onComplete = std::function<void(const toolkit::SockException &)>;
HttpTSPlayer(const toolkit::EventPoller::Ptr &poller = nullptr);
~HttpTSPlayer() override = default;
/**
*

View File

@ -19,7 +19,6 @@ namespace mediakit {
class TsPlayer : public HttpTSPlayer, public PlayerBase {
public:
TsPlayer(const toolkit::EventPoller::Ptr &poller);
~TsPlayer() override = default;
/**
*

View File

@ -21,7 +21,6 @@ public:
using Ptr = std::shared_ptr<TsPlayerImp>;
TsPlayerImp(const toolkit::EventPoller::Ptr &poller = nullptr);
~TsPlayerImp() override = default;
private:
//// TsPlayer override////

View File

@ -37,9 +37,7 @@ public:
template <typename... ArgsType>
ClientTypeImp(ArgsType &&...args) : ClientType(std::forward<ArgsType>(args)...) {}
~ClientTypeImp() override = default;
protected:
/**
* websocket协议
*/
@ -50,6 +48,7 @@ protected:
return ClientType::send(std::move(buf));
}
protected:
/**
*
* @param cb
@ -74,7 +73,6 @@ public:
_Sec_WebSocket_Key = encodeBase64(toolkit::makeRandStr(16, false));
setPoller(delegate->getPoller());
}
~HttpWsClient() = default;
/**
* ws握手

View File

@ -21,7 +21,6 @@ class SendInterceptor{
public:
using onBeforeSendCB =std::function<ssize_t (const toolkit::Buffer::Ptr &buf)>;
SendInterceptor() = default;
virtual ~SendInterceptor() = default;
virtual void setOnBeforeSendCB(const onBeforeSendCB &cb) = 0;
};
@ -38,8 +37,6 @@ public:
SessionTypeImp(const mediakit::Parser &header, const mediakit::HttpSession &parent, const toolkit::Socket::Ptr &pSock) :
SessionType(pSock) {}
~SessionTypeImp() = default;
/**
*
* @param cb
@ -82,7 +79,6 @@ template<typename Creator, typename HttpSessionType = mediakit::HttpSession, med
class WebSocketSessionBase : public HttpSessionType {
public:
WebSocketSessionBase(const toolkit::Socket::Ptr &pSock) : HttpSessionType(pSock){}
virtual ~WebSocketSessionBase() = default;
//收到eof或其他导致脱离TcpServer事件的回调
void onError(const toolkit::SockException &err) override{
@ -249,7 +245,6 @@ template<typename SessionType,typename HttpSessionType = mediakit::HttpSession,
class WebSocketSession : public WebSocketSessionBase<SessionCreator<SessionType>,HttpSessionType,DataType>{
public:
WebSocketSession(const toolkit::Socket::Ptr &pSock) : WebSocketSessionBase<SessionCreator<SessionType>,HttpSessionType,DataType>(pSock){}
virtual ~WebSocketSession() = default;
};
#endif //ZLMEDIAKIT_WEBSOCKETSESSION_H

View File

@ -51,6 +51,7 @@ public:
//根据内存地址设置掩码随机数
_mask.assign((uint8_t*)(&ptr), (uint8_t*)(&ptr) + 4);
}
virtual ~WebSocketHeader() = default;
public:
@ -71,8 +72,6 @@ public:
WebSocketBuffer(WebSocketHeader::Type headType, bool fin, ARGS &&...args)
: toolkit::BufferString(std::forward<ARGS>(args)...), _fin(fin), _head_type(headType){}
~WebSocketBuffer() override = default;
WebSocketHeader::Type headType() const { return _head_type; }
bool isFinished() const { return _fin; };
@ -84,9 +83,6 @@ private:
class WebSocketSplitter : public WebSocketHeader{
public:
WebSocketSplitter() = default;
virtual ~WebSocketSplitter() = default;
/**
* 便webSocket数据以及处理粘包问题
* onWebSocketDecodeHeader和onWebSocketDecodePayload回调

View File

@ -22,7 +22,6 @@ public:
using Ptr = std::shared_ptr<MediaPlayer>;
MediaPlayer(const toolkit::EventPoller::Ptr &poller = nullptr);
~MediaPlayer() override = default;
void play(const std::string &url) override;
toolkit::EventPoller::Ptr getPoller();

View File

@ -32,7 +32,6 @@ public:
static Ptr createPlayer(const toolkit::EventPoller::Ptr &poller, const std::string &strUrl);
PlayerBase();
~PlayerBase() override = default;
/**
*
@ -128,7 +127,6 @@ public:
template<typename ...ArgsType>
PlayerImp(ArgsType &&...args) : Parent(std::forward<ArgsType>(args)...) {}
~PlayerImp() override = default;
void play(const std::string &url) override {
return _delegate ? _delegate->play(url) : Parent::play(url);

View File

@ -191,8 +191,11 @@ void PlayerProxy::setDirectProxy() {
mediaSource = std::make_shared<RtspMediaSource>(_tuple);
}
} else if (dynamic_pointer_cast<RtmpPlayer>(_delegate)) {
// rtmp拉流,rtmp强制直接代理
mediaSource = std::make_shared<RtmpMediaSource>(_tuple);
// rtmp拉流
GET_CONFIG(bool, directProxy, Rtmp::kDirectProxy);
if (directProxy) {
mediaSource = std::make_shared<RtmpMediaSource>(_tuple);
}
}
if (mediaSource) {
setMediaSource(mediaSource);

View File

@ -31,8 +31,6 @@ MediaPusher::MediaPusher(const string &schema,
MediaPusher(MediaSource::find(schema, vhost, app, stream), poller){
}
MediaPusher::~MediaPusher() = default;
static void setOnCreateSocket_l(const std::shared_ptr<PusherBase> &delegate, const Socket::onCreateSocket &cb){
auto helper = dynamic_pointer_cast<SocketHelper>(delegate);
if (helper) {

View File

@ -30,8 +30,6 @@ public:
MediaPusher(const MediaSource::Ptr &src,
const toolkit::EventPoller::Ptr &poller = nullptr);
virtual ~MediaPusher();
void publish(const std::string &url) override;
toolkit::EventPoller::Ptr getPoller();
void setOnCreateSocket(toolkit::Socket::onCreateSocket cb);

View File

@ -66,7 +66,6 @@ public:
template<typename ...ArgsType>
PusherImp(ArgsType &&...args) : Parent(std::forward<ArgsType>(args)...) {}
~PusherImp() override = default;
/**
*

View File

@ -26,7 +26,6 @@ public:
using Ptr = std::shared_ptr<HlsMediaSource>;
HlsMediaSource(const std::string &schema, const MediaTuple &tuple) : MediaSource(schema, tuple) {}
~HlsMediaSource() override = default;
/**
*

View File

@ -33,8 +33,6 @@ public:
_hls->clearCache();
}
~HlsRecorderBase() override = default;
void setMediaSource(const MediaTuple& tuple) {
_hls->setMediaSource(tuple.vhost, tuple.app, tuple.stream);
}

View File

@ -8,7 +8,7 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#if defined(ENABLE_MP4) || defined(ENABLE_HLS_FMP4)
#if defined(ENABLE_MP4)
#include "MP4.h"
#include "Util/File.h"
@ -177,4 +177,4 @@ int MP4FileMemory::onWrite(const void *data, size_t bytes){
}
}//namespace mediakit
#endif // defined(ENABLE_MP4) || defined(ENABLE_HLS_FMP4)
#endif // defined(ENABLE_MP4)

View File

@ -11,7 +11,7 @@
#ifndef ZLMEDIAKIT_MP4_H
#define ZLMEDIAKIT_MP4_H
#if defined(ENABLE_MP4) || defined(ENABLE_HLS_FMP4)
#if defined(ENABLE_MP4)
#include <memory>
#include <string>
@ -33,7 +33,6 @@ public:
using Writer = std::shared_ptr<mp4_writer_t>;
using Reader = std::shared_ptr<mov_reader_t>;
MP4FileIO() = default;
virtual ~MP4FileIO() = default;
/**
@ -83,8 +82,6 @@ public:
class MP4FileDisk : public MP4FileIO {
public:
using Ptr = std::shared_ptr<MP4FileDisk>;
MP4FileDisk() = default;
~MP4FileDisk() override = default;
/**
*
@ -111,8 +108,6 @@ private:
class MP4FileMemory : public MP4FileIO{
public:
using Ptr = std::shared_ptr<MP4FileMemory>;
MP4FileMemory() = default;
~MP4FileMemory() override = default;
/**
*
@ -136,5 +131,5 @@ private:
};
}//namespace mediakit
#endif //defined(ENABLE_MP4) || defined(ENABLE_HLS_FMP4)
#endif //defined(ENABLE_MP4)
#endif //ZLMEDIAKIT_MP4_H

View File

@ -23,8 +23,6 @@ using namespace toolkit;
namespace mediakit {
MP4Demuxer::MP4Demuxer() = default;
MP4Demuxer::~MP4Demuxer() {
closeMP4();
}
@ -86,80 +84,65 @@ static const char *getObjectName(int obj_id) {
SWITCH_CASE(MOV_OBJECT_G711a);
SWITCH_CASE(MOV_OBJECT_G711u);
SWITCH_CASE(MOV_OBJECT_AV1);
default:
return "unknown mp4 object";
default: return "unknown mp4 object";
}
}
void MP4Demuxer::onVideoTrack(uint32_t track, uint8_t object, int width, int height, const void *extra, size_t bytes) {
Track::Ptr video;
switch (object) {
case MOV_OBJECT_H264: {
auto video = std::make_shared<H264Track>();
_track_to_codec.emplace(track,video);
struct mpeg4_avc_t avc;
memset(&avc, 0, sizeof(avc));
if (mpeg4_avc_decoder_configuration_record_load((uint8_t *) extra, bytes, &avc) > 0) {
uint8_t config[1024 * 10] = {0};
int size = mpeg4_avc_to_nalu(&avc, config, sizeof(config));
if (size > 0) {
video->inputFrame(std::make_shared<H264FrameNoCacheAble>((char *)config, size, 0, 0,4));
}
}
video = std::make_shared<H264Track>();
_track_to_codec.emplace(track, video);
break;
}
case MOV_OBJECT_HEVC: {
auto video = std::make_shared<H265Track>();
_track_to_codec.emplace(track,video);
struct mpeg4_hevc_t hevc;
memset(&hevc, 0, sizeof(hevc));
if (mpeg4_hevc_decoder_configuration_record_load((uint8_t *) extra, bytes, &hevc) > 0) {
uint8_t config[1024 * 10] = {0};
int size = mpeg4_hevc_to_nalu(&hevc, config, sizeof(config));
if (size > 0) {
video->inputFrame(std::make_shared<H265FrameNoCacheAble>((char *) config, size, 0, 0,4));
}
}
video = std::make_shared<H265Track>();
_track_to_codec.emplace(track, video);
break;
}
case MOV_OBJECT_JPEG: {
auto video = std::make_shared<JPEGTrack>();
_track_to_codec.emplace(track,video);
video = std::make_shared<JPEGTrack>();
_track_to_codec.emplace(track, video);
break;
}
default: WarnL << "不支持该编码类型的MP4,已忽略:" << getObjectName(object); break;
}
if (extra && bytes) {
video->setExtraData((uint8_t *)extra, bytes);
}
}
void MP4Demuxer::onAudioTrack(uint32_t track_id, uint8_t object, int channel_count, int bit_per_sample, int sample_rate, const void *extra, size_t bytes) {
Track::Ptr audio;
switch(object){
case MOV_OBJECT_AAC:{
auto audio = std::make_shared<AACTrack>(bytes > 0 ? string((char *)extra,bytes) : "");
audio = std::make_shared<AACTrack>();
_track_to_codec.emplace(track_id, audio);
break;
}
case MOV_OBJECT_G711a:
case MOV_OBJECT_G711u:{
auto audio = std::make_shared<G711Track>(object == MOV_OBJECT_G711a ? CodecG711A : CodecG711U, sample_rate, channel_count, bit_per_sample / channel_count );
audio = std::make_shared<G711Track>(object == MOV_OBJECT_G711a ? CodecG711A : CodecG711U, sample_rate, channel_count, bit_per_sample / channel_count );
_track_to_codec.emplace(track_id, audio);
break;
}
case MOV_OBJECT_OPUS: {
auto audio = std::make_shared<OpusTrack>();
audio = std::make_shared<OpusTrack>();
_track_to_codec.emplace(track_id, audio);
break;
}
default:
WarnL << "不支持该编码类型的MP4,已忽略:" << getObjectName(object);
break;
default: WarnL << "不支持该编码类型的MP4,已忽略:" << getObjectName(object); break;
}
if (extra && bytes) {
audio->setExtraData((uint8_t *)extra, bytes);
}
}
@ -180,8 +163,6 @@ struct Context {
BufferRaw::Ptr buffer;
};
#define DATA_OFFSET ADTS_HEADER_LEN
Frame::Ptr MP4Demuxer::readFrame(bool &keyFrame, bool &eof) {
keyFrame = false;
eof = false;
@ -194,9 +175,9 @@ Frame::Ptr MP4Demuxer::readFrame(bool &keyFrame, bool &eof) {
ctx->track_id = track_id;
ctx->buffer = ctx->thiz->_buffer_pool.obtain2();
ctx->buffer->setCapacity(bytes + DATA_OFFSET + 1);
ctx->buffer->setSize(bytes + DATA_OFFSET);
return ctx->buffer->data() + DATA_OFFSET;
ctx->buffer->setCapacity(bytes + 1);
ctx->buffer->setSize(bytes);
return ctx->buffer->data();
};
Context ctx(this);
@ -225,14 +206,14 @@ Frame::Ptr MP4Demuxer::makeFrame(uint32_t track_id, const Buffer::Ptr &buf, int6
if (it == _track_to_codec.end()) {
return nullptr;
}
auto bytes = buf->size() - DATA_OFFSET;
auto data = buf->data() + DATA_OFFSET;
auto codec = it->second->getCodecId();
Frame::Ptr ret;
auto codec = it->second->getCodecId();
switch (codec) {
case CodecH264 :
case CodecH265 : {
uint32_t offset = 0;
auto bytes = buf->size();
auto data = buf->data();
auto offset = 0u;
while (offset < bytes) {
uint32_t frame_len;
memcpy(&frame_len, data + offset, 4);
@ -244,35 +225,22 @@ Frame::Ptr MP4Demuxer::makeFrame(uint32_t track_id, const Buffer::Ptr &buf, int6
offset += (frame_len + 4);
}
if (codec == CodecH264) {
ret = std::make_shared<FrameWrapper<H264FrameNoCacheAble> >(buf, (uint64_t)dts, (uint64_t)pts, 4, DATA_OFFSET);
ret = std::make_shared<FrameWrapper<H264FrameNoCacheAble> >(buf, (uint64_t)dts, (uint64_t)pts, 4, 0);
break;
}
ret = std::make_shared<FrameWrapper<H265FrameNoCacheAble> >(buf, (uint64_t)dts, (uint64_t)pts, 4, DATA_OFFSET);
ret = std::make_shared<FrameWrapper<H265FrameNoCacheAble> >(buf, (uint64_t)dts, (uint64_t)pts, 4, 0);
break;
}
case CodecJPEG: {
ret = std::make_shared<JPEGFrame>(buf, (uint64_t)dts, 0, DATA_OFFSET);
ret = std::make_shared<JPEGFrame>(buf, (uint64_t)dts, 0, 0);
break;
}
case CodecAAC: {
AACTrack::Ptr track = dynamic_pointer_cast<AACTrack>(it->second);
assert(track);
//加上adts头
dumpAacConfig(track->getConfig(), buf->size() - DATA_OFFSET, (uint8_t *) buf->data() + (DATA_OFFSET - ADTS_HEADER_LEN), ADTS_HEADER_LEN);
ret = std::make_shared<FrameWrapper<FrameFromPtr> >(buf, (uint64_t)dts, (uint64_t)pts, ADTS_HEADER_LEN, DATA_OFFSET - ADTS_HEADER_LEN, codec);
default: {
ret = std::make_shared<FrameWrapper<FrameFromPtr>>(buf, (uint64_t)dts, (uint64_t)pts, 0, 0, codec);
break;
}
case CodecOpus:
case CodecG711A:
case CodecG711U: {
ret = std::make_shared<FrameWrapper<FrameFromPtr> >(buf, (uint64_t)dts, (uint64_t)pts, 0, DATA_OFFSET, codec);
break;
}
default: return nullptr;
}
if (ret) {
it->second->inputFrame(ret);
@ -283,7 +251,7 @@ Frame::Ptr MP4Demuxer::makeFrame(uint32_t track_id, const Buffer::Ptr &buf, int6
vector<Track::Ptr> MP4Demuxer::getTracks(bool trackReady) const {
vector<Track::Ptr> ret;
for (auto &pr : _track_to_codec) {
if(trackReady && !pr.second->ready()){
if (trackReady && !pr.second->ready()) {
continue;
}
ret.push_back(pr.second);

Some files were not shown because too many files have changed in this diff Show More