Merge branch 'master' of github.com:ZLMediaKit/ZLMediaKit into feature/transcode

This commit is contained in:
cqm 2023-02-20 17:07:15 +08:00
commit d27e577b14
82 changed files with 2360 additions and 387 deletions

View File

@ -34,6 +34,6 @@ jobs:
run: mkdir -p build && cd build && cmake .. && make -j $(nproc)
- name: 运行MediaServer
run: pwd && cd release/linux/Debug && sudo ./MediaServer -d &
run: pwd && cd release/darwin/Debug && sudo ./MediaServer -d &

@ -1 +1 @@
Subproject commit c2137bb4c07cbefd62a7cf38f75c29d40b26e516
Subproject commit 57901f9d341478378b1526f7efe99ebc79b2ddb5

@ -1 +1 @@
Subproject commit 357ef885afa5112c8cb873eab21d94d26744c0ab
Subproject commit 57a64570644afad8e76303b60a7b4fc1b658a78d

View File

@ -64,4 +64,5 @@ WuPeng <wp@zafu.edu.cn>
[custompal](https://github.com/custompal)
[PioLing](https://github.com/PioLing)
[KevinZang](https://github.com/ZSC714725)
[gongluck](https://github.com/gongluck)
[gongluck](https://github.com/gongluck)
[a-ucontrol](https://github.com/a-ucontrol)

View File

@ -50,6 +50,7 @@ option(ENABLE_SERVER "Enable Server" ON)
option(ENABLE_SERVER_LIB "Enable server as android static library" OFF)
option(ENABLE_SRT "Enable SRT" ON)
option(ENABLE_TESTS "Enable Tests" ON)
option(ENABLE_SCTP "Enable SCTP" ON)
option(ENABLE_WEBRTC "Enable WebRTC" ON)
option(ENABLE_X264 "Enable x264" OFF)
option(ENABLE_WEPOLL "Enable wepoll" ON)

View File

@ -53,7 +53,7 @@
- 服务器/客户端完整支持Basic/Digest方式的登录鉴权全异步可配置化的鉴权接口
- 支持H265编码
- 服务器支持RTSP推流(包括`rtp over udp` `rtp over tcp`方式)
- 支持H264/H265/AAC/G711/OPUS编码其他编码能转发但不能转协议
- 支持H264/H265/AAC/G711/OPUS/MJPEG编码,其他编码能转发但不能转协议
- RTMP[S]
- RTMP[S] 播放服务器支持RTSP/MP4/HLS转RTMP
@ -80,7 +80,7 @@
- fMP4
- 支持http[s]-fmp4直播
- 支持ws[s]-fmp4直播
- 支持H264/H265/AAC/G711/OPUS编码
- 支持H264/H265/AAC/G711/OPUS/MJPEG编码
- HTTP[S]与WebSocket
- 服务器支持`目录索引生成`,`文件下载`,`表单提交请求`
@ -290,6 +290,7 @@ bash build_docker_images.sh
[PioLing](https://github.com/PioLing)
[KevinZang](https://github.com/ZSC714725)
[gongluck](https://github.com/gongluck)
[a-ucontrol](https://github.com/a-ucontrol)
## 使用案例

View File

@ -47,6 +47,9 @@ extern "C" {
//输出日志到回调函数(mk_events::on_mk_log)
#define LOG_CALLBACK (1 << 2)
//回调user_data回调函数
typedef void(API_CALL *on_user_data_free)(void *user_data);
typedef struct {
// 线程数
int thread_num;
@ -117,6 +120,7 @@ API_EXPORT void API_CALL mk_set_log(int file_max_size, int file_max_count);
/**
*
* @deprecated 使mk_ini_set_option替代
* @param key
* @param val
*/
@ -124,6 +128,7 @@ API_EXPORT void API_CALL mk_set_option(const char *key, const char *val);
/**
*
* @deprecated 使mk_ini_get_option替代
* @param key
*/
API_EXPORT const char * API_CALL mk_get_option(const char *key);
@ -181,6 +186,9 @@ typedef void(API_CALL *on_mk_webrtc_get_answer_sdp)(void *user_data, const char
API_EXPORT void API_CALL mk_webrtc_get_answer_sdp(void *user_data, on_mk_webrtc_get_answer_sdp cb, const char *type,
const char *offer, const char *url);
API_EXPORT void API_CALL mk_webrtc_get_answer_sdp2(void *user_data, on_user_data_free user_data_free, on_mk_webrtc_get_answer_sdp cb, const char *type,
const char *offer, const char *url);
/**
* srt服务器
* @param port srt监听端口

View File

@ -120,6 +120,7 @@ typedef void(API_CALL *on_mk_media_source_send_rtp_result)(void *user_data, uint
//MediaSource::startSendRtp,请参考mk_media_start_send_rtp,注意ctx参数类型不一样
API_EXPORT void API_CALL mk_media_source_start_send_rtp(const mk_media_source ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_source_send_rtp_result cb, void *user_data);
API_EXPORT void API_CALL mk_media_source_start_send_rtp2(const mk_media_source ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_source_send_rtp_result cb, void *user_data, on_user_data_free user_data_free);
//MediaSource::stopSendRtp请参考mk_media_stop_send_rtp,注意ctx参数类型不一样
API_EXPORT int API_CALL mk_media_source_stop_send_rtp(const mk_media_source ctx);
@ -145,6 +146,13 @@ typedef void* mk_http_body;
*/
API_EXPORT mk_http_body API_CALL mk_http_body_from_string(const char *str,size_t len);
/**
* HttpBufferBody
* @param buffer mk_buffer对象
*/
API_EXPORT mk_http_body API_CALL mk_http_body_from_buffer(mk_buffer buffer);
/**
* HttpFileBody
* @param file_path

View File

@ -57,7 +57,8 @@ typedef void(API_CALL *on_mk_frame_data_release)(void *user_data, char *ptr);
*/
API_EXPORT mk_frame API_CALL mk_frame_create(int codec_id, uint64_t dts, uint64_t pts, const char *data, size_t size,
on_mk_frame_data_release cb, void *user_data);
API_EXPORT mk_frame API_CALL mk_frame_create2(int codec_id, uint64_t dts, uint64_t pts, const char *data, size_t size,
on_mk_frame_data_release cb, void *user_data, on_user_data_free user_data_free);
/**
* frame对象
* @param frame

View File

@ -32,9 +32,11 @@ typedef void(API_CALL *on_mk_h264_splitter_frame)(void *user_data, mk_h264_split
* h264分帧器
* @param cb
* @param user_data
* @param is_h265 265
* @return
*/
API_EXPORT mk_h264_splitter API_CALL mk_h264_splitter_create(on_mk_h264_splitter_frame cb, void *user_data);
API_EXPORT mk_h264_splitter API_CALL mk_h264_splitter_create(on_mk_h264_splitter_frame cb, void *user_data, int is_h265);
API_EXPORT mk_h264_splitter API_CALL mk_h264_splitter_create2(on_mk_h264_splitter_frame cb, void *user_data, on_user_data_free user_data_free, int is_h265);
/**
* h264分帧器

View File

@ -51,7 +51,7 @@ API_EXPORT void API_CALL mk_http_downloader_release(mk_http_downloader ctx);
* @param user_data
*/
API_EXPORT void API_CALL mk_http_downloader_start(mk_http_downloader ctx, const char *url, const char *file, on_mk_download_complete cb, void *user_data);
API_EXPORT void API_CALL mk_http_downloader_start2(mk_http_downloader ctx, const char *url, const char *file, on_mk_download_complete cb, void *user_data, on_user_data_free user_data_free);
///////////////////////////////////////////HttpRequester/////////////////////////////////////////////
typedef void *mk_http_requester;
@ -143,6 +143,7 @@ API_EXPORT mk_parser API_CALL mk_http_requester_get_response(mk_http_requester c
* @param user_data
*/
API_EXPORT void API_CALL mk_http_requester_set_cb(mk_http_requester ctx,on_mk_http_requester_complete cb, void *user_data);
API_EXPORT void API_CALL mk_http_requester_set_cb2(mk_http_requester ctx,on_mk_http_requester_complete cb, void *user_data, on_user_data_free user_data_free);
/**
* url请求

View File

@ -16,6 +16,7 @@
#include "mk_frame.h"
#include "mk_events_objects.h"
#include "mk_thread.h"
#include "mk_util.h"
#ifdef __cplusplus
extern "C" {
@ -36,6 +37,17 @@ typedef void *mk_media;
API_EXPORT mk_media API_CALL mk_media_create(const char *vhost, const char *app, const char *stream,
float duration, int hls_enabled, int mp4_enabled);
/**
*
* @param vhost __defaultVhost__
* @param app live
* @param stream idcamera
* @param duration ()0
* @param option ProtocolOption相关配置
* @return
*/
API_EXPORT mk_media API_CALL mk_media_create2(const char *vhost, const char *app, const char *stream, float duration, mk_ini option);
/**
*
* @param ctx
@ -171,6 +183,7 @@ typedef void(API_CALL *on_mk_media_close)(void *user_data);
* @param user_data
*/
API_EXPORT void API_CALL mk_media_set_on_close(mk_media ctx, on_mk_media_close cb, void *user_data);
API_EXPORT void API_CALL mk_media_set_on_close2(mk_media ctx, on_mk_media_close cb, void *user_data, on_user_data_free user_data_free);
/**
* seek请求时触发该回调
@ -201,6 +214,7 @@ typedef int(API_CALL* on_mk_media_speed)(void* user_data, float speed);
* @param user_data
*/
API_EXPORT void API_CALL mk_media_set_on_seek(mk_media ctx, on_mk_media_seek cb, void *user_data);
API_EXPORT void API_CALL mk_media_set_on_seek2(mk_media ctx, on_mk_media_seek cb, void *user_data, on_user_data_free user_data_free);
/**
* pause请求事件
@ -208,7 +222,8 @@ API_EXPORT void API_CALL mk_media_set_on_seek(mk_media ctx, on_mk_media_seek cb,
* @param cb
* @param user_data
*/
API_EXPORT void API_CALL mk_media_set_on_pause(mk_media ctx, on_mk_media_pause cb, void* user_data);
API_EXPORT void API_CALL mk_media_set_on_pause(mk_media ctx, on_mk_media_pause cb, void *user_data);
API_EXPORT void API_CALL mk_media_set_on_pause2(mk_media ctx, on_mk_media_pause cb, void *user_data, on_user_data_free user_data_free);
/**
* pause请求事件
@ -216,7 +231,8 @@ API_EXPORT void API_CALL mk_media_set_on_pause(mk_media ctx, on_mk_media_pause c
* @param cb
* @param user_data
*/
API_EXPORT void API_CALL mk_media_set_on_speed(mk_media ctx, on_mk_media_speed cb, void* user_data);
API_EXPORT void API_CALL mk_media_set_on_speed(mk_media ctx, on_mk_media_speed cb, void *user_data);
API_EXPORT void API_CALL mk_media_set_on_speed2(mk_media ctx, on_mk_media_speed cb, void *user_data, on_user_data_free user_data_free);
/**
*
@ -240,6 +256,7 @@ typedef void(API_CALL *on_mk_media_source_regist)(void *user_data, mk_media_sour
* @param user_data
*/
API_EXPORT void API_CALL mk_media_set_on_regist(mk_media ctx, on_mk_media_source_regist cb, void *user_data);
API_EXPORT void API_CALL mk_media_set_on_regist2(mk_media ctx, on_mk_media_source_regist cb, void *user_data, on_user_data_free user_data_free);
/**
* rtp推流成功与否的回调()
@ -257,6 +274,7 @@ typedef on_mk_media_source_send_rtp_result on_mk_media_send_rtp_result;
* @param user_data
*/
API_EXPORT void API_CALL mk_media_start_send_rtp(mk_media ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_send_rtp_result cb, void *user_data);
API_EXPORT void API_CALL mk_media_start_send_rtp2(mk_media ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_send_rtp_result cb, void *user_data, on_user_data_free user_data_free);
/**
* ps-rtp发送api线程安全

View File

@ -94,6 +94,7 @@ API_EXPORT void API_CALL mk_player_seekto_pos(mk_player ctx, int seek_pos);
* @param user_data
*/
API_EXPORT void API_CALL mk_player_set_on_result(mk_player ctx, on_mk_play_event cb, void *user_data);
API_EXPORT void API_CALL mk_player_set_on_result2(mk_player ctx, on_mk_play_event cb, void *user_data, on_user_data_free user_data_free);
/**
*
@ -102,6 +103,7 @@ API_EXPORT void API_CALL mk_player_set_on_result(mk_player ctx, on_mk_play_event
* @param user_data
*/
API_EXPORT void API_CALL mk_player_set_on_shutdown(mk_player ctx, on_mk_play_event cb, void *user_data);
API_EXPORT void API_CALL mk_player_set_on_shutdown2(mk_player ctx, on_mk_play_event cb, void *user_data, on_user_data_free user_data_free);
///////////////////////////获取音视频相关信息接口在播放成功回调触发后才有效///////////////////////////////

View File

@ -70,6 +70,7 @@ typedef void(API_CALL *on_mk_proxy_player_close)(void *user_data, int err, const
* @param user_data
*/
API_EXPORT void API_CALL mk_proxy_player_set_on_close(mk_proxy_player ctx, on_mk_proxy_player_close cb, void *user_data);
API_EXPORT void API_CALL mk_proxy_player_set_on_close2(mk_proxy_player ctx, on_mk_proxy_player_close cb, void *user_data, on_user_data_free user_data_free);
/**
*

View File

@ -79,6 +79,7 @@ API_EXPORT void API_CALL mk_pusher_publish(mk_pusher ctx,const char *url);
* @param user_data
*/
API_EXPORT void API_CALL mk_pusher_set_on_result(mk_pusher ctx, on_mk_push_event cb, void *user_data);
API_EXPORT void API_CALL mk_pusher_set_on_result2(mk_pusher ctx, on_mk_push_event cb, void *user_data, on_user_data_free user_data_free);
/**
*
@ -87,6 +88,7 @@ API_EXPORT void API_CALL mk_pusher_set_on_result(mk_pusher ctx, on_mk_push_event
* @param user_data
*/
API_EXPORT void API_CALL mk_pusher_set_on_shutdown(mk_pusher ctx, on_mk_push_event cb, void *user_data);
API_EXPORT void API_CALL mk_pusher_set_on_shutdown2(mk_pusher ctx, on_mk_push_event cb, void *user_data, on_user_data_free user_data_free);
#ifdef __cplusplus
}

View File

@ -40,6 +40,7 @@ typedef void(API_CALL *on_mk_rtp_server_connected)(void *user_data, int err, con
* @return
*/
API_EXPORT void API_CALL mk_rtp_server_connect(mk_rtp_server ctx, const char *dst_url, uint16_t dst_port, on_mk_rtp_server_connected cb, void *user_data);
API_EXPORT void API_CALL mk_rtp_server_connect2(mk_rtp_server ctx, const char *dst_url, uint16_t dst_port, on_mk_rtp_server_connected cb, void *user_data, on_user_data_free user_data_free);
/**
* GB28181 RTP
@ -67,7 +68,7 @@ typedef void(API_CALL *on_mk_rtp_server_detach)(void *user_data);
* @param user_data
*/
API_EXPORT void API_CALL mk_rtp_server_set_on_detach(mk_rtp_server ctx, on_mk_rtp_server_detach cb, void *user_data);
API_EXPORT void API_CALL mk_rtp_server_set_on_detach2(mk_rtp_server ctx, on_mk_rtp_server_detach cb, void *user_data, on_user_data_free user_data_free);
#ifdef __cplusplus
}

View File

@ -31,6 +31,7 @@ typedef void(API_CALL *on_mk_buffer_free)(void *user_data, void *data);
* @return buffer对象
*/
API_EXPORT mk_buffer API_CALL mk_buffer_from_char(const char *data, size_t len, on_mk_buffer_free cb, void *user_data);
API_EXPORT mk_buffer API_CALL mk_buffer_from_char2(const char *data, size_t len, on_mk_buffer_free cb, void *user_data, on_user_data_free user_data_free);
API_EXPORT mk_buffer API_CALL mk_buffer_ref(mk_buffer buffer);
API_EXPORT void API_CALL mk_buffer_unref(mk_buffer buffer);
API_EXPORT const char* API_CALL mk_buffer_get_data(mk_buffer buffer);
@ -141,7 +142,8 @@ typedef enum {
* @param session
* @param user_data
*/
API_EXPORT void API_CALL mk_tcp_session_set_user_data(mk_tcp_session session,void *user_data);
API_EXPORT void API_CALL mk_tcp_session_set_user_data(mk_tcp_session session, void *user_data);
API_EXPORT void API_CALL mk_tcp_session_set_user_data2(mk_tcp_session session, void *user_data, on_user_data_free user_data_free);
/**
* tcp会话对象上附着的用户数据
@ -250,7 +252,8 @@ API_EXPORT void API_CALL mk_tcp_client_send_buffer_safe(mk_tcp_client ctx, mk_bu
* @param ctx
* @param user_data
*/
API_EXPORT void API_CALL mk_tcp_client_set_user_data(mk_tcp_client ctx,void *user_data);
API_EXPORT void API_CALL mk_tcp_client_set_user_data(mk_tcp_client ctx, void *user_data);
API_EXPORT void API_CALL mk_tcp_client_set_user_data2(mk_tcp_client ctx, void *user_data, on_user_data_free user_data_free);
/**
*

View File

@ -87,6 +87,7 @@ typedef void (API_CALL *on_mk_async)(void *user_data);
* @param user_data
*/
API_EXPORT void API_CALL mk_async_do(mk_thread ctx, on_mk_async cb, void *user_data);
API_EXPORT void API_CALL mk_async_do2(mk_thread ctx, on_mk_async cb, void *user_data, on_user_data_free user_data_free);
/**
* 线
@ -96,6 +97,7 @@ API_EXPORT void API_CALL mk_async_do(mk_thread ctx, on_mk_async cb, void *user_d
* @param user_data
*/
API_EXPORT void API_CALL mk_async_do_delay(mk_thread ctx, size_t ms, on_mk_async cb, void *user_data);
API_EXPORT void API_CALL mk_async_do_delay2(mk_thread ctx, size_t ms, on_mk_async cb, void *user_data, on_user_data_free user_data_free);
/**
* 线
@ -123,6 +125,7 @@ typedef uint64_t (API_CALL *on_mk_timer)(void *user_data);
* @return
*/
API_EXPORT mk_timer API_CALL mk_timer_create(mk_thread ctx, uint64_t delay_ms, on_mk_timer cb, void *user_data);
API_EXPORT mk_timer API_CALL mk_timer_create2(mk_thread ctx, uint64_t delay_ms, on_mk_timer cb, void *user_data, on_user_data_free user_data_free);
/**
*

View File

@ -80,6 +80,7 @@ API_EXPORT int API_CALL mk_track_bit_rate(mk_track track);
* @param user_data frame输出回调用户指针参数
*/
API_EXPORT void *API_CALL mk_track_add_delegate(mk_track track, on_mk_frame_out cb, void *user_data);
API_EXPORT void *API_CALL mk_track_add_delegate2(mk_track track, on_mk_frame_out cb, void *user_data, on_user_data_free user_data_free);
/**
* frame输出事件监听

View File

@ -13,6 +13,7 @@
#include "mk_common.h"
#include "mk_track.h"
#include "mk_tcp.h"
#ifdef __cplusplus
extern "C" {
@ -39,6 +40,16 @@ typedef void(API_CALL *on_mk_decode)(void *user_data, mk_frame_pix frame);
*/
API_EXPORT mk_decoder API_CALL mk_decoder_create(mk_track track, int thread_num);
/**
*
* @param track track对象
* @param thread_num 线0
* @param codec_name_list ffmpeg codec name列表NULL结尾{"libopenh264", "h264_nvdec", NULL};
* ;codec不存在mk_track_codec_id类型不匹配时使codec列表
* @return NULL代表失败
*/
API_EXPORT mk_decoder API_CALL mk_decoder_create2(mk_track track, int thread_num, const char *codec_name_list[]);
/**
*
* @param ctx
@ -67,6 +78,7 @@ API_EXPORT void API_CALL mk_decoder_set_max_async_frame_size(mk_decoder ctx, siz
* @param user_data
*/
API_EXPORT void API_CALL mk_decoder_set_cb(mk_decoder ctx, on_mk_decode cb, void *user_data);
API_EXPORT void API_CALL mk_decoder_set_cb2(mk_decoder ctx, on_mk_decode cb, void *user_data, on_user_data_free user_data_free);
/**
* FFmpeg原始AVCodecContext对象
@ -96,6 +108,15 @@ API_EXPORT void API_CALL mk_frame_pix_unref(mk_frame_pix frame);
*/
API_EXPORT mk_frame_pix API_CALL mk_frame_pix_from_av_frame(AVFrame *frame);
/**
* mk_frame_pix对象
* @param plane_data , mk_buffer_get_data获取其数据指针
* @param line_size line size
* @param plane
* @return mk_frame_pix对象
*/
API_EXPORT mk_frame_pix API_CALL mk_frame_pix_from_buffer(mk_buffer plane_data[], int line_size[], int plane);
/**
* FFmpeg AVFrame对象
* @param frame mk_frame_pix
@ -139,13 +160,26 @@ API_EXPORT mk_frame_pix mk_swscale_input_frame2(mk_swscale ctx, mk_frame_pix fra
/////////////////////////////////////////////////////////////////////////////////////////////
API_EXPORT uint8_t** API_CALL mk_get_av_frame_data(AVFrame *frame);
API_EXPORT int* API_CALL mk_get_av_frame_line_size(AVFrame *frame);
API_EXPORT uint8_t **API_CALL mk_get_av_frame_data(AVFrame *frame);
API_EXPORT void API_CALL mk_set_av_frame_data(AVFrame *frame, uint8_t *data, int plane);
API_EXPORT int *API_CALL mk_get_av_frame_line_size(AVFrame *frame);
API_EXPORT void API_CALL mk_set_av_frame_line_size(AVFrame *frame, int line_size, int plane);
API_EXPORT int64_t API_CALL mk_get_av_frame_dts(AVFrame *frame);
API_EXPORT void API_CALL mk_set_av_frame_dts(AVFrame *frame, int64_t dts);
API_EXPORT int64_t API_CALL mk_get_av_frame_pts(AVFrame *frame);
API_EXPORT void API_CALL mk_set_av_frame_pts(AVFrame *frame, int64_t pts);
API_EXPORT int API_CALL mk_get_av_frame_width(AVFrame *frame);
API_EXPORT void API_CALL mk_set_av_frame_width(AVFrame *frame, int width);
API_EXPORT int API_CALL mk_get_av_frame_height(AVFrame *frame);
API_EXPORT void API_CALL mk_set_av_frame_height(AVFrame *frame, int height);
API_EXPORT int API_CALL mk_get_av_frame_format(AVFrame *frame);
API_EXPORT void API_CALL mk_set_av_frame_format(AVFrame *frame, int format);
#ifdef __cplusplus
}

View File

@ -18,16 +18,21 @@
extern "C" {
#endif
/**
* mk api内部malloc的资源
*/
API_EXPORT void API_CALL mk_free(void *ptr);
/**
*
* @return 使free
* @return 使mk_free
*/
API_EXPORT char* API_CALL mk_util_get_exe_path();
/**
*
* @param relative_path ,null
* @return 使free
* @return 使mk_free
*/
API_EXPORT char* API_CALL mk_util_get_exe_dir(const char *relative_path);
@ -40,7 +45,7 @@ API_EXPORT uint64_t API_CALL mk_util_get_current_millisecond();
/**
*
* @param fmt %Y-%m-%d %H:%M:%S
* @return 使free
* @return 使mk_free
*/
API_EXPORT char* API_CALL mk_util_get_current_time_string(const char *fmt);
@ -48,9 +53,82 @@ API_EXPORT char* API_CALL mk_util_get_current_time_string(const char *fmt);
*
* @param buf
* @param len
* @return 使free
* @return 使mk_free
*/
API_EXPORT char* API_CALL mk_util_hex_dump(const void *buf, int len);
///////////////////////////////////////////mk ini/////////////////////////////////////////////
typedef void* mk_ini;
/**
* ini配置对象
*/
API_EXPORT mk_ini API_CALL mk_ini_create();
/**
* ini配置
* @return ini配置mk_ini_release释放它
*/
API_EXPORT mk_ini API_CALL mk_ini_default();
/**
* ini配置文件内容
* @param ini ini对象
* @param str
*/
API_EXPORT void API_CALL mk_ini_load_string(mk_ini ini, const char *str);
/**
* ini配置文件
* @param ini ini对象
* @param file
*/
API_EXPORT void API_CALL mk_ini_load_file(mk_ini ini, const char *file);
/**
* ini配置对象
*/
API_EXPORT void API_CALL mk_ini_release(mk_ini ini);
/**
*
* @param ini
* @param key field.key
* @param value
*/
API_EXPORT void API_CALL mk_ini_set_option(mk_ini ini, const char *key, const char *value);
API_EXPORT void API_CALL mk_ini_set_option_int(mk_ini ini, const char *key, int value);
/**
*
* @param ini
* @param key field.key
* @return NULL
*/
API_EXPORT const char *API_CALL mk_ini_get_option(mk_ini ini, const char *key);
/**
*
* @param ini
* @param key field.key
* @return 1: 0:
*/
API_EXPORT int API_CALL mk_ini_del_option(mk_ini ini, const char *key);
/**
*
* @param ini
* @return mk_free
*/
API_EXPORT char *API_CALL mk_ini_dump_string(mk_ini ini);
/**
*
* @param ini
* @param file
*/
API_EXPORT void API_CALL mk_ini_dump_file(mk_ini ini, const char *file);
///////////////////////////////////////////日志/////////////////////////////////////////////
/**
@ -67,9 +145,9 @@ API_EXPORT void API_CALL mk_log_printf(int level, const char *file, const char *
// 以下宏可以替换printf使用
#define log_printf(lev, ...) mk_log_printf(lev, __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__)
#define log_trace(...) log_printf(0, ##__VA_ARGS__)
#define log_debug(...) log_printf(1, ##__VA_ARGS__)
#define log_info(...) log_printf(2, ##__VA_ARGS__)
#define log_warn(...) log_printf(3, ##__VA_ARGS__)
#define log_debug(...) log_printf(1, ##__VA_ARGS__)
#define log_info(...) log_printf(2, ##__VA_ARGS__)
#define log_warn(...) log_printf(3, ##__VA_ARGS__)
#define log_error(...) log_printf(4, ##__VA_ARGS__)
#ifdef __cplusplus

View File

@ -158,6 +158,8 @@ API_EXPORT void API_CALL mk_set_option(const char *key, const char *val) {
return;
}
mINI::Instance()[key] = val;
//广播配置文件热加载
NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastReloadConfig);
}
API_EXPORT const char * API_CALL mk_get_option(const char *key)
@ -292,19 +294,24 @@ private:
};
#endif
API_EXPORT void API_CALL mk_webrtc_get_answer_sdp(void *user_data, on_mk_webrtc_get_answer_sdp cb, const char *type,
API_EXPORT void API_CALL mk_webrtc_get_answer_sdp(void *user_data, on_mk_webrtc_get_answer_sdp cb, const char *type,
const char *offer, const char *url) {
mk_webrtc_get_answer_sdp2(user_data, nullptr, cb, type, offer, url);
}
API_EXPORT void API_CALL mk_webrtc_get_answer_sdp2(void *user_data, on_user_data_free user_data_free, on_mk_webrtc_get_answer_sdp cb, const char *type,
const char *offer, const char *url) {
#ifdef ENABLE_WEBRTC
assert(type && offer && url && cb);
auto session = std::make_shared<HttpSession>(Socket::createSocket());
std::string offer_str = offer;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
WebRtcPluginManager::Instance().getAnswerSdp(*session, type, WebRtcArgsUrl(url),
[offer_str, session, user_data, cb](const WebRtcInterface &exchanger) mutable {
[offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
try {
auto sdp_answer = const_cast<WebRtcInterface &>(exchanger).getAnswerSdp(offer_str);
cb(user_data, sdp_answer.data(), nullptr);
cb(ptr.get(), sdp_answer.data(), nullptr);
} catch (std::exception &ex) {
cb(user_data, nullptr, ex.what());
cb(ptr.get(), nullptr, ex.what());
}
});
#else

View File

@ -208,8 +208,11 @@ API_EXPORT int API_CALL mk_media_source_seek_to(const mk_media_source ctx,uint32
MediaSource *src = (MediaSource *)ctx;
return src->seekTo(stamp);
}
API_EXPORT void API_CALL mk_media_source_start_send_rtp(const mk_media_source ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_source_send_rtp_result cb, void *user_data) {
mk_media_source_start_send_rtp2(ctx, dst_url, dst_port, ssrc, is_udp, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_media_source_start_send_rtp(const mk_media_source ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_source_send_rtp_result cb, void *user_data){
API_EXPORT void API_CALL mk_media_source_start_send_rtp2(const mk_media_source ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_source_send_rtp_result cb, void *user_data, on_user_data_free user_data_free){
assert(ctx && dst_url && ssrc);
MediaSource *src = (MediaSource *)ctx;
@ -219,9 +222,10 @@ API_EXPORT void API_CALL mk_media_source_start_send_rtp(const mk_media_source ct
args.ssrc = ssrc;
args.is_udp = is_udp;
src->startSendRtp(args, [cb, user_data](uint16_t local_port, const SockException &ex){
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
src->startSendRtp(args, [cb, ptr](uint16_t local_port, const SockException &ex){
if (cb) {
cb(user_data, local_port, ex.getErrCode(), ex.what());
cb(ptr.get(), local_port, ex.getErrCode(), ex.what());
}
});
}
@ -253,6 +257,7 @@ API_EXPORT void API_CALL mk_media_source_for_each(void *user_data, on_mk_media_s
}
///////////////////////////////////////////HttpBody/////////////////////////////////////////////
API_EXPORT mk_http_body API_CALL mk_http_body_from_string(const char *str, size_t len){
assert(str);
if(!len){
@ -261,6 +266,11 @@ API_EXPORT mk_http_body API_CALL mk_http_body_from_string(const char *str, size_
return new HttpBody::Ptr(new HttpStringBody(std::string(str, len)));
}
API_EXPORT mk_http_body API_CALL mk_http_body_from_buffer(mk_buffer buffer) {
assert(buffer);
return new HttpBody::Ptr(new HttpBufferBody(*((Buffer::Ptr *) buffer)));
}
API_EXPORT mk_http_body API_CALL mk_http_body_from_file(const char *file_path){
assert(file_path);
return new HttpBody::Ptr(new HttpFileBody(file_path));

View File

@ -27,17 +27,17 @@ public:
using Ptr = std::shared_ptr<FrameFromPtrForC>;
template<typename ...ARGS>
FrameFromPtrForC(bool cache_able, uint32_t flags, on_mk_frame_data_release cb, void *user_data, ARGS &&...args) : FrameFromPtr(
FrameFromPtrForC(bool cache_able, uint32_t flags, on_mk_frame_data_release cb, std::shared_ptr<void> user_data, ARGS &&...args) : FrameFromPtr(
std::forward<ARGS>(args)...) {
_flags = flags;
_cb = cb;
_user_data = user_data;
_user_data = std::move(user_data);
_cache_able = cache_able;
}
~FrameFromPtrForC() override {
if (_cb) {
_cb(_user_data, _ptr);
_cb(_user_data.get(), _ptr);
}
}
@ -66,43 +66,47 @@ public:
private:
uint32_t _flags;
on_mk_frame_data_release _cb;
void *_user_data;
std::shared_ptr<void> _user_data;
bool _cache_able;
};
static mk_frame mk_frame_create_complex(int codec_id, uint64_t dts, uint64_t pts, uint32_t frame_flags, size_t prefix_size,
char *data, size_t size, on_mk_frame_data_release cb, void *user_data) {
char *data, size_t size, on_mk_frame_data_release cb, std::shared_ptr<void> user_data) {
switch (codec_id) {
case CodecH264:
return new Frame::Ptr(new H264FrameHelper<FrameFromPtrForC>(cb, frame_flags, cb, user_data, (CodecId) codec_id,
data, size, dts, pts, prefix_size));
return new Frame::Ptr(new H264FrameHelper<FrameFromPtrForC>(
cb, frame_flags, cb, std::move(user_data), (CodecId)codec_id, data, size, dts, pts, prefix_size));
case CodecH265:
return new Frame::Ptr(new H265FrameHelper<FrameFromPtrForC>(cb, frame_flags, cb, user_data, (CodecId) codec_id,
data, size, dts, pts, prefix_size));
return new Frame::Ptr(new H265FrameHelper<FrameFromPtrForC>(
cb, frame_flags, cb, std::move(user_data), (CodecId)codec_id, data, size, dts, pts, prefix_size));
default:
return new Frame::Ptr(new FrameFromPtrForC(cb, frame_flags, cb, user_data, (CodecId) codec_id, data,
size, dts, pts, prefix_size));
return new Frame::Ptr(new FrameFromPtrForC(
cb, frame_flags, cb, std::move(user_data), (CodecId)codec_id, data, size, dts, pts, prefix_size));
}
}
API_EXPORT mk_frame API_CALL mk_frame_create(int codec_id, uint64_t dts, uint64_t pts, const char *data, size_t size,
on_mk_frame_data_release cb, void *user_data) {
on_mk_frame_data_release cb, void *user_data) {
return mk_frame_create2(codec_id, dts, pts, data, size, cb, user_data, nullptr);
}
API_EXPORT mk_frame API_CALL mk_frame_create2(int codec_id, uint64_t dts, uint64_t pts, const char *data, size_t size,
on_mk_frame_data_release cb, void *user_data, on_user_data_free user_data_free) {
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
switch (codec_id) {
case CodecH264:
case CodecH265:
return mk_frame_create_complex(codec_id, dts, pts, 0, prefixSize(data, size), (char *)data, size, cb, user_data);
return mk_frame_create_complex(codec_id, dts, pts, 0, prefixSize(data, size), (char *)data, size, cb, std::move(ptr));
case CodecAAC: {
int prefix = 0;
if ((((uint8_t *) data)[0] == 0xFF && (((uint8_t *) data)[1] & 0xF0) == 0xF0) && size > ADTS_HEADER_LEN) {
prefix = ADTS_HEADER_LEN;
}
return mk_frame_create_complex(codec_id, dts, pts, 0, prefix, (char *)data, size, cb, user_data);
return mk_frame_create_complex(codec_id, dts, pts, 0, prefix, (char *)data, size, cb, std::move(ptr));
}
default:
return mk_frame_create_complex(codec_id, dts, pts, 0, 0, (char *)data, size, cb, user_data);
return mk_frame_create_complex(codec_id, dts, pts, 0, 0, (char *)data, size, cb, std::move(ptr));
}
}

View File

@ -10,13 +10,15 @@
#include "mk_h264_splitter.h"
#include "Http/HttpRequestSplitter.h"
#include "Extension/H264.h"
#include "Extension/H265.h"
using namespace mediakit;
class H264Splitter : public HttpRequestSplitter {
public:
using onH264 = std::function<void(const char *data, size_t len)>;
H264Splitter() = default;
H264Splitter(bool h265 = false) { _h265 = h265; }
~H264Splitter() override;
void setOnSplitted(onH264 cb);
@ -25,7 +27,9 @@ protected:
const char *onSearchPacketTail(const char *data, size_t len) override;
private:
bool _h265 = false;
onH264 _cb;
size_t _search_pos = 0;
};
void H264Splitter::setOnSplitted(H264Splitter::onH264 cb) {
@ -43,7 +47,7 @@ ssize_t H264Splitter::onRecvHeader(const char *data, size_t len) {
return 0;
}
const char *H264Splitter::onSearchPacketTail(const char *data, size_t len) {
static const char *onSearchPacketTail_l(const char *data, size_t len) {
for (size_t i = 2; len > 2 && i < len - 2; ++i) {
//判断0x00 00 01
if (data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1) {
@ -57,15 +61,42 @@ const char *H264Splitter::onSearchPacketTail(const char *data, size_t len) {
return nullptr;
}
const char *H264Splitter::onSearchPacketTail(const char *data, size_t len) {
auto last_frame = data + _search_pos;
auto next_frame = onSearchPacketTail_l(last_frame, len - _search_pos);
if (!next_frame) {
return nullptr;
}
auto last_frame_len = next_frame - last_frame;
Frame::Ptr frame;
if (_h265) {
frame = std::make_shared<H265FrameNoCacheAble>((char *) last_frame, last_frame_len, 0, 0, prefixSize(last_frame, last_frame_len));
} else {
frame = std::make_shared<H264FrameNoCacheAble>((char *) last_frame, last_frame_len, 0, 0, prefixSize(last_frame, last_frame_len));
}
if (frame->decodeAble()) {
_search_pos = 0;
return next_frame;
}
_search_pos += last_frame_len;
return nullptr;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////
API_EXPORT mk_h264_splitter API_CALL mk_h264_splitter_create(on_mk_h264_splitter_frame cb, void *user_data) {
API_EXPORT mk_h264_splitter API_CALL mk_h264_splitter_create(on_mk_h264_splitter_frame cb, void *user_data, int is_h265) {
return mk_h264_splitter_create2(cb, user_data, nullptr, is_h265);
}
API_EXPORT mk_h264_splitter API_CALL mk_h264_splitter_create2(on_mk_h264_splitter_frame cb, void *user_data, on_user_data_free user_data_free, int is_h265) {
assert(cb);
auto ptr = new H264Splitter();
ptr->setOnSplitted([cb, ptr, user_data](const char *data, size_t len) {
cb(user_data, reinterpret_cast<mk_h264_splitter>(ptr), data, len);
auto ret = new H264Splitter(is_h265);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
ret->setOnSplitted([cb, ptr, ret](const char *data, size_t len) {
cb(ptr.get(), reinterpret_cast<mk_h264_splitter>(ret), data, len);
});
return reinterpret_cast<mk_h264_splitter>(ptr);
return reinterpret_cast<mk_h264_splitter>(ret);
}
API_EXPORT void API_CALL mk_h264_splitter_release(mk_h264_splitter ctx){

View File

@ -30,17 +30,21 @@ API_EXPORT void API_CALL mk_http_downloader_release(mk_http_downloader ctx) {
}
API_EXPORT void API_CALL mk_http_downloader_start(mk_http_downloader ctx, const char *url, const char *file, on_mk_download_complete cb, void *user_data) {
mk_http_downloader_start2(ctx, url, file, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_http_downloader_start2(mk_http_downloader ctx, const char *url, const char *file, on_mk_download_complete cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx && url && file);
HttpDownloader::Ptr *obj = (HttpDownloader::Ptr *) ctx;
(*obj)->setOnResult([cb, user_data](const SockException &ex, const string &filePath) {
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->setOnResult([cb, ptr](const SockException &ex, const string &filePath) {
if (cb) {
cb(user_data, ex.getErrCode(), ex.what(), filePath.data());
cb(ptr.get(), ex.getErrCode(), ex.what(), filePath.data());
}
});
(*obj)->startDownload(url, file, false);
}
///////////////////////////////////////////HttpRequester/////////////////////////////////////////////
API_EXPORT mk_http_requester API_CALL mk_http_requester_create(){
HttpRequester::Ptr *ret = new HttpRequester::Ptr(new HttpRequester);
@ -128,11 +132,16 @@ API_EXPORT mk_parser API_CALL mk_http_requester_get_response(mk_http_requester c
return (mk_parser)&((*obj)->response());
}
API_EXPORT void API_CALL mk_http_requester_set_cb(mk_http_requester ctx,on_mk_http_requester_complete cb, void *user_data){
API_EXPORT void API_CALL mk_http_requester_set_cb(mk_http_requester ctx,on_mk_http_requester_complete cb, void *user_data) {
mk_http_requester_set_cb2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_http_requester_set_cb2(mk_http_requester ctx,on_mk_http_requester_complete cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx && cb);
HttpRequester::Ptr *obj = (HttpRequester::Ptr *)ctx;
(*obj)->setOnResult([cb, user_data](const SockException &ex, const Parser &res) {
cb(user_data, ex.getErrCode(), ex.what());
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->setOnResult([cb, ptr](const SockException &ex, const Parser &res) {
cb(ptr.get(), ex.getErrCode(), ex.what());
});
}

View File

@ -23,7 +23,8 @@ public:
MediaHelper(ArgsType &&...args){
_channel = std::make_shared<DevChannel>(std::forward<ArgsType>(args)...);
}
~MediaHelper(){}
~MediaHelper() = default;
void attachEvent(){
_channel->setMediaListener(shared_from_this());
@ -33,29 +34,29 @@ public:
return _channel;
}
void setOnClose(on_mk_media_close cb, void *user_data){
void setOnClose(on_mk_media_close cb, std::shared_ptr<void> user_data){
_on_close = cb;
_on_close_data = user_data;
_on_close_data = std::move(user_data);
}
void setOnSeek(on_mk_media_seek cb, void *user_data){
void setOnSeek(on_mk_media_seek cb, std::shared_ptr<void> user_data){
_on_seek = cb;
_on_seek_data = user_data;
_on_seek_data = std::move(user_data);
}
void setOnPause(on_mk_media_pause cb, void* user_data) {
void setOnPause(on_mk_media_pause cb, std::shared_ptr<void> user_data) {
_on_pause = cb;
_on_pause_data = user_data;
_on_pause_data = std::move(user_data);
}
void setOnSpeed(on_mk_media_speed cb, void* user_data) {
void setOnSpeed(on_mk_media_speed cb, std::shared_ptr<void> user_data) {
_on_speed = cb;
_on_speed_data = user_data;
_on_speed_data = std::move(user_data);
}
void setOnRegist(on_mk_media_source_regist cb, void *user_data){
void setOnRegist(on_mk_media_source_regist cb, std::shared_ptr<void> user_data){
_on_regist = cb;
_on_regist_data = user_data;
_on_regist_data = std::move(user_data);
}
protected:
@ -67,7 +68,7 @@ protected:
return false;
}
//请在回调中调用mk_media_release函数释放资源,否则MediaSource::close()操作不会生效
_on_close(_on_close_data);
_on_close(_on_close_data.get());
WarnL << "close media: " << sender.getUrl();
return true;
}
@ -76,7 +77,7 @@ protected:
if (!_on_seek) {
return false;
}
return _on_seek(_on_seek_data, stamp);
return _on_seek(_on_seek_data.get(), stamp);
}
// 通知暂停或恢复
@ -84,7 +85,7 @@ protected:
if (!_on_pause) {
return false;
}
return _on_pause(_on_pause_data, pause);
return _on_pause(_on_pause_data.get(), pause);
}
//通知倍数播放
@ -92,12 +93,12 @@ protected:
if (!_on_speed) {
return false;
}
return _on_speed(_on_speed_data, speed);
return _on_speed(_on_speed_data.get(), speed);
}
void onRegist(MediaSource &sender, bool regist) override{
if (_on_regist) {
_on_regist(_on_regist_data, &sender, regist);
_on_regist(_on_regist_data.get(), &sender, regist);
}
}
@ -108,41 +109,66 @@ private:
on_mk_media_pause _on_pause = nullptr;
on_mk_media_speed _on_speed = nullptr;
on_mk_media_source_regist _on_regist = nullptr;
void* _on_seek_data;
void* _on_pause_data;
void* _on_speed_data;
void *_on_close_data;
void *_on_regist_data;
std::shared_ptr<void> _on_seek_data;
std::shared_ptr<void> _on_pause_data;
std::shared_ptr<void> _on_speed_data;
std::shared_ptr<void> _on_close_data;
std::shared_ptr<void> _on_regist_data;
};
API_EXPORT void API_CALL mk_media_set_on_close(mk_media ctx, on_mk_media_close cb, void *user_data){
mk_media_set_on_close2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_media_set_on_close2(mk_media ctx, on_mk_media_close cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->setOnClose(cb, user_data);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->setOnClose(cb, std::move(ptr));
}
API_EXPORT void API_CALL mk_media_set_on_seek(mk_media ctx, on_mk_media_seek cb, void *user_data) {
mk_media_set_on_seek2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_media_set_on_seek2(mk_media ctx, on_mk_media_seek cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->setOnSeek(cb, user_data);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->setOnSeek(cb, std::move(ptr));
}
API_EXPORT void API_CALL mk_media_set_on_pause(mk_media ctx, on_mk_media_pause cb, void *user_data) {
mk_media_set_on_pause2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_media_set_on_pause2(mk_media ctx, on_mk_media_pause cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->setOnPause(cb, user_data);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->setOnPause(cb, std::move(ptr));
}
API_EXPORT void API_CALL mk_media_set_on_speed(mk_media ctx, on_mk_media_speed cb, void *user_data) {
mk_media_set_on_speed2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_media_set_on_speed2(mk_media ctx, on_mk_media_speed cb, void *user_data, on_user_data_free user_data_free){
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->setOnSpeed(cb, user_data);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->setOnSpeed(cb, std::move(ptr));
}
API_EXPORT void API_CALL mk_media_set_on_regist(mk_media ctx, on_mk_media_source_regist cb, void *user_data){
mk_media_set_on_regist2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_media_set_on_regist2(mk_media ctx, on_mk_media_source_regist cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->setOnRegist(cb, user_data);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->setOnRegist(cb, std::move(ptr));
}
API_EXPORT int API_CALL mk_media_total_reader_count(mk_media ctx){
@ -163,6 +189,14 @@ API_EXPORT mk_media API_CALL mk_media_create(const char *vhost, const char *app,
return (mk_media) obj;
}
API_EXPORT mk_media API_CALL mk_media_create2(const char *vhost, const char *app, const char *stream, float duration, mk_ini ini) {
assert(vhost && app && stream && ini);
ProtocolOption option(*((mINI *)ini));
MediaHelper::Ptr *obj(new MediaHelper::Ptr(new MediaHelper(vhost, app, stream, duration, option)));
(*obj)->attachEvent();
return (mk_media) obj;
}
API_EXPORT void API_CALL mk_media_release(mk_media ctx) {
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
@ -246,7 +280,11 @@ API_EXPORT int API_CALL mk_media_input_audio(mk_media ctx, const void* data, int
return (*obj)->getChannel()->inputAudio((const char*)data, len, dts);
}
API_EXPORT void API_CALL mk_media_start_send_rtp(mk_media ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_send_rtp_result cb, void *user_data){
API_EXPORT void API_CALL mk_media_start_send_rtp(mk_media ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_send_rtp_result cb, void *user_data) {
mk_media_start_send_rtp2(ctx, dst_url, dst_port, ssrc, is_udp, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_media_start_send_rtp2(mk_media ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_send_rtp_result cb, void *user_data, on_user_data_free user_data_free){
assert(ctx && dst_url && ssrc);
MediaHelper::Ptr* obj = (MediaHelper::Ptr*) ctx;
@ -258,10 +296,11 @@ API_EXPORT void API_CALL mk_media_start_send_rtp(mk_media ctx, const char *dst_u
// sender参数无用
auto ref = *obj;
(*obj)->getChannel()->getOwnerPoller(MediaSource::NullMediaSource())->async([args, ref, cb, user_data]() {
ref->getChannel()->startSendRtp(MediaSource::NullMediaSource(), args, [cb, user_data](uint16_t local_port, const SockException &ex) {
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*obj)->getChannel()->getOwnerPoller(MediaSource::NullMediaSource())->async([args, ref, cb, ptr]() {
ref->getChannel()->startSendRtp(MediaSource::NullMediaSource(), args, [cb, ptr](uint16_t local_port, const SockException &ex) {
if (cb) {
cb(user_data, local_port, ex.getErrCode(), ex.what());
cb(ptr.get(), local_port, ex.getErrCode(), ex.what());
}
});
});

View File

@ -61,7 +61,7 @@ public:
if (is_shutdown) {
//播放中断
if (_on_shutdown) {
_on_shutdown(_on_shutdown_data, ex.getErrCode(), ex.what(), nullptr, 0);
_on_shutdown(_on_shutdown_data.get(), ex.getErrCode(), ex.what(), nullptr, 0);
}
return;
}
@ -74,17 +74,17 @@ public:
for (auto &track : cpp_tracks) {
tracks[track_count++] = (mk_track) &track;
}
_on_play(_on_play_data, ex.getErrCode(), ex.what(), tracks, track_count);
_on_play(_on_play_data.get(), ex.getErrCode(), ex.what(), tracks, track_count);
}
}
void setOnEvent(on_mk_play_event cb, void *user_data, int type) {
void setOnEvent(on_mk_play_event cb, std::shared_ptr<void> user_data, int type) {
lock_guard<recursive_mutex> lck(_mtx);
if(type == 0){
_on_play_data = user_data;
if (type == 0) {
_on_play_data = std::move(user_data);
_on_play = cb;
}else{
_on_shutdown_data = user_data;
} else {
_on_shutdown_data = std::move(user_data);
_on_shutdown = cb;
}
}
@ -98,8 +98,8 @@ private:
on_mk_play_event _on_play = nullptr;
on_mk_play_event _on_shutdown = nullptr;
void *_on_play_data = nullptr;
void *_on_shutdown_data = nullptr;
std::shared_ptr<void> _on_play_data;
std::shared_ptr<void> _on_shutdown_data;
};
API_EXPORT mk_player API_CALL mk_player_create() {
@ -175,18 +175,28 @@ API_EXPORT void API_CALL mk_player_seekto_pos(mk_player ctx, int seek_pos) {
});
}
static void mk_player_set_on_event(mk_player ctx, on_mk_play_event cb, void *user_data, int type) {
static void mk_player_set_on_event(mk_player ctx, on_mk_play_event cb, std::shared_ptr<void> user_data, int type) {
assert(ctx);
MediaPlayerForC &obj = **((MediaPlayerForC::Ptr *)ctx);
obj.setOnEvent(cb,user_data, type);
obj.setOnEvent(cb, std::move(user_data), type);
}
API_EXPORT void API_CALL mk_player_set_on_result(mk_player ctx, on_mk_play_event cb, void *user_data) {
mk_player_set_on_event(ctx,cb,user_data,0);
mk_player_set_on_result2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_player_set_on_result2(mk_player ctx, on_mk_play_event cb, void *user_data, on_user_data_free user_data_free) {
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
mk_player_set_on_event(ctx, cb, std::move(ptr), 0);
}
API_EXPORT void API_CALL mk_player_set_on_shutdown(mk_player ctx, on_mk_play_event cb, void *user_data) {
mk_player_set_on_event(ctx,cb,user_data,1);
mk_player_set_on_shutdown2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_player_set_on_shutdown2(mk_player ctx, on_mk_play_event cb, void *user_data, on_user_data_free user_data_free){
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
mk_player_set_on_event(ctx, cb, std::move(ptr), 1);
}
API_EXPORT float API_CALL mk_player_duration(mk_player ctx) {

View File

@ -50,13 +50,18 @@ API_EXPORT void API_CALL mk_proxy_player_play(mk_proxy_player ctx, const char *u
}
API_EXPORT void API_CALL mk_proxy_player_set_on_close(mk_proxy_player ctx, on_mk_proxy_player_close cb, void *user_data){
mk_proxy_player_set_on_close2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_proxy_player_set_on_close2(mk_proxy_player ctx, on_mk_proxy_player_close cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx);
PlayerProxy::Ptr &obj = *((PlayerProxy::Ptr *) ctx);
obj->getPoller()->async([obj,cb,user_data](){
//切换线程再操作
obj->setOnClose([cb,user_data](const SockException &ex){
if(cb){
cb(user_data, ex.getErrCode(), ex.what(), ex.getCustomCode());
PlayerProxy::Ptr &obj = *((PlayerProxy::Ptr *)ctx);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
obj->getPoller()->async([obj, cb, ptr]() {
// 切换线程再操作
obj->setOnClose([cb, ptr](const SockException &ex) {
if (cb) {
cb(ptr.get(), ex.getErrCode(), ex.what(), ex.getCustomCode());
}
});
});

View File

@ -55,23 +55,29 @@ API_EXPORT void API_CALL mk_pusher_publish(mk_pusher ctx,const char *url){
}
API_EXPORT void API_CALL mk_pusher_set_on_result(mk_pusher ctx, on_mk_push_event cb, void *user_data){
mk_pusher_set_on_result2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_pusher_set_on_result2(mk_pusher ctx, on_mk_push_event cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx && cb);
MediaPusher::Ptr &obj = *((MediaPusher::Ptr *)ctx);
obj->getPoller()->async([obj,cb,user_data](){
//切换线程再操作
obj->setOnPublished([cb,user_data](const SockException &ex){
cb(user_data,ex.getErrCode(),ex.what());
});
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
obj->getPoller()->async([obj, cb, ptr]() {
// 切换线程再操作
obj->setOnPublished([cb, ptr](const SockException &ex) { cb(ptr.get(), ex.getErrCode(), ex.what()); });
});
}
API_EXPORT void API_CALL mk_pusher_set_on_shutdown(mk_pusher ctx, on_mk_push_event cb, void *user_data){
mk_pusher_set_on_shutdown2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_pusher_set_on_shutdown2(mk_pusher ctx, on_mk_push_event cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx && cb);
MediaPusher::Ptr &obj = *((MediaPusher::Ptr *)ctx);
obj->getPoller()->async([obj,cb,user_data](){
//切换线程再操作
obj->setOnShutdown([cb,user_data](const SockException &ex){
cb(user_data,ex.getErrCode(),ex.what());
});
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
obj->getPoller()->async([obj, cb, ptr]() {
// 切换线程再操作
obj->setOnShutdown([cb, ptr](const SockException &ex) { cb(ptr.get(), ex.getErrCode(), ex.what()); });
});
}

View File

@ -23,11 +23,16 @@ API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create(uint16_t port, int tcp_mo
}
API_EXPORT void API_CALL mk_rtp_server_connect(mk_rtp_server ctx, const char *dst_url, uint16_t dst_port, on_mk_rtp_server_connected cb, void *user_data) {
mk_rtp_server_connect2(ctx, dst_url, dst_port, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_rtp_server_connect2(mk_rtp_server ctx, const char *dst_url, uint16_t dst_port, on_mk_rtp_server_connected cb, void *user_data, on_user_data_free user_data_free){
RtpServer::Ptr *server = (RtpServer::Ptr *)ctx;
if (server) {
(*server)->connectToServer(dst_url, dst_port, [cb, user_data](const SockException &ex) {
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*server)->connectToServer(dst_url, dst_port, [cb, ptr](const SockException &ex) {
if (cb) {
cb(user_data, ex.getErrCode(), ex.what(), ex.getCustomCode());
cb(ptr.get(), ex.getErrCode(), ex.what(), ex.getCustomCode());
}
});
}
@ -44,10 +49,15 @@ API_EXPORT uint16_t API_CALL mk_rtp_server_port(mk_rtp_server ctx) {
}
API_EXPORT void API_CALL mk_rtp_server_set_on_detach(mk_rtp_server ctx, on_mk_rtp_server_detach cb, void *user_data) {
mk_rtp_server_set_on_detach2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_rtp_server_set_on_detach2(mk_rtp_server ctx, on_mk_rtp_server_detach cb, void *user_data, on_user_data_free user_data_free){
RtpServer::Ptr *server = (RtpServer::Ptr *) ctx;
if (cb) {
(*server)->setOnDetach([cb, user_data]() {
cb(user_data);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*server)->setOnDetach([cb, ptr]() {
cb(ptr.get());
});
} else {
(*server)->setOnDetach(nullptr);

View File

@ -20,7 +20,7 @@ using namespace mediakit;
class BufferForC : public Buffer {
public:
BufferForC(const char *data, size_t len, on_mk_buffer_free cb, void *user_data) {
BufferForC(const char *data, size_t len, on_mk_buffer_free cb, std::shared_ptr<void> user_data) {
if (len <= 0) {
len = strlen(data);
}
@ -36,11 +36,11 @@ public:
_data = (char *) data;
_size = len;
_cb = cb;
_user_data = user_data;
_user_data = std::move(user_data);
}
~BufferForC() override {
_cb(_user_data, _data);
_cb(_user_data.get(), _data);
}
char *data() const override {
@ -55,12 +55,17 @@ private:
char *_data;
size_t _size;
on_mk_buffer_free _cb;
void *_user_data;
std::shared_ptr<void> _user_data;
};
API_EXPORT mk_buffer API_CALL mk_buffer_from_char(const char *data, size_t len, on_mk_buffer_free cb, void *user_data) {
return mk_buffer_from_char2(data, len, cb, user_data, nullptr);
}
API_EXPORT mk_buffer API_CALL mk_buffer_from_char2(const char *data, size_t len, on_mk_buffer_free cb, void *user_data, on_user_data_free user_data_free) {
assert(data);
return new Buffer::Ptr(std::make_shared<BufferForC>(data, len, cb, user_data));
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
return new Buffer::Ptr(std::make_shared<BufferForC>(data, len, cb, std::move(ptr)));
}
API_EXPORT mk_buffer API_CALL mk_buffer_ref(mk_buffer buffer) {
@ -200,16 +205,21 @@ void stopAllTcpServer(){
CLEAR_ARR(s_tcp_server);
}
API_EXPORT void API_CALL mk_tcp_session_set_user_data(mk_tcp_session session,void *user_data){
API_EXPORT void API_CALL mk_tcp_session_set_user_data(mk_tcp_session session, void *user_data) {
mk_tcp_session_set_user_data2(session, user_data, nullptr);
}
API_EXPORT void API_CALL mk_tcp_session_set_user_data2(mk_tcp_session session, void *user_data, on_user_data_free user_data_free) {
assert(session);
SessionForC *obj = (SessionForC *)session;
obj->_user_data = user_data;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
obj->_user_data = std::move(ptr);
}
API_EXPORT void* API_CALL mk_tcp_session_get_user_data(mk_tcp_session session){
assert(session);
SessionForC *obj = (SessionForC *)session;
return obj->_user_data;
return obj->_user_data.get();
}
API_EXPORT void API_CALL mk_tcp_server_events_listen(const mk_tcp_session_events *events){
@ -364,13 +374,18 @@ API_EXPORT void API_CALL mk_tcp_client_send_safe(mk_tcp_client ctx, const char *
}
API_EXPORT void API_CALL mk_tcp_client_set_user_data(mk_tcp_client ctx,void *user_data){
mk_tcp_client_set_user_data2(ctx, user_data, nullptr);
}
API_EXPORT void API_CALL mk_tcp_client_set_user_data2(mk_tcp_client ctx, void *user_data, on_user_data_free user_data_free) {
assert(ctx);
TcpClientForC::Ptr *client = (TcpClientForC::Ptr *)ctx;
(*client)->_user_data = user_data;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*client)->_user_data = std::move(ptr);
}
API_EXPORT void* API_CALL mk_tcp_client_get_user_data(mk_tcp_client ctx){
assert(ctx);
TcpClientForC::Ptr *client = (TcpClientForC::Ptr *)ctx;
return (*client)->_user_data;
return (*client)->_user_data.get();
}

View File

@ -25,7 +25,7 @@ public:
void onManager() override;
void onConnect(const toolkit::SockException &ex) override;
void setClient(mk_tcp_client client);
void *_user_data;
std::shared_ptr<void> _user_data;
private:
mk_tcp_client_events _events;
mk_tcp_client _client;
@ -38,7 +38,7 @@ public:
void onRecv(const toolkit::Buffer::Ptr &buffer) override ;
void onError(const toolkit::SockException &err) override;
void onManager() override;
void *_user_data;
std::shared_ptr<void> _user_data;
uint16_t _local_port;
};

View File

@ -44,11 +44,23 @@ API_EXPORT void API_CALL mk_async_do(mk_thread ctx,on_mk_async cb, void *user_da
});
}
API_EXPORT void API_CALL mk_async_do2(mk_thread ctx, on_mk_async cb, void *user_data, on_user_data_free user_data_free){
assert(ctx && cb);
EventPoller *poller = (EventPoller *)ctx;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
poller->async([cb, ptr]() { cb(ptr.get()); });
}
API_EXPORT void API_CALL mk_async_do_delay(mk_thread ctx, size_t ms, on_mk_async cb, void *user_data) {
mk_async_do_delay2(ctx, ms, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_async_do_delay2(mk_thread ctx, size_t ms, on_mk_async cb, void *user_data, on_user_data_free user_data_free){
assert(ctx && cb && ms);
EventPoller *poller = (EventPoller *) ctx;
poller->doDelayTask(ms, [cb, user_data]() {
cb(user_data);
EventPoller *poller = (EventPoller *)ctx;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
poller->doDelayTask(ms, [cb, ptr]() {
cb(ptr.get());
return 0;
});
}
@ -56,28 +68,26 @@ API_EXPORT void API_CALL mk_async_do_delay(mk_thread ctx, size_t ms, on_mk_async
API_EXPORT void API_CALL mk_sync_do(mk_thread ctx,on_mk_async cb, void *user_data){
assert(ctx && cb);
EventPoller *poller = (EventPoller *)ctx;
poller->sync([cb,user_data](){
cb(user_data);
});
poller->sync([cb, user_data]() { cb(user_data); });
}
class TimerForC : public std::enable_shared_from_this<TimerForC>{
public:
using Ptr = std::shared_ptr<TimerForC>;
TimerForC(on_mk_timer cb, void *user_data){
TimerForC(on_mk_timer cb, std::shared_ptr<void> user_data) {
_cb = cb;
_user_data = user_data;
_user_data = std::move(user_data);
}
~TimerForC(){}
~TimerForC() = default;
uint64_t operator()(){
lock_guard<recursive_mutex> lck(_mxt);
if(!_cb){
return 0;
}
return _cb(_user_data);
return _cb(_user_data.get());
}
void cancel(){
@ -98,15 +108,20 @@ public:
}
private:
on_mk_timer _cb = nullptr;
void *_user_data = nullptr;
std::shared_ptr<void> _user_data;
recursive_mutex _mxt;
EventPoller::DelayTask::Ptr _task;
};
API_EXPORT mk_timer API_CALL mk_timer_create(mk_thread ctx,uint64_t delay_ms,on_mk_timer cb, void *user_data){
API_EXPORT mk_timer API_CALL mk_timer_create(mk_thread ctx, uint64_t delay_ms, on_mk_timer cb, void *user_data) {
return mk_timer_create2(ctx, delay_ms, cb, user_data, nullptr);
}
API_EXPORT mk_timer API_CALL mk_timer_create2(mk_thread ctx, uint64_t delay_ms, on_mk_timer cb, void *user_data, on_user_data_free user_data_free){
assert(ctx && cb);
EventPoller *poller = (EventPoller *)ctx;
TimerForC::Ptr *ret = new TimerForC::Ptr(new TimerForC(cb, user_data));
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
TimerForC::Ptr *ret = new TimerForC::Ptr(new TimerForC(cb, ptr));
(*ret)->start(delay_ms,*poller);
return ret;
}

View File

@ -114,9 +114,14 @@ API_EXPORT int API_CALL mk_track_bit_rate(mk_track track) {
}
API_EXPORT void *API_CALL mk_track_add_delegate(mk_track track, on_mk_frame_out cb, void *user_data) {
return mk_track_add_delegate2(track, cb, user_data, nullptr);
}
API_EXPORT void *API_CALL mk_track_add_delegate2(mk_track track, on_mk_frame_out cb, void *user_data, on_user_data_free user_data_free){
assert(track && cb);
return (*((Track::Ptr *) track))->addDelegate([cb, user_data](const Frame::Ptr &frame) {
cb(user_data, (mk_frame) &frame);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
return (*((Track::Ptr *) track))->addDelegate([cb, ptr](const Frame::Ptr &frame) {
cb(ptr.get(), (mk_frame) &frame);
return true;
});
}

View File

@ -13,18 +13,33 @@
using namespace mediakit;
std::vector<std::string> toCodecList(const char *codec_name_list[]) {
std::vector<std::string> codec_list;
auto i = 0U;
while (codec_name_list[i]) {
codec_list.emplace_back(codec_name_list[i]);
++i;
}
return codec_list;
}
#ifdef ENABLE_FFMPEG
#include "Codec/Transcode.h"
API_EXPORT mk_decoder API_CALL mk_decoder_create(mk_track track, int thread_num) {
assert(track);
return new FFmpegDecoder(*((Track::Ptr *)track), thread_num);
return new FFmpegDecoder(*((Track::Ptr *) track), thread_num);
}
API_EXPORT mk_decoder API_CALL mk_decoder_create2(mk_track track, int thread_num, const char *codec_name_list[]) {
assert(track && codec_name_list);
return new FFmpegDecoder(*((Track::Ptr *) track), thread_num, toCodecList(codec_name_list));
}
API_EXPORT void API_CALL mk_decoder_release(mk_decoder ctx, int flush_frame) {
assert(ctx);
auto decoder = (FFmpegDecoder *)ctx;
auto decoder = (FFmpegDecoder *) ctx;
if (flush_frame) {
decoder->stopThread(false);
}
@ -33,94 +48,147 @@ API_EXPORT void API_CALL mk_decoder_release(mk_decoder ctx, int flush_frame) {
API_EXPORT void API_CALL mk_decoder_decode(mk_decoder ctx, mk_frame frame, int async, int enable_merge) {
assert(ctx && frame);
((FFmpegDecoder *)ctx)->inputFrame(*((Frame::Ptr *)frame), false, async, enable_merge);
((FFmpegDecoder *) ctx)->inputFrame(*((Frame::Ptr *) frame), false, async, enable_merge);
}
API_EXPORT void API_CALL mk_decoder_set_max_async_frame_size(mk_decoder ctx, size_t size) {
assert(ctx && size);
((FFmpegDecoder *)ctx)->setMaxTaskSize(size);
((FFmpegDecoder *) ctx)->setMaxTaskSize(size);
}
API_EXPORT void API_CALL mk_decoder_set_cb(mk_decoder ctx, on_mk_decode cb, void *user_data) {
mk_decoder_set_cb2(ctx, cb, user_data, nullptr);
}
API_EXPORT void API_CALL mk_decoder_set_cb2(mk_decoder ctx, on_mk_decode cb, void *user_data, on_user_data_free user_data_free){
assert(ctx && cb);
((FFmpegDecoder *)ctx)->setOnDecode([cb, user_data](const FFmpegFrame::Ptr &pix_frame) {
cb(user_data, (mk_frame_pix)&pix_frame);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
((FFmpegDecoder *) ctx)->setOnDecode([cb, ptr](const FFmpegFrame::Ptr &pix_frame) {
cb(ptr.get(), (mk_frame_pix) &pix_frame);
});
}
API_EXPORT const AVCodecContext *API_CALL mk_decoder_get_context(mk_decoder ctx) {
assert(ctx);
return ((FFmpegDecoder *)ctx)->getContext();
return ((FFmpegDecoder *) ctx)->getContext();
}
/////////////////////////////////////////////////////////////////////////////////////////////
API_EXPORT mk_frame_pix API_CALL mk_frame_pix_ref(mk_frame_pix frame) {
assert(frame);
return new FFmpegFrame::Ptr(*(FFmpegFrame::Ptr *)frame);
return new FFmpegFrame::Ptr(*(FFmpegFrame::Ptr *) frame);
}
API_EXPORT mk_frame_pix API_CALL mk_frame_pix_from_av_frame(AVFrame *frame) {
assert(frame);
return new FFmpegFrame::Ptr(std::make_shared<FFmpegFrame>(
std::shared_ptr<AVFrame>(av_frame_clone(frame), [](AVFrame *frame) { av_frame_free(&frame); })));
return new FFmpegFrame::Ptr(std::make_shared<FFmpegFrame>(std::shared_ptr<AVFrame>(av_frame_clone(frame), [](AVFrame *frame){
av_frame_free(&frame);
})));
}
API_EXPORT mk_frame_pix API_CALL mk_frame_pix_from_buffer(mk_buffer plane_data[], int line_size[], int plane) {
assert(plane <= AV_NUM_DATA_POINTERS);
std::shared_ptr<AVFrame> frame(av_frame_alloc(), [](AVFrame *ptr) {
av_frame_free(&ptr);
});
std::vector<mk_buffer> buffer_array;
for (auto i = 0; i < plane; ++i) {
auto buffer = mk_buffer_ref(plane_data[i]);
frame->data[i] = (uint8_t *) mk_buffer_get_data(buffer);
frame->linesize[i] = line_size[i];
buffer_array.emplace_back(buffer);
}
return new FFmpegFrame::Ptr(new FFmpegFrame(std::move(frame)), [buffer_array](FFmpegFrame *frame) {
for (auto &buffer : buffer_array) {
mk_buffer_unref(buffer);
}
delete frame;
});
}
API_EXPORT void API_CALL mk_frame_pix_unref(mk_frame_pix frame) {
assert(frame);
delete (FFmpegFrame::Ptr *)frame;
delete (FFmpegFrame::Ptr *) frame;
}
API_EXPORT AVFrame *API_CALL mk_frame_pix_get_av_frame(mk_frame_pix frame) {
assert(frame);
return (*(FFmpegFrame::Ptr *)frame)->get();
return (*(FFmpegFrame::Ptr *) frame)->get();
}
//////////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////////////////
API_EXPORT mk_swscale mk_swscale_create(int output, int width, int height) {
return new FFmpegSws((AVPixelFormat)output, width, height);
return new FFmpegSws((AVPixelFormat) output, width, height);
}
API_EXPORT void mk_swscale_release(mk_swscale ctx) {
delete (FFmpegSws *)ctx;
delete (FFmpegSws *) ctx;
}
API_EXPORT int mk_swscale_input_frame(mk_swscale ctx, mk_frame_pix frame, uint8_t *data) {
return ((FFmpegSws *)ctx)->inputFrame(*(FFmpegFrame::Ptr *)frame, data);
return ((FFmpegSws *) ctx)->inputFrame(*(FFmpegFrame::Ptr *) frame, data);
}
API_EXPORT mk_frame_pix mk_swscale_input_frame2(mk_swscale ctx, mk_frame_pix frame) {
return new FFmpegFrame::Ptr(((FFmpegSws *)ctx)->inputFrame(*(FFmpegFrame::Ptr *)frame));
API_EXPORT mk_frame_pix mk_swscale_input_frame2(mk_swscale ctx, mk_frame_pix frame){
return new FFmpegFrame::Ptr(((FFmpegSws *) ctx)->inputFrame(*(FFmpegFrame::Ptr *) frame));
}
API_EXPORT uint8_t **API_CALL mk_get_av_frame_data(AVFrame *frame) {
return frame->data;
}
API_EXPORT void API_CALL mk_set_av_frame_data(AVFrame *frame, uint8_t *data, int plane) {
frame->data[plane] = data;
}
API_EXPORT int *API_CALL mk_get_av_frame_line_size(AVFrame *frame) {
return frame->linesize;
}
API_EXPORT int64_t API_CALL mk_get_av_frame_dts(AVFrame *frame) {
API_EXPORT void API_CALL mk_set_av_frame_line_size(AVFrame *frame, int line_size, int plane) {
frame->linesize[plane] = line_size;
}
API_EXPORT int64_t API_CALL mk_get_av_frame_dts(AVFrame *frame) {
return frame->pkt_dts;
}
API_EXPORT int64_t API_CALL mk_get_av_frame_pts(AVFrame *frame) {
API_EXPORT void API_CALL mk_set_av_frame_dts(AVFrame *frame, int64_t dts) {
frame->pkt_dts = dts;
}
API_EXPORT int64_t API_CALL mk_get_av_frame_pts(AVFrame *frame) {
return frame->pts;
}
API_EXPORT void API_CALL mk_set_av_frame_pts(AVFrame *frame, int64_t pts) {
frame->pts = pts;
}
API_EXPORT int API_CALL mk_get_av_frame_width(AVFrame *frame) {
return frame->width;
}
API_EXPORT void API_CALL mk_set_av_frame_width(AVFrame *frame, int width) {
frame->width = width;
}
API_EXPORT int API_CALL mk_get_av_frame_height(AVFrame *frame) {
return frame->height;
}
API_EXPORT void API_CALL mk_set_av_frame_height(AVFrame *frame, int height) {
frame->height = height;
}
API_EXPORT int API_CALL mk_get_av_frame_format(AVFrame *frame) {
return frame->format;
}
API_EXPORT void API_CALL mk_set_av_frame_format(AVFrame *frame, int format) {
frame->format = format;
}
#endif //ENABLE_FFMPEG

View File

@ -13,15 +13,22 @@
#include "mk_util.h"
#include "Util/util.h"
#include "Util/mini.h"
#include "Util/logger.h"
#include "Common/config.h"
using namespace std;
using namespace toolkit;
using namespace mediakit;
#ifndef _WIN32
#define _strdup strdup
#endif
API_EXPORT void API_CALL mk_free(void *ptr) {
free(ptr);
}
API_EXPORT char* API_CALL mk_util_get_exe_path(){
return _strdup(exePath().data());
}
@ -47,6 +54,88 @@ API_EXPORT char* API_CALL mk_util_hex_dump(const void *buf, int len){
return _strdup(hexdump(buf,len).data());
}
API_EXPORT mk_ini API_CALL mk_ini_create() {
return new mINI;
}
API_EXPORT mk_ini API_CALL mk_ini_default() {
return &(mINI::Instance());
}
static void emit_ini_file_reload(mk_ini ini) {
if (ini == mk_ini_default()) {
// 广播配置文件热加载
NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastReloadConfig);
}
}
API_EXPORT void API_CALL mk_ini_load_string(mk_ini ini, const char *str) {
assert(str);
auto ptr = (mINI *)ini;
ptr->parse(str);
emit_ini_file_reload(ini);
}
API_EXPORT void API_CALL mk_ini_load_file(mk_ini ini, const char *file) {
assert(file);
auto ptr = (mINI *)ini;
ptr->parseFile(file);
emit_ini_file_reload(ini);
}
API_EXPORT void API_CALL mk_ini_release(mk_ini ini) {
assert(ini);
delete (mINI *)ini;
}
API_EXPORT void API_CALL mk_ini_set_option(mk_ini ini, const char *key, const char *value) {
assert(ini && key && value);
auto ptr = (mINI *)ini;
(*ptr)[key] = value;
emit_ini_file_reload(ini);
}
API_EXPORT void API_CALL mk_ini_set_option_int(mk_ini ini, const char *key, int value) {
assert(ini && key);
auto ptr = (mINI *)ini;
(*ptr)[key] = value;
emit_ini_file_reload(ini);
}
API_EXPORT const char *API_CALL mk_ini_get_option(mk_ini ini, const char *key) {
assert(ini && key);
auto ptr = (mINI *)ini;
auto it = ptr->find(key);
if (it == ptr->end()) {
return nullptr;
}
return it->second.data();
}
API_EXPORT int API_CALL mk_ini_del_option(mk_ini ini, const char *key) {
assert(ini && key);
auto ptr = (mINI *)ini;
auto it = ptr->find(key);
if (it == ptr->end()) {
return false;
}
ptr->erase(it);
emit_ini_file_reload(ini);
return true;
}
API_EXPORT char *API_CALL mk_ini_dump_string(mk_ini ini) {
assert(ini);
auto ptr = (mINI *)ini;
return _strdup(ptr->dump().data());
}
API_EXPORT void API_CALL mk_ini_dump_file(mk_ini ini, const char *file) {
assert(ini && file);
auto ptr = (mINI *)ini;
ptr->dumpFile(file);
}
API_EXPORT void API_CALL mk_log_printf(int level, const char *file, const char *function, int line, const char *fmt, ...) {
va_list ap;
va_start(ap, fmt);

View File

@ -36,9 +36,108 @@ static void on_h264_frame(void *user_data, mk_h264_splitter splitter, const char
mk_frame_unref(frame);
}
//按照json转义规则转义webrtc answer sdp
static char *escape_string(const char *ptr){
char *escaped = malloc(2 * strlen(ptr));
char *ptr_escaped = escaped;
while (1) {
switch (*ptr) {
case '\r': {
*(ptr_escaped++) = '\\';
*(ptr_escaped++) = 'r';
break;
}
case '\n': {
*(ptr_escaped++) = '\\';
*(ptr_escaped++) = 'n';
break;
}
case '\t': {
*(ptr_escaped++) = '\\';
*(ptr_escaped++) = 't';
break;
}
default: {
*(ptr_escaped++) = *ptr;
if (!*ptr) {
return escaped;
}
break;
}
}
++ptr;
}
}
static void on_mk_webrtc_get_answer_sdp_func(void *user_data, const char *answer, const char *err) {
const char *response_header[] = { "Content-Type", "application/json", "Access-Control-Allow-Origin", "*" , NULL};
if (answer) {
answer = escape_string(answer);
}
size_t len = answer ? 2 * strlen(answer) : 1024;
char *response_content = (char *)malloc(len);
if (answer) {
snprintf(response_content, len,
"{"
"\"sdp\":\"%s\","
"\"type\":\"answer\","
"\"code\":0"
"}",
answer);
} else {
snprintf(response_content, len,
"{"
"\"msg\":\"%s\","
"\"code\":-1"
"}",
err);
}
mk_http_response_invoker_do_string(user_data, 200, response_header, response_content);
mk_http_response_invoker_clone_release(user_data);
free(response_content);
if (answer) {
free((void*)answer);
}
}
/**
* http api请求广播(GET/POST)
* @param parser http请求内容对象
* @param invoker invoker返回http回复
* @param consumed 1
* @param sender http客户端相关信息
*/
//测试url : http://127.0.0.1/api/test
void API_CALL on_mk_http_request(const mk_parser parser,
const mk_http_response_invoker invoker,
int *consumed,
const mk_sock_info sender) {
const char *url = mk_parser_get_url(parser);
*consumed = 1;
if (strcmp(url, "/index/api/webrtc") == 0) {
//拦截api: /index/api/webrtc
char rtc_url[1024];
snprintf(rtc_url, sizeof(rtc_url), "rtc://%s/%s/%s?%s", mk_parser_get_header(parser, "Host"),
mk_parser_get_url_param(parser, "app"), mk_parser_get_url_param(parser, "stream"),
mk_parser_get_url_params(parser));
mk_webrtc_get_answer_sdp(mk_http_response_invoker_clone(invoker), on_mk_webrtc_get_answer_sdp_func,
mk_parser_get_url_param(parser, "type"), mk_parser_get_content(parser, NULL), rtc_url);
} else {
*consumed = 0;
return;
}
}
int main(int argc, char *argv[]) {
char *ini_path = mk_util_get_exe_dir("config.ini");
mk_config config = {
.ini = NULL,
.ini = ini_path,
.ini_is_path = 1,
.log_level = 0,
.log_mask = LOG_CONSOLE,
@ -50,11 +149,29 @@ int main(int argc, char *argv[]) {
.thread_num = 0
};
mk_env_init(&config);
mk_free(ini_path);
mk_http_server_start(80, 0);
mk_rtsp_server_start(554, 0);
mk_rtmp_server_start(1935, 0);
mk_rtc_server_start(atoi(mk_get_option("rtc.port")));
signal(SIGINT, s_on_exit);// 设置退出信号
mk_events events = {
.on_mk_media_changed = NULL,
.on_mk_media_publish = NULL,
.on_mk_media_play = NULL,
.on_mk_media_not_found = NULL,
.on_mk_media_no_reader = NULL,
.on_mk_http_request = on_mk_http_request,
.on_mk_http_access = NULL,
.on_mk_http_before_access = NULL,
.on_mk_rtsp_get_realm = NULL,
.on_mk_rtsp_auth = NULL,
.on_mk_record_mp4 = NULL,
.on_mk_shell_login = NULL,
.on_mk_flow_report = NULL
};
mk_events_listen(&events);
FILE *fp = fopen(argv[1], "rb");
if (!fp) {
@ -65,14 +182,15 @@ int main(int argc, char *argv[]) {
mk_media media = mk_media_create("__defaultVhost__", "live", "test", 0, 0, 0);
//h264的codec
//mk_media_init_video(media, 0, 0, 0, 0, 2 * 104 * 1024);
codec_args v_args={0};
mk_track v_track = mk_track_create(MKCodecH264,&v_args);
mk_media_init_track(media,v_track);
codec_args v_args = {0};
mk_track v_track = mk_track_create(MKCodecH264, &v_args);
mk_media_init_track(media, v_track);
mk_media_init_complete(media);
mk_track_unref(v_track);
//创建h264分帧器
mk_h264_splitter splitter = mk_h264_splitter_create(on_h264_frame, media);
mk_h264_splitter splitter = mk_h264_splitter_create(on_h264_frame, media, 0);
signal(SIGINT, s_on_exit);// 设置退出信号
char buf[1024];
while (!exit_flag) {

View File

@ -16,7 +16,7 @@ typedef struct {
mk_http_requester requester;
} Context;
void API_CALL on_requester_complete(void *user_data, int code, const char *err_msg){
static void API_CALL on_requester_complete(void *user_data, int code, const char *err_msg){
Context *ctx = (Context *)user_data;
log_debug("code: %d %s", code, err_msg);
size_t res_len = 0;

View File

@ -293,15 +293,8 @@ h264_pt=98
h265_pt=99
#rtp ps 负载的pt
ps_pt=96
#rtp ts 负载的pt
ts_pt=33
#rtp opus 负载的pt
opus_pt=100
#rtp g711u 负载的pt
g711u_pt=0
#rtp g711a 负载的pt
g711a_pt=8
[rtc]
#rtc播放推流、播放超时时间

View File

@ -1404,6 +1404,12 @@
"value": "0",
"description": "是否指定收流的rtp ssrc, 十进制数字不指定或指定0时则不过滤rtp非必选参数",
"disabled": true
},
{
"key": "only_audio",
"value": "1",
"description": "是否为单音频track用于语音对讲",
"disabled": true
}
]
}
@ -1661,6 +1667,12 @@
"value": "0",
"description": "udp方式推流时是否开启rtcp发送和rtcp接收超时判断开启后(默认关闭)如果接收rr rtcp超时将导致主动停止rtp发送",
"disabled": true
},
{
"key": "recv_stream_id",
"value": "",
"description": "发送rtp同时接收一般用于双向语言对讲, 如果不为空说明开启接收值为接收流的id",
"disabled": true
}
]
}
@ -1737,6 +1749,18 @@
"value": "1",
"description": "rtp es方式打包时是否只打包音频该参数非必选参数",
"disabled": true
},
{
"key": "recv_stream_id",
"value": "",
"description": "发送rtp同时接收一般用于双向语言对讲, 如果不为空说明开启接收值为接收流的id",
"disabled": true
},
{
"key": "close_delay_ms",
"value": "5000",
"description": "等待tcp连接超时时间单位毫秒默认5000毫秒",
"disabled": true
}
]
}

View File

@ -365,6 +365,7 @@ Value makeMediaSourceJson(MediaSource &media){
}
obj["loss"] = loss;
}
obj["frames"] = track->getFrames();
switch(codec_type){
case TrackAudio : {
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
@ -377,7 +378,16 @@ Value makeMediaSourceJson(MediaSource &media){
auto video_track = dynamic_pointer_cast<VideoTrack>(track);
obj["width"] = video_track->getVideoWidth();
obj["height"] = video_track->getVideoHeight();
obj["fps"] = round(video_track->getVideoFps());
obj["key_frames"] = video_track->getVideoKeyFrames();
int gop_size = video_track->getVideoGopSize();
int gop_interval_ms = video_track->getVideoGopInterval();
float fps = video_track->getVideoFps();
if (fps <= 1) {
fps = gop_size * 1000.0 / gop_interval_ms;
}
obj["fps"] = round(fps);
obj["gop_size"] = gop_size;
obj["gop_interval_ms"] = gop_interval_ms;
break;
}
default:
@ -389,7 +399,7 @@ Value makeMediaSourceJson(MediaSource &media){
}
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc) {
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, bool only_audio) {
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
if (s_rtpServerMap.find(stream_id) != s_rtpServerMap.end()) {
//为了防止RtpProcess所有权限混乱的问题不允许重复添加相同的stream_id
@ -397,7 +407,7 @@ uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mod
}
RtpServer::Ptr server = std::make_shared<RtpServer>();
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc);
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_audio);
server->setOnDetach([stream_id]() {
//设置rtp超时移除事件
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
@ -1138,7 +1148,7 @@ void installWebApi() {
tcp_mode = 1;
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, "::", allArgs["re_use_port"].as<bool>(),
allArgs["ssrc"].as<uint32_t>());
allArgs["ssrc"].as<uint32_t>(), allArgs["only_audio"].as<bool>());
if (port == 0) {
throw InvalidArgsException("该stream_id已存在");
}
@ -1203,6 +1213,7 @@ void installWebApi() {
args.use_ps = allArgs["use_ps"].empty() ? true : allArgs["use_ps"].as<bool>();
args.only_audio = allArgs["only_audio"].as<bool>();
args.udp_rtcp_timeout = allArgs["udp_rtcp_timeout"];
args.recv_stream_id = allArgs["recv_stream_id"];
TraceL << "startSendRtp, pt " << int(args.pt) << " ps " << args.use_ps << " audio " << args.only_audio;
src->getOwnerPoller()->async([=]() mutable {
@ -1234,6 +1245,9 @@ void installWebApi() {
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
args.use_ps = allArgs["use_ps"].empty() ? true : allArgs["use_ps"].as<bool>();
args.only_audio = allArgs["only_audio"].as<bool>();
args.recv_stream_id = allArgs["recv_stream_id"];
//tcp被动服务器等待链接超时时间
args.tcp_passive_close_delay_ms = allArgs["close_delay_ms"];
TraceL << "startSendRtpPassive, pt " << int(args.pt) << " ps " << args.use_ps << " audio " << args.only_audio;
src->getOwnerPoller()->async([=]() mutable {

View File

@ -54,13 +54,13 @@ static void on_ffmpeg_log(void *ctx, int level, const char *fmt, va_list args) {
}
LogLevel lev;
switch (level) {
case AV_LOG_FATAL:
case AV_LOG_FATAL: lev = LError; break;
case AV_LOG_ERROR: lev = LError; break;
case AV_LOG_WARNING: lev = LWarn; break;
case AV_LOG_INFO: lev = LInfo; break;
case AV_LOG_VERBOSE:
case AV_LOG_VERBOSE: lev = LDebug; break;
case AV_LOG_DEBUG: lev = LDebug; break;
case AV_LOG_TRACE:
case AV_LOG_TRACE: lev = LTrace; break;
default: lev = LTrace; break;
}
LoggerWrapper::printLogV(::toolkit::getLogger(), lev, __FILE__, ctx ? av_default_item_name(ctx) : "NULL", level, fmt, args);
@ -313,13 +313,31 @@ static inline const AVCodec *getCodec(const std::initializer_list<CodecName> &co
return ret;
}
FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num) {
template<bool decoder = true>
static inline const AVCodec *getCodecByName(const std::vector<std::string> &codec_list) {
const AVCodec *ret = nullptr;
for (auto &codec : codec_list) {
ret = getCodec_l<decoder>(codec.data());
if (ret) {
return ret;
}
}
return ret;
}
FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num, const std::vector<std::string> &codec_name) {
setupFFmpeg();
const AVCodec *codec = nullptr;
const AVCodec *codec_default = nullptr;
if (!codec_name.empty()) {
codec = getCodecByName(codec_name);
}
switch (track->getCodecId()) {
case CodecH264:
codec_default = getCodec({AV_CODEC_ID_H264});
if (codec && codec->id == AV_CODEC_ID_H264) {
break;
}
if (checkIfSupportedNvidia()) {
codec = getCodec({{"libopenh264"}, {AV_CODEC_ID_H264}, {"h264_qsv"}, {"h264_videotoolbox"}, {"h264_cuvid"}, {"h264_nvmpi"}});
} else {
@ -328,6 +346,9 @@ FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num) {
break;
case CodecH265:
codec_default = getCodec({AV_CODEC_ID_HEVC});
if (codec && codec->id == AV_CODEC_ID_HEVC) {
break;
}
if (checkIfSupportedNvidia()) {
codec = getCodec({{AV_CODEC_ID_HEVC}, {"hevc_qsv"}, {"hevc_videotoolbox"}, {"hevc_cuvid"}, {"hevc_nvmpi"}});
} else {
@ -335,27 +356,51 @@ FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num) {
}
break;
case CodecAAC:
if (codec && codec->id == AV_CODEC_ID_AAC) {
break;
}
codec = getCodec({AV_CODEC_ID_AAC});
break;
case CodecG711A:
if (codec && codec->id == AV_CODEC_ID_PCM_ALAW) {
break;
}
codec = getCodec({AV_CODEC_ID_PCM_ALAW});
break;
case CodecG711U:
if (codec && codec->id == AV_CODEC_ID_PCM_MULAW) {
break;
}
codec = getCodec({AV_CODEC_ID_PCM_MULAW});
break;
case CodecOpus:
if (codec && codec->id == AV_CODEC_ID_OPUS) {
break;
}
codec = getCodec({AV_CODEC_ID_OPUS});
break;
case CodecJPEG:
if (codec && codec->id == AV_CODEC_ID_MJPEG) {
break;
}
codec = getCodec({AV_CODEC_ID_MJPEG});
break;
case CodecVP8:
if (codec && codec->id == AV_CODEC_ID_VP8) {
break;
}
codec = getCodec({AV_CODEC_ID_VP8});
break;
case CodecVP9:
if (codec && codec->id == AV_CODEC_ID_VP9) {
break;
}
codec = getCodec({AV_CODEC_ID_VP9});
break;
default:
break;
default: codec = nullptr; break;
}
codec = codec ? codec : codec_default;
if (!codec) {
throw std::runtime_error("未找到解码器");
}
@ -459,11 +504,11 @@ void FFmpegDecoder::flush() {
bool FFmpegDecoder::inputFrame_l(const Frame::Ptr &frame, bool live, bool enable_merge) {
if (_do_merger && enable_merge) {
return _merger.inputFrame(frame, [this, live](uint64_t dts, uint64_t pts, const Buffer::Ptr &buffer, bool have_idr) {
decodeFrame(buffer->data(), buffer->size(), dts, pts, live);
decodeFrame(buffer->data(), buffer->size(), dts, pts, live, have_idr);
});
}
return decodeFrame(frame->data(), frame->size(), frame->dts(), frame->pts(), live);
return decodeFrame(frame->data(), frame->size(), frame->dts(), frame->pts(), live, frame->keyFrame());
}
bool FFmpegDecoder::inputFrame(const Frame::Ptr &frame, bool live, bool async, bool enable_merge) {
@ -484,7 +529,7 @@ bool FFmpegDecoder::inputFrame(const Frame::Ptr &frame, bool live, bool async, b
});
}
bool FFmpegDecoder::decodeFrame(const char *data, size_t size, uint64_t dts, uint64_t pts, bool live) {
bool FFmpegDecoder::decodeFrame(const char *data, size_t size, uint64_t dts, uint64_t pts, bool live, bool key_frame) {
TimeTicker2(30, TraceL);
auto pkt = alloc_av_packet();
@ -492,6 +537,9 @@ bool FFmpegDecoder::decodeFrame(const char *data, size_t size, uint64_t dts, uin
pkt->size = size;
pkt->dts = dts;
pkt->pts = pts;
if (key_frame) {
pkt->flags |= AV_PKT_FLAG_KEY;
}
auto ret = avcodec_send_packet(_context.get(), pkt.get());
if (ret < 0) {
@ -673,68 +721,54 @@ FFmpegSws::~FFmpegSws() {
}
int FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame, uint8_t *data) {
TimeTicker2(30, TraceL);
if (!_target_width) {
_target_width = frame->get()->width;
}
if (!_target_height) {
_target_height = frame->get()->height;
}
AVFrame dst;
memset(&dst, 0, sizeof(dst));
av_image_fill_arrays(dst.data, dst.linesize, data, _target_format, _target_width, _target_height,1);
if (!_ctx) {
_ctx = sws_getContext(frame->get()->width, frame->get()->height, (enum AVPixelFormat) frame->get()->format,
_target_width, _target_height, _target_format, SWS_FAST_BILINEAR, NULL, NULL, NULL);
InfoL << "sws_getContext:" << av_get_pix_fmt_name((enum AVPixelFormat) frame->get()->format) << " -> "
<< av_get_pix_fmt_name(_target_format);
}
assert(_ctx);
int ret = 0;
if (0 >= (ret = sws_scale(_ctx, frame->get()->data, frame->get()->linesize, 0, frame->get()->height, dst.data,
dst.linesize))) {
WarnL << "sws_scale failed:" << ffmpeg_err(ret);
}
int ret;
inputFrame(frame, ret, data);
return ret;
}
FFmpegFrame::Ptr FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame) {
TimeTicker2(30, TraceL);
int ret;
return inputFrame(frame, ret, nullptr);
}
if (!_target_width) {
_target_width = frame->get()->width;
}
if (!_target_height) {
_target_height = frame->get()->height;
}
if (frame->get()->format == _target_format && frame->get()->width == _target_width
&& frame->get()->height == _target_height) {
FFmpegFrame::Ptr FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame, int &ret, uint8_t *data) {
ret = -1;
TimeTicker2(30, TraceL);
auto target_width = _target_width ? _target_width : frame->get()->width;
auto target_height = _target_height ? _target_height : frame->get()->height;
if (frame->get()->format == _target_format && frame->get()->width == target_width && frame->get()->height == target_height) {
//不转格式
return frame;
}
if (_ctx && (_src_width != frame->get()->width || _src_height != frame->get()->height || _src_format != (enum AVPixelFormat) frame->get()->format)) {
//输入分辨率发生变化了
sws_freeContext(_ctx);
_ctx = nullptr;
}
if (!_ctx) {
_ctx = sws_getContext(frame->get()->width, frame->get()->height, (enum AVPixelFormat) frame->get()->format,
_target_width, _target_height, _target_format,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
InfoL << "sws_getContext:" << av_get_pix_fmt_name((enum AVPixelFormat) frame->get()->format) << " -> "
<< av_get_pix_fmt_name(_target_format);
_src_format = (enum AVPixelFormat) frame->get()->format;
_src_width = frame->get()->width;
_src_height = frame->get()->height;
_ctx = sws_getContext(frame->get()->width, frame->get()->height, (enum AVPixelFormat) frame->get()->format, target_width, target_height, _target_format, SWS_FAST_BILINEAR, NULL, NULL, NULL);
InfoL << "sws_getContext:" << av_get_pix_fmt_name((enum AVPixelFormat) frame->get()->format) << " -> " << av_get_pix_fmt_name(_target_format);
}
if (_ctx) {
auto out = std::make_shared<FFmpegFrame>();
if (!out->get()->data[0]) {
out->fillPicture(_target_format, _target_width, _target_height);
if (data) {
avpicture_fill((AVPicture *) out->get(), data, _target_format, target_width, target_height);
} else {
out->fillPicture(_target_format, target_width, target_height);
}
}
int ret = 0;
if (0 == (ret = sws_scale(_ctx, frame->get()->data, frame->get()->linesize, 0, frame->get()->height,
out->get()->data, out->get()->linesize))) {
if (0 >= (ret = sws_scale(_ctx, frame->get()->data, frame->get()->linesize, 0, frame->get()->height, out->get()->data, out->get()->linesize))) {
WarnL << "sws_scale failed:" << ffmpeg_err(ret);
return nullptr;
}
out->get()->format = _target_format;
out->get()->width = _target_width;
out->get()->height = _target_height;
out->get()->width = target_width;
out->get()->height = target_height;
out->get()->pkt_dts = frame->get()->pkt_dts;
out->get()->pts = frame->get()->pts;
return out;

View File

@ -120,7 +120,7 @@ public:
using Ptr = std::shared_ptr<FFmpegDecoder>;
using onDec = std::function<void(const FFmpegFrame::Ptr &)>;
FFmpegDecoder(const Track::Ptr &track, int thread_num = 2);
FFmpegDecoder(const Track::Ptr &track, int thread_num = 2, const std::vector<std::string> &codec_name = {});
~FFmpegDecoder() override;
bool inputFrame(const Frame::Ptr &frame, bool live, bool async, bool enable_merge = true);
@ -131,7 +131,7 @@ public:
private:
void onDecode(const FFmpegFrame::Ptr &frame);
bool inputFrame_l(const Frame::Ptr &frame, bool live, bool enable_merge);
bool decodeFrame(const char *data, size_t size, uint64_t dts, uint64_t pts, bool live);
bool decodeFrame(const char *data, size_t size, uint64_t dts, uint64_t pts, bool live, bool key_frame);
private:
bool _do_merger = false;
@ -151,10 +151,16 @@ public:
int inputFrame(const FFmpegFrame::Ptr &frame, uint8_t *data);
private:
int _target_width;
int _target_height;
FFmpegFrame::Ptr inputFrame(const FFmpegFrame::Ptr &frame, int &ret, uint8_t *data);
private:
int _target_width = 0;
int _target_height = 0;
int _src_width = 0;
int _src_height = 0;
SwsContext *_ctx = nullptr;
AVPixelFormat _target_format;
AVPixelFormat _src_format = AV_PIX_FMT_NONE;
AVPixelFormat _target_format = AV_PIX_FMT_NONE;
};
class FFmpegEncoder : public TaskManager, public CodecInfo {

View File

@ -17,16 +17,20 @@ using namespace std;
namespace mediakit{
bool MediaSink::addTrack(const Track::Ptr &track_in) {
if (_only_audio && track_in->getTrackType() != TrackAudio) {
InfoL << "Only audio enabled, track ignored: " << track_in->getCodecName();
return false;
}
if (!_enable_audio) {
//关闭音频时,加快单视频流注册速度
_max_track_size = 1;
// 关闭音频时,加快单视频流注册速度
if (track_in->getTrackType() == TrackAudio) {
//音频被全局忽略
// 音频被全局忽略
InfoL << "Audio disabled, audio track ignored";
return false;
}
}
if (_all_track_ready) {
WarnL << "all track is ready, add this track too late!";
WarnL << "All track is ready, add track too late: " << track_in->getCodecName();
return false;
}
//克隆Track只拷贝其数据不拷贝其数据转发关系
@ -48,7 +52,7 @@ bool MediaSink::addTrack(const Track::Ptr &track_in) {
if (frame_unread.size() > kMaxUnreadyFrame) {
//未就绪的的track不能缓存太多的帧否则可能内存溢出
frame_unread.clear();
WarnL << "cached frame of unready track(" << frame->getCodecName() << ") is too much, now cleared";
WarnL << "Cached frame of unready track(" << frame->getCodecName() << ") is too much, now cleared";
}
//还有Track未就绪先缓存之
frame_unread.emplace_back(Frame::getCacheAbleFrame(frame));
@ -124,8 +128,16 @@ void MediaSink::checkTrackIfReady(){
}
}
void MediaSink::addTrackCompleted(){
_max_track_size = _track_map.size();
void MediaSink::addTrackCompleted() {
setMaxTrackCount(_track_map.size());
}
void MediaSink::setMaxTrackCount(size_t i) {
if (_all_track_ready) {
WarnL << "All track is ready, set max track count ignored";
return;
}
_max_track_size = MAX(MIN(i, 2), 1);
checkTrackIfReady();
}
@ -134,14 +146,14 @@ void MediaSink::emitAllTrackReady() {
return;
}
DebugL << "all track ready use " << _ticker.elapsedTime() << "ms";
DebugL << "All track ready use " << _ticker.elapsedTime() << "ms";
if (!_track_ready_callback.empty()) {
//这是超时强制忽略未准备好的Track
_track_ready_callback.clear();
//移除未准备好的Track
for (auto it = _track_map.begin(); it != _track_map.end();) {
if (!it->second.second || !it->second.first->ready()) {
WarnL << "track not ready for a long time, ignored: " << it->second.first->getCodecName();
WarnL << "Track not ready for a long time, ignored: " << it->second.first->getCodecName();
it = _track_map.erase(it);
continue;
}
@ -303,7 +315,7 @@ bool MediaSink::addMuteAudioTrack() {
return audio->inputFrame(frame);
});
onTrackReady(audio);
TraceL << "mute aac track added";
TraceL << "Mute aac track added";
return true;
}
@ -313,6 +325,14 @@ bool MediaSink::isAllTrackReady() const {
void MediaSink::enableAudio(bool flag) {
_enable_audio = flag;
_max_track_size = flag ? 2 : 1;
}
void MediaSink::setOnlyAudio(){
_only_audio = true;
_enable_audio = true;
_add_mute_audio = false;
_max_track_size = 1;
}
void MediaSink::enableMuteAudio(bool flag) {

View File

@ -97,6 +97,12 @@ public:
*/
void addTrackCompleted() override;
/**
* track数1~2addTrackCompleted类型
* track时
*/
void setMaxTrackCount(size_t i);
/**
* track
*/
@ -118,6 +124,11 @@ public:
*/
void enableAudio(bool flag);
/**
*
*/
void setOnlyAudio();
/**
*
*/
@ -163,6 +174,7 @@ private:
private:
bool _enable_audio = true;
bool _only_audio = false;
bool _add_mute_audio = true;
bool _all_track_ready = false;
size_t _max_track_size = 2;

View File

@ -112,12 +112,15 @@ public:
//udp发送时是否开启rr rtcp接收超时判断
bool udp_rtcp_timeout = false;
//tcp被动发送服务器延时关闭事件单位毫秒
uint32_t tcp_passive_close_delay_ms = 5 * 1000;
//tcp被动发送服务器延时关闭事件单位毫秒设置为0时则使用默认值5000ms
uint32_t tcp_passive_close_delay_ms = 0;
//udp 发送时rr rtcp包接收超时时间单位毫秒
uint32_t rtcp_timeout_ms = 30 * 1000;
//udp 发送时发送sr rtcp包间隔单位毫秒
uint32_t rtcp_send_interval_ms = 5 * 1000;
//发送rtp同时接收一般用于双向语言对讲, 如果不为空,说明开启接收
std::string recv_stream_id;
};
// 开始发送ps-rtp

View File

@ -267,9 +267,9 @@ bool MultiMediaSourceMuxer::isRecording(MediaSource &sender, Recorder::type type
void MultiMediaSourceMuxer::startSendRtp(MediaSource &sender, const MediaSourceEvent::SendRtpArgs &args, const std::function<void(uint16_t, const toolkit::SockException &)> cb) {
#if defined(ENABLE_RTPPROXY)
auto rtp_sender = std::make_shared<RtpSender>(getOwnerPoller(sender));
auto sender_ptr = sender.shared_from_this();
weak_ptr<MediaSource> weak_sender = sender.shared_from_this();
weak_ptr<MultiMediaSourceMuxer> weak_self = shared_from_this();
rtp_sender->startSend(args, [args, weak_self, rtp_sender, cb, sender_ptr](uint16_t local_port, const SockException &ex) mutable {
rtp_sender->startSend(args, [args, weak_self, rtp_sender, cb, weak_sender](uint16_t local_port, const SockException &ex) mutable {
cb(local_port, ex);
auto strong_self = weak_self.lock();
if (!strong_self || ex) {
@ -281,17 +281,23 @@ void MultiMediaSourceMuxer::startSendRtp(MediaSource &sender, const MediaSourceE
rtp_sender->addTrackCompleted();
auto ssrc = args.ssrc;
rtp_sender->setOnClose([weak_self, ssrc, sender_ptr](const toolkit::SockException &ex) {
rtp_sender->setOnClose([weak_self, ssrc, weak_sender](const toolkit::SockException &ex) {
if (auto strong_self = weak_self.lock()) {
WarnL << "stream:" << strong_self->shortUrl() << " stop send rtp:" << ssrc << ", reason:" << ex.what();
strong_self->_rtp_sender.erase(ssrc);
//触发观看人数统计
strong_self->onReaderChanged(*sender_ptr, strong_self->totalReaderCount());
auto strong_sender = weak_sender.lock();
if (strong_sender) {
strong_self->onReaderChanged(*strong_sender, strong_self->totalReaderCount());
}
NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastSendRtpStopped, *strong_self, ssrc, ex);
}
});
strong_self->_rtp_sender[args.ssrc] = std::move(rtp_sender);
strong_self->onReaderChanged(*sender_ptr, strong_self->totalReaderCount());
auto strong_sender = weak_sender.lock();
if (strong_sender) {
strong_self->onReaderChanged(*strong_sender, strong_self->totalReaderCount());
}
});
#else
cb(0, SockException(Err_other, "该功能未启用编译时请打开ENABLE_RTPPROXY宏"));

View File

@ -159,7 +159,81 @@ StrCaseMap Parser::parseArgs(const string &str, const char *pair_delim, const ch
}
return ret;
}
std::string Parser::merge_url(const string &base_url, const string &path) {
//以base_url为基础, 合并path路径生成新的url, path支持相对路径和绝对路径
if (base_url.empty()) {
return path;
}
if (path.empty()) {
return base_url;
}
// 如果包含协议,则直接返回
if (path.find("://") != string::npos) {
return path;
}
string protocol = "http://";
size_t protocol_end = base_url.find("://");
if (protocol_end != string::npos) {
protocol = base_url.substr(0, protocol_end + 3);
}
// 如果path以"//"开头,则直接拼接协议
if (path.find("//") == 0) {
return protocol + path.substr(2);
}
string host;
size_t pos = 0;
if (protocol_end != string::npos) {
pos = base_url.find('/', protocol_end + 3);
host = base_url.substr(0, pos);
if (pos == string::npos) {
pos = base_url.size();
} else {
pos++;
}
}
// 如果path以"/"开头,则直接拼接协议和主机
if (path[0] == '/') {
return host + path;
}
vector<string> path_parts;
size_t next_pos = 0;
if (!host.empty()) {
path_parts.emplace_back(host);
}
while ((next_pos = base_url.find('/', pos)) != string::npos) {
path_parts.emplace_back(base_url.substr(pos, next_pos - pos));
pos = next_pos + 1;
}
pos = 0;
while ((next_pos = path.find('/', pos)) != string::npos) {
string part = path.substr(pos, next_pos - pos);
if (part == "..") {
if (!path_parts.empty() && !path_parts.back().empty()) {
if (path_parts.size() > 1 || protocol_end == string::npos) {
path_parts.pop_back();
}
}
} else if (part != "." && !part.empty()) {
path_parts.emplace_back(part);
}
pos = next_pos + 1;
}
string part = path.substr(pos);
if (part != ".." && part != "." && !part.empty()) {
path_parts.emplace_back(part);
}
stringstream final_url;
for (size_t i = 0; i < path_parts.size(); ++i) {
if (i == 0) {
final_url << path_parts[i];
} else {
final_url << '/' << path_parts[i];
}
}
return final_url.str();
}
void RtspUrl::parse(const string &strUrl) {
auto schema = FindField(strUrl.data(), nullptr, "://");
bool is_ssl = strcasecmp(schema.data(), "rtsps") == 0;
@ -238,4 +312,4 @@ static onceToken token([](){
});
#endif
}//namespace mediakit
}//namespace mediakit

View File

@ -105,6 +105,8 @@ public:
//解析?后面的参数
static StrCaseMap parseArgs(const std::string &str, const char *pair_delim = "&", const char *key_delim = "=");
static std::string merge_url(const std::string &base_url, const std::string &path);
private:
std::string _strMethod;
std::string _strUrl;

View File

@ -327,10 +327,7 @@ const string kPortRange = RTP_PROXY_FIELD "port_range";
const string kH264PT = RTP_PROXY_FIELD "h264_pt";
const string kH265PT = RTP_PROXY_FIELD "h265_pt";
const string kPSPT = RTP_PROXY_FIELD "ps_pt";
const string kTSPT = RTP_PROXY_FIELD "ts_pt";
const string kOpusPT = RTP_PROXY_FIELD "opus_pt";
const string kG711UPT = RTP_PROXY_FIELD "g711u_pt";
const string kG711APT = RTP_PROXY_FIELD "g711a_pt";
static onceToken token([]() {
mINI::Instance()[kDumpDir] = "";
@ -339,10 +336,7 @@ static onceToken token([]() {
mINI::Instance()[kH264PT] = 98;
mINI::Instance()[kH265PT] = 99;
mINI::Instance()[kPSPT] = 96;
mINI::Instance()[kTSPT] = 33;
mINI::Instance()[kOpusPT] = 100;
mINI::Instance()[kG711UPT] = 0;
mINI::Instance()[kG711APT] = 8;
});
} // namespace RtpProxy

View File

@ -355,14 +355,8 @@ extern const std::string kH264PT;
extern const std::string kH265PT;
// rtp server ps 的pt
extern const std::string kPSPT;
// rtp server ts 的pt
extern const std::string kTSPT;
// rtp server opus 的pt
extern const std::string kOpusPT;
// rtp server g711u 的pt
extern const std::string kG711UPT;
// rtp server g711a 的pt
extern const std::string kG711APT;
} // namespace RtpProxy
/**

View File

@ -15,6 +15,7 @@
#include "AACRtmp.h"
#include "CommonRtmp.h"
#include "H264Rtp.h"
#include "JPEGRtp.h"
#include "AACRtp.h"
#include "H265Rtp.h"
#include "CommonRtp.h"
@ -22,6 +23,7 @@
#include "Opus.h"
#include "G711.h"
#include "L16.h"
#include "JPEG.h"
#include "Util/base64.h"
#include "Common/Parser.h"
#include "Common/config.h"
@ -89,6 +91,10 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
return std::make_shared<H265Track>(vps, sps, pps, 0, 0, 0);
}
case CodecJPEG : {
return std::make_shared<JPEGTrack>();
}
default: {
//其他codec不支持
WarnL << "暂不支持该rtsp编码类型:" << track->getName();
@ -113,6 +119,7 @@ Track::Ptr Factory::getTrackByAbstractTrack(const Track::Ptr& track) {
case CodecOpus: return std::make_shared<OpusTrack>();
case CodecH265: return std::make_shared<H265Track>();
case CodecH264: return std::make_shared<H264Track>();
case CodecJPEG: return std::make_shared<JPEGTrack>();
default: {
//其他codec不支持
@ -141,6 +148,7 @@ RtpCodec::Ptr Factory::getRtpEncoderByCodecId(CodecId codec_id, uint32_t sample_
}
return std::make_shared<CommonRtpEncoder>(codec_id, ssrc, mtu, sample_rate, pt, interleaved);
}
case CodecJPEG: return std::make_shared<JPEGRtpEncoder>(ssrc, mtu, sample_rate, pt, interleaved);
default: WarnL << "暂不支持该CodecId:" << codec_id; return nullptr;
}
}
@ -172,6 +180,7 @@ RtpCodec::Ptr Factory::getRtpDecoderByTrack(const Track::Ptr &track) {
case CodecOpus :
case CodecG711A :
case CodecG711U : return std::make_shared<CommonRtpDecoder>(track->getCodecId());
case CodecJPEG: return std::make_shared<JPEGRtpDecoder>();
default : WarnL << "暂不支持该CodecId:" << track->getCodecName(); return nullptr;
}
}
@ -210,6 +219,7 @@ Track::Ptr getTrackByCodecId(CodecId codecId, int sample_rate = 0, int channels
case CodecOpus: return std::make_shared<OpusTrack>();
case CodecG711A :
case CodecG711U : return (sample_rate && channels && sample_bit) ? std::make_shared<G711Track>(codecId, sample_rate, channels, sample_bit) : nullptr;
case CodecJPEG : return std::make_shared<JPEGTrack>();
default : WarnL << "暂不支持该CodecId:" << codecId; return nullptr;
}
}

View File

@ -15,6 +15,7 @@
#include <mutex>
#include <functional>
#include "Util/List.h"
#include "Util/TimeTicker.h"
#include "Network/Buffer.h"
namespace mediakit {
@ -38,7 +39,8 @@ typedef enum {
XX(CodecL16, TrackAudio, 6, "L16", PSI_STREAM_RESERVED) \
XX(CodecVP8, TrackVideo, 7, "VP8", PSI_STREAM_VP8) \
XX(CodecVP9, TrackVideo, 8, "VP9", PSI_STREAM_VP9) \
XX(CodecAV1, TrackVideo, 9, "AV1", PSI_STREAM_AV1)
XX(CodecAV1, TrackVideo, 9, "AV1", PSI_STREAM_AV1) \
XX(CodecJPEG, TrackVideo, 10, "JPEG", PSI_STREAM_JPEG_2000)
typedef enum {
CodecInvalid = -1,
@ -291,7 +293,7 @@ public:
*
*/
FrameWriterInterface* addDelegate(FrameWriterInterface::Ptr delegate) {
std::lock_guard<std::mutex> lck(_mtx);
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _delegates.emplace(delegate.get(), std::move(delegate)).first->second.get();
}
@ -301,7 +303,7 @@ public:
*
*/
void delDelegate(FrameWriterInterface *ptr) {
std::lock_guard<std::mutex> lck(_mtx);
std::lock_guard<std::recursive_mutex> lck(_mtx);
_delegates.erase(ptr);
}
@ -309,7 +311,8 @@ public:
*
*/
bool inputFrame(const Frame::Ptr &frame) override {
std::lock_guard<std::mutex> lck(_mtx);
std::lock_guard<std::recursive_mutex> lck(_mtx);
doStatistics(frame);
bool ret = false;
for (auto &pr : _delegates) {
if (pr.second->inputFrame(frame)) {
@ -323,17 +326,65 @@ public:
*
*/
size_t size() const {
std::lock_guard<std::mutex> lck(_mtx);
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _delegates.size();
}
void clear() {
std::lock_guard<std::mutex> lck(_mtx);
std::lock_guard<std::recursive_mutex> lck(_mtx);
_delegates.clear();
}
/**
*
*/
uint64_t getVideoKeyFrames() const {
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _video_key_frames;
}
/**
*
*/
uint64_t getFrames() const {
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _frames;
}
size_t getVideoGopSize() const {
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _gop_size;
}
size_t getVideoGopInterval() const {
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _gop_interval_ms;
}
private:
mutable std::mutex _mtx;
void doStatistics(const Frame::Ptr &frame) {
if (!frame->configFrame() && !frame->dropAble()) {
// 忽略配置帧与可丢弃的帧
++_frames;
if (frame->keyFrame() && frame->getTrackType() == TrackVideo) {
// 遇视频关键帧时统计
++_video_key_frames;
_gop_size = _frames - _last_frames;
_gop_interval_ms = _ticker.elapsedTime();
_last_frames = _frames;
_ticker.resetTime();
}
}
}
private:
toolkit::Ticker _ticker;
size_t _gop_interval_ms = 0;
size_t _gop_size = 0;
uint64_t _last_frames = 0;
uint64_t _frames = 0;
uint64_t _video_key_frames = 0;
mutable std::recursive_mutex _mtx;
std::map<void *, FrameWriterInterface::Ptr> _delegates;
};

View File

@ -151,6 +151,7 @@ bool H264Track::ready() {
bool H264Track::inputFrame(const Frame::Ptr &frame) {
using H264FrameInternal = FrameInternal<H264FrameNoCacheAble>;
int type = H264_TYPE(frame->data()[frame->prefixSize()]);
if ((type == H264Frame::NAL_B_P || type == H264Frame::NAL_IDR) && ready()) {
return inputFrame_l(frame);
}
@ -204,7 +205,9 @@ bool H264Track::inputFrame_l(const Frame::Ptr &frame) {
if (frame->keyFrame() && !_latest_is_config_frame) {
insertConfigFrame(frame);
}
_latest_is_config_frame = false;
if(!frame->dropAble()){
_latest_is_config_frame = false;
}
ret = VideoTrack::inputFrame(frame);
break;
}

View File

@ -90,7 +90,7 @@ using H264FrameNoCacheAble = H264FrameHelper<FrameFromPtr>;
/**
* 264
*/
class H264Track : public VideoTrack{
class H264Track : public VideoTrack {
public:
using Ptr = std::shared_ptr<H264Track>;

53
src/Extension/JPEG.cpp Normal file
View File

@ -0,0 +1,53 @@
#include "JPEG.h"
#include "Rtsp/Rtsp.h"
#include "Util/util.h"
using namespace toolkit;
namespace mediakit {
bool JPEGTrack::inputFrame(const Frame::Ptr &frame) {
if (!ready()) {
if (_height > 0 && _width > 0) {
if (_tmp == 0) _tmp = frame->dts();
else _fps = 1000.0 / (frame->dts() - _tmp);
} else getVideoResolution((uint8_t*)frame->data(), frame->size());
return false;
}
return VideoTrack::inputFrame(frame);
}
void JPEGTrack::getVideoResolution(const uint8_t *buf, int len) {
for (int i = 0; i < len - 8; i++) {
if (buf[i] != 0xff)
continue;
if (buf[i + 1] == 0xC0 /*SOF0*/) {
_height = buf[i + 5] * 256 + buf[i + 6];
_width = buf[i + 7] * 256 + buf[i + 8];
return;
}
}
}
class JPEGSdp : public Sdp {
public:
JPEGSdp(int bitrate): Sdp(90000, Rtsp::PT_JPEG) {
_printer << "m=video 0 RTP/AVP " << (int)getPayloadType() << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
}
std::string getSdp() const { return _printer; }
CodecId getCodecId() const { return CodecJPEG; }
private:
_StrPrinter _printer;
};
Sdp::Ptr JPEGTrack::getSdp() {
return std::make_shared<JPEGSdp>(getBitRate() / 1024);
}
} // namespace mediakit

69
src/Extension/JPEG.h Normal file
View File

@ -0,0 +1,69 @@
#ifndef ZLMEDIAKIT_JPEG_H
#define ZLMEDIAKIT_JPEG_H
#include "Frame.h"
#include "Track.h"
namespace mediakit {
class JPEGTrack : public VideoTrack {
public:
using Ptr = std::shared_ptr<JPEGTrack>;
CodecId getCodecId() const override { return CodecJPEG; }
int getVideoHeight() const override { return _height; }
int getVideoWidth() const override { return _width; }
float getVideoFps() const override { return _fps; }
bool ready() override { return _fps > 0; }
bool inputFrame(const Frame::Ptr &frame) override;
private:
Sdp::Ptr getSdp() override;
Track::Ptr clone() override { return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this); }
void getVideoResolution(const uint8_t *buf, int len);
private:
int _width = 0;
int _height = 0;
float _fps = 0;
uint64_t _tmp = 0;
};
class JPEGFrame : public Frame {
public:
/**
* JPEG/MJPEG帧
* @param buffer
* @param dts ,
* @param pix_type pixel format type; AV_PIX_FMT_YUVJ422P || (AVCOL_RANGE_JPEG && AV_PIX_FMT_YUV422P) : 1; AV_PIX_FMT_YUVJ420P || (AVCOL_RANGE_JPEG && AV_PIX_FMT_YUV420P) : 0
* @param prefix_size JFIF头大小
*/
JPEGFrame(toolkit::Buffer::Ptr buffer, uint64_t dts, uint8_t pix_type = 0, size_t prefix_size = 0) {
_buffer = std::move(buffer);
_dts = dts;
_pix_type = pix_type;
_prefix_size = prefix_size;
}
~JPEGFrame() override = default;
uint64_t dts() const override { return _dts; }
size_t prefixSize() const override { return _prefix_size; }
bool keyFrame() const override { return true; }
bool configFrame() const override { return false; }
CodecId getCodecId() const override { return CodecJPEG; }
char *data() const override { return _buffer->data(); }
size_t size() const override { return _buffer->size(); }
uint8_t pixType() const {return _pix_type; }
private:
uint8_t _pix_type;
size_t _prefix_size;
uint64_t _dts;
toolkit::Buffer::Ptr _buffer;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_JPEG_H

836
src/Extension/JPEGRtp.cpp Normal file
View File

@ -0,0 +1,836 @@
#include "JPEGRtp.h"
#include "JPEG.h"
using namespace std;
using namespace mediakit;
#define AV_WB24(p, d) \
do { \
((uint8_t *)(p))[2] = (d); \
((uint8_t *)(p))[1] = (d) >> 8; \
((uint8_t *)(p))[0] = (d) >> 16; \
} while (0)
#define AV_WB16(p, d) \
do { \
((uint8_t *)(p))[1] = (d); \
((uint8_t *)(p))[0] = (d) >> 8; \
} while (0)
#define AV_WB8(p, d) do { ((uint8_t*)(p))[0] = (d); } while(0)
/* JPEG marker codes */
enum JpegMarker {
/* start of frame */
SOF0 = 0xc0, /* baseline */
SOF1 = 0xc1, /* extended sequential, huffman */
SOF2 = 0xc2, /* progressive, huffman */
SOF3 = 0xc3, /* lossless, huffman */
SOF5 = 0xc5, /* differential sequential, huffman */
SOF6 = 0xc6, /* differential progressive, huffman */
SOF7 = 0xc7, /* differential lossless, huffman */
JPG = 0xc8, /* reserved for JPEG extension */
SOF9 = 0xc9, /* extended sequential, arithmetic */
SOF10 = 0xca, /* progressive, arithmetic */
SOF11 = 0xcb, /* lossless, arithmetic */
SOF13 = 0xcd, /* differential sequential, arithmetic */
SOF14 = 0xce, /* differential progressive, arithmetic */
SOF15 = 0xcf, /* differential lossless, arithmetic */
DHT = 0xc4, /* define huffman tables */
DAC = 0xcc, /* define arithmetic-coding conditioning */
/* restart with modulo 8 count "m" */
RST0 = 0xd0,
RST1 = 0xd1,
RST2 = 0xd2,
RST3 = 0xd3,
RST4 = 0xd4,
RST5 = 0xd5,
RST6 = 0xd6,
RST7 = 0xd7,
SOI = 0xd8, /* start of image */
EOI = 0xd9, /* end of image */
SOS = 0xda, /* start of scan */
DQT = 0xdb, /* define quantization tables */
DNL = 0xdc, /* define number of lines */
DRI = 0xdd, /* define restart interval */
DHP = 0xde, /* define hierarchical progression */
EXP = 0xdf, /* expand reference components */
APP0 = 0xe0,
APP1 = 0xe1,
APP2 = 0xe2,
APP3 = 0xe3,
APP4 = 0xe4,
APP5 = 0xe5,
APP6 = 0xe6,
APP7 = 0xe7,
APP8 = 0xe8,
APP9 = 0xe9,
APP10 = 0xea,
APP11 = 0xeb,
APP12 = 0xec,
APP13 = 0xed,
APP14 = 0xee,
APP15 = 0xef,
JPG0 = 0xf0,
JPG1 = 0xf1,
JPG2 = 0xf2,
JPG3 = 0xf3,
JPG4 = 0xf4,
JPG5 = 0xf5,
JPG6 = 0xf6,
SOF48 = 0xf7, ///< JPEG-LS
LSE = 0xf8, ///< JPEG-LS extension parameters
JPG9 = 0xf9,
JPG10 = 0xfa,
JPG11 = 0xfb,
JPG12 = 0xfc,
JPG13 = 0xfd,
COM = 0xfe, /* comment */
TEM = 0x01, /* temporary private use for arithmetic coding */
/* 0x02 -> 0xbf reserved */
};
typedef struct PutByteContext {
uint8_t *buffer, *buffer_end, *buffer_start;
int eof;
} PutByteContext;
static void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size) {
assert(buf_size >= 0);
p->buffer = buf;
p->buffer_start = buf;
p->buffer_end = buf + buf_size;
p->eof = 0;
}
static inline void bytestream2_put_byte(PutByteContext *p, uint8_t value) {
if (!p->eof && (p->buffer_end - p->buffer >= 1)) {
p->buffer[0] = value;
p->buffer += 1;
} else {
p->eof = 1;
}
}
static inline void bytestream2_put_be16(PutByteContext *p, uint16_t value) {
if (!p->eof && (p->buffer_end - p->buffer >= 2)) {
p->buffer[0] = value >> 8;
p->buffer[1] = value & 0x00FF;
p->buffer += 2;
} else {
p->eof = 1;
}
}
static inline void bytestream2_put_be24(PutByteContext *p, uint16_t value) {
if (!p->eof && (p->buffer_end - p->buffer >= 2)) {
p->buffer[0] = value >> 16;
p->buffer[1] = value >> 8;
p->buffer[2] = value & 0x00FF;
p->buffer += 2;
} else {
p->eof = 1;
}
}
static unsigned int bytestream2_put_buffer(PutByteContext *p, const uint8_t *src, unsigned int size) {
int size2 = 0;
if (p->eof) {
return 0;
}
size2 = MIN(p->buffer_end - p->buffer, size);
if (size2 != (int)size) {
p->eof = 1;
}
memcpy(p->buffer, src, size2);
p->buffer += size2;
return size2;
}
static inline int bytestream2_tell_p(PutByteContext *p) {
return (int) (p->buffer - p->buffer_start);
}
static inline void avio_write(string &str, const void *ptr, size_t size) {
str.append((char *) ptr, size);
}
//////////////////////////////////////////////////////////////////////////////////////////////////
static const uint8_t default_quantizers[128] = {
/* luma table */
16, 11, 12, 14, 12, 10, 16, 14,
13, 14, 18, 17, 16, 19, 24, 40,
26, 24, 22, 22, 24, 49, 35, 37,
29, 40, 58, 51, 61, 60, 57, 51,
56, 55, 64, 72, 92, 78, 64, 68,
87, 69, 55, 56, 80, 109, 81, 87,
95, 98, 103, 104, 103, 62, 77, 113,
121, 112, 100, 120, 92, 101, 103, 99,
/* chroma table */
17, 18, 18, 24, 21, 24, 47, 26,
26, 47, 99, 66, 56, 66, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99
};
/* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
/* IMPORTANT: these are only valid for 8-bit data precision! */
const uint8_t avpriv_mjpeg_bits_dc_luminance[17] =
{ /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0};
const uint8_t avpriv_mjpeg_val_dc[12] =
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
const uint8_t avpriv_mjpeg_bits_dc_chrominance[17] =
{ /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0};
const uint8_t avpriv_mjpeg_bits_ac_luminance[17] =
{ /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d};
const uint8_t avpriv_mjpeg_val_ac_luminance[] =
{0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
0xf9, 0xfa
};
const uint8_t avpriv_mjpeg_bits_ac_chrominance[17] =
{ /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77};
const uint8_t avpriv_mjpeg_val_ac_chrominance[] =
{0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
0xf9, 0xfa
};
static int jpeg_create_huffman_table(PutByteContext *p, int table_class,
int table_id, const uint8_t *bits_table,
const uint8_t *value_table) {
int i = 0, n = 0;
bytestream2_put_byte(p, table_class << 4 | table_id);
for (i = 1; i <= 16; i++) {
n += bits_table[i];
bytestream2_put_byte(p, bits_table[i]);
}
for (i = 0; i < n; i++) {
bytestream2_put_byte(p, value_table[i]);
}
return n + 17;
}
static void jpeg_put_marker(PutByteContext *pbc, int code) {
bytestream2_put_byte(pbc, 0xff);
bytestream2_put_byte(pbc, code);
}
static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w,
uint32_t h, const uint8_t *qtable, int nb_qtable,
int dri) {
PutByteContext pbc;
uint8_t *dht_size_ptr;
int dht_size = 0, i = 0;
bytestream2_init_writer(&pbc, buf, size);
/* Convert from blocks to pixels. */
w <<= 3;
h <<= 3;
/* SOI */
jpeg_put_marker(&pbc, SOI);
/* JFIF header */
jpeg_put_marker(&pbc, APP0);
bytestream2_put_be16(&pbc, 16);
bytestream2_put_buffer(&pbc, (const uint8_t *) "JFIF", 5);
bytestream2_put_be16(&pbc, 0x0201);
bytestream2_put_byte(&pbc, 0);
bytestream2_put_be16(&pbc, 1);
bytestream2_put_be16(&pbc, 1);
bytestream2_put_byte(&pbc, 0);
bytestream2_put_byte(&pbc, 0);
if (dri) {
jpeg_put_marker(&pbc, DRI);
bytestream2_put_be16(&pbc, 4);
bytestream2_put_be16(&pbc, dri);
}
/* DQT */
jpeg_put_marker(&pbc, DQT);
bytestream2_put_be16(&pbc, 2 + nb_qtable * (1 + 64));
for (i = 0; i < nb_qtable; i++) {
bytestream2_put_byte(&pbc, i);
/* Each table is an array of 64 values given in zig-zag
* order, identical to the format used in a JFIF DQT
* marker segment. */
bytestream2_put_buffer(&pbc, qtable + 64 * i, 64);
}
/* DHT */
jpeg_put_marker(&pbc, DHT);
dht_size_ptr = pbc.buffer;
bytestream2_put_be16(&pbc, 0);
dht_size = 2;
dht_size += jpeg_create_huffman_table(&pbc, 0, 0, avpriv_mjpeg_bits_dc_luminance,
avpriv_mjpeg_val_dc);
dht_size += jpeg_create_huffman_table(&pbc, 0, 1, avpriv_mjpeg_bits_dc_chrominance,
avpriv_mjpeg_val_dc);
dht_size += jpeg_create_huffman_table(&pbc, 1, 0, avpriv_mjpeg_bits_ac_luminance,
avpriv_mjpeg_val_ac_luminance);
dht_size += jpeg_create_huffman_table(&pbc, 1, 1, avpriv_mjpeg_bits_ac_chrominance,
avpriv_mjpeg_val_ac_chrominance);
AV_WB16(dht_size_ptr, dht_size);
/* SOF0 */
jpeg_put_marker(&pbc, SOF0);
bytestream2_put_be16(&pbc, 17); /* size */
bytestream2_put_byte(&pbc, 8); /* bits per component */
bytestream2_put_be16(&pbc, h);
bytestream2_put_be16(&pbc, w);
bytestream2_put_byte(&pbc, 3); /* number of components */
bytestream2_put_byte(&pbc, 1); /* component number */
bytestream2_put_byte(&pbc, (2 << 4) | (type ? 2 : 1)); /* hsample/vsample */
bytestream2_put_byte(&pbc, 0); /* matrix number */
bytestream2_put_byte(&pbc, 2); /* component number */
bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */
bytestream2_put_byte(&pbc, 3); /* component number */
bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */
/* SOS */
jpeg_put_marker(&pbc, SOS);
bytestream2_put_be16(&pbc, 12);
bytestream2_put_byte(&pbc, 3);
bytestream2_put_byte(&pbc, 1);
bytestream2_put_byte(&pbc, 0);
bytestream2_put_byte(&pbc, 2);
bytestream2_put_byte(&pbc, 17);
bytestream2_put_byte(&pbc, 3);
bytestream2_put_byte(&pbc, 17);
bytestream2_put_byte(&pbc, 0);
bytestream2_put_byte(&pbc, 63);
bytestream2_put_byte(&pbc, 0);
/* Return the length in bytes of the JPEG header. */
return bytestream2_tell_p(&pbc);
}
static inline int av_clip(int a, int amin, int amax) {
if (a < amin) { return amin; }
else if (a > amax) { return amax; }
else { return a; }
}
static void create_default_qtables(uint8_t *qtables, uint8_t q) {
int factor = q;
int i = 0;
uint16_t S;
factor = av_clip(q, 1, 99);
if (q < 50) {
S = 5000 / factor;
} else {
S = 200 - factor * 2;
}
for (i = 0; i < 128; i++) {
int val = (default_quantizers[i] * S + 50) / 100;
/* Limit the quantizers to 1 <= q <= 255. */
val = av_clip(val, 1, 255);
qtables[i] = val;
}
}
#define AVERROR_INVALIDDATA -1
#define AVERROR_PATCHWELCOME -2
#define AVERROR_EAGAIN -3
#define RTP_FLAG_KEY 0x1 ///< RTP packet contains a keyframe
#define RTP_FLAG_MARKER 0x2 ///< RTP marker bit was set for this packet
#define av_log(ctx, level, ...) PrintD(__VA_ARGS__)
#ifndef AV_RB24
# define AV_RB24(x) \
((((const uint8_t*)(x))[0] << 16) | \
(((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[2])
#endif
#define AV_RB8(x) (((const uint8_t*)(x))[0])
#ifndef AV_RB16
# define AV_RB16(x) ((((const uint8_t*)(x))[0] << 8) | (((const uint8_t*)(x))[1] ))
#endif
static int jpeg_parse_packet(void *ctx, PayloadContext *jpeg, uint32_t *timestamp, const uint8_t *buf, int len,
uint16_t seq, int flags, uint8_t *type) {
uint8_t q = 0, width = 0, height = 0;
const uint8_t *qtables = NULL;
uint16_t qtable_len = 0;
uint32_t off = 0;
int ret = 0, dri = 0;
if (len < 8) {
av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
return AVERROR_INVALIDDATA;
}
/* Parse the main JPEG header. */
off = AV_RB24(buf + 1); /* fragment byte offset */
*type = AV_RB8(buf + 4); /* id of jpeg decoder params */
q = AV_RB8(buf + 5); /* quantization factor (or table id) */
width = AV_RB8(buf + 6); /* frame width in 8 pixel blocks */
height = AV_RB8(buf + 7); /* frame height in 8 pixel blocks */
buf += 8;
len -= 8;
if (*type & 0x40) {
if (len < 4) {
av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
return AVERROR_INVALIDDATA;
}
dri = AV_RB16(buf);
buf += 4;
len -= 4;
*type &= ~0x40;
}
if (*type > 1) {
av_log(ctx, AV_LOG_ERROR, "RTP/JPEG type %d", (int) *type);
return AVERROR_PATCHWELCOME;
}
/* Parse the quantization table header. */
if (off == 0) {
/* Start of JPEG data packet. */
uint8_t new_qtables[128];
uint8_t hdr[1024];
if (q > 127) {
uint8_t precision;
if (len < 4) {
av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
return AVERROR_INVALIDDATA;
}
/* The first byte is reserved for future use. */
precision = AV_RB8(buf + 1); /* size of coefficients */
qtable_len = AV_RB16(buf + 2); /* length in bytes */
buf += 4;
len -= 4;
if (precision) {
av_log(ctx, AV_LOG_WARNING, "Only 8-bit precision is supported.\n");
}
if (qtable_len > 0) {
if (len < qtable_len) {
av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
return AVERROR_INVALIDDATA;
}
qtables = buf;
buf += qtable_len;
len -= qtable_len;
if (q < 255) {
if (jpeg->qtables_len[q - 128] &&
(jpeg->qtables_len[q - 128] != qtable_len ||
memcmp(qtables, &jpeg->qtables[q - 128][0], qtable_len))) {
av_log(ctx, AV_LOG_WARNING,
"Quantization tables for q=%d changed\n", q);
} else if (!jpeg->qtables_len[q - 128] && qtable_len <= 128) {
memcpy(&jpeg->qtables[q - 128][0], qtables,
qtable_len);
jpeg->qtables_len[q - 128] = qtable_len;
}
}
} else {
if (q == 255) {
av_log(ctx, AV_LOG_ERROR,
"Invalid RTP/JPEG packet. Quantization tables not found.\n");
return AVERROR_INVALIDDATA;
}
if (!jpeg->qtables_len[q - 128]) {
av_log(ctx, AV_LOG_ERROR,
"No quantization tables known for q=%d yet.\n", q);
return AVERROR_INVALIDDATA;
}
qtables = &jpeg->qtables[q - 128][0];
qtable_len = jpeg->qtables_len[q - 128];
}
} else { /* q <= 127 */
if (q == 0 || q > 99) {
av_log(ctx, AV_LOG_ERROR, "Reserved q value %d\n", q);
return AVERROR_INVALIDDATA;
}
create_default_qtables(new_qtables, q);
qtables = new_qtables;
qtable_len = sizeof(new_qtables);
}
/* Skip the current frame in case of the end packet
* has been lost somewhere. */
jpeg->frame.clear();
jpeg->frame.reserve(1024 + len);
jpeg->timestamp = *timestamp;
/* Generate a frame and scan headers that can be prepended to the
* RTP/JPEG data payload to produce a JPEG compressed image in
* interchange format. */
jpeg->hdr_size = jpeg_create_header(hdr, sizeof(hdr), *type, width,
height, qtables,
qtable_len / 64, dri);
/* Copy JPEG header to frame buffer. */
avio_write(jpeg->frame, hdr, jpeg->hdr_size);
}
if (jpeg->frame.empty()) {
av_log(ctx, AV_LOG_ERROR,
"Received packet without a start chunk; dropping frame.\n");
return AVERROR_EAGAIN;
}
if (jpeg->timestamp != *timestamp) {
/* Skip the current frame if timestamp is incorrect.
* A start packet has been lost somewhere. */
jpeg->frame.clear();
av_log(ctx, AV_LOG_ERROR, "RTP timestamps don't match.\n");
return AVERROR_INVALIDDATA;
}
if (off != jpeg->frame.size() - jpeg->hdr_size) {
av_log(ctx, AV_LOG_ERROR,
"Missing packets; dropping frame.\n");
return AVERROR_EAGAIN;
}
/* Copy data to frame buffer. */
avio_write(jpeg->frame, buf, len);
if (flags & RTP_FLAG_MARKER) {
/* End of JPEG data packet. */
uint8_t buf[2] = {0xff, EOI};
/* Put EOI marker. */
avio_write(jpeg->frame, buf, sizeof(buf));
return 0;
}
return AVERROR_EAGAIN;
}
//----------------------------------------------------------------------------------
#define DEF(type, name, bytes, write) \
static inline void bytestream_put_##name(uint8_t **b, const type value) { \
write(*b, value); \
(*b) += bytes; \
}
DEF(unsigned int, be24, 3, AV_WB24)
DEF(unsigned int, be16, 2, AV_WB16)
DEF(unsigned int, byte, 1, AV_WB8)
static inline void bytestream_put_buffer(uint8_t **b, const uint8_t *src, unsigned int size) {
memcpy(*b, src, size);
(*b) += size;
}
void JPEGRtpEncoder::rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uint8_t type)
{
const uint8_t *qtables[4] = { NULL };
int nb_qtables = 0;
uint8_t w, h;
uint8_t *p;
int off = 0; /* fragment offset of the current JPEG frame */
int len;
int i;
int default_huffman_tables = 0;
uint8_t *out = nullptr;
/* preparse the header for getting some info */
for (i = 0; i < size; i++) {
if (buf[i] != 0xff)
continue;
if (buf[i + 1] == DQT) {
int tables, j;
if (buf[i + 4] & 0xF0)
av_log(s1, AV_LOG_WARNING,
"Only 8-bit precision is supported.\n");
/* a quantization table is 64 bytes long */
tables = AV_RB16(&buf[i + 2]) / 65;
if (i + 5 + tables * 65 > size) {
av_log(s1, AV_LOG_ERROR, "Too short JPEG header. Aborted!\n");
return;
}
if (nb_qtables + tables > 4) {
av_log(s1, AV_LOG_ERROR, "Invalid number of quantisation tables\n");
return;
}
for (j = 0; j < tables; j++)
qtables[nb_qtables + j] = buf + i + 5 + j * 65;
nb_qtables += tables;
// 大致忽略DQT/qtable所占字节数提高搜寻速度
i += tables << 6;
} else if (buf[i + 1] == SOF0) {
if (buf[i + 14] != 17 || buf[i + 17] != 17) {
av_log(s1, AV_LOG_ERROR,
"Only 1x1 chroma blocks are supported. Aborted!\n");
return;
}
h = (buf[i + 5] * 256 + buf[i + 6]) / 8;
w = (buf[i + 7] * 256 + buf[i + 8]) / 8;
// 大致忽略SOF0所占字节数提高搜寻速度
i += 16;
} else if (buf[i + 1] == DHT) {
int dht_size = AV_RB16(&buf[i + 2]);
default_huffman_tables |= 1 << 4;
i += 3;
dht_size -= 2;
if (i + dht_size >= size)
continue;
while (dht_size > 0)
switch (buf[i + 1]) {
case 0x00:
if ( dht_size >= 29
&& !memcmp(buf + i + 2, avpriv_mjpeg_bits_dc_luminance + 1, 16)
&& !memcmp(buf + i + 18, avpriv_mjpeg_val_dc, 12)) {
default_huffman_tables |= 1;
i += 29;
dht_size -= 29;
} else {
i += dht_size;
dht_size = 0;
}
break;
case 0x01:
if ( dht_size >= 29
&& !memcmp(buf + i + 2, avpriv_mjpeg_bits_dc_chrominance + 1, 16)
&& !memcmp(buf + i + 18, avpriv_mjpeg_val_dc, 12)) {
default_huffman_tables |= 1 << 1;
i += 29;
dht_size -= 29;
} else {
i += dht_size;
dht_size = 0;
}
break;
case 0x10:
if ( dht_size >= 179
&& !memcmp(buf + i + 2, avpriv_mjpeg_bits_ac_luminance + 1, 16)
&& !memcmp(buf + i + 18, avpriv_mjpeg_val_ac_luminance, 162)) {
default_huffman_tables |= 1 << 2;
i += 179;
dht_size -= 179;
} else {
i += dht_size;
dht_size = 0;
}
break;
case 0x11:
if ( dht_size >= 179
&& !memcmp(buf + i + 2, avpriv_mjpeg_bits_ac_chrominance + 1, 16)
&& !memcmp(buf + i + 18, avpriv_mjpeg_val_ac_chrominance, 162)) {
default_huffman_tables |= 1 << 3;
i += 179;
dht_size -= 179;
} else {
i += dht_size;
dht_size = 0;
}
break;
default:
i += dht_size;
dht_size = 0;
continue;
}
} else if (buf[i + 1] == SOS) {
/* SOS is last marker in the header */
i += AV_RB16(&buf[i + 2]) + 2;
if (i > size) {
av_log(s1, AV_LOG_ERROR,
"Insufficient data. Aborted!\n");
return;
}
break;
}
}
if (default_huffman_tables && default_huffman_tables != 31) {
av_log(s1, AV_LOG_ERROR,
"RFC 2435 requires standard Huffman tables for jpeg\n");
return;
}
if (nb_qtables && nb_qtables != 2)
av_log(s1, AV_LOG_WARNING,
"RFC 2435 suggests two quantization tables, %d provided\n",
nb_qtables);
/* skip JPEG header */
buf += i;
size -= i;
for (i = size - 2; i >= 0; i--) {
if (buf[i] == 0xff && buf[i + 1] == EOI) {
/* Remove the EOI marker */
size = i;
break;
}
}
while (size > 0) {
int hdr_size = 8;
if (off == 0 && nb_qtables)
hdr_size += 4 + 64 * nb_qtables;
/* payload max in one packet */
len = MIN(size, (int)getMaxSize() - hdr_size);
/* marker bit is last packet in frame */
auto rtp_packet = makeRtp(getTrackType(), nullptr, len + hdr_size, size == len, pts);
p = rtp_packet->getPayload();
/* set main header */
bytestream_put_byte(&p, 0);
bytestream_put_be24(&p, off);
bytestream_put_byte(&p, type);
bytestream_put_byte(&p, 255);
bytestream_put_byte(&p, w);
bytestream_put_byte(&p, h);
if (off == 0 && nb_qtables) {
/* set quantization tables header */
bytestream_put_byte(&p, 0);
bytestream_put_byte(&p, 0);
bytestream_put_be16(&p, 64 * nb_qtables);
for (i = 0; i < nb_qtables; i++)
bytestream_put_buffer(&p, qtables[i], 64);
}
/* copy payload data */
memcpy(p, buf, len);
// output rtp packet
RtpCodec::inputRtp(std::move(rtp_packet), false);
buf += len;
size -= len;
off += len;
}
free(out);
}
////////////////////////////////////////////////////////////
JPEGRtpDecoder::JPEGRtpDecoder() {
memset(&_ctx.timestamp, 0, sizeof(_ctx) - offsetof(decltype(_ctx), timestamp));
}
CodecId JPEGRtpDecoder::getCodecId() const {
return CodecJPEG;
}
bool JPEGRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool) {
auto payload = rtp->getPayload();
auto size = rtp->getPayloadSize();
auto stamp = rtp->getStamp();
auto seq = rtp->getSeq();
auto marker = rtp->getHeader()->mark;
if (size <= 0) {
//无实际负载
return false;
}
uint8_t type;
if (0 == jpeg_parse_packet(nullptr, &_ctx, &stamp, payload, size, seq, marker ? RTP_FLAG_MARKER : 0, &type)) {
auto buffer = std::make_shared<toolkit::BufferString>(std::move(_ctx.frame));
// JFIF头固定20个字节长度
auto frame = std::make_shared<JPEGFrame>(std::move(buffer), stamp / 90, type, 20);
_ctx.frame.clear();
RtpCodec::inputFrame(std::move(frame));
}
return false;
}
////////////////////////////////////////////////////////////////////////
JPEGRtpEncoder::JPEGRtpEncoder(
uint32_t ssrc, uint32_t mtu, uint32_t sample_rate, uint8_t payload_type, uint8_t interleaved)
: RtpInfo(ssrc, mtu, sample_rate, payload_type, interleaved) {}
bool JPEGRtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto ptr = (uint8_t *)frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto pts = frame->pts();
auto type = 1;
auto jpeg = dynamic_pointer_cast<JPEGFrame>(frame);
if (jpeg) {
type = jpeg->pixType();
}
rtpSendJpeg(ptr, len, pts, type);
return len > 0;
}

59
src/Extension/JPEGRtp.h Normal file
View File

@ -0,0 +1,59 @@
#ifndef ZLMEDIAKIT_JPEGRTP_H
#define ZLMEDIAKIT_JPEGRTP_H
#include "Frame.h"
#include "Rtsp/RtpCodec.h"
namespace mediakit{
/**
* RTP/JPEG specific private data.
*/
struct PayloadContext {
std::string frame; ///< current frame buffer
uint32_t timestamp; ///< current frame timestamp
int hdr_size; ///< size of the current frame header
uint8_t qtables[128][128];
uint8_t qtables_len[128];
};
/**
* rtp解码类
*/
class JPEGRtpDecoder : public RtpCodec {
public:
typedef std::shared_ptr <JPEGRtpDecoder> Ptr;
JPEGRtpDecoder();
~JPEGRtpDecoder() override = default;
/**
* ID
*/
CodecId getCodecId() const override;
/**
* rtp并解码
* @param rtp rtp数据包
* @param key_pos false,
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override;
private:
struct PayloadContext _ctx;
};
class JPEGRtpEncoder : public JPEGRtpDecoder, public RtpInfo {
public:
using Ptr = std::shared_ptr<JPEGRtpEncoder>;
JPEGRtpEncoder(uint32_t ssrc, uint32_t mtu = 1400, uint32_t sample_rate = 90000, uint8_t payload_type = 96, uint8_t interleaved = TrackVideo * 2);
~JPEGRtpEncoder() = default;
bool inputFrame(const Frame::Ptr &frame) override;
private:
void rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uint8_t type);
};
}//namespace mediakit
#endif //ZLMEDIAKIT_JPEGRTP_H

View File

@ -80,17 +80,17 @@ public:
/**
*
*/
virtual int getVideoHeight() const {return 0;};
virtual int getVideoHeight() const { return 0; }
/**
*
*/
virtual int getVideoWidth() const {return 0;};
virtual int getVideoWidth() const { return 0; }
/**
* fps
*/
virtual float getVideoFps() const {return 0;};
virtual float getVideoFps() const { return 0; }
};
/**

View File

@ -37,15 +37,7 @@ bool HlsParser::parse(const string &http_url, const string &m3u8) {
if ((_is_m3u8_inner || extinf_dur != 0) && line[0] != '#') {
segment.duration = extinf_dur;
if (line.find("http://") == 0 || line.find("https://") == 0) {
segment.url = line;
} else {
if (line.find("/") == 0) {
segment.url = http_url.substr(0, http_url.find("/", 8)) + line;
} else {
segment.url = http_url.substr(0, http_url.rfind("/") + 1) + line;
}
}
segment.url = Parser::merge_url(http_url, line);
if (!_is_m3u8_inner) {
//ts按照先后顺序排序
ts_map.emplace(index++, segment);

View File

@ -182,8 +182,8 @@ void HttpClient::onErr(const SockException &ex) {
ssize_t HttpClient::onRecvHeader(const char *data, size_t len) {
_parser.Parse(data);
if (_parser.Url() == "302" || _parser.Url() == "301") {
auto new_url = _parser["Location"];
if (_parser.Url() == "302" || _parser.Url() == "301" || _parser.Url() == "303") {
auto new_url = Parser::merge_url(_url, _parser["Location"]);
if (new_url.empty()) {
throw invalid_argument("未找到Location字段(跳转url)");
}
@ -206,7 +206,11 @@ ssize_t HttpClient::onRecvHeader(const char *data, size_t len) {
onResponseBody(data, len);
} else {
_total_body_size = _recved_body_size;
onResponseCompleted_l(SockException(Err_success, "success"));
if (_recved_body_size > 0) {
onResponseCompleted_l(SockException(Err_success, "success"));
}else{
onResponseCompleted_l(SockException(Err_other, "no body"));
}
}
});
//后续为源源不断的body

View File

@ -40,7 +40,12 @@ HlsMakerImp::HlsMakerImp(const string &m3u8_file,
}
HlsMakerImp::~HlsMakerImp() {
clearCache(false, true);
try {
// 可能hls注册时导致抛异常
clearCache(false, true);
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
void HlsMakerImp::clearCache() {

View File

@ -15,6 +15,7 @@
#include "Extension/G711.h"
#include "Extension/H264.h"
#include "Extension/H265.h"
#include "Extension/JPEG.h"
#include "Common/config.h"
using namespace std;
@ -145,6 +146,7 @@ static uint8_t getObject(CodecId codecId) {
case CodecAAC : return MOV_OBJECT_AAC;
case CodecH264 : return MOV_OBJECT_H264;
case CodecH265 : return MOV_OBJECT_HEVC;
case CodecJPEG : return MOV_OBJECT_JPEG;
default : return 0;
}
}
@ -302,6 +304,28 @@ bool MP4MuxerInterface::addTrack(const Track::Ptr &track) {
break;
}
case CodecJPEG: {
auto jpeg_track = dynamic_pointer_cast<JPEGTrack>(track);
if (!jpeg_track) {
WarnL << "不是JPEG Track";
return false;
}
auto track_id = mp4_writer_add_video(_mov_writter.get(),
mp4_object,
jpeg_track->getVideoWidth(),
jpeg_track->getVideoHeight(),
nullptr,
0);
if (track_id < 0) {
WarnL << "添加JPEG Track失败:" << track_id;
return false;
}
_codec_to_trackid[track->getCodecId()].track_id = track_id;
_have_video = true;
break;
}
default: WarnL << "MP4录制不支持该编码格式:" << track->getCodecName(); return false;
}

View File

@ -13,14 +13,15 @@
#include "Extension/CommonRtp.h"
#include "Extension/Factory.h"
#include "Extension/G711.h"
#include "Extension/H264Rtp.h"
#include "Extension/H264.h"
#include "Extension/H265.h"
#include "Extension/Opus.h"
#include "Extension/JPEG.h"
#include "Http/HttpTSPlayer.h"
#include "Util/File.h"
#include "Common/config.h"
#include "Rtsp/RtpCodec.h"
#include "Rtsp/RtpReceiver.h"
#include "Rtsp/Rtsp.h"
using namespace std;
using namespace toolkit;
@ -76,10 +77,7 @@ bool GB28181Process::inputRtp(bool, const char *data, size_t data_len) {
GET_CONFIG(uint32_t, h264_pt, RtpProxy::kH264PT);
GET_CONFIG(uint32_t, h265_pt, RtpProxy::kH265PT);
GET_CONFIG(uint32_t, ps_pt, RtpProxy::kPSPT);
GET_CONFIG(uint32_t, ts_pt, RtpProxy::kTSPT);
GET_CONFIG(uint32_t, opus_pt, RtpProxy::kOpusPT);
GET_CONFIG(uint32_t, g711u_pt, RtpProxy::kG711UPT);
GET_CONFIG(uint32_t, g711a_pt, RtpProxy::kG711APT);
RtpHeader *header = (RtpHeader *)data;
auto pt = header->pt;
@ -89,53 +87,62 @@ bool GB28181Process::inputRtp(bool, const char *data, size_t data_len) {
// 防止pt类型太多导致内存溢出
throw std::invalid_argument("rtp pt类型不得超过2种!");
}
if (pt == opus_pt) {
// opus负载
ref = std::make_shared<RtpReceiverImp>(48000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<OpusTrack>();
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
} else if (pt == h265_pt) {
// H265负载
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<H265Track>();
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
} else if (pt == h264_pt) {
// H264负载
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<H264Track>();
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
} else if (pt == g711u_pt || pt == g711a_pt) {
// CodecG711U
// CodecG711A
ref = std::make_shared<RtpReceiverImp>(8000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<G711Track>(pt == g711u_pt ? CodecG711U : CodecG711A, 8000, 1, 16);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
} else {
if (pt != ts_pt && pt != ps_pt) {
WarnL << "rtp payload type未识别(" << (int)pt << "),已按ts或ps负载处理";
switch (pt) {
case Rtsp::PT_PCMA:
case Rtsp::PT_PCMU: {
// CodecG711U or CodecG711A
ref = std::make_shared<RtpReceiverImp>(8000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<G711Track>(pt == Rtsp::PT_PCMU ? CodecG711U : CodecG711A, 8000, 1, 16);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
break;
}
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
// ts或ps负载
_rtp_decoder[pt] = std::make_shared<CommonRtpDecoder>(CodecInvalid, 32 * 1024);
// 设置dump目录
GET_CONFIG(string, dump_dir, RtpProxy::kDumpDir);
if (!dump_dir.empty()) {
auto save_path = File::absolutePath(_media_info._streamid + ".mp2", dump_dir);
_save_file_ps.reset(File::create_file(save_path.data(), "wb"), [](FILE *fp) {
if (fp) {
fclose(fp);
case Rtsp::PT_JPEG: {
// mjpeg
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<JPEGTrack>();
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
break;
}
default: {
if (pt == opus_pt) {
// opus负载
ref = std::make_shared<RtpReceiverImp>(48000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<OpusTrack>();
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
} else if (pt == h265_pt) {
// H265负载
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<H265Track>();
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
} else if (pt == h264_pt) {
// H264负载
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = std::make_shared<H264Track>();
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByTrack(track);
} else {
if (pt != Rtsp::PT_MP2T && pt != ps_pt) {
WarnL << "rtp payload type未识别(" << (int)pt << "),已按ts或ps负载处理";
}
});
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
// ts或ps负载
_rtp_decoder[pt] = std::make_shared<CommonRtpDecoder>(CodecInvalid, 32 * 1024);
// 设置dump目录
GET_CONFIG(string, dump_dir, RtpProxy::kDumpDir);
if (!dump_dir.empty()) {
auto save_path = File::absolutePath(_media_info._streamid + ".mpeg", dump_dir);
_save_file_ps.reset(File::create_file(save_path.data(), "wb"), [](FILE *fp) {
if (fp) {
fclose(fp);
}
});
}
}
break;
}
}
// 设置frame回调

View File

@ -187,6 +187,10 @@ void RtpProcess::setStopCheckRtp(bool is_check){
}
}
void RtpProcess::setOnlyAudio(bool only_audio){
_only_audio = only_audio;
}
void RtpProcess::onDetach() {
if (_on_detach) {
_on_detach();
@ -247,6 +251,9 @@ void RtpProcess::emitOnPublish() {
strong_self->_media_info._app,
strong_self->_media_info._streamid,0.0f,
option);
if (strong_self->_only_audio) {
strong_self->_muxer->setOnlyAudio();
}
strong_self->_muxer->setMediaListener(strong_self);
strong_self->doCachedFunc();
InfoP(strong_self) << "允许RTP推流";

View File

@ -57,6 +57,12 @@ public:
*/
void setStopCheckRtp(bool is_check=false);
/**
* track
* inputRtp前调用此方法
*/
void setOnlyAudio(bool only_audio);
/**
* flush输出缓存
*/
@ -87,6 +93,7 @@ private:
void doCachedFunc();
private:
bool _only_audio = false;
uint64_t _dts = 0;
uint64_t _total_bytes = 0;
std::unique_ptr<sockaddr_storage> _addr;

View File

@ -10,6 +10,7 @@
#if defined(ENABLE_RTPPROXY)
#include "RtpSender.h"
#include "RtpSession.h"
#include "Rtsp/RtspSession.h"
#include "Thread/WorkThreadPool.h"
#include "Util/uv_errno.h"
@ -65,7 +66,8 @@ void RtpSender::startSend(const MediaSourceEvent::SendRtpArgs &args, const funct
is_wait = false;
}
// tcp服务器默认开启5秒
auto delay_task = _poller->doDelayTask(_args.tcp_passive_close_delay_ms, [tcp_listener, cb,is_wait]() mutable {
auto delay = _args.tcp_passive_close_delay_ms ? _args.tcp_passive_close_delay_ms : 5000;
auto delay_task = _poller->doDelayTask(delay, [tcp_listener, cb, is_wait]() mutable {
if (is_wait) {
cb(0, SockException(Err_timeout, "wait tcp connection timeout"));
}
@ -213,6 +215,25 @@ void RtpSender::onConnect(){
}
//连接建立成功事件
weak_ptr<RtpSender> weak_self = shared_from_this();
if (!_args.recv_stream_id.empty()) {
mINI ini;
ini[RtpSession::kStreamID] = _args.recv_stream_id;
_rtp_session = std::make_shared<RtpSession>(_socket_rtp);
_rtp_session->setParams(ini);
_socket_rtp->setOnRead([weak_self](const Buffer::Ptr &buf, struct sockaddr *addr, int addr_len) {
auto strong_self = weak_self.lock();
if (!strong_self) {
return;
}
try {
strong_self->_rtp_session->onRecv(buf);
} catch (std::exception &ex){
SockException err(toolkit::Err_shutdown, ex.what());
strong_self->_rtp_session->shutdown(err);
}
});
}
_socket_rtp->setOnErr([weak_self](const SockException &err) {
auto strong_self = weak_self.lock();
if (strong_self) {

View File

@ -19,6 +19,8 @@
namespace mediakit{
class RtpSession;
//rtp发送客户端支持发送GB28181协议
class RtpSender final : public MediaSinkInterface, public std::enable_shared_from_this<RtpSender>{
public:
@ -85,6 +87,7 @@ private:
std::shared_ptr<RtcpContext> _rtcp_context;
toolkit::Ticker _rtcp_send_ticker;
toolkit::Ticker _rtcp_recv_ticker;
std::shared_ptr<RtpSession> _rtp_session;
std::function<void(const toolkit::SockException &ex)> _on_close;
};

View File

@ -42,11 +42,12 @@ public:
}
}
void setRtpServerInfo(uint16_t local_port,RtpServer::TcpMode mode,bool re_use_port,uint32_t ssrc){
void setRtpServerInfo(uint16_t local_port,RtpServer::TcpMode mode,bool re_use_port,uint32_t ssrc, bool only_audio) {
_local_port = local_port;
_tcp_mode = mode;
_re_use_port = re_use_port;
_ssrc = ssrc;
_only_audio = only_audio;
}
void setOnDetach(function<void()> cb) {
@ -60,6 +61,7 @@ public:
void onRecvRtp(const Socket::Ptr &sock, const Buffer::Ptr &buf, struct sockaddr *addr) {
if (!_process) {
_process = RtpSelector::Instance().getProcess(_stream_id, true);
_process->setOnlyAudio(_only_audio);
_process->setOnDetach(std::move(_on_detach));
cancelDelayTask();
}
@ -137,6 +139,7 @@ private:
private:
bool _re_use_port = false;
bool _only_audio = false;
uint16_t _local_port = 0;
uint32_t _ssrc = 0;
RtpServer::TcpMode _tcp_mode = RtpServer::NONE;
@ -150,7 +153,7 @@ private:
EventPoller::DelayTask::Ptr _delay_task;
};
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc) {
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, bool only_audio) {
//创建udp服务器
Socket::Ptr rtp_socket = Socket::createSocket(nullptr, true);
Socket::Ptr rtcp_socket = Socket::createSocket(nullptr, true);
@ -176,6 +179,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
tcp_server = std::make_shared<TcpServer>(rtp_socket->getPoller());
(*tcp_server)[RtpSession::kStreamID] = stream_id;
(*tcp_server)[RtpSession::kSSRC] = ssrc;
(*tcp_server)[RtpSession::kOnlyAudio] = only_audio;
if (tcp_mode == PASSIVE) {
tcp_server->start<RtpSession>(rtp_socket->get_local_port(), local_ip);
} else if (stream_id.empty()) {
@ -191,13 +195,19 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
//指定了流id那么一个端口一个流(不管是否包含多个ssrc的多个流绑定rtp源后会筛选掉ip端口不匹配的流)
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), stream_id);
helper->startRtcp();
helper->setRtpServerInfo(local_port,tcp_mode,re_use_port,ssrc);
rtp_socket->setOnRead([rtp_socket, helper, ssrc](const Buffer::Ptr &buf, struct sockaddr *addr, int addr_len) {
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_audio);
bool bind_peer_addr = false;
rtp_socket->setOnRead([rtp_socket, helper, ssrc, bind_peer_addr](const Buffer::Ptr &buf, struct sockaddr *addr, int addr_len) mutable {
RtpHeader *header = (RtpHeader *)buf->data();
auto rtp_ssrc = ntohl(header->ssrc);
if (ssrc && rtp_ssrc != ssrc) {
WarnL << "ssrc不匹配,rtp已丢弃:" << rtp_ssrc << " != " << ssrc;
} else {
if (!bind_peer_addr) {
//绑定对方ip+端口防止多个设备或一个设备多次推流从而日志报ssrc不匹配问题
bind_peer_addr = true;
rtp_socket->bindPeerAddr(addr, addr_len);
}
helper->onRecvRtp(rtp_socket, buf, addr);
}
});
@ -205,6 +215,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
#if 1
//单端口多线程接收多个流根据ssrc区分流
udp_server = std::make_shared<UdpServer>(rtp_socket->getPoller());
(*udp_server)[RtpSession::kOnlyAudio] = only_audio;
udp_server->start<RtpSession>(rtp_socket->get_local_port(), local_ip);
rtp_socket = nullptr;
#else

View File

@ -44,7 +44,7 @@ public:
* @param ssrc ssrc
*/
void start(uint16_t local_port, const std::string &stream_id = "", TcpMode tcp_mode = PASSIVE,
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0);
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, bool only_audio = false);
/**
* tcp服务(tcp主动模式)
@ -75,6 +75,7 @@ protected:
std::shared_ptr<RtcpHelper> _rtcp_helper;
std::function<void()> _on_cleanup;
bool _only_audio = false;
//用于tcp主动模式
TcpMode _tcp_mode = NONE;
};

View File

@ -22,10 +22,16 @@ namespace mediakit{
const string RtpSession::kStreamID = "stream_id";
const string RtpSession::kSSRC = "ssrc";
const string RtpSession::kOnlyAudio = "only_audio";
void RtpSession::attachServer(const Server &server) {
_stream_id = const_cast<Server &>(server)[kStreamID];
_ssrc = const_cast<Server &>(server)[kSSRC];
setParams(const_cast<Server &>(server));
}
void RtpSession::setParams(mINI &ini) {
_stream_id = ini[kStreamID];
_ssrc = ini[kSSRC];
_only_audio = ini[kOnlyAudio];
}
RtpSession::RtpSession(const Socket::Ptr &sock) : Session(sock) {
@ -97,6 +103,7 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
}
//tcp情况下一个tcp链接只可能是一路流不需要通过多个ssrc来区分所以不需要频繁getProcess
_process = RtpSelector::Instance().getProcess(_stream_id, true);
_process->setOnlyAudio(_only_audio);
_process->setDelegate(dynamic_pointer_cast<RtpSession>(shared_from_this()));
}
try {

View File

@ -24,12 +24,14 @@ class RtpSession : public toolkit::Session, public RtpSplitter, public MediaSour
public:
static const std::string kStreamID;
static const std::string kSSRC;
static const std::string kOnlyAudio;
RtpSession(const toolkit::Socket::Ptr &sock);
~RtpSession() override;
void onRecv(const toolkit::Buffer::Ptr &) override;
void onError(const toolkit::SockException &err) override;
void onManager() override;
void setParams(toolkit::mINI &ini);
void attachServer(const toolkit::Server &server) override;
protected:
@ -44,6 +46,7 @@ private:
bool _is_udp = false;
bool _search_rtp = false;
bool _search_rtp_finished = false;
bool _only_audio = false;
uint32_t _ssrc = 0;
toolkit::Ticker _ticker;
std::string _stream_id;

View File

@ -99,13 +99,13 @@ RtpMultiCaster::~RtpMultiCaster() {
DebugL;
}
RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream) {
RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port) {
auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA, vhost, app, stream));
if (!src) {
auto err = StrPrinter << "未找到媒体源:" << vhost << " " << app << " " << stream << endl;
throw std::runtime_error(err);
}
_multicast_ip = MultiCastAddressMaker::Instance().obtain();
_multicast_ip = (multicast_ip) ? make_shared<uint32_t>(multicast_ip) : MultiCastAddressMaker::Instance().obtain();
if (!_multicast_ip) {
throw std::runtime_error("获取组播地址失败");
}
@ -113,7 +113,7 @@ RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, con
for (auto i = 0; i < 2; ++i) {
//创建udp socket, 数组下标为TrackType
_udp_sock[i] = helper.createSocket();
if (!_udp_sock[i]->bindUdpSock(0, local_ip.data())) {
if (!_udp_sock[i]->bindUdpSock((i == TrackVideo) ? video_port : audio_port, local_ip.data())) {
auto err = StrPrinter << "绑定UDP端口失败:" << local_ip << endl;
throw std::runtime_error(err);
}
@ -171,11 +171,11 @@ string RtpMultiCaster::getMultiCasterIP() {
return SockUtil::inet_ntoa(addr);
}
RtpMultiCaster::Ptr RtpMultiCaster::get(SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream) {
static auto on_create = [](SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream){
RtpMultiCaster::Ptr RtpMultiCaster::get(SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port) {
static auto on_create = [](SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port){
try {
auto poller = helper.getPoller();
auto ret = RtpMultiCaster::Ptr(new RtpMultiCaster(helper, local_ip, vhost, app, stream), [poller](RtpMultiCaster *ptr) {
auto ret = RtpMultiCaster::Ptr(new RtpMultiCaster(helper, local_ip, vhost, app, stream, multicast_ip, video_port, audio_port), [poller](RtpMultiCaster *ptr) {
poller->async([ptr]() {
delete ptr;
});
@ -194,12 +194,12 @@ RtpMultiCaster::Ptr RtpMultiCaster::get(SocketHelper &helper, const string &loca
lock_guard<recursive_mutex> lck(g_mtx);
auto it = g_multi_caster_map.find(strKey);
if (it == g_multi_caster_map.end()) {
return on_create(helper, local_ip, vhost, app, stream);
return on_create(helper, local_ip, vhost, app, stream, multicast_ip, video_port, audio_port);
}
auto ret = it->second.lock();
if (!ret) {
g_multi_caster_map.erase(it);
return on_create(helper, local_ip, vhost, app, stream);
return on_create(helper, local_ip, vhost, app, stream, multicast_ip, video_port, audio_port);
}
return ret;
}

View File

@ -46,14 +46,14 @@ public:
~RtpMultiCaster();
static Ptr get(toolkit::SocketHelper &helper, const std::string &local_ip, const std::string &vhost, const std::string &app, const std::string &stream);
static Ptr get(toolkit::SocketHelper &helper, const std::string &local_ip, const std::string &vhost, const std::string &app, const std::string &stream, uint32_t multicast_ip = 0, uint16_t video_port = 0, uint16_t audio_port = 0);
void setDetachCB(void *listener,const onDetach &cb);
std::string getMultiCasterIP();
uint16_t getMultiCasterPort(TrackType trackType);
private:
RtpMultiCaster(toolkit::SocketHelper &helper, const std::string &local_ip, const std::string &vhost, const std::string &app, const std::string &stream);
RtpMultiCaster(toolkit::SocketHelper &helper, const std::string &local_ip, const std::string &vhost, const std::string &app, const std::string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port);
private:
std::recursive_mutex _mtx;

View File

@ -48,7 +48,7 @@ typedef enum {
XX(DVI4_22050, TrackAudio, 17, 22050, 1, CodecInvalid) \
XX(G729, TrackAudio, 18, 8000, 1, CodecInvalid) \
XX(CelB, TrackVideo, 25, 90000, 1, CodecInvalid) \
XX(JPEG, TrackVideo, 26, 90000, 1, CodecInvalid) \
XX(JPEG, TrackVideo, 26, 90000, 1, CodecJPEG) \
XX(nv, TrackVideo, 28, 90000, 1, CodecInvalid) \
XX(H261, TrackVideo, 31, 90000, 1, CodecInvalid) \
XX(MPV, TrackVideo, 32, 90000, 1, CodecInvalid) \

View File

@ -307,7 +307,7 @@ void RtspSession::handleReq_ANNOUNCE(const Parser &parser) {
void RtspSession::handleReq_RECORD(const Parser &parser){
if (_sdp_track.empty() || parser["Session"] != _sessionid) {
send_SessionNotFound();
throw SockException(Err_shutdown, _sdp_track.empty() ? "can not find any availabe track when record" : "session not found when record");
throw SockException(Err_shutdown, _sdp_track.empty() ? "can not find any available track when record" : "session not found when record");
}
_StrPrinter rtp_info;
@ -721,7 +721,7 @@ void RtspSession::handleReq_Setup(const Parser &parser) {
break;
case Rtsp::RTP_MULTICAST: {
if(!_multicaster){
_multicaster = RtpMultiCaster::get(*this, get_local_ip(), _media_info._vhost, _media_info._app, _media_info._streamid);
_multicaster = RtpMultiCaster::get(*this, get_local_ip(), _media_info._vhost, _media_info._app, _media_info._streamid, _multicast_ip, _multicast_video_port, _multicast_audio_port);
if (!_multicaster) {
send_NotAcceptable();
throw SockException(Err_shutdown, "can not get a available udp multicast socket");

View File

@ -72,6 +72,24 @@ protected:
//收到RTCP包回调
virtual void onRtcpPacket(int track_idx, SdpTrack::Ptr &track, const char *data, size_t len);
//回复客户端
virtual bool sendRtspResponse(const std::string &res_code, const StrCaseMap &header = StrCaseMap(), const std::string &sdp = "", const char *protocol = "RTSP/1.0");
protected:
//url解析后保存的相关信息
MediaInfo _media_info;
////////RTP over udp_multicast////////
//共享的rtp组播对象
RtpMultiCaster::Ptr _multicaster;
//Session号
std::string _sessionid;
uint32_t _multicast_ip = 0;
uint16_t _multicast_video_port = 0;
uint16_t _multicast_audio_port = 0;
private:
//处理options方法,获取服务器能力
void handleReq_Options(const Parser &parser);
@ -130,7 +148,6 @@ private:
void updateRtcpContext(const RtpPacket::Ptr &rtp);
//回复客户端
bool sendRtspResponse(const std::string &res_code, const std::initializer_list<std::string> &header, const std::string &sdp = "", const char *protocol = "RTSP/1.0");
bool sendRtspResponse(const std::string &res_code, const StrCaseMap &header = StrCaseMap(), const std::string &sdp = "", const char *protocol = "RTSP/1.0");
//设置socket标志
void setSocketFlags();
@ -149,8 +166,6 @@ private:
uint64_t _bytes_usage = 0;
//ContentBase
std::string _content_base;
//Session号
std::string _sessionid;
//记录是否需要rtsp专属鉴权防止重复触发事件
std::string _rtsp_realm;
//登录认证
@ -158,8 +173,6 @@ private:
//用于判断客户端是否超时
toolkit::Ticker _alive_ticker;
//url解析后保存的相关信息
MediaInfo _media_info;
//rtsp推流相关绑定的源
RtspMediaSourceImp::Ptr _push_src;
//推流器所有权
@ -180,9 +193,6 @@ private:
toolkit::Socket::Ptr _rtcp_socks[2];
//标记是否收到播放的udp打洞包,收到播放的udp打洞包后才能知道其外网udp端口号
std::unordered_set<int> _udp_connected_flags;
////////RTP over udp_multicast////////
//共享的rtp组播对象
RtpMultiCaster::Ptr _multicaster;
////////RTSP over HTTP ////////
//quicktime 请求rtsp会产生两次tcp连接
//一次发送 get 一次发送post需要通过x-sessioncookie关联起来

View File

@ -38,6 +38,7 @@ set(LINK_LIBRARIES ${SRTP_LIBRARIES})
set(COMPILE_DEFINITIONS)
set(INCLUDE_DIRECTORIES)
if(ENABLE_SCTP)
find_package(SCTP QUIET)
if(SCTP_FOUND)
message(STATUS "found library: ${SCTP_INCLUDE_DIRS} ${SCTP_LIBRARIES}")
@ -48,6 +49,7 @@ if(SCTP_FOUND)
list(APPEND LINK_LIBRARIES ${SCTP_LIBRARIES})
message(STATUS "WebRTC datachannel 功能已打开")
endif()
endif()
file(GLOB WEBRTC_SRC_LIST
${CMAKE_CURRENT_SOURCE_DIR}/*.cpp

View File

@ -33,15 +33,19 @@ WebRtcPlayer::WebRtcPlayer(const EventPoller::Ptr &poller,
bool preferred_tcp) : WebRtcTransportImp(poller,preferred_tcp) {
_media_info = info;
_play_src = src;
CHECK(_play_src);
CHECK(src);
}
void WebRtcPlayer::onStartWebRTC() {
CHECK(_play_src);
auto playSrc = _play_src.lock();
if(!playSrc){
onShutdown(SockException(Err_shutdown, "rtsp media source was shutdown"));
return ;
}
WebRtcTransportImp::onStartWebRTC();
if (canSendRtp()) {
_play_src->pause(false);
_reader = _play_src->getRing()->attach(getPoller(), true);
playSrc->pause(false);
_reader = playSrc->getRing()->attach(getPoller(), true);
weak_ptr<WebRtcPlayer> weak_self = static_pointer_cast<WebRtcPlayer>(shared_from_this());
weak_ptr<Session> weak_session = getSession();
_reader->setGetInfoCB([weak_session]() { return weak_session.lock(); });
@ -64,8 +68,6 @@ void WebRtcPlayer::onStartWebRTC() {
strong_self->onShutdown(SockException(Err_shutdown, "rtsp ring buffer detached"));
});
}
//使用完毕后,释放强引用,这样确保推流器断开后能及时注销媒体
_play_src = nullptr;
}
void WebRtcPlayer::onDestory() {
WebRtcTransportImp::onDestory();
@ -86,11 +88,14 @@ void WebRtcPlayer::onDestory() {
}
void WebRtcPlayer::onRtcConfigure(RtcConfigure &configure) const {
CHECK(_play_src);
auto playSrc = _play_src.lock();
if(!playSrc){
return ;
}
WebRtcTransportImp::onRtcConfigure(configure);
//这是播放
configure.audio.direction = configure.video.direction = RtpDirection::sendonly;
configure.setPlayRtspInfo(_play_src->getSdp());
configure.setPlayRtspInfo(playSrc->getSdp());
}
}// namespace mediakit

View File

@ -36,7 +36,7 @@ private:
//媒体相关元数据
MediaInfo _media_info;
//播放的rtsp源
RtspMediaSource::Ptr _play_src;
std::weak_ptr<RtspMediaSource> _play_src;
//播放rtsp源的reader对象
RtspMediaSource::RingType::RingReader::Ptr _reader;
};