Merge pull request #32 from ZLMediaKit/master

[pull] master from ZLMediaKit:master
This commit is contained in:
waken 2024-07-12 16:15:58 +08:00 committed by GitHub
commit 6552dffec3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
120 changed files with 4075 additions and 2559 deletions

View File

@ -23,3 +23,37 @@ jobs:
- name: 编译
run: cd Android && ./gradlew build
- name: 设置环境变量
run: |
echo "BRANCH=$(echo ${GITHUB_REF#refs/heads/} | tr -s "/\?%*:|\"<>" "_")" >> $GITHUB_ENV
echo "BRANCH2=$(echo ${GITHUB_REF#refs/heads/} )" >> $GITHUB_ENV
echo "DATE=$(date +%Y-%m-%d)" >> $GITHUB_ENV
- name: 打包二进制
id: upload
uses: actions/upload-artifact@v4
with:
name: ${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}
path: Android/app/build/outputs/apk/debug/*
if-no-files-found: error
retention-days: 90
- name: issue评论
if: github.event_name != 'pull_request' && github.ref != 'refs/heads/feature/test'
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.createComment({
issue_number: 483,
owner: context.repo.owner,
repo: context.repo.repo,
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
+ '- 分支: ${{ env.BRANCH2 }}\n'
+ '- git hash: ${{ github.sha }} \n'
+ '- 编译日期: ${{ env.DATE }}\n'
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
+ '- 开启特性: 未开启openssl/webrtc/datachannel等功能\n'
+ '- 打包ci名: ${{ github.workflow }}\n'
})

View File

@ -7,12 +7,6 @@ on:
- "feature/*"
- "release/*"
pull_request:
branches:
- "master"
- "feature/*"
- "release/*"
env:
# Use docker.io for Docker Hub if empty
REGISTRY: docker.io
@ -21,7 +15,7 @@ env:
jobs:
build:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
permissions:
contents: read
packages: write
@ -39,7 +33,6 @@ jobs:
# Install the cosign tool except on PR
# https://github.com/sigstore/cosign-installer
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@d572c9c13673d2e0a26fabf90b5748f36886883f
- name: Set up QEMU
@ -53,7 +46,6 @@ jobs:
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c
with:
registry: ${{ env.REGISTRY }}
@ -71,6 +63,7 @@ jobs:
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
if: github.event_name != 'pull_request' && github.ref != 'refs/heads/feature/test'
id: build-and-push
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a
with:

View File

@ -13,9 +13,6 @@ jobs:
- name: 下载submodule源码
run: mv -f .gitmodules_github .gitmodules && git submodule sync && git submodule update --init
- name: apt-get安装依赖库(非必选)
run: sudo apt-get update && sudo apt-get install -y cmake libssl-dev libsdl-dev libavcodec-dev libavutil-dev libswscale-dev libresample-dev libusrsctp-dev
- name: 下载 SRTP
uses: actions/checkout@v2
with:
@ -24,13 +21,115 @@ jobs:
ref: v2.3.0
path: 3rdpart/libsrtp
- name: 编译 SRTP
run: cd 3rdpart/libsrtp && ./configure --enable-openssl && make -j4 && sudo make install
- name: 下载 openssl
uses: actions/checkout@v2
with:
repository: openssl/openssl
fetch-depth: 1
ref: OpenSSL_1_1_1
path: 3rdpart/openssl
- name: 编译
run: mkdir -p linux_build && cd linux_build && cmake .. -DENABLE_WEBRTC=true -DENABLE_FFMPEG=true && make -j $(nproc)
- name: 运行MediaServer
run: pwd && cd release/linux/Debug && sudo ./MediaServer -d &
- name: 下载 usrsctp
uses: actions/checkout@v2
with:
repository: sctplab/usrsctp
fetch-depth: 1
ref: 0.9.5.0
path: 3rdpart/usrsctp
- name: 启动 Docker 容器, 在Docker 容器中执行脚本
run: |
docker pull centos:7
docker run -v $(pwd):/root -w /root --rm centos:7 sh -c "
#!/bin/bash
set -x
# Backup original CentOS-Base.repo file
cp /etc/yum.repos.d/CentOS-Base.repo /etc/yum.repos.d/CentOS-Base.repo.backup
# Define new repository configuration
cat <<EOF > /etc/yum.repos.d/CentOS-Base.repo
[base]
name=CentOS-7 - Base - mirrors.aliyun.com
baseurl=http://mirrors.aliyun.com/centos/7/os/x86_64/
gpgcheck=1
gpgkey=http://mirrors.aliyun.com/centos/RPM-GPG-KEY-CentOS-7
[updates]
name=CentOS-7 - Updates - mirrors.aliyun.com
baseurl=http://mirrors.aliyun.com/centos/7/updates/x86_64/
gpgcheck=1
gpgkey=http://mirrors.aliyun.com/centos/RPM-GPG-KEY-CentOS-7
EOF
# Clean yum cache and recreate it
yum clean all
yum makecache
echo \"CentOS 7 软件源已成功切换\"
yum install -y git wget gcc gcc-c++ make
mkdir -p /root/install
cd 3rdpart/openssl
./config no-shared --prefix=/root/install
make -j $(nproc)
make install
cd ../../
wget https://github.com/Kitware/CMake/releases/download/v3.29.5/cmake-3.29.5.tar.gz
tar -xf cmake-3.29.5.tar.gz
cd cmake-3.29.5
OPENSSL_ROOT_DIR=/root/install ./configure
make -j $(nproc)
make install
cd ..
cd 3rdpart/usrsctp
mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON ..
make -j $(nproc)
make install
cd ../../../
cd 3rdpart/libsrtp && ./configure --enable-openssl --with-openssl-dir=/root/install && make -j $(nproc) && make install
cd ../../
mkdir -p linux_build && cd linux_build && cmake .. -DOPENSSL_ROOT_DIR=/root/install -DCMAKE_BUILD_TYPE=Release && make -j $(nproc)
"
- name: 设置环境变量
run: |
echo "BRANCH=$(echo ${GITHUB_REF#refs/heads/} | tr -s "/\?%*:|\"<>" "_")" >> $GITHUB_ENV
echo "BRANCH2=$(echo ${GITHUB_REF#refs/heads/} )" >> $GITHUB_ENV
echo "DATE=$(date +%Y-%m-%d)" >> $GITHUB_ENV
- name: 打包二进制
id: upload
uses: actions/upload-artifact@v4
with:
name: ${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}
path: release/*
if-no-files-found: error
retention-days: 90
- name: issue评论
if: github.event_name != 'pull_request' && github.ref != 'refs/heads/feature/test'
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.createComment({
issue_number: 483,
owner: context.repo.owner,
repo: context.repo.repo,
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
+ '- 分支: ${{ env.BRANCH2 }}\n'
+ '- git hash: ${{ github.sha }} \n'
+ '- 编译日期: ${{ env.DATE }}\n'
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
+ '- 打包ci名: ${{ github.workflow }}\n'
+ '- 开启特性: openssl/webrtc/datachannel\n'
+ '- 说明: 本二进制在centos7(x64)上编译,请确保您的机器系统不低于此版本\n'
})

View File

@ -13,27 +13,54 @@ jobs:
- name: 下载submodule源码
run: mv -f .gitmodules_github .gitmodules && git submodule sync && git submodule update --init
# - name: 安装brew
# run: ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
#
# - name: brew安装依赖库(非必选)
# run: brew update && brew install cmake openssl sdl2 ffmpeg
# - name: 下载 SRTP
# uses: actions/checkout@v2
# with:
# repository: cisco/libsrtp
# fetch-depth: 1
# ref: v2.3.0
# path: 3rdpart/libsrtp
#
# - name: 编译 SRTP
# run: cd 3rdpart/libsrtp && ./configure --enable-openssl && make -j4 && sudo make install
- name: 配置 vcpkg
uses: lukka/run-vcpkg@v7
with:
vcpkgDirectory: '${{github.workspace}}/vcpkg'
vcpkgTriplet: arm64-osx
# 2024.06.01
vcpkgGitCommitId: '47364fbc300756f64f7876b549d9422d5f3ec0d3'
vcpkgArguments: 'openssl libsrtp[openssl] usrsctp'
- name: 编译
run: mkdir -p build && cd build && cmake .. && make -j $(nproc)
uses: lukka/run-cmake@v3
with:
useVcpkgToolchainFile: true
buildDirectory: '${{github.workspace}}/build'
cmakeAppendedArgs: ''
cmakeBuildType: 'Release'
- name: 运行MediaServer
run: pwd && cd release/darwin/Debug && sudo ./MediaServer -d &
- name: 设置环境变量
run: |
echo "BRANCH=$(echo ${GITHUB_REF#refs/heads/} | tr -s "/\?%*:|\"<>" "_")" >> $GITHUB_ENV
echo "BRANCH2=$(echo ${GITHUB_REF#refs/heads/} )" >> $GITHUB_ENV
echo "DATE=$(date +%Y-%m-%d)" >> $GITHUB_ENV
- name: 打包二进制
id: upload
uses: actions/upload-artifact@v4
with:
name: ${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}
path: release/*
if-no-files-found: error
retention-days: 90
- name: issue评论
if: github.event_name != 'pull_request' && github.ref != 'refs/heads/feature/test'
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.createComment({
issue_number: 483,
owner: context.repo.owner,
repo: context.repo.repo,
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
+ '- 分支: ${{ env.BRANCH2 }}\n'
+ '- git hash: ${{ github.sha }} \n'
+ '- 编译日期: ${{ env.DATE }}\n'
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
+ '- 打包ci名: ${{ github.workflow }}\n'
+ '- 开启特性: openssl/webrtc/datachannel\n'
+ '- 说明: 此二进制为arm64版本\n'
})

View File

@ -4,7 +4,7 @@ on: [pull_request]
jobs:
check:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:

View File

@ -17,14 +17,52 @@ jobs:
with:
vcpkgDirectory: '${{github.workspace}}/vcpkg'
vcpkgTriplet: x64-windows-static
# 2021.05.12
vcpkgGitCommitId: '5568f110b509a9fd90711978a7cb76bae75bb092'
vcpkgArguments: 'openssl libsrtp'
# 2024.06.01
vcpkgGitCommitId: '47364fbc300756f64f7876b549d9422d5f3ec0d3'
vcpkgArguments: 'openssl libsrtp[openssl] usrsctp'
- name: 编译
uses: lukka/run-cmake@v3
with:
useVcpkgToolchainFile: true
buildDirectory: '${{github.workspace}}/build'
cmakeAppendedArgs: '-DCMAKE_ENABLE_WEBRTC:BOOL=ON'
cmakeBuildType: 'RelWithDebInfo'
cmakeAppendedArgs: ''
cmakeBuildType: 'Release'
- name: 设置环境变量
run: |
$dateString = Get-Date -Format "yyyy-MM-dd"
$branch = $env:GITHUB_REF -replace "refs/heads/", "" -replace "[\\/\\\?\%\*:\|\x22<>]", "_"
$branch2 = $env:GITHUB_REF -replace "refs/heads/", ""
echo "BRANCH=$branch" >> $env:GITHUB_ENV
echo "BRANCH2=$branch2" >> $env:GITHUB_ENV
echo "DATE=$dateString" >> $env:GITHUB_ENV
- name: 打包二进制
id: upload
uses: actions/upload-artifact@v4
with:
name: ${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}
path: release/*
if-no-files-found: error
retention-days: 90
- name: issue评论
if: github.event_name != 'pull_request' && github.ref != 'refs/heads/feature/test'
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.createComment({
issue_number: 483,
owner: context.repo.owner,
repo: context.repo.repo,
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
+ '- 分支: ${{ env.BRANCH2 }}\n'
+ '- git hash: ${{ github.sha }} \n'
+ '- 编译日期: ${{ env.DATE }}\n'
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
+ '- 打包ci名: ${{ github.workflow }}\n'
+ '- 开启特性: openssl/webrtc/datachannel\n'
+ '- 说明: 此二进制为x64版本\n'
})

@ -1 +1 @@
Subproject commit 1e1a990783c6c09452419c0aaa6d72ce02d0202b
Subproject commit fb695d203421d906c473018022a736fa4a7a47e4

View File

@ -13,22 +13,18 @@
#include <stdio.h>
#ifndef NDEBUG
#ifdef assert
#undef assert
#endif//assert
#ifdef assert
#undef assert
#endif//assert
#ifdef __cplusplus
extern "C" {
#endif
extern void Assert_Throw(int failed, const char *exp, const char *func, const char *file, int line, const char *str);
#ifdef __cplusplus
}
#endif
#ifdef __cplusplus
extern "C" {
#endif
extern void Assert_Throw(int failed, const char *exp, const char *func, const char *file, int line, const char *str);
#ifdef __cplusplus
}
#endif
#define assert(exp) Assert_Throw(!(exp), #exp, __FUNCTION__, __FILE__, __LINE__, NULL)
#else
#define assert(e) ((void)0)
#endif//NDEBUG
#define assert(exp) Assert_Throw(!(exp), #exp, __FUNCTION__, __FILE__, __LINE__, NULL)
#endif //ZLMEDIAKIT_ASSERT_H

@ -1 +1 @@
Subproject commit 527c0f5117b489fda78fcd123d446370ddd9ec9a
Subproject commit cf83ebc62e65ae6f3b73bc5ebd06cb0b2da49fa5

View File

@ -448,10 +448,21 @@ endif()
if(WIN32)
update_cached_list(MK_LINK_LIBRARIES WS2_32 Iphlpapi shlwapi)
elseif(ANDROID)
update_cached_list(MK_LINK_LIBRARIES log)
elseif(NOT ANDROID OR IOS)
update_cached_list(MK_LINK_LIBRARIES pthread)
endif()
if(ENABLE_VIDEOSTACK)
if(ENABLE_FFMPEG AND ENABLE_X264)
message(STATUS "ENABLE_VIDEOSTACK defined")
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_VIDEOSTACK)
else()
message(WARNING "ENABLE_VIDEOSTACK requires ENABLE_FFMPEG and ENABLE_X264")
endif ()
endif ()
# ----------------------------------------------------------------------------
# Solution folders:
# ----------------------------------------------------------------------------

View File

@ -159,12 +159,15 @@
- 2、作为独立的流媒体服务器使用不想做c/c++开发的,可以参考 [restful api](https://github.com/ZLMediaKit/ZLMediaKit/wiki/MediaServer支持的HTTP-API) 和 [web hook](https://github.com/ZLMediaKit/ZLMediaKit/wiki/MediaServer支持的HTTP-HOOK-API ).
- 3、如果想做c/c++开发,添加业务逻辑增加功能,可以参考这里的[测试程序](https://github.com/ZLMediaKit/ZLMediaKit/tree/master/tests).
## 二进制文件下载
zlmediakit采用 github action 持续集成自动编译打包上传编译产出包,请在[issue列表](https://github.com/ZLMediaKit/ZLMediaKit/issues/483)下载最新sdk库文件以及可执行文件。
## Docker 镜像
你可以从Docker Hub下载已经编译好的镜像并启动它
```bash
#此镜像为github持续集成自动编译推送,跟代码(master分支)保持最新状态
#此镜像为github action 持续集成自动编译推送,跟代码(master分支)保持最新状态
docker run -id -p 1935:1935 -p 8080:80 -p 8443:443 -p 8554:554 -p 10000:10000 -p 10000:10000/udp -p 8000:8000/udp -p 9000:9000/udp zlmediakit/zlmediakit:master
```
@ -188,6 +191,7 @@ bash build_docker_images.sh
- [jessibuca](https://github.com/langhuihui/jessibuca) 基于wasm支持H265的播放器
- [wsPlayer](https://github.com/v354412101/wsPlayer) 基于MSE的websocket-fmp4播放器
- [BXC_gb28181Player](https://github.com/any12345com/BXC_gb28181Player) C++开发的支持国标GB28181协议的视频流播放器
- [RTCPlayer](https://github.com/leo94666/RTCPlayer) 一个基于Android客户端的的RTC播放器
- WEB管理网站
- [zlm_webassist](https://github.com/1002victor/zlm_webassist) 本项目配套的前后端分离web管理项目
@ -363,6 +367,7 @@ bash build_docker_images.sh
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
[李之阳](https://github.com/leo94666)
同时感谢JetBrains对开源项目的支持本项目使用CLion开发与调试

View File

@ -324,6 +324,10 @@ git submodule update --init
});
```
## Binary file download
zlmediakit uses github action to continuously integrate automatic compilation package and upload the compilation output package. Please download the latest sdk library file and executable file at [issue list] (https://github.com/ZLMediaKit/ZLMediaKit/issues/483).
## Docker Image
You can download the pre-compiled image from Docker Hub and start it:
@ -369,6 +373,8 @@ bash build_docker_images.sh
- [WebSocket-fmp4 player based on MSE](https://github.com/v354412101/wsPlayer)
- [Domestic webrtc sdk(metaRTC)](https://github.com/metartc/metaRTC)
- [GB28181 player implemented in C++](https://github.com/any12345com/BXC_gb28181Player)
- [Android RTCPlayer](https://github.com/leo94666/RTCPlayer)
## License
@ -521,6 +527,7 @@ Thanks to all those who have supported this project in various ways, including b
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
[李之阳](https://github.com/leo94666)
Also thank to JetBrains for their support for open source project, we developed and debugged zlmediakit with CLion:

View File

@ -58,6 +58,8 @@ API_EXPORT const char *API_CALL mk_record_info_get_stream(const mk_record_info c
///////////////////////////////////////////Parser/////////////////////////////////////////////
//Parser对象的C映射
typedef struct mk_parser_t *mk_parser;
//Parser对象中Headers foreach回调
typedef void(API_CALL *on_mk_parser_header_cb)(void *user_data, const char *key, const char *val);
//Parser::Method(),获取命令字譬如GET/POST
API_EXPORT const char* API_CALL mk_parser_get_method(const mk_parser ctx);
//Parser::Url(),获取HTTP的访问url(不包括?后面的参数)
@ -72,6 +74,8 @@ API_EXPORT const char* API_CALL mk_parser_get_tail(const mk_parser ctx);
API_EXPORT const char* API_CALL mk_parser_get_header(const mk_parser ctx,const char *key);
//Parser::Content(),获取HTTP body
API_EXPORT const char* API_CALL mk_parser_get_content(const mk_parser ctx, size_t *length);
//循环获取所有header
API_EXPORT void API_CALL mk_parser_headers_for_each(const mk_parser ctx, on_mk_parser_header_cb cb, void *user_data);
///////////////////////////////////////////MediaInfo/////////////////////////////////////////////
//MediaInfo对象的C映射
@ -114,19 +118,24 @@ API_EXPORT int API_CALL mk_media_source_get_total_reader_count(const mk_media_so
API_EXPORT int API_CALL mk_media_source_get_track_count(const mk_media_source ctx);
// copy track reference by index from MediaSource, please use mk_track_unref to release it
API_EXPORT mk_track API_CALL mk_media_source_get_track(const mk_media_source ctx, int index);
// MediaSource::Track:loss
API_EXPORT float API_CALL mk_media_source_get_track_loss(const mk_media_source ctx, const mk_track track);
// MediaSource::broadcastMessage
API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx, const char *msg, size_t len);
// MediaSource::getOriginUrl()
API_EXPORT const char* API_CALL mk_media_source_get_origin_url(const mk_media_source ctx);
// MediaSource::getOriginType()
API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source ctx);
// MediaSource::getOriginTypeStr(), 使用后请用mk_free释放返回值
API_EXPORT const char *API_CALL mk_media_source_get_origin_type_str(const mk_media_source ctx);
// MediaSource::getCreateStamp()
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx);
// MediaSource::isRecording() 0:hls,1:MP4
API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx, int type);
// MediaSource::getBytesSpeed()
API_EXPORT int API_CALL mk_media_source_get_bytes_speed(const mk_media_source ctx);
// MediaSource::getAliveSecond()
API_EXPORT uint64_t API_CALL mk_media_source_get_alive_second(const mk_media_source ctx);
/**
* ZLMediaKit中被称作为MediaSource
* 3RtmpMediaSourceRtspMediaSourceHlsMediaSource

View File

@ -79,7 +79,7 @@ API_EXPORT void API_CALL mk_proxy_player_release(mk_proxy_player ctx);
/**
*
* @param ctx
* @param key , net_adapter/rtp_type/rtsp_user/rtsp_pwd/protocol_timeout_ms/media_timeout_ms/beat_interval_ms
* @param key , net_adapter/rtp_type/rtsp_user/rtsp_pwd/protocol_timeout_ms/media_timeout_ms/beat_interval_ms/rtsp_speed
* @param val ,string
*/
API_EXPORT void API_CALL mk_proxy_player_set_option(mk_proxy_player ctx, const char *key, const char *val);
@ -98,7 +98,9 @@ API_EXPORT void API_CALL mk_proxy_player_play(mk_proxy_player ctx, const char *u
* mk_proxy_player_release函数MediaSource.close()
* @param user_data mk_proxy_player_set_on_close函数设置
*/
typedef void(API_CALL *on_mk_proxy_player_close)(void *user_data, int err, const char *what, int sys_err);
typedef void(API_CALL *on_mk_proxy_player_cb)(void *user_data, int err, const char *what, int sys_err);
// 保持兼容
#define on_mk_proxy_player_close on_mk_proxy_player_cb
/**
* MediaSource.close()
@ -108,8 +110,17 @@ typedef void(API_CALL *on_mk_proxy_player_close)(void *user_data, int err, const
* @param cb
* @param user_data
*/
API_EXPORT void API_CALL mk_proxy_player_set_on_close(mk_proxy_player ctx, on_mk_proxy_player_close cb, void *user_data);
API_EXPORT void API_CALL mk_proxy_player_set_on_close2(mk_proxy_player ctx, on_mk_proxy_player_close cb, void *user_data, on_user_data_free user_data_free);
API_EXPORT void API_CALL mk_proxy_player_set_on_close(mk_proxy_player ctx, on_mk_proxy_player_cb cb, void *user_data);
API_EXPORT void API_CALL mk_proxy_player_set_on_close2(mk_proxy_player ctx, on_mk_proxy_player_cb cb, void *user_data, on_user_data_free user_data_free);
/**
*
* @param ctx
* @param cb
* @param user_data
* @param user_data_free
*/
API_EXPORT void API_CALL mk_proxy_player_set_on_play_result(mk_proxy_player ctx, on_mk_proxy_player_cb cb, void *user_data, on_user_data_free user_data_free);
/**
*

View File

@ -24,6 +24,7 @@ typedef struct mk_rtp_server_t *mk_rtp_server;
* @return
*/
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create(uint16_t port, int tcp_mode, const char *stream_id);
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create2(uint16_t port, int tcp_mode, const char *vhost, const char *app, const char *stream_id);
/**
* TCP

View File

@ -73,6 +73,21 @@ API_EXPORT const char* API_CALL mk_track_codec_name(mk_track track);
*/
API_EXPORT int API_CALL mk_track_bit_rate(mk_track track);
/**
* 1: 0
*/
API_EXPORT int API_CALL mk_track_ready(mk_track track);
/**
*
*/
API_EXPORT uint64_t API_CALL mk_track_frames(mk_track track);
/**
*
*/
API_EXPORT uint64_t API_CALL mk_track_duration(mk_track track);
/**
* frame输出事件
* @param track track对象
@ -114,6 +129,21 @@ API_EXPORT int API_CALL mk_track_video_height(mk_track track);
*/
API_EXPORT int API_CALL mk_track_video_fps(mk_track track);
/**
*
*/
API_EXPORT uint64_t API_CALL mk_track_video_key_frames(mk_track track);
/**
* GOP关键帧间隔
*/
API_EXPORT int API_CALL mk_track_video_gop_size(mk_track track);
/**
* ()
*/
API_EXPORT int API_CALL mk_track_video_gop_interval_ms(mk_track track);
/**
*
*/

View File

@ -125,6 +125,13 @@ API_EXPORT const char* API_CALL mk_parser_get_content(const mk_parser ctx, size_
}
return parser->content().c_str();
}
API_EXPORT void API_CALL mk_parser_headers_for_each(const mk_parser ctx, on_mk_parser_header_cb cb, void *user_data){
assert(ctx && cb);
Parser *parser = (Parser *)ctx;
for (auto it = parser->getHeader().begin(); it != parser->getHeader().end(); ++it) {
cb(user_data, it->first.c_str(), it->second.c_str());
}
}
///////////////////////////////////////////MediaInfo/////////////////////////////////////////////
API_EXPORT const char* API_CALL mk_media_info_get_params(const mk_media_info ctx){
@ -218,6 +225,13 @@ API_EXPORT mk_track API_CALL mk_media_source_get_track(const mk_media_source ctx
return (mk_track) new Track::Ptr(std::move(tracks[index]));
}
API_EXPORT float API_CALL mk_media_source_get_track_loss(const mk_media_source ctx, const mk_track track) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
// rtp推流只有一个统计器但是可能有多个track如果短时间多次获取间隔丢包率第二次会获取为-1
return src->getLossRate((*((Track::Ptr *)track))->getTrackType());
}
API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx, const char *msg, size_t len) {
assert(ctx && msg && len);
MediaSource *src = (MediaSource *)ctx;
@ -240,6 +254,12 @@ API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source c
return static_cast<int>(src->getOriginType());
}
API_EXPORT const char* API_CALL mk_media_source_get_origin_type_str(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return _strdup(getOriginTypeString(src->getOriginType()).c_str());
}
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
@ -252,6 +272,19 @@ API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx,i
return src->isRecording((Recorder::type)type);
}
API_EXPORT int API_CALL mk_media_source_get_bytes_speed(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return src->getBytesSpeed();
}
API_EXPORT uint64_t API_CALL mk_media_source_get_alive_second(const mk_media_source ctx) {
assert(ctx);
MediaSource *src = (MediaSource *)ctx;
return src->getAliveSecond();
}
API_EXPORT int API_CALL mk_media_source_close(const mk_media_source ctx,int force){
assert(ctx);
MediaSource *src = (MediaSource *)ctx;

View File

@ -84,6 +84,20 @@ API_EXPORT void API_CALL mk_proxy_player_set_on_close2(mk_proxy_player ctx, on_m
});
}
API_EXPORT void API_CALL mk_proxy_player_set_on_play_result(mk_proxy_player ctx, on_mk_proxy_player_close cb, void *user_data, on_user_data_free user_data_free) {
assert(ctx);
PlayerProxy::Ptr &obj = *((PlayerProxy::Ptr *)ctx);
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
obj->getPoller()->async([obj, cb, ptr]() {
// 切换线程再操作
obj->setPlayCallbackOnce([cb, ptr](const SockException &ex) {
if (cb) {
cb(ptr.get(), ex.getErrCode(), ex.what(), ex.getCustomCode());
}
});
});
}
API_EXPORT int API_CALL mk_proxy_player_total_reader_count(mk_proxy_player ctx){
assert(ctx);
PlayerProxy::Ptr &obj = *((PlayerProxy::Ptr *) ctx);

View File

@ -18,7 +18,13 @@ using namespace mediakit;
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create(uint16_t port, int tcp_mode, const char *stream_id) {
RtpServer::Ptr *server = new RtpServer::Ptr(new RtpServer);
(*server)->start(port, stream_id, (RtpServer::TcpMode)tcp_mode);
(*server)->start(port, MediaTuple { DEFAULT_VHOST, kRtpAppName, stream_id, "" }, (RtpServer::TcpMode)tcp_mode);
return (mk_rtp_server)server;
}
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create2(uint16_t port, int tcp_mode, const char *vhost, const char *app, const char *stream_id) {
RtpServer::Ptr *server = new RtpServer::Ptr(new RtpServer);
(*server)->start(port, MediaTuple { vhost, app, stream_id, "" }, (RtpServer::TcpMode)tcp_mode);
return (mk_rtp_server)server;
}
@ -56,7 +62,7 @@ API_EXPORT void API_CALL mk_rtp_server_set_on_detach2(mk_rtp_server ctx, on_mk_r
RtpServer::Ptr *server = (RtpServer::Ptr *) ctx;
if (cb) {
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
(*server)->setOnDetach([cb, ptr]() {
(*server)->setOnDetach([cb, ptr](const SockException &ex) {
cb(ptr.get());
});
} else {
@ -71,6 +77,11 @@ API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create(uint16_t port, int enable
return nullptr;
}
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create2(uint16_t port, int tcp_mode, const char *vhost, const char *app, const char *stream_id) {
WarnL << "请打开ENABLE_RTPPROXY后再编译";
return nullptr;
}
API_EXPORT void API_CALL mk_rtp_server_release(mk_rtp_server ctx) {
WarnL << "请打开ENABLE_RTPPROXY后再编译";
}

View File

@ -109,6 +109,21 @@ API_EXPORT int API_CALL mk_track_bit_rate(mk_track track) {
return (*((Track::Ptr *) track))->getBitRate();
}
API_EXPORT int API_CALL mk_track_ready(mk_track track) {
assert(track);
return (*((Track::Ptr *)track))->ready();
}
API_EXPORT uint64_t API_CALL mk_track_frames(mk_track track) {
assert(track);
return (*((Track::Ptr *)track))->getFrames();
}
API_EXPORT uint64_t API_CALL mk_track_duration(mk_track track) {
assert(track);
return (*((Track::Ptr *)track))->getDuration();
}
API_EXPORT void *API_CALL mk_track_add_delegate(mk_track track, on_mk_frame_out cb, void *user_data) {
return mk_track_add_delegate2(track, cb, user_data, nullptr);
}
@ -167,6 +182,36 @@ API_EXPORT int API_CALL mk_track_video_fps(mk_track track) {
return 0;
}
API_EXPORT uint64_t API_CALL mk_track_video_key_frames(mk_track track) {
assert(track);
auto video = dynamic_pointer_cast<VideoTrack>((*((Track::Ptr *)track)));
if (video) {
return video->getVideoFps();
}
WarnL << "not video track";
return 0;
}
API_EXPORT int API_CALL mk_track_video_gop_size(mk_track track) {
assert(track);
auto video = dynamic_pointer_cast<VideoTrack>((*((Track::Ptr *)track)));
if (video) {
return video->getVideoGopSize();
}
WarnL << "not video track";
return 0;
}
API_EXPORT int API_CALL mk_track_video_gop_interval_ms(mk_track track) {
assert(track);
auto video = dynamic_pointer_cast<VideoTrack>((*((Track::Ptr *)track)));
if (video) {
return video->getVideoGopInterval();
}
WarnL << "not video track";
return 0;
}
API_EXPORT int API_CALL mk_track_audio_sample_rate(mk_track track) {
assert(track);
auto audio = dynamic_pointer_cast<AudioTrack>((*((Track::Ptr *) track)));

View File

@ -43,7 +43,7 @@ WORKDIR /opt/media/ZLMediaKit
# 3rdpart init
WORKDIR /opt/media/ZLMediaKit/3rdpart
RUN wget https://mirror.ghproxy.com/https://github.com/cisco/libsrtp/archive/v2.3.0.tar.gz -O libsrtp-2.3.0.tar.gz && \
RUN wget https://github.com/cisco/libsrtp/archive/v2.3.0.tar.gz -O libsrtp-2.3.0.tar.gz && \
tar xfv libsrtp-2.3.0.tar.gz && \
mv libsrtp-2.3.0 libsrtp && \
cd libsrtp && ./configure --enable-openssl && make -j $(nproc) && make install

View File

@ -55,62 +55,61 @@ void AACRtpDecoder::obtainFrame() {
bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool key_pos) {
auto payload_size = rtp->getPayloadSize();
if (payload_size <= 0) {
//无实际负载
// 无实际负载
return false;
}
auto stamp = rtp->getStampMS();
//rtp数据开始部分
// rtp数据开始部分
auto ptr = rtp->getPayload();
//rtp数据末尾
// rtp数据末尾
auto end = ptr + payload_size;
//首2字节表示Au-Header的个数单位bit所以除以16得到Au-Header个数
// 首2字节表示Au-Header的个数单位bit所以除以16得到Au-Header个数
auto au_header_count = ((ptr[0] << 8) | ptr[1]) >> 4;
if (!au_header_count) {
//问题issue: https://github.com/ZLMediaKit/ZLMediaKit/issues/1869
// 问题issue: https://github.com/ZLMediaKit/ZLMediaKit/issues/1869
WarnL << "invalid aac rtp au_header_count";
return false;
}
//记录au_header起始指针
// 记录au_header起始指针
auto au_header_ptr = ptr + 2;
ptr = au_header_ptr + au_header_count * 2;
ptr = au_header_ptr + au_header_count * 2;
if (end < ptr) {
//数据不够
// 数据不够
return false;
}
if (!_last_dts) {
//记录第一个时间戳
// 记录第一个时间戳
_last_dts = stamp;
}
//每个audio unit时间戳增量
// 每个audio unit时间戳增量
auto dts_inc = (stamp - _last_dts) / au_header_count;
if (dts_inc < 0 && dts_inc > 100) {
//时间戳增量异常,忽略
if (dts_inc < 0 || dts_inc > 100) {
// 时间戳增量异常,忽略
dts_inc = 0;
}
for (int i = 0; i < au_header_count; ++i) {
for (auto i = 0u; i < (size_t)au_header_count; ++i) {
// 之后的2字节是AU_HEADER,其中高13位表示一帧AAC负载的字节长度低3位无用
uint16_t size = ((au_header_ptr[0] << 8) | au_header_ptr[1]) >> 3;
if (ptr + size > end) {
//数据不够
auto size = ((au_header_ptr[0] << 8) | au_header_ptr[1]) >> 3;
auto len = std::min<int>(size, end - ptr);
if (len <= 0) {
break;
}
_frame->_buffer.append((char *)ptr, len);
ptr += len;
au_header_ptr += 2;
if (size) {
//设置aac数据
_frame->_buffer.assign((char *) ptr, size);
//设置当前audio unit时间戳
if (_frame->size() >= (size_t)size) {
// 设置当前audio unit时间戳
_frame->_dts = _last_dts + i * dts_inc;
ptr += size;
au_header_ptr += 2;
flushData();
}
}
//记录上次时间戳
// 记录上次时间戳
_last_dts = stamp;
return false;
}

View File

@ -40,7 +40,7 @@ bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto remain_size = len;
size_t max_size = 160 * _channels * _pkt_dur_ms / 20; // 20 ms per 160 byte
size_t n = 0;
bool mark = true;
bool mark = false;
while (remain_size >= max_size) {
assert(remain_size >= max_size);
const size_t rtp_size = max_size;

View File

@ -117,7 +117,7 @@ size_t prefixSize(const char *ptr, size_t len) {
H264Track::H264Track(const string &sps, const string &pps, int sps_prefix_len, int pps_prefix_len) {
_sps = sps.substr(sps_prefix_len);
_pps = pps.substr(pps_prefix_len);
update();
H264Track::update();
}
CodecId H264Track::getCodecId() const {
@ -238,6 +238,14 @@ bool H264Track::update() {
return getAVCInfo(_sps, _width, _height, _fps);
}
std::vector<Frame::Ptr> H264Track::getConfigFrames() const {
if (!ready()) {
return {};
}
return { createConfigFrame<H264Frame>(_sps, 0, getIndex()),
createConfigFrame<H264Frame>(_pps, 0, getIndex()) };
}
Track::Ptr H264Track::clone() const {
return std::make_shared<H264Track>(*this);
}
@ -284,23 +292,11 @@ bool H264Track::inputFrame_l(const Frame::Ptr &frame) {
void H264Track::insertConfigFrame(const Frame::Ptr &frame) {
if (!_sps.empty()) {
auto spsFrame = FrameImp::create<H264Frame>();
spsFrame->_prefix_size = 4;
spsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
spsFrame->_buffer.append(_sps);
spsFrame->_dts = frame->dts();
spsFrame->setIndex(frame->getIndex());
VideoTrack::inputFrame(spsFrame);
VideoTrack::inputFrame(createConfigFrame<H264Frame>(_sps, frame->dts(), frame->getIndex()));
}
if (!_pps.empty()) {
auto ppsFrame = FrameImp::create<H264Frame>();
ppsFrame->_prefix_size = 4;
ppsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
ppsFrame->_buffer.append(_pps);
ppsFrame->_dts = frame->dts();
ppsFrame->setIndex(frame->getIndex());
VideoTrack::inputFrame(ppsFrame);
VideoTrack::inputFrame(createConfigFrame<H264Frame>(_pps, frame->dts(), frame->getIndex()));
}
}

View File

@ -115,6 +115,7 @@ public:
toolkit::Buffer::Ptr getExtraData() const override;
void setExtraData(const uint8_t *data, size_t size) override;
bool update() override;
std::vector<Frame::Ptr> getConfigFrames() const override;
private:
Sdp::Ptr getSdp(uint8_t payload_type) const override;
@ -131,5 +132,17 @@ private:
std::string _pps;
};
template <typename FrameType>
Frame::Ptr createConfigFrame(const std::string &data, uint64_t dts, int index) {
auto frame = FrameImp::create<FrameType>();
frame->_prefix_size = 4;
frame->_buffer.assign("\x00\x00\x00\x01", 4);
frame->_buffer.append(data);
frame->_dts = dts;
frame->setIndex(index);
return frame;
}
}//namespace mediakit
#endif //ZLMEDIAKIT_H264_H

View File

@ -66,7 +66,7 @@ H265Track::H265Track(const string &vps,const string &sps, const string &pps,int
_vps = vps.substr(vps_prefix_len);
_sps = sps.substr(sps_prefix_len);
_pps = pps.substr(pps_prefix_len);
update();
H265Track::update();
}
CodecId H265Track::getCodecId() const {
@ -185,6 +185,15 @@ bool H265Track::update() {
return getHEVCInfo(_vps, _sps, _width, _height, _fps);
}
std::vector<Frame::Ptr> H265Track::getConfigFrames() const {
if (!ready()) {
return {};
}
return { createConfigFrame<H265Frame>(_vps, 0, getIndex()),
createConfigFrame<H265Frame>(_sps, 0, getIndex()),
createConfigFrame<H265Frame>(_pps, 0, getIndex()) };
}
Track::Ptr H265Track::clone() const {
return std::make_shared<H265Track>(*this);
}
@ -194,32 +203,13 @@ void H265Track::insertConfigFrame(const Frame::Ptr &frame) {
return;
}
if (!_vps.empty()) {
auto vpsFrame = FrameImp::create<H265Frame>();
vpsFrame->_prefix_size = 4;
vpsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
vpsFrame->_buffer.append(_vps);
vpsFrame->_dts = frame->dts();
vpsFrame->setIndex(frame->getIndex());
VideoTrack::inputFrame(vpsFrame);
VideoTrack::inputFrame(createConfigFrame<H265Frame>(_vps, frame->dts(), frame->getIndex()));
}
if (!_sps.empty()) {
auto spsFrame = FrameImp::create<H265Frame>();
spsFrame->_prefix_size = 4;
spsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
spsFrame->_buffer.append(_sps);
spsFrame->_dts = frame->dts();
spsFrame->setIndex(frame->getIndex());
VideoTrack::inputFrame(spsFrame);
VideoTrack::inputFrame(createConfigFrame<H265Frame>(_sps, frame->dts(), frame->getIndex()));
}
if (!_pps.empty()) {
auto ppsFrame = FrameImp::create<H265Frame>();
ppsFrame->_prefix_size = 4;
ppsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
ppsFrame->_buffer.append(_pps);
ppsFrame->_dts = frame->dts();
ppsFrame->setIndex(frame->getIndex());
VideoTrack::inputFrame(ppsFrame);
VideoTrack::inputFrame(createConfigFrame<H265Frame>(_pps, frame->dts(), frame->getIndex()));
}
}

View File

@ -142,6 +142,7 @@ public:
toolkit::Buffer::Ptr getExtraData() const override;
void setExtraData(const uint8_t *data, size_t size) override;
bool update() override;
std::vector<Frame::Ptr> getConfigFrames() const override;
private:
Sdp::Ptr getSdp(uint8_t payload_type) const override;

View File

@ -306,13 +306,12 @@ static int getBits(void *pvHandle, int iN)
uint8_t u8Nbyte;
uint8_t u8Shift;
uint32_t u32Result = 0;
int iRet = 0;
uint32_t iRet = 0;
int iResoLen = 0;
if(NULL == ptPtr)
{
RPT(RPT_ERR, "NULL pointer");
iRet = -1;
goto exit;
}
@ -324,7 +323,6 @@ static int getBits(void *pvHandle, int iN)
iResoLen = getBitsLeft(ptPtr);
if(iResoLen < iN)
{
iRet = -1;
goto exit;
}

View File

@ -1476,6 +1476,16 @@
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",
@ -1517,6 +1527,16 @@
"value": "1",
"description": "tcp模式0时为不启用tcp监听1时为启用tcp监听2时为tcp主动连接模式"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",
@ -1582,6 +1602,16 @@
"value": "1",
"description": "tcp模式0时为不启用tcp监听1时为启用tcp监听"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",
@ -1635,6 +1665,16 @@
"value": "1",
"description": "tcp主动模式时服务端端口"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",
@ -1666,6 +1706,16 @@
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",
@ -1697,6 +1747,16 @@
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",
@ -1733,6 +1793,16 @@
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",
@ -1764,6 +1834,16 @@
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream_id",
"value": "test",

View File

@ -18,23 +18,15 @@
// ITU-R BT.709
#define RGB_TO_Y(R, G, B) (((47 * (R) + 157 * (G) + 16 * (B) + 128) >> 8) + 16)
#define RGB_TO_U(R, G, B) (((-26 * (R)-87 * (G) + 112 * (B) + 128) >> 8) + 128)
#define RGB_TO_V(R, G, B) (((112 * (R)-102 * (G)-10 * (B) + 128) >> 8) + 128)
#define RGB_TO_U(R, G, B) (((-26 * (R) - 87 * (G) + 112 * (B) + 128) >> 8) + 128)
#define RGB_TO_V(R, G, B) (((112 * (R) - 102 * (G) - 10 * (B) + 128) >> 8) + 128)
INSTANCE_IMP(VideoStackManager)
Param::~Param()
{
VideoStackManager::Instance().unrefChannel(
id, width, height, pixfmt);
}
Param::~Param() { VideoStackManager::Instance().unrefChannel(id, width, height, pixfmt); }
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
{
: _id(id), _width(width), _height(height), _pixfmt(pixfmt) {
_tmp = std::make_shared<mediakit::FFmpegFrame>();
_tmp->get()->width = _width;
@ -53,88 +45,72 @@ Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pix
_tmp = _sws->inputFrame(frame);
}
void Channel::addParam(const std::weak_ptr<Param>& p)
{
void Channel::addParam(const std::weak_ptr<Param>& p) {
std::lock_guard<std::recursive_mutex> lock(_mx);
_params.push_back(p);
}
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
std::weak_ptr<Channel> weakSelf = shared_from_this();
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
_poller->async([weakSelf, frame]() {
auto self = weakSelf.lock();
if (!self) {
return;
}
if (!self) { return; }
self->_tmp = self->_sws->inputFrame(frame);
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
});
}
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func)
{
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func) {
for (auto& wp : _params) {
if (auto sp = wp.lock()) {
func(sp);
}
if (auto sp = wp.lock()) { func(sp); }
}
}
void Channel::fillBuffer(const Param::Ptr& p)
{
if (auto buf = p->weak_buf.lock()) {
copyData(buf, p);
}
void Channel::fillBuffer(const Param::Ptr& p) {
if (auto buf = p->weak_buf.lock()) { copyData(buf, p); }
}
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p)
{
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p) {
switch (p->pixfmt) {
case AV_PIX_FMT_YUV420P: {
for (int i = 0; i < p->height; i++) {
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i,
_tmp->get()->width);
}
//确保height为奇数时也能正确的复制到最后一行uv数据
for (int i = 0; i < (p->height + 1) / 2; i++) {
// U平面
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i,
_tmp->get()->width / 2);
case AV_PIX_FMT_YUV420P: {
for (int i = 0; i < p->height; i++) {
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i, _tmp->get()->width);
}
// 确保height为奇数时也能正确的复制到最后一行uv数据
for (int i = 0; i < (p->height + 1) / 2; i++) {
// U平面
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) +
p->posX / 2,
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i, _tmp->get()->width / 2);
// V平面
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i,
_tmp->get()->width / 2);
// V平面
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) +
p->posX / 2,
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i, _tmp->get()->width / 2);
}
break;
}
case AV_PIX_FMT_NV12: {
// TODO: 待实现
break;
}
break;
}
case AV_PIX_FMT_NV12: {
//TODO: 待实现
break;
}
default:
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
break;
default: WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt); break;
}
}
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn)
{
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn) {
std::lock_guard<std::recursive_mutex> lock(_mx);
_channels.push_back(chn);
}
void StackPlayer::play()
{
void StackPlayer::play() {
auto url = _url;
//创建拉流 解码对象
// 创建拉流 解码对象
_player = std::make_shared<mediakit::MediaPlayer>();
std::weak_ptr<mediakit::MediaPlayer> weakPlayer = _player;
@ -146,13 +122,9 @@ void StackPlayer::play()
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
if (!strongPlayer) { return; }
auto self = weakSelf.lock();
if (!self) {
return;
}
if (!self) { return; }
if (!ex) {
// 取消定时器
@ -164,19 +136,18 @@ void StackPlayer::play()
self->rePlay(url);
}
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false));
//auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(
strongPlayer->getTrack(mediakit::TrackVideo, false));
// auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
if (videoTrack) {
//TODO:添加使用显卡还是cpu解码的判断逻辑
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"});
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
// TODO:添加使用显卡还是cpu解码的判断逻辑
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(
videoTrack, 0, std::vector<std::string>{"h264", "hevc"});
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
auto self = weakSelf.lock();
if (!self) {
return;
}
if (!self) { return; }
self->onFrame(frame);
});
@ -190,14 +161,10 @@ void StackPlayer::play()
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
if (!strongPlayer) { return; }
auto self = weakSelf.lock();
if (!self) {
return;
}
if (!self) { return; }
self->onDisconnect();
@ -207,18 +174,14 @@ void StackPlayer::play()
_player->play(url);
}
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
chn->onFrame(frame);
}
if (auto chn = weak_chn.lock()) { chn->onFrame(frame); }
}
}
void StackPlayer::onDisconnect()
{
void StackPlayer::onDisconnect() {
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
@ -228,31 +191,22 @@ void StackPlayer::onDisconnect()
}
}
void StackPlayer::rePlay(const std::string& url)
{
void StackPlayer::rePlay(const std::string& url) {
_failedCount++;
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000)); //步进延迟 重试间隔
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000));// 步进延迟 重试间隔
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
_timer = std::make_shared<toolkit::Timer>(
delay / 1000.0f, [weakSelf, url]() {
auto self = weakSelf.lock();
if (!self) {
}
WarnL << "replay [" << self->_failedCount << "]:" << url;
self->_player->play(url);
return false;
},
nullptr);
_timer = std::make_shared<toolkit::Timer>(delay / 1000.0f, [weakSelf, url]() {
auto self = weakSelf.lock();
if (!self) {}
WarnL << "replay [" << self->_failedCount << "]:" << url;
self->_player->play(url);
return false;
}, nullptr);
}
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
, _fps(fps)
, _bitRate(bitRate)
{
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt,
float fps, int bitRate)
: _id(id), _width(width), _height(height), _pixfmt(pixfmt), _fps(fps), _bitRate(bitRate) {
_buffer = std::make_shared<mediakit::FFmpegFrame>();
@ -262,7 +216,8 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
av_frame_get_buffer(_buffer->get(), 32);
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id });
_dev = std::make_shared<mediakit::DevChannel>(
mediakit::MediaTuple{DEFAULT_VHOST, "live", _id, ""});
mediakit::VideoInfo info;
info.codecId = mediakit::CodecH264;
@ -272,34 +227,28 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
info.iBitRate = _bitRate;
_dev->initVideo(info);
//dev->initAudio(); //TODO:音频
// dev->initAudio(); //TODO:音频
_dev->addTrackCompleted();
_isExit = false;
}
VideoStack::~VideoStack()
{
VideoStack::~VideoStack() {
_isExit = true;
if (_thread.joinable()) {
_thread.join();
}
if (_thread.joinable()) { _thread.join(); }
}
void VideoStack::setParam(const Params& params)
{
void VideoStack::setParam(const Params& params) {
if (_params) {
for (auto& p : (*_params)) {
if (!p)
continue;
if (!p) continue;
p->weak_buf.reset();
}
}
initBgColor();
for (auto& p : (*params)) {
if (!p)
continue;
if (!p) continue;
p->weak_buf = _buffer;
if (auto chn = p->weak_chn.lock()) {
chn->addParam(p);
@ -309,14 +258,14 @@ void VideoStack::setParam(const Params& params)
_params = params;
}
void VideoStack::start()
{
void VideoStack::start() {
_thread = std::thread([&]() {
uint64_t pts = 0;
int frameInterval = 1000 / _fps;
auto lastEncTP = std::chrono::steady_clock::now();
while (!_isExit) {
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) {
if (std::chrono::steady_clock::now() - lastEncTP >
std::chrono::milliseconds(frameInterval)) {
lastEncTP = std::chrono::steady_clock::now();
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
@ -326,9 +275,8 @@ void VideoStack::start()
});
}
void VideoStack::initBgColor()
{
//填充底色
void VideoStack::initBgColor() {
// 填充底色
auto R = 20;
auto G = 20;
auto B = 20;
@ -342,27 +290,19 @@ void VideoStack::initBgColor()
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
}
Channel::Ptr VideoStackManager::getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
Channel::Ptr VideoStackManager::getChannel(const std::string& id, int width, int height,
AVPixelFormat pixfmt) {
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto it = _channelMap.find(key);
if (it != _channelMap.end()) {
return it->second->acquire();
}
if (it != _channelMap.end()) { return it->second->acquire(); }
return createChannel(id, width, height, pixfmt);
}
void VideoStackManager::unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
void VideoStackManager::unrefChannel(const std::string& id, int width, int height,
AVPixelFormat pixfmt) {
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
@ -377,8 +317,7 @@ void VideoStackManager::unrefChannel(const std::string& id,
}
}
int VideoStackManager::startVideoStack(const Json::Value& json)
{
int VideoStackManager::startVideoStack(const Json::Value& json) {
std::string id;
int width, height;
@ -392,8 +331,7 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
auto stack = std::make_shared<VideoStack>(id, width, height);
for (auto& p : (*params)) {
if (!p)
continue;
if (!p) continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
@ -405,13 +343,13 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
return 0;
}
int VideoStackManager::resetVideoStack(const Json::Value& json)
{
int VideoStackManager::resetVideoStack(const Json::Value& json) {
std::string id;
int width, height;
auto params = parseParams(json, id, width, height);
if (!params) {
ErrorL << "Videostack parse params failed!";
return -1;
}
@ -419,15 +357,12 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it == _stackMap.end()) {
return -2;
}
if (it == _stackMap.end()) { return -2; }
stack = it->second;
}
for (auto& p : (*params)) {
if (!p)
continue;
if (!p) continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
@ -435,8 +370,7 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
return 0;
}
int VideoStackManager::stopVideoStack(const std::string& id)
{
int VideoStackManager::stopVideoStack(const std::string& id) {
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it != _stackMap.end()) {
@ -447,93 +381,90 @@ int VideoStackManager::stopVideoStack(const std::string& id)
return -1;
}
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg()
{
return _bgImg;
}
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg() { return _bgImg; }
Params VideoStackManager::parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height)
{
try {
id = json["id"].asString();
width = json["width"].asInt();
height = json["height"].asInt();
int rows = json["row"].asInt(); //堆叠行数
int cols = json["col"].asInt(); //堆叠列数
float gapv = json["gapv"].asFloat(); //垂直间距
float gaph = json["gaph"].asFloat(); //水平间距
//单个间距
int gaphPix = static_cast<int>(round(width * gaph));
int gapvPix = static_cast<int>(round(height * gapv));
// 根据间距计算格子宽高
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
std::string url = json["url"][row][col].asString();
auto param = std::make_shared<Param>();
param->posX = gridWidth * col + col * gaphPix;
param->posY = gridHeight * row + row * gapvPix;
param->width = gridWidth;
param->height = gridHeight;
param->id = url;
(*params)[row * cols + col] = param;
}
}
//判断是否需要合并格子 (焦点屏)
if (!json["span"].empty() && json.isMember("span")) {
for (const auto& subArray : json["span"]) {
if (!subArray.isArray() || subArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
}
std::array<int, 4> mergePos;
int index = 0;
for (const auto& innerArray : subArray) {
if (!innerArray.isArray() || innerArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
}
for (const auto& number : innerArray) {
if (index < mergePos.size()) {
mergePos[index++] = number.asInt();
}
}
}
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
if (i == mergePos[0] && j == mergePos[1]) {
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix;
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix;
} else {
(*params)[i * cols + j] = nullptr;
}
}
}
}
}
return params;
} catch (const std::exception& e) {
ErrorL << "Videostack parse params failed! " << e.what();
return nullptr;
template<typename T> T getJsonValue(const Json::Value& json, const std::string& key) {
if (!json.isMember(key)) {
throw Json::LogicError("VideoStack parseParams missing required field: " + key);
}
return json[key].as<T>();
}
bool VideoStackManager::loadBgImg(const std::string& path)
{
Params VideoStackManager::parseParams(const Json::Value& json, std::string& id, int& width,
int& height) {
id = getJsonValue<std::string>(json, "id");
width = getJsonValue<int>(json, "width");
height = getJsonValue<int>(json, "height");
int rows = getJsonValue<int>(json, "row");// 行数
int cols = getJsonValue<int>(json, "col");// 列数
float gapv = json["gapv"].asFloat();// 垂直间距
float gaph = json["gaph"].asFloat();// 水平间距
// 单个间距
int gaphPix = static_cast<int>(round(width * gaph));
int gapvPix = static_cast<int>(round(height * gapv));
// 根据间距计算格子宽高
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
std::string url = json["url"][row][col].asString();
auto param = std::make_shared<Param>();
param->posX = gridWidth * col + col * gaphPix;
param->posY = gridHeight * row + row * gapvPix;
param->width = gridWidth;
param->height = gridHeight;
param->id = url;
(*params)[row * cols + col] = param;
}
}
// 判断是否需要合并格子 (焦点屏)
if (json.isMember("span") && json["span"].isArray() && json["span"].size() > 0) {
for (const auto& subArray : json["span"]) {
if (!subArray.isArray() || subArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
}
std::array<int, 4> mergePos;
unsigned int index = 0;
for (const auto& innerArray : subArray) {
if (!innerArray.isArray() || innerArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
}
for (const auto& number : innerArray) {
if (index < mergePos.size()) { mergePos[index++] = number.asInt(); }
}
}
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
if (i == mergePos[0] && j == mergePos[1]) {
(*params)[i * cols + j]->width =
(mergePos[3] - mergePos[1] + 1) * gridWidth +
(mergePos[3] - mergePos[1]) * gapvPix;
(*params)[i * cols + j]->height =
(mergePos[2] - mergePos[0] + 1) * gridHeight +
(mergePos[2] - mergePos[0]) * gaphPix;
} else {
(*params)[i * cols + j] = nullptr;
}
}
}
}
}
return params;
}
bool VideoStackManager::loadBgImg(const std::string& path) {
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
_bgImg->get()->width = 1280;
@ -543,21 +474,21 @@ bool VideoStackManager::loadBgImg(const std::string& path)
av_frame_get_buffer(_bgImg->get(), 32);
std::ifstream file(path, std::ios::binary);
if (!file.is_open()) {
return false;
}
if (!file.is_open()) { return false; }
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V
file.read((char*)_bgImg->get()->data[0],
_bgImg->get()->linesize[0] * _bgImg->get()->height);// Y
file.read((char*)_bgImg->get()->data[1],
_bgImg->get()->linesize[1] * _bgImg->get()->height / 2);// U
file.read((char*)_bgImg->get()->data[2],
_bgImg->get()->linesize[2] * _bgImg->get()->height / 2);// V
return true;
}
Channel::Ptr VideoStackManager::createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
void VideoStackManager::clear() { _stackMap.clear(); }
Channel::Ptr VideoStackManager::createChannel(const std::string& id, int width, int height,
AVPixelFormat pixfmt) {
std::lock_guard<std::recursive_mutex> lock(_mx);
StackPlayer::Ptr player;
@ -568,24 +499,24 @@ Channel::Ptr VideoStackManager::createChannel(const std::string& id,
player = createPlayer(id);
}
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt));
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(
std::make_shared<Channel>(id, width, height, pixfmt));
auto chn = refChn->acquire();
player->addChannel(chn);
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn;
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] =
refChn;
return chn;
}
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id)
{
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id) {
std::lock_guard<std::recursive_mutex> lock(_mx);
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
auto refPlayer =
std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
_playerMap[id] = refPlayer;
auto player = refPlayer->acquire();
if (!id.empty()) {
player->play();
}
if (!id.empty()) { player->play(); }
return player;
}

View File

@ -5,29 +5,23 @@
#include "Player/MediaPlayer.h"
#include "json/json.h"
#include <mutex>
template <typename T>
class RefWrapper {
public:
template<typename T> class RefWrapper {
public:
using Ptr = std::shared_ptr<RefWrapper<T>>;
template <typename... Args>
explicit RefWrapper(Args&&... args)
: _rc(0)
, _entity(std::forward<Args>(args)...)
{
}
template<typename... Args>
explicit RefWrapper(Args&&... args) : _rc(0), _entity(std::forward<Args>(args)...) {}
T acquire()
{
T acquire() {
++_rc;
return _entity;
}
bool dispose() { return --_rc <= 0; }
private:
T _entity;
private:
std::atomic<int> _rc;
T _entity;
};
class Channel;
@ -40,7 +34,7 @@ struct Param {
int width = 0;
int height = 0;
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P;
std::string id {};
std::string id{};
// runtime
std::weak_ptr<Channel> weak_chn;
@ -52,7 +46,7 @@ struct Param {
using Params = std::shared_ptr<std::vector<Param::Ptr>>;
class Channel : public std::enable_shared_from_this<Channel> {
public:
public:
using Ptr = std::shared_ptr<Channel>;
Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
@ -63,12 +57,12 @@ class Channel : public std::enable_shared_from_this<Channel> {
void fillBuffer(const Param::Ptr& p);
protected:
protected:
void forEachParam(const std::function<void(const Param::Ptr&)>& func);
void copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p);
private:
private:
std::string _id;
int _width;
int _height;
@ -84,13 +78,10 @@ class Channel : public std::enable_shared_from_this<Channel> {
};
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
public:
public:
using Ptr = std::shared_ptr<StackPlayer>;
StackPlayer(const std::string& url)
: _url(url)
{
}
StackPlayer(const std::string& url) : _url(url) {}
void addChannel(const std::weak_ptr<Channel>& chn);
@ -100,14 +91,14 @@ class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
void onDisconnect();
protected:
protected:
void rePlay(const std::string& url);
private:
private:
std::string _url;
mediakit::MediaPlayer::Ptr _player;
//用于断线重连
// 用于断线重连
toolkit::Timer::Ptr _timer;
int _failedCount = 0;
@ -116,15 +107,12 @@ class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
};
class VideoStack {
public:
public:
using Ptr = std::shared_ptr<VideoStack>;
VideoStack(const std::string& url,
int width = 1920,
int height = 1080,
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
float fps = 25.0,
int bitRate = 2 * 1024 * 1024);
VideoStack(const std::string& url, int width = 1920, int height = 1080,
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P, float fps = 25.0,
int bitRate = 2 * 1024 * 1024);
~VideoStack();
@ -132,15 +120,15 @@ class VideoStack {
void start();
protected:
protected:
void initBgColor();
public:
public:
Params _params;
mediakit::FFmpegFrame::Ptr _buffer;
private:
private:
std::string _id;
int _width;
int _height;
@ -156,53 +144,47 @@ class VideoStack {
};
class VideoStackManager {
public:
static VideoStackManager& Instance();
Channel::Ptr getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
void unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
public:
// 创建拼接流
int startVideoStack(const Json::Value& json);
// 停止拼接流
int stopVideoStack(const std::string& id);
// 可以在不断流的情况下,修改拼接流的配置(实现切换拼接屏内容)
int resetVideoStack(const Json::Value& json);
int stopVideoStack(const std::string& id);
public:
static VideoStackManager& Instance();
Channel::Ptr getChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
void unrefChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
bool loadBgImg(const std::string& path);
void clear();
mediakit::FFmpegFrame::Ptr getBgImg();
protected:
Params parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height);
protected:
Params parseParams(const Json::Value& json, std::string& id, int& width, int& height);
protected:
Channel::Ptr createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
protected:
Channel::Ptr createChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
StackPlayer::Ptr createPlayer(const std::string& id);
private:
private:
mediakit::FFmpegFrame::Ptr _bgImg;
private:
private:
std::recursive_mutex _mx;
std::unordered_map<std::string, VideoStack::Ptr> _stackMap;
std::unordered_map<std::string, RefWrapper<Channel::Ptr>::Ptr> _channelMap;
std::unordered_map<std::string, RefWrapper<StackPlayer::Ptr>::Ptr> _playerMap;
};
#endif

View File

@ -8,6 +8,7 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#include <exception>
#include <sys/stat.h>
#include <math.h>
#include <signal.h>
@ -45,7 +46,7 @@
#include "Http/HttpRequester.h"
#include "Player/PlayerProxy.h"
#include "Pusher/PusherProxy.h"
#include "Rtp/RtpSelector.h"
#include "Rtp/RtpProcess.h"
#include "Record/MP4Reader.h"
#if defined(ENABLE_RTPPROXY)
@ -476,18 +477,19 @@ Value makeMediaSourceJson(MediaSource &media){
}
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
if (s_rtp_server.find(stream_id)) {
//为了防止RtpProcess所有权限混乱的问题不允许重复添加相同的stream_id
uint16_t openRtpServer(uint16_t local_port, const mediakit::MediaTuple &tuple, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
auto key = tuple.shortUrl();
if (s_rtp_server.find(key)) {
//为了防止RtpProcess所有权限混乱的问题不允许重复添加相同的key
return 0;
}
auto server = s_rtp_server.makeWithAction(stream_id, [&](RtpServer::Ptr server) {
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_track, multiplex);
auto server = s_rtp_server.makeWithAction(key, [&](RtpServer::Ptr server) {
server->start(local_port, tuple, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_track, multiplex);
});
server->setOnDetach([stream_id]() {
server->setOnDetach([key](const SockException &ex) {
//设置rtp超时移除事件
s_rtp_server.erase(stream_id);
s_rtp_server.erase(key);
});
//回复json
@ -1198,8 +1200,16 @@ void installWebApi() {
api_regist("/index/api/getRtpInfo",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("stream_id");
auto process = RtpSelector::Instance().getProcess(allArgs["stream_id"], false);
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
auto src = MediaSource::find(vhost, app, allArgs["stream_id"]);
auto process = src ? src->getRtpProcess() : nullptr;
if (!process) {
val["exist"] = false;
return;
@ -1211,7 +1221,16 @@ void installWebApi() {
api_regist("/index/api/openRtpServer",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("port", "stream_id");
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
auto stream_id = allArgs["stream_id"];
auto tuple = MediaTuple { vhost, app, stream_id, "" };
auto tcp_mode = allArgs["tcp_mode"].as<int>();
if (allArgs["enable_tcp"].as<int>() && !tcp_mode) {
//兼容老版本请求新版本去除enable_tcp参数并新增tcp_mode参数
@ -1226,40 +1245,50 @@ void installWebApi() {
if (!allArgs["local_ip"].empty()) {
local_ip = allArgs["local_ip"];
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, allArgs["re_use_port"].as<bool>(),
auto port = openRtpServer(allArgs["port"], tuple, tcp_mode, local_ip, allArgs["re_use_port"].as<bool>(),
allArgs["ssrc"].as<uint32_t>(), only_track);
if (port == 0) {
throw InvalidArgsException("该stream_id已存在");
throw InvalidArgsException("This stream already exists");
}
//回复json
val["port"] = port;
});
api_regist("/index/api/openRtpServerMultiplex", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("port", "stream_id");
auto stream_id = allArgs["stream_id"];
auto tcp_mode = allArgs["tcp_mode"].as<int>();
if (allArgs["enable_tcp"].as<int>() && !tcp_mode) {
// 兼容老版本请求新版本去除enable_tcp参数并新增tcp_mode参数
tcp_mode = 1;
}
auto only_track = allArgs["only_track"].as<int>();
if (allArgs["only_audio"].as<bool>()) {
// 兼容老版本请求新版本去除only_audio参数并新增only_track参数
only_track = 1;
}
std::string local_ip = "::";
if (!allArgs["local_ip"].empty()) {
local_ip = allArgs["local_ip"];
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, true, 0, only_track,true);
if (port == 0) {
throw InvalidArgsException("该stream_id已存在");
}
// 回复json
val["port"] = port;
});
api_regist("/index/api/openRtpServerMultiplex", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("port", "stream_id");
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
auto stream_id = allArgs["stream_id"];
auto tuple = MediaTuple { vhost, app, stream_id, "" };
auto tcp_mode = allArgs["tcp_mode"].as<int>();
if (allArgs["enable_tcp"].as<int>() && !tcp_mode) {
// 兼容老版本请求新版本去除enable_tcp参数并新增tcp_mode参数
tcp_mode = 1;
}
auto only_track = allArgs["only_track"].as<int>();
if (allArgs["only_audio"].as<bool>()) {
// 兼容老版本请求新版本去除only_audio参数并新增only_track参数
only_track = 1;
}
std::string local_ip = "::";
if (!allArgs["local_ip"].empty()) {
local_ip = allArgs["local_ip"];
}
auto port = openRtpServer(allArgs["port"], tuple, tcp_mode, local_ip, true, 0, only_track, true);
if (port == 0) {
throw InvalidArgsException("This stream already exists");
}
// 回复json
val["port"] = port;
});
api_regist("/index/api/connectRtpServer", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
@ -1272,9 +1301,19 @@ void installWebApi() {
invoker(200, headerOut, val.toStyledString());
};
auto server = s_rtp_server.find(allArgs["stream_id"]);
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
auto stream_id = allArgs["stream_id"];
auto tuple = MediaTuple { vhost, app, stream_id, "" };
auto server = s_rtp_server.find(tuple.shortUrl());
if (!server) {
cb(SockException(Err_other, "未找到rtp服务"));
cb(SockException(Err_other, "can not find the stream"));
return;
}
server->connectToServer(allArgs["dst_url"], allArgs["dst_port"], cb);
@ -1284,7 +1323,17 @@ void installWebApi() {
CHECK_SECRET();
CHECK_ARGS("stream_id");
if(s_rtp_server.erase(allArgs["stream_id"]) == 0){
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
auto stream_id = allArgs["stream_id"];
auto tuple = MediaTuple { vhost, app, stream_id, "" };
if (s_rtp_server.erase(tuple.shortUrl()) == 0) {
val["hit"] = 0;
return;
}
@ -1295,7 +1344,17 @@ void installWebApi() {
CHECK_SECRET();
CHECK_ARGS("stream_id", "ssrc");
auto server = s_rtp_server.find(allArgs["stream_id"]);
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
auto stream_id = allArgs["stream_id"];
auto tuple = MediaTuple { vhost, app, stream_id, "" };
auto server = s_rtp_server.find(tuple.shortUrl());
if (!server) {
throw ApiRetException("RtpServer not found by stream_id", API::NotFound);
}
@ -1307,8 +1366,11 @@ void installWebApi() {
std::lock_guard<std::recursive_mutex> lck(s_rtp_server._mtx);
for (auto &pr : s_rtp_server._map) {
auto vec = split(pr.first, "/");
Value obj;
obj["stream_id"] = pr.first;
obj["vhost"] = vec[0];
obj["app"] = vec[1];
obj["stream_id"] = vec[2];
obj["port"] = pr.second->getPort();
val["data"].append(obj);
}
@ -1437,10 +1499,19 @@ void installWebApi() {
api_regist("/index/api/pauseRtpCheck", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("stream_id");
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
//只是暂停流的检查流媒体服务器做为流负载服务收流就转发RTSP/RTMP有自己暂停协议
auto rtp_process = RtpSelector::Instance().getProcess(allArgs["stream_id"], false);
if (rtp_process) {
rtp_process->setStopCheckRtp(true);
auto src = MediaSource::find(vhost, app, allArgs["stream_id"]);
auto process = src ? src->getRtpProcess() : nullptr;
if (process) {
process->setStopCheckRtp(true);
} else {
val["code"] = API::NotFound;
}
@ -1449,9 +1520,18 @@ void installWebApi() {
api_regist("/index/api/resumeRtpCheck", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("stream_id");
auto rtp_process = RtpSelector::Instance().getProcess(allArgs["stream_id"], false);
if (rtp_process) {
rtp_process->setStopCheckRtp(false);
std::string vhost = DEFAULT_VHOST;
if (!allArgs["vhost"].empty()) {
vhost = allArgs["vhost"];
}
std::string app = kRtpAppName;
if (!allArgs["app"].empty()) {
app = allArgs["app"];
}
auto src = MediaSource::find(vhost, app, allArgs["stream_id"]);
auto process = src ? src->getRtpProcess() : nullptr;
if (process) {
process->setStopCheckRtp(false);
} else {
val["code"] = API::NotFound;
}
@ -1869,6 +1949,7 @@ void installWebApi() {
});
#endif
#if ENABLE_MP4
api_regist("/index/api/loadMP4File", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream", "file_path");
@ -1887,6 +1968,7 @@ void installWebApi() {
// sample_ms设置为0从配置文件加载file_repeat可以指定如果配置文件也指定循环解复用那么强制开启
reader->startReadMP4(0, true, allArgs["file_repeat"]);
});
#endif
GET_CONFIG_FUNC(std::set<std::string>, download_roots, API::kDownloadRoot, [](const string &str) -> std::set<std::string> {
std::set<std::string> ret;
@ -1948,9 +2030,29 @@ void installWebApi() {
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
CHECK_SECRET();
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
int ret = 0;
try {
ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
} catch (const std::exception &e) {
val["code"] = -1;
val["msg"] = e.what();
}
invoker(200, headerOut, val.toStyledString());
});
api_regist("/index/api/stack/reset", [](API_ARGS_JSON_ASYNC) {
CHECK_SECRET();
int ret = 0;
try {
auto ret = VideoStackManager::Instance().resetVideoStack(allArgs.args);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
} catch (const std::exception &e) {
val["code"] = -1;
val["msg"] = e.what();
}
invoker(200, headerOut, val.toStyledString());
});
@ -1972,6 +2074,9 @@ void unInstallWebApi(){
#if defined(ENABLE_RTPPROXY)
s_rtp_server.clear();
#endif
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_FFMPEG) && defined(ENABLE_X264)
VideoStackManager::Instance().clear();
#endif
NoticeCenter::Instance().delListener(&web_api_tag);
}

View File

@ -202,7 +202,7 @@ void installWebApi();
void unInstallWebApi();
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
uint16_t openRtpServer(uint16_t local_port, const mediakit::MediaTuple &tuple, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
#endif
Json::Value makeMediaSourceJson(mediakit::MediaSource &media);

View File

@ -682,7 +682,9 @@ void installWebHook() {
ArgsType body;
body["local_port"] = local_port;
body["stream_id"] = stream_id;
body[VHOST_KEY] = tuple.vhost;
body["app"] = tuple.app;
body["stream_id"] = tuple.stream;
body["tcp_mode"] = tcp_mode;
body["re_use_port"] = re_use_port;
body["ssrc"] = ssrc;

View File

@ -321,6 +321,8 @@ int H264Encoder::inputData(char *yuv[3], int linesize[3], int64_t cts, H264Frame
_aFrames[i].iType = pNal.i_type;
_aFrames[i].iLength = pNal.i_payload;
_aFrames[i].pucData = pNal.p_payload;
_aFrames[i].dts = _pPicOut->i_dts;
_aFrames[i].pts = _pPicOut->i_pts;
}
*out_frame = _aFrames;
return iNal;

View File

@ -27,6 +27,9 @@ public:
int iType;
int iLength;
uint8_t *pucData;
int64_t dts;
int64_t pts;
} H264Frame;
H264Encoder();

View File

@ -39,7 +39,7 @@ bool DevChannel::inputYUV(char *yuv[3], int linesize[3], uint64_t cts) {
int frames = _pH264Enc->inputData(yuv, linesize, cts, &out_frames);
bool ret = false;
for (int i = 0; i < frames; i++) {
ret = inputH264((char *) out_frames[i].pucData, out_frames[i].iLength, cts) ? true : ret;
ret = inputH264((char *) out_frames[i].pucData, out_frames[i].iLength, out_frames[i].dts, out_frames[i].pts) ? true : ret;
}
return ret;
}

View File

@ -55,59 +55,13 @@ string getOriginTypeString(MediaOriginType type){
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
ProtocolOption::ProtocolOption() {
GET_CONFIG(int, s_modify_stamp, Protocol::kModifyStamp);
GET_CONFIG(bool, s_enabel_audio, Protocol::kEnableAudio);
GET_CONFIG(bool, s_add_mute_audio, Protocol::kAddMuteAudio);
GET_CONFIG(bool, s_auto_close, Protocol::kAutoClose);
GET_CONFIG(uint32_t, s_continue_push_ms, Protocol::kContinuePushMS);
GET_CONFIG(uint32_t, s_paced_sender_ms, Protocol::kPacedSenderMS);
GET_CONFIG(bool, s_enable_hls, Protocol::kEnableHls);
GET_CONFIG(bool, s_enable_hls_fmp4, Protocol::kEnableHlsFmp4);
GET_CONFIG(bool, s_enable_mp4, Protocol::kEnableMP4);
GET_CONFIG(bool, s_enable_rtsp, Protocol::kEnableRtsp);
GET_CONFIG(bool, s_enable_rtmp, Protocol::kEnableRtmp);
GET_CONFIG(bool, s_enable_ts, Protocol::kEnableTS);
GET_CONFIG(bool, s_enable_fmp4, Protocol::kEnableFMP4);
GET_CONFIG(bool, s_hls_demand, Protocol::kHlsDemand);
GET_CONFIG(bool, s_rtsp_demand, Protocol::kRtspDemand);
GET_CONFIG(bool, s_rtmp_demand, Protocol::kRtmpDemand);
GET_CONFIG(bool, s_ts_demand, Protocol::kTSDemand);
GET_CONFIG(bool, s_fmp4_demand, Protocol::kFMP4Demand);
GET_CONFIG(bool, s_mp4_as_player, Protocol::kMP4AsPlayer);
GET_CONFIG(uint32_t, s_mp4_max_second, Protocol::kMP4MaxSecond);
GET_CONFIG(string, s_mp4_save_path, Protocol::kMP4SavePath);
GET_CONFIG(string, s_hls_save_path, Protocol::kHlsSavePath);
modify_stamp = s_modify_stamp;
enable_audio = s_enabel_audio;
add_mute_audio = s_add_mute_audio;
auto_close = s_auto_close;
continue_push_ms = s_continue_push_ms;
paced_sender_ms = s_paced_sender_ms;
enable_hls = s_enable_hls;
enable_hls_fmp4 = s_enable_hls_fmp4;
enable_mp4 = s_enable_mp4;
enable_rtsp = s_enable_rtsp;
enable_rtmp = s_enable_rtmp;
enable_ts = s_enable_ts;
enable_fmp4 = s_enable_fmp4;
hls_demand = s_hls_demand;
rtsp_demand = s_rtsp_demand;
rtmp_demand = s_rtmp_demand;
ts_demand = s_ts_demand;
fmp4_demand = s_fmp4_demand;
mp4_as_player = s_mp4_as_player;
mp4_max_second = s_mp4_max_second;
mp4_save_path = s_mp4_save_path;
hls_save_path = s_hls_save_path;
mINI ini;
auto &config = mINI::Instance();
static auto sz = strlen(Protocol::kFieldName);
for (auto it = config.lower_bound(Protocol::kFieldName); it != config.end() && start_with(it->first, Protocol::kFieldName); ++it) {
ini.emplace(it->first.substr(sz), it->second);
}
load(ini);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
@ -271,9 +225,14 @@ toolkit::EventPoller::Ptr MediaSource::getOwnerPoller() {
throw std::runtime_error(toolkit::demangle(typeid(*this).name()) + "::getOwnerPoller failed: " + getUrl());
}
std::shared_ptr<MultiMediaSourceMuxer> MediaSource::getMuxer() {
std::shared_ptr<MultiMediaSourceMuxer> MediaSource::getMuxer() const {
auto listener = _listener.lock();
return listener ? listener->getMuxer(*this) : nullptr;
return listener ? listener->getMuxer(const_cast<MediaSource&>(*this)) : nullptr;
}
std::shared_ptr<RtpProcess> MediaSource::getRtpProcess() const {
auto listener = _listener.lock();
return listener ? listener->getRtpProcess(const_cast<MediaSource&>(*this)) : nullptr;
}
void MediaSource::onReaderChanged(int size) {
@ -706,7 +665,7 @@ string MediaSourceEvent::getOriginUrl(MediaSource &sender) const {
MediaOriginType MediaSourceEventInterceptor::getOriginType(MediaSource &sender) const {
auto listener = _listener.lock();
if (!listener) {
return MediaOriginType::unknown;
return MediaSourceEvent::getOriginType(sender);
}
return listener->getOriginType(sender);
}
@ -726,7 +685,7 @@ string MediaSourceEventInterceptor::getOriginUrl(MediaSource &sender) const {
std::shared_ptr<SockInfo> MediaSourceEventInterceptor::getOriginSock(MediaSource &sender) const {
auto listener = _listener.lock();
if (!listener) {
return nullptr;
return MediaSourceEvent::getOriginSock(sender);
}
return listener->getOriginSock(sender);
}
@ -734,7 +693,7 @@ std::shared_ptr<SockInfo> MediaSourceEventInterceptor::getOriginSock(MediaSource
bool MediaSourceEventInterceptor::seekTo(MediaSource &sender, uint32_t stamp) {
auto listener = _listener.lock();
if (!listener) {
return false;
return MediaSourceEvent::seekTo(sender, stamp);
}
return listener->seekTo(sender, stamp);
}
@ -742,7 +701,7 @@ bool MediaSourceEventInterceptor::seekTo(MediaSource &sender, uint32_t stamp) {
bool MediaSourceEventInterceptor::pause(MediaSource &sender, bool pause) {
auto listener = _listener.lock();
if (!listener) {
return false;
return MediaSourceEvent::pause(sender, pause);
}
return listener->pause(sender, pause);
}
@ -750,7 +709,7 @@ bool MediaSourceEventInterceptor::pause(MediaSource &sender, bool pause) {
bool MediaSourceEventInterceptor::speed(MediaSource &sender, float speed) {
auto listener = _listener.lock();
if (!listener) {
return false;
return MediaSourceEvent::speed(sender, speed);
}
return listener->speed(sender, speed);
}
@ -758,7 +717,7 @@ bool MediaSourceEventInterceptor::speed(MediaSource &sender, float speed) {
bool MediaSourceEventInterceptor::close(MediaSource &sender) {
auto listener = _listener.lock();
if (!listener) {
return false;
return MediaSourceEvent::close(sender);
}
return listener->close(sender);
}
@ -766,7 +725,7 @@ bool MediaSourceEventInterceptor::close(MediaSource &sender) {
int MediaSourceEventInterceptor::totalReaderCount(MediaSource &sender) {
auto listener = _listener.lock();
if (!listener) {
return sender.readerCount();
return MediaSourceEvent::totalReaderCount(sender);
}
return listener->totalReaderCount(sender);
}
@ -774,44 +733,55 @@ int MediaSourceEventInterceptor::totalReaderCount(MediaSource &sender) {
void MediaSourceEventInterceptor::onReaderChanged(MediaSource &sender, int size) {
auto listener = _listener.lock();
if (!listener) {
MediaSourceEvent::onReaderChanged(sender, size);
} else {
listener->onReaderChanged(sender, size);
return MediaSourceEvent::onReaderChanged(sender, size);
}
listener->onReaderChanged(sender, size);
}
void MediaSourceEventInterceptor::onRegist(MediaSource &sender, bool regist) {
auto listener = _listener.lock();
if (listener) {
listener->onRegist(sender, regist);
if (!listener) {
return MediaSourceEvent::onRegist(sender, regist);
}
listener->onRegist(sender, regist);
}
float MediaSourceEventInterceptor::getLossRate(MediaSource &sender, TrackType type){
float MediaSourceEventInterceptor::getLossRate(MediaSource &sender, TrackType type) {
auto listener = _listener.lock();
if (listener) {
return listener->getLossRate(sender, type);
if (!listener) {
return MediaSourceEvent::getLossRate(sender, type);
}
return -1; //异常返回-1
return listener->getLossRate(sender, type);
}
toolkit::EventPoller::Ptr MediaSourceEventInterceptor::getOwnerPoller(MediaSource &sender) {
auto listener = _listener.lock();
if (listener) {
return listener->getOwnerPoller(sender);
if (!listener) {
return MediaSourceEvent::getOwnerPoller(sender);
}
throw std::runtime_error(toolkit::demangle(typeid(*this).name()) + "::getOwnerPoller failed");
return listener->getOwnerPoller(sender);
}
std::shared_ptr<MultiMediaSourceMuxer> MediaSourceEventInterceptor::getMuxer(MediaSource &sender) {
std::shared_ptr<MultiMediaSourceMuxer> MediaSourceEventInterceptor::getMuxer(MediaSource &sender) const {
auto listener = _listener.lock();
return listener ? listener->getMuxer(sender) : nullptr;
if (!listener) {
return MediaSourceEvent::getMuxer(sender);
}
return listener->getMuxer(sender);
}
std::shared_ptr<RtpProcess> MediaSourceEventInterceptor::getRtpProcess(MediaSource &sender) const {
auto listener = _listener.lock();
if (!listener) {
return MediaSourceEvent::getRtpProcess(sender);
}
return listener->getRtpProcess(sender);
}
bool MediaSourceEventInterceptor::setupRecord(MediaSource &sender, Recorder::type type, bool start, const string &custom_path, size_t max_second) {
auto listener = _listener.lock();
if (!listener) {
return false;
return MediaSourceEvent::setupRecord(sender, type, start, custom_path, max_second);
}
return listener->setupRecord(sender, type, start, custom_path, max_second);
}
@ -819,7 +789,7 @@ bool MediaSourceEventInterceptor::setupRecord(MediaSource &sender, Recorder::typ
bool MediaSourceEventInterceptor::isRecording(MediaSource &sender, Recorder::type type) {
auto listener = _listener.lock();
if (!listener) {
return false;
return MediaSourceEvent::isRecording(sender, type);
}
return listener->isRecording(sender, type);
}
@ -827,26 +797,25 @@ bool MediaSourceEventInterceptor::isRecording(MediaSource &sender, Recorder::typ
vector<Track::Ptr> MediaSourceEventInterceptor::getMediaTracks(MediaSource &sender, bool trackReady) const {
auto listener = _listener.lock();
if (!listener) {
return vector<Track::Ptr>();
return MediaSourceEvent::getMediaTracks(sender, trackReady);
}
return listener->getMediaTracks(sender, trackReady);
}
void MediaSourceEventInterceptor::startSendRtp(MediaSource &sender, const MediaSourceEvent::SendRtpArgs &args, const std::function<void(uint16_t, const toolkit::SockException &)> cb) {
auto listener = _listener.lock();
if (listener) {
listener->startSendRtp(sender, args, cb);
} else {
MediaSourceEvent::startSendRtp(sender, args, cb);
if (!listener) {
return MediaSourceEvent::startSendRtp(sender, args, cb);
}
listener->startSendRtp(sender, args, cb);
}
bool MediaSourceEventInterceptor::stopSendRtp(MediaSource &sender, const string &ssrc){
bool MediaSourceEventInterceptor::stopSendRtp(MediaSource &sender, const string &ssrc) {
auto listener = _listener.lock();
if (listener) {
return listener->stopSendRtp(sender, ssrc);
if (!listener) {
return MediaSourceEvent::stopSendRtp(sender, ssrc);
}
return false;
return listener->stopSendRtp(sender, ssrc);
}
void MediaSourceEventInterceptor::setDelegate(const std::weak_ptr<MediaSourceEvent> &listener) {
@ -856,7 +825,7 @@ void MediaSourceEventInterceptor::setDelegate(const std::weak_ptr<MediaSourceEve
_listener = listener;
}
std::shared_ptr<MediaSourceEvent> MediaSourceEventInterceptor::getDelegate() const{
std::shared_ptr<MediaSourceEvent> MediaSourceEventInterceptor::getDelegate() const {
return _listener.lock();
}

View File

@ -15,6 +15,7 @@
#include <atomic>
#include <memory>
#include <functional>
#include "Util/mini.h"
#include "Network/Socket.h"
#include "Extension/Track.h"
#include "Record/Recorder.h"
@ -41,6 +42,7 @@ enum class MediaOriginType : uint8_t {
std::string getOriginTypeString(MediaOriginType type);
class MediaSource;
class RtpProcess;
class MultiMediaSourceMuxer;
class MediaSourceEvent {
public:
@ -88,7 +90,9 @@ public:
// 获取所有track相关信息
virtual std::vector<Track::Ptr> getMediaTracks(MediaSource &sender, bool trackReady = true) const { return std::vector<Track::Ptr>(); };
// 获取MultiMediaSourceMuxer对象
virtual std::shared_ptr<MultiMediaSourceMuxer> getMuxer(MediaSource &sender) { return nullptr; }
virtual std::shared_ptr<MultiMediaSourceMuxer> getMuxer(MediaSource &sender) const { return nullptr; }
// 获取RtpProcess对象
virtual std::shared_ptr<RtpProcess> getRtpProcess(MediaSource &sender) const { return nullptr; }
class SendRtpArgs {
public:
@ -145,6 +149,14 @@ static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
}
}
template <typename KEY, typename TYPE>
static void getArgsValue(const toolkit::mINI &allArgs, const KEY &key, TYPE &value) {
auto it = allArgs.find(key);
if (it != allArgs.end()) {
value = (TYPE)it->second;
}
}
class ProtocolOption {
public:
ProtocolOption();
@ -278,7 +290,8 @@ public:
bool stopSendRtp(MediaSource &sender, const std::string &ssrc) override;
float getLossRate(MediaSource &sender, TrackType type) override;
toolkit::EventPoller::Ptr getOwnerPoller(MediaSource &sender) override;
std::shared_ptr<MultiMediaSourceMuxer> getMuxer(MediaSource &sender) override;
std::shared_ptr<MultiMediaSourceMuxer> getMuxer(MediaSource &sender) const override;
std::shared_ptr<RtpProcess> getRtpProcess(MediaSource &sender) const override;
private:
std::weak_ptr<MediaSourceEvent> _listener;
@ -395,7 +408,9 @@ public:
// 获取所在线程
toolkit::EventPoller::Ptr getOwnerPoller();
// 获取MultiMediaSourceMuxer对象
std::shared_ptr<MultiMediaSourceMuxer> getMuxer();
std::shared_ptr<MultiMediaSourceMuxer> getMuxer() const;
// 获取RtpProcess对象
std::shared_ptr<RtpProcess> getRtpProcess() const;
////////////////static方法查找或生成MediaSource////////////////

View File

@ -466,8 +466,8 @@ EventPoller::Ptr MultiMediaSourceMuxer::getOwnerPoller(MediaSource &sender) {
}
}
std::shared_ptr<MultiMediaSourceMuxer> MultiMediaSourceMuxer::getMuxer(MediaSource &sender) {
return shared_from_this();
std::shared_ptr<MultiMediaSourceMuxer> MultiMediaSourceMuxer::getMuxer(MediaSource &sender) const {
return const_cast<MultiMediaSourceMuxer*>(this)->shared_from_this();
}
bool MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) {

View File

@ -127,7 +127,7 @@ public:
/**
*
*/
std::shared_ptr<MultiMediaSourceMuxer> getMuxer(MediaSource &sender) override;
std::shared_ptr<MultiMediaSourceMuxer> getMuxer(MediaSource &sender) const override;
const ProtocolOption &getOption() const;
const MediaTuple &getMediaTuple() const;

View File

@ -104,33 +104,32 @@ static onceToken token([]() {
} // namespace General
namespace Protocol {
#define PROTOCOL_FIELD "protocol."
const string kModifyStamp = PROTOCOL_FIELD "modify_stamp";
const string kEnableAudio = PROTOCOL_FIELD "enable_audio";
const string kAddMuteAudio = PROTOCOL_FIELD "add_mute_audio";
const string kAutoClose = PROTOCOL_FIELD "auto_close";
const string kContinuePushMS = PROTOCOL_FIELD "continue_push_ms";
const string kPacedSenderMS = PROTOCOL_FIELD "paced_sender_ms";
const string kModifyStamp = string(kFieldName) + "modify_stamp";
const string kEnableAudio = string(kFieldName) + "enable_audio";
const string kAddMuteAudio = string(kFieldName) + "add_mute_audio";
const string kAutoClose = string(kFieldName) + "auto_close";
const string kContinuePushMS = string(kFieldName) + "continue_push_ms";
const string kPacedSenderMS = string(kFieldName) + "paced_sender_ms";
const string kEnableHls = PROTOCOL_FIELD "enable_hls";
const string kEnableHlsFmp4 = PROTOCOL_FIELD "enable_hls_fmp4";
const string kEnableMP4 = PROTOCOL_FIELD "enable_mp4";
const string kEnableRtsp = PROTOCOL_FIELD "enable_rtsp";
const string kEnableRtmp = PROTOCOL_FIELD "enable_rtmp";
const string kEnableTS = PROTOCOL_FIELD "enable_ts";
const string kEnableFMP4 = PROTOCOL_FIELD "enable_fmp4";
const string kEnableHls = string(kFieldName) + "enable_hls";
const string kEnableHlsFmp4 = string(kFieldName) + "enable_hls_fmp4";
const string kEnableMP4 = string(kFieldName) + "enable_mp4";
const string kEnableRtsp = string(kFieldName) + "enable_rtsp";
const string kEnableRtmp = string(kFieldName) + "enable_rtmp";
const string kEnableTS = string(kFieldName) + "enable_ts";
const string kEnableFMP4 = string(kFieldName) + "enable_fmp4";
const string kMP4AsPlayer = PROTOCOL_FIELD "mp4_as_player";
const string kMP4MaxSecond = PROTOCOL_FIELD "mp4_max_second";
const string kMP4SavePath = PROTOCOL_FIELD "mp4_save_path";
const string kMP4AsPlayer = string(kFieldName) + "mp4_as_player";
const string kMP4MaxSecond = string(kFieldName) + "mp4_max_second";
const string kMP4SavePath = string(kFieldName) + "mp4_save_path";
const string kHlsSavePath = PROTOCOL_FIELD "hls_save_path";
const string kHlsSavePath = string(kFieldName) + "hls_save_path";
const string kHlsDemand = PROTOCOL_FIELD "hls_demand";
const string kRtspDemand = PROTOCOL_FIELD "rtsp_demand";
const string kRtmpDemand = PROTOCOL_FIELD "rtmp_demand";
const string kTSDemand = PROTOCOL_FIELD "ts_demand";
const string kFMP4Demand = PROTOCOL_FIELD "fmp4_demand";
const string kHlsDemand = string(kFieldName) + "hls_demand";
const string kRtspDemand = string(kFieldName) + "rtsp_demand";
const string kRtmpDemand = string(kFieldName) + "rtmp_demand";
const string kTSDemand = string(kFieldName) + "ts_demand";
const string kFMP4Demand = string(kFieldName) + "fmp4_demand";
static onceToken token([]() {
mINI::Instance()[kModifyStamp] = (int)ProtocolOption::kModifyStampRelative;
@ -375,6 +374,7 @@ const string kBenchmarkMode = "benchmark_mode";
const string kWaitTrackReady = "wait_track_ready";
const string kPlayTrack = "play_track";
const string kProxyUrl = "proxy_url";
const string kRtspSpeed = "rtsp_speed";
} // namespace Client
} // namespace mediakit

View File

@ -107,7 +107,7 @@ extern const std::string kBroadcastReloadConfig;
// rtp server 超时
extern const std::string kBroadcastRtpServerTimeout;
#define BroadcastRtpServerTimeoutArgs uint16_t &local_port, const string &stream_id,int &tcp_mode, bool &re_use_port, uint32_t &ssrc
#define BroadcastRtpServerTimeoutArgs uint16_t &local_port, const MediaTuple &tuple, int &tcp_mode, bool &re_use_port, uint32_t &ssrc
// rtc transport sctp 连接状态
extern const std::string kBroadcastRtcSctpConnecting;
@ -205,6 +205,7 @@ extern const std::string kBroadcastPlayerCountChanged;
} // namespace General
namespace Protocol {
static constexpr char kFieldName[] = "protocol.";
//时间戳修复这一路流标志位
extern const std::string kModifyStamp;
//转协议是否开启音频
@ -447,6 +448,8 @@ extern const std::string kWaitTrackReady;
extern const std::string kPlayTrack;
//设置代理url目前只支持http协议
extern const std::string kProxyUrl;
//设置开始rtsp倍速播放
extern const std::string kRtspSpeed;
} // namespace Client
} // namespace mediakit

View File

@ -109,6 +109,11 @@ public:
* fps
*/
virtual float getVideoFps() const { return 0; }
/**
* sps/pps
*/
virtual std::vector<Frame::Ptr> getConfigFrames() const { return std::vector<Frame::Ptr>{}; }
};
class VideoTrackImp : public VideoTrack {

View File

@ -41,7 +41,13 @@ public:
FMP4MediaSource(const MediaTuple& tuple,
int ring_size = FMP4_GOP_SIZE) : MediaSource(FMP4_SCHEMA, tuple), _ring_size(ring_size) {}
~FMP4MediaSource() override { flush(); }
~FMP4MediaSource() override {
try {
flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
/**
*

View File

@ -26,7 +26,13 @@ public:
_media_src = std::make_shared<FMP4MediaSource>(tuple);
}
~FMP4MediaSourceMuxer() override { MP4MuxerMemory::flush(); };
~FMP4MediaSourceMuxer() override {
try {
MP4MuxerMemory::flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
setDelegate(listener);

View File

@ -141,7 +141,9 @@ static std::shared_ptr<char> getSharedMmap(const string &file_path, int64_t &fil
return nullptr;
}
file_size = ::GetFileSize(hfile, NULL);
LARGE_INTEGER FileSize;
GetFileSizeEx(hfile, &FileSize); //GetFileSize函数的拓展可用于获取大于4G的文件大小
file_size = FileSize.QuadPart;
auto hmapping = ::CreateFileMapping(hfile, NULL, PAGE_READONLY, 0, 0, NULL);

View File

@ -24,7 +24,7 @@ namespace mediakit {
PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &in_poller, const string &url_in) {
auto poller = in_poller ? in_poller : EventPollerPool::Instance().getPoller();
std::weak_ptr<EventPoller> weak_poller = poller;
static auto release_func = [weak_poller](PlayerBase *ptr) {
auto release_func = [weak_poller](PlayerBase *ptr) {
if (auto poller = weak_poller.lock()) {
poller->async([ptr]() {
onceToken token(nullptr, [&]() { delete ptr; });

View File

@ -84,7 +84,13 @@ public:
using Ptr = std::shared_ptr<HlsRecorder>;
template <typename ...ARGS>
HlsRecorder(ARGS && ...args) : HlsRecorderBase<MpegMuxer>(false, std::forward<ARGS>(args)...) {}
~HlsRecorder() override { this->flush(); }
~HlsRecorder() override {
try {
this->flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
private:
void onWrite(std::shared_ptr<toolkit::Buffer> buffer, uint64_t timestamp, bool key_pos) override {
@ -102,7 +108,13 @@ public:
using Ptr = std::shared_ptr<HlsFMP4Recorder>;
template <typename ...ARGS>
HlsFMP4Recorder(ARGS && ...args) : HlsRecorderBase<MP4MuxerMemory>(true, std::forward<ARGS>(args)...) {}
~HlsFMP4Recorder() override { this->flush(); }
~HlsFMP4Recorder() override {
try {
this->flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
void addTrackCompleted() override {
HlsRecorderBase<MP4MuxerMemory>::addTrackCompleted();

View File

@ -48,7 +48,13 @@ public:
*/
RtmpMediaSource(const MediaTuple& tuple, int ring_size = RTMP_GOP_SIZE): MediaSource(RTMP_SCHEMA, tuple), _ring_size(ring_size) {}
~RtmpMediaSource() override { flush(); }
~RtmpMediaSource() override {
try {
flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
/**
*

View File

@ -87,10 +87,9 @@ DecoderImp::DecoderImp(const Decoder::Ptr &decoder, MediaSinkInterface *sink){
void DecoderImp::onStream(int stream, int codecid, const void *extra, size_t bytes, int finish) {
// G711传统只支持 8000/1/16的规格FFmpeg貌似做了扩展但是这里不管它了
auto track = Factory::getTrackByCodecId(getCodecByMpegId(codecid), 8000, 1, 16);
if (!track) {
return;
if (track) {
onTrack(stream, std::move(track));
}
onTrack(stream, std::move(track));
// 防止未获取视频track提前complete导致忽略后续视频的问题用于兼容一些不太规范的ps流
if (finish && _have_video) {
_sink->addTrackCompleted();

View File

@ -11,26 +11,26 @@
#if defined(ENABLE_RTPPROXY)
#include "GB28181Process.h"
#include "RtpProcess.h"
#include "RtpSelector.h"
#include "Http/HttpTSPlayer.h"
#include "Util/File.h"
#include "Common/config.h"
using namespace std;
using namespace toolkit;
static constexpr char kRtpAppName[] = "rtp";
//在创建_muxer对象前(也就是推流鉴权成功前)需要先缓存frame这样可以防止丢包提高体验
//但是同时需要控制缓冲长度,防止内存溢出。200帧数据大概有10秒数据应该足矣等待鉴权hook返回
static constexpr size_t kMaxCachedFrame = 200;
//但是同时需要控制缓冲长度,防止内存溢出。最多缓存10秒数据应该足矣等待鉴权hook返回
static constexpr size_t kMaxCachedFrameMS = 10 * 1000;
namespace mediakit {
RtpProcess::RtpProcess(const string &stream_id) {
_media_info.schema = kRtpAppName;
_media_info.vhost = DEFAULT_VHOST;
_media_info.app = kRtpAppName;
_media_info.stream = stream_id;
RtpProcess::Ptr RtpProcess::createProcess(const MediaTuple &tuple) {
RtpProcess::Ptr ret(new RtpProcess(tuple));
ret->createTimer();
return ret;
}
RtpProcess::RtpProcess(const MediaTuple &tuple) {
static_cast<MediaTuple &>(_media_info) = tuple;
GET_CONFIG(string, dump_dir, RtpProxy::kDumpDir);
{
@ -75,6 +75,25 @@ RtpProcess::~RtpProcess() {
}
}
void RtpProcess::onManager() {
if (!alive()) {
onDetach(SockException(Err_timeout, "RtpProcess timeout"));
}
}
void RtpProcess::createTimer() {
//创建超时管理定时器
weak_ptr<RtpProcess> weakSelf = shared_from_this();
_timer = std::make_shared<Timer>(3.0f, [weakSelf] {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
strongSelf->onManager();
return true;
}, EventPollerPool::Instance().getPoller());
}
bool RtpProcess::inputRtp(bool is_udp, const Socket::Ptr &sock, const char *data, size_t len, const struct sockaddr *addr, uint64_t *dts_out) {
if (!isRtp(data, len)) {
WarnP(this) << "Not rtp packet";
@ -90,6 +109,7 @@ bool RtpProcess::inputRtp(bool is_udp, const Socket::Ptr &sock, const char *data
_addr.reset(new sockaddr_storage(*((sockaddr_storage *)addr)));
if (first) {
emitOnPublish();
_cache_ticker.resetTime();
}
}
@ -130,8 +150,8 @@ bool RtpProcess::inputFrame(const Frame::Ptr &frame) {
_last_frame_time.resetTime();
return _muxer->inputFrame(frame);
}
if (_cached_func.size() > kMaxCachedFrame) {
WarnL << "cached frame of track(" << frame->getCodecName() << ") is too much, now dropped, please check your on_publish hook url in config.ini file";
if (_cache_ticker.elapsedTime() > kMaxCachedFrameMS) {
WarnL << "Cached frame of stream(" << _media_info.stream << ") is too much, your on_publish hook responded too late!";
return false;
}
auto frame_cached = Frame::getCacheAbleFrame(frame);
@ -203,13 +223,14 @@ void RtpProcess::setOnlyTrack(OnlyTrack only_track) {
_only_track = only_track;
}
void RtpProcess::onDetach() {
void RtpProcess::onDetach(const SockException &ex) {
if (_on_detach) {
_on_detach();
WarnL << ex << ", stream_id: " << getIdentifier();
_on_detach(ex);
}
}
void RtpProcess::setOnDetach(function<void()> cb) {
void RtpProcess::setOnDetach(onDetachCB cb) {
_on_detach = std::move(cb);
}
@ -256,9 +277,6 @@ void RtpProcess::emitOnPublish() {
}
if (err.empty()) {
strong_self->_muxer = std::make_shared<MultiMediaSourceMuxer>(strong_self->_media_info, 0.0f, option);
if (!option.stream_replace.empty()) {
RtpSelector::Instance().addStreamReplace(strong_self->_media_info.stream, option.stream_replace);
}
switch (strong_self->_only_track) {
case kOnlyAudio: strong_self->_muxer->setOnlyAudio(); break;
case kOnlyVideo: strong_self->_muxer->enableAudio(false); break;
@ -294,6 +312,15 @@ std::shared_ptr<SockInfo> RtpProcess::getOriginSock(MediaSource &sender) const {
return const_cast<RtpProcess *>(this)->shared_from_this();
}
RtpProcess::Ptr RtpProcess::getRtpProcess(mediakit::MediaSource &sender) const {
return const_cast<RtpProcess *>(this)->shared_from_this();
}
bool RtpProcess::close(mediakit::MediaSource &sender) {
onDetach(SockException(Err_shutdown, "close media"));
return true;
}
toolkit::EventPoller::Ptr RtpProcess::getOwnerPoller(MediaSource &sender) {
if (_sock) {
return _sock->getPoller();

View File

@ -18,11 +18,14 @@
namespace mediakit {
class RtpProcess final : public RtcpContextForRecv, public toolkit::SockInfo, public MediaSinkInterface, public MediaSourceEventInterceptor, public std::enable_shared_from_this<RtpProcess>{
static constexpr char kRtpAppName[] = "rtp";
class RtpProcess final : public RtcpContextForRecv, public toolkit::SockInfo, public MediaSinkInterface, public MediaSourceEvent, public std::enable_shared_from_this<RtpProcess>{
public:
using Ptr = std::shared_ptr<RtpProcess>;
friend class RtpProcessHelper;
RtpProcess(const std::string &stream_id);
using onDetachCB = std::function<void(const toolkit::SockException &ex)>;
static Ptr createProcess(const MediaTuple &tuple);
~RtpProcess();
enum OnlyTrack { kAll = 0, kOnlyAudio = 1, kOnlyVideo = 2 };
@ -38,20 +41,16 @@ public:
*/
bool inputRtp(bool is_udp, const toolkit::Socket::Ptr &sock, const char *data, size_t len, const struct sockaddr *addr , uint64_t *dts_out = nullptr);
/**
*
*/
bool alive();
/**
* RtpSelector移除时触发
*/
void onDetach();
void onDetach(const toolkit::SockException &ex);
/**
* onDetach事件回调
*/
void setOnDetach(std::function<void()> cb);
void setOnDetach(onDetachCB cb);
/**
* onDetach事件回调,false检查RTP超时true停止
@ -88,10 +87,17 @@ protected:
std::shared_ptr<SockInfo> getOriginSock(MediaSource &sender) const override;
toolkit::EventPoller::Ptr getOwnerPoller(MediaSource &sender) override;
float getLossRate(MediaSource &sender, TrackType type) override;
Ptr getRtpProcess(mediakit::MediaSource &sender) const override;
bool close(mediakit::MediaSource &sender) override;
private:
RtpProcess(const MediaTuple &tuple);
void emitOnPublish();
void doCachedFunc();
bool alive();
void onManager();
void createTimer();
private:
OnlyTrack _only_track = kAll;
@ -102,14 +108,16 @@ private:
toolkit::Socket::Ptr _sock;
MediaInfo _media_info;
toolkit::Ticker _last_frame_time;
std::function<void()> _on_detach;
onDetachCB _on_detach;
std::shared_ptr<FILE> _save_file_rtp;
std::shared_ptr<FILE> _save_file_video;
ProcessInterface::Ptr _process;
MultiMediaSourceMuxer::Ptr _muxer;
std::atomic_bool _stop_rtp_check{false};
toolkit::Timer::Ptr _timer;
toolkit::Ticker _last_check_alive;
std::recursive_mutex _func_mtx;
toolkit::Ticker _cache_ticker;
std::deque<std::function<void()> > _cached_func;
};

View File

@ -1,168 +0,0 @@
/*
* Copyright (c) 2016-present The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/ZLMediaKit/ZLMediaKit).
*
* Use of this source code is governed by MIT-like license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#if defined(ENABLE_RTPPROXY)
#include <stddef.h>
#include "RtpSelector.h"
#include "RtpSplitter.h"
using namespace std;
using namespace toolkit;
namespace mediakit{
INSTANCE_IMP(RtpSelector);
void RtpSelector::clear(){
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
_map_rtp_process.clear();
_map_stream_replace.clear();
}
bool RtpSelector::getSSRC(const char *data, size_t data_len, uint32_t &ssrc){
if (data_len < 12) {
return false;
}
uint32_t *ssrc_ptr = (uint32_t *) (data + 8);
ssrc = ntohl(*ssrc_ptr);
return true;
}
RtpProcess::Ptr RtpSelector::getProcess(const string &stream_id,bool makeNew) {
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
string stream_id_origin = stream_id;
auto it_replace = _map_stream_replace.find(stream_id);
if (it_replace != _map_stream_replace.end()) {
stream_id_origin = it_replace->second;
}
auto it = _map_rtp_process.find(stream_id_origin);
if (it == _map_rtp_process.end() && !makeNew) {
return nullptr;
}
if (it != _map_rtp_process.end() && makeNew) {
//已经被其他线程持有了,不得再被持有,否则会存在线程安全的问题
throw ProcessExisted(StrPrinter << "RtpProcess(" << stream_id_origin << ") already existed");
}
RtpProcessHelper::Ptr &ref = _map_rtp_process[stream_id_origin];
if (!ref) {
ref = std::make_shared<RtpProcessHelper>(stream_id_origin, shared_from_this());
ref->attachEvent();
createTimer();
}
return ref->getProcess();
}
void RtpSelector::createTimer() {
if (!_timer) {
//创建超时管理定时器
weak_ptr<RtpSelector> weakSelf = shared_from_this();
_timer = std::make_shared<Timer>(3.0f, [weakSelf] {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
strongSelf->onManager();
return true;
}, EventPollerPool::Instance().getPoller());
}
}
void RtpSelector::delProcess(const string &stream_id,const RtpProcess *ptr) {
RtpProcess::Ptr process;
{
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
auto it = _map_rtp_process.find(stream_id);
if (it == _map_rtp_process.end()) {
return;
}
if (it->second->getProcess().get() != ptr) {
return;
}
process = it->second->getProcess();
_map_rtp_process.erase(it);
delStreamReplace(stream_id);
}
process->onDetach();
}
void RtpSelector::addStreamReplace(const string &stream_id, const std::string &stream_replace) {
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
_map_stream_replace[stream_replace] = stream_id;
}
void RtpSelector::delStreamReplace(const string &stream_id) {
for (auto it = _map_stream_replace.begin(); it != _map_stream_replace.end(); ++it) {
if (it->second == stream_id) {
_map_stream_replace.erase(it);
break;
}
}
}
void RtpSelector::onManager() {
List<RtpProcess::Ptr> clear_list;
{
lock_guard<decltype(_mtx_map)> lck(_mtx_map);
for (auto it = _map_rtp_process.begin(); it != _map_rtp_process.end();) {
if (it->second->getProcess()->alive()) {
++it;
continue;
}
WarnL << "RtpProcess timeout:" << it->first;
clear_list.emplace_back(it->second->getProcess());
delStreamReplace(it->first);
it = _map_rtp_process.erase(it);
}
}
clear_list.for_each([](const RtpProcess::Ptr &process) {
process->onDetach();
});
}
RtpProcessHelper::RtpProcessHelper(const string &stream_id, const weak_ptr<RtpSelector> &parent) {
_stream_id = stream_id;
_parent = parent;
_process = std::make_shared<RtpProcess>(stream_id);
}
RtpProcessHelper::~RtpProcessHelper() {
auto process = std::move(_process);
try {
// flush时确保线程安全
process->getOwnerPoller(MediaSource::NullMediaSource())->async([process]() { process->flush(); });
} catch (...) {
// 忽略getOwnerPoller可能抛出的异常
}
}
void RtpProcessHelper::attachEvent() {
//主要目的是close回调触发时能把对象从RtpSelector中删除
_process->setDelegate(shared_from_this());
}
bool RtpProcessHelper::close(MediaSource &sender) {
//此回调在其他线程触发
auto parent = _parent.lock();
if (!parent) {
return false;
}
parent->delProcess(_stream_id, _process.get());
WarnL << "close media: " << sender.getUrl();
return true;
}
RtpProcess::Ptr &RtpProcessHelper::getProcess() {
return _process;
}
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)

View File

@ -1,89 +0,0 @@
/*
* Copyright (c) 2016-present The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/ZLMediaKit/ZLMediaKit).
*
* Use of this source code is governed by MIT-like license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ZLMEDIAKIT_RTPSELECTOR_H
#define ZLMEDIAKIT_RTPSELECTOR_H
#if defined(ENABLE_RTPPROXY)
#include <stdint.h>
#include <mutex>
#include <unordered_map>
#include "RtpProcess.h"
#include "Common/MediaSource.h"
namespace mediakit{
class RtpSelector;
class RtpProcessHelper : public MediaSourceEvent , public std::enable_shared_from_this<RtpProcessHelper> {
public:
using Ptr = std::shared_ptr<RtpProcessHelper>;
RtpProcessHelper(const std::string &stream_id, const std::weak_ptr<RtpSelector > &parent);
~RtpProcessHelper();
void attachEvent();
RtpProcess::Ptr & getProcess();
protected:
// 通知其停止推流
bool close(MediaSource &sender) override;
private:
std::string _stream_id;
RtpProcess::Ptr _process;
std::weak_ptr<RtpSelector> _parent;
};
class RtpSelector : public std::enable_shared_from_this<RtpSelector>{
public:
class ProcessExisted : public std::runtime_error {
public:
template<typename ...T>
ProcessExisted(T && ...args) : std::runtime_error(std::forward<T>(args)...) {}
};
static bool getSSRC(const char *data,size_t data_len, uint32_t &ssrc);
static RtpSelector &Instance();
/**
*
*/
void clear();
/**
* rtp处理器
* @param stream_id id
* @param makeNew , true时
* @return rtp处理器
*/
RtpProcess::Ptr getProcess(const std::string &stream_id, bool makeNew);
/**
* rtp处理器
* @param stream_id id
* @param ptr rtp处理器指针
*/
void delProcess(const std::string &stream_id, const RtpProcess *ptr);
void addStreamReplace(const std::string &stream_id, const std::string &stream_replace);
private:
void onManager();
void createTimer();
void delStreamReplace(const std::string &stream_id);
private:
toolkit::Timer::Ptr _timer;
std::recursive_mutex _mtx_map;
std::unordered_map<std::string,RtpProcessHelper::Ptr> _map_rtp_process;
std::unordered_map<std::string,std::string> _map_stream_replace;
};
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
#endif //ZLMEDIAKIT_RTPSELECTOR_H

View File

@ -11,7 +11,7 @@
#if defined(ENABLE_RTPPROXY)
#include "Util/uv_errno.h"
#include "RtpServer.h"
#include "RtpSelector.h"
#include "RtpProcess.h"
#include "Rtcp/RtcpContext.h"
#include "Common/config.h"
@ -30,43 +30,39 @@ class RtcpHelper: public std::enable_shared_from_this<RtcpHelper> {
public:
using Ptr = std::shared_ptr<RtcpHelper>;
RtcpHelper(Socket::Ptr rtcp_sock, std::string stream_id) {
RtcpHelper(Socket::Ptr rtcp_sock, MediaTuple tuple) {
_rtcp_sock = std::move(rtcp_sock);
_stream_id = std::move(stream_id);
}
~RtcpHelper() {
if (_process) {
// 删除rtp处理器
RtpSelector::Instance().delProcess(_stream_id, _process.get());
}
_tuple = std::move(tuple);
}
void setRtpServerInfo(uint16_t local_port, RtpServer::TcpMode mode, bool re_use_port, uint32_t ssrc, int only_track) {
_local_port = local_port;
_tcp_mode = mode;
_re_use_port = re_use_port;
_ssrc = ssrc;
_only_track = only_track;
_process = RtpProcess::createProcess(_tuple);
_process->setOnlyTrack((RtpProcess::OnlyTrack)only_track);
_timeout_cb = [=]() mutable {
NOTICE_EMIT(BroadcastRtpServerTimeoutArgs, Broadcast::kBroadcastRtpServerTimeout, local_port, _tuple, (int)mode, re_use_port, ssrc);
};
weak_ptr<RtcpHelper> weak_self = shared_from_this();
_process->setOnDetach([weak_self](const SockException &ex) {
if (auto strong_self = weak_self.lock()) {
if (strong_self->_on_detach) {
strong_self->_on_detach(ex);
}
if (ex.getErrCode() == Err_timeout) {
strong_self->_timeout_cb();
}
}
});
}
void setOnDetach(function<void()> cb) {
if (_process) {
_process->setOnDetach(std::move(cb));
} else {
_on_detach = std::move(cb);
}
}
void setOnDetach(RtpProcess::onDetachCB cb) { _on_detach = std::move(cb); }
RtpProcess::Ptr getProcess() const { return _process; }
void onRecvRtp(const Socket::Ptr &sock, const Buffer::Ptr &buf, struct sockaddr *addr) {
if (!_process) {
_process = RtpSelector::Instance().getProcess(_stream_id, true);
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
_process->setOnDetach(std::move(_on_detach));
cancelDelayTask();
}
_process->inputRtp(true, sock, buf->data(), buf->size(), addr);
// 统计rtp接受情况用于发送rr包
auto header = (RtpHeader *)buf->data();
sendRtcp(ntohl(header->ssrc), addr);
@ -92,37 +88,12 @@ public:
// 收到sr rtcp后驱动返回rr rtcp
strong_self->sendRtcp(strong_self->_ssrc, (struct sockaddr *)(strong_self->_rtcp_addr.get()));
});
GET_CONFIG(uint64_t, timeoutSec, RtpProxy::kTimeoutSec);
_delay_task = _rtcp_sock->getPoller()->doDelayTask(timeoutSec * 1000, [weak_self]() {
if (auto strong_self = weak_self.lock()) {
auto process = RtpSelector::Instance().getProcess(strong_self->_stream_id, false);
if (!process && strong_self->_on_detach) {
strong_self->_on_detach();
}
if(process && strong_self->_on_detach){// tcp 链接防止断开不删除rtpServer
process->setOnDetach(std::move(strong_self->_on_detach));
}
if (!process) { // process 未创建触发rtp server 超时事件
NOTICE_EMIT(BroadcastRtpServerTimeoutArgs, Broadcast::kBroadcastRtpServerTimeout, strong_self->_local_port, strong_self->_stream_id,
(int)strong_self->_tcp_mode, strong_self->_re_use_port, strong_self->_ssrc);
}
}
return 0;
});
}
void cancelDelayTask() {
if (_delay_task) {
_delay_task->cancel();
_delay_task = nullptr;
}
}
private:
void sendRtcp(uint32_t rtp_ssrc, struct sockaddr *addr) {
// 每5秒发送一次rtcp
if (_ticker.elapsedTime() < 5000 || !_process) {
if (_ticker.elapsedTime() < 5000) {
return;
}
_ticker.resetTime();
@ -141,25 +112,21 @@ private:
}
private:
bool _re_use_port = false;
int _only_track = 0;
uint16_t _local_port = 0;
uint32_t _ssrc = 0;
RtpServer::TcpMode _tcp_mode = RtpServer::NONE;
std::function<void()> _timeout_cb;
Ticker _ticker;
Socket::Ptr _rtcp_sock;
RtpProcess::Ptr _process;
std::string _stream_id;
function<void()> _on_detach;
MediaTuple _tuple;
RtpProcess::onDetachCB _on_detach;
std::shared_ptr<struct sockaddr_storage> _rtcp_addr;
EventPoller::DelayTask::Ptr _delay_task;
};
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
void RtpServer::start(uint16_t local_port, const MediaTuple &tuple, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
//创建udp服务器
Socket::Ptr rtp_socket = Socket::createSocket(nullptr, true);
Socket::Ptr rtcp_socket = Socket::createSocket(nullptr, true);
auto poller = EventPollerPool::Instance().getPoller();
Socket::Ptr rtp_socket = Socket::createSocket(poller, true);
Socket::Ptr rtcp_socket = Socket::createSocket(poller, true);
if (local_port == 0) {
//随机端口rtp端口采用偶数
auto pair = std::make_pair(rtp_socket, rtcp_socket);
@ -177,29 +144,13 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
GET_CONFIG(int, udpRecvSocketBuffer, RtpProxy::kUdpRecvSocketBuffer);
SockUtil::setRecvBuf(rtp_socket->rawFD(), udpRecvSocketBuffer);
TcpServer::Ptr tcp_server;
_tcp_mode = tcp_mode;
if (tcp_mode == PASSIVE || tcp_mode == ACTIVE) {
//创建tcp服务器
tcp_server = std::make_shared<TcpServer>(rtp_socket->getPoller());
(*tcp_server)[RtpSession::kStreamID] = stream_id;
(*tcp_server)[RtpSession::kSSRC] = ssrc;
(*tcp_server)[RtpSession::kOnlyTrack] = only_track;
if (tcp_mode == PASSIVE) {
tcp_server->start<RtpSession>(local_port, local_ip);
} else if (stream_id.empty()) {
// tcp主动模式时只能一个端口一个流必须指定流id; 创建TcpServer对象也仅用于传参
throw std::runtime_error(StrPrinter << "tcp主动模式时必需指定流id");
}
}
//创建udp服务器
UdpServer::Ptr udp_server;
RtcpHelper::Ptr helper;
//增加了多路复用判断如果多路复用为true就走else逻辑同时保留了原来stream_id为空走else逻辑
if (!stream_id.empty() && !multiplex) {
if (!tuple.stream.empty() && !multiplex) {
//指定了流id那么一个端口一个流(不管是否包含多个ssrc的多个流绑定rtp源后会筛选掉ip端口不匹配的流)
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), stream_id);
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), tuple);
helper->startRtcp();
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_track);
bool bind_peer_addr = false;
@ -222,14 +173,35 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
});
} else {
//单端口多线程接收多个流根据ssrc区分流
udp_server = std::make_shared<UdpServer>(rtp_socket->getPoller());
udp_server = std::make_shared<UdpServer>();
(*udp_server)[RtpSession::kOnlyTrack] = only_track;
(*udp_server)[RtpSession::kUdpRecvBuffer] = udpRecvSocketBuffer;
udp_server->start<RtpSession>(local_port, local_ip);
rtp_socket = nullptr;
}
_on_cleanup = [rtp_socket, stream_id]() {
TcpServer::Ptr tcp_server;
if (tcp_mode == PASSIVE || tcp_mode == ACTIVE) {
auto processor = helper ? helper->getProcess() : nullptr;
// 如果共享同一个processor对象那么tcp server深圳为单线程模式确保线程安全
tcp_server = std::make_shared<TcpServer>(processor ? poller : nullptr);
(*tcp_server)[RtpSession::kVhost] = tuple.vhost;
(*tcp_server)[RtpSession::kApp] = tuple.app;
(*tcp_server)[RtpSession::kStreamID] = tuple.stream;
(*tcp_server)[RtpSession::kSSRC] = ssrc;
(*tcp_server)[RtpSession::kOnlyTrack] = only_track;
if (tcp_mode == PASSIVE) {
weak_ptr<RtpServer> weak_self = shared_from_this();
tcp_server->start<RtpSession>(local_port, local_ip, 1024, [weak_self, processor](std::shared_ptr<RtpSession> &session) {
session->setRtpProcess(processor);
});
} else if (tuple.stream.empty()) {
// tcp主动模式时只能一个端口一个流必须指定流id; 创建TcpServer对象也仅用于传参
throw std::runtime_error(StrPrinter << "tcp主动模式时必需指定流id");
}
}
_on_cleanup = [rtp_socket]() {
if (rtp_socket) {
//去除循环引用
rtp_socket->setOnRead(nullptr);
@ -240,9 +212,10 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
_udp_server = udp_server;
_rtp_socket = rtp_socket;
_rtcp_helper = helper;
_tcp_mode = tcp_mode;
}
void RtpServer::setOnDetach(function<void()> cb) {
void RtpServer::setOnDetach(RtpProcess::onDetachCB cb) {
if (_rtcp_helper) {
_rtcp_helper->setOnDetach(std::move(cb));
}
@ -277,6 +250,7 @@ void RtpServer::connectToServer(const std::string &url, uint16_t port, const fun
void RtpServer::onConnect() {
auto rtp_session = std::make_shared<RtpSession>(_rtp_socket);
rtp_session->setRtpProcess(_rtcp_helper->getProcess());
rtp_session->attachServer(*_tcp_server);
_rtp_socket->setOnRead([rtp_session](const Buffer::Ptr &buf, struct sockaddr *addr, int addr_len) {
rtp_session->onRecv(buf);

View File

@ -43,7 +43,7 @@ public:
* @param ssrc ssrc
* @param multiplex
*/
void start(uint16_t local_port, const std::string &stream_id = "", TcpMode tcp_mode = PASSIVE,
void start(uint16_t local_port, const MediaTuple &tuple = MediaTuple{DEFAULT_VHOST, kRtpAppName, "", ""}, TcpMode tcp_mode = PASSIVE,
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, int only_track = 0, bool multiplex = false);
/**
@ -62,7 +62,7 @@ public:
/**
* RtpProcess onDetach事件回调
*/
void setOnDetach(std::function<void()> cb);
void setOnDetach(RtpProcess::onDetachCB cb);
/**
* ssrc

View File

@ -10,7 +10,7 @@
#if defined(ENABLE_RTPPROXY)
#include "RtpSession.h"
#include "RtpSelector.h"
#include "RtpProcess.h"
#include "Network/TcpServer.h"
#include "Rtsp/Rtsp.h"
#include "Rtsp/RtpReceiver.h"
@ -21,6 +21,8 @@ using namespace toolkit;
namespace mediakit{
const string RtpSession::kVhost = "vhost";
const string RtpSession::kApp = "app";
const string RtpSession::kStreamID = "stream_id";
const string RtpSession::kSSRC = "ssrc";
const string RtpSession::kOnlyTrack = "only_track";
@ -31,7 +33,9 @@ void RtpSession::attachServer(const Server &server) {
}
void RtpSession::setParams(mINI &ini) {
_stream_id = ini[kStreamID];
_tuple.vhost = ini[kVhost];
_tuple.app = ini[kApp];
_tuple.stream = ini[kStreamID];
_ssrc = ini[kSSRC];
_only_track = ini[kOnlyTrack];
int udp_socket_buffer = ini[kUdpRecvBuffer];
@ -60,28 +64,24 @@ void RtpSession::onRecv(const Buffer::Ptr &data) {
}
void RtpSession::onError(const SockException &err) {
WarnP(this) << _stream_id << " " << err;
if (_process) {
RtpSelector::Instance().delProcess(_stream_id, _process.get());
_process = nullptr;
if (_emit_detach) {
_process->onDetach(err);
}
WarnP(this) << _tuple.shortUrl() << " " << err;
}
void RtpSession::onManager() {
if (_process && !_process->alive()) {
shutdown(SockException(Err_timeout, "receive rtp timeout"));
}
if (!_process && _ticker.createdTime() > 10 * 1000) {
shutdown(SockException(Err_timeout, "illegal connection"));
}
}
void RtpSession::setRtpProcess(RtpProcess::Ptr process) {
_emit_detach = (bool)process;
_process = std::move(process);
}
void RtpSession::onRtpPacket(const char *data, size_t len) {
if (_delay_close) {
// 正在延时关闭中,忽略所有数据
return;
}
if (!isRtp(data, len)) {
// 忽略非rtp数据
WarnP(this) << "Not rtp packet";
@ -104,33 +104,31 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
return;
}
}
// 未设置ssrc时尝试获取ssrc
if (!_ssrc && !getSSRC(data, len, _ssrc)) {
return;
}
// 未指定流id就使用ssrc为流id
if (_tuple.stream.empty()) {
_tuple.stream = printSSRC(_ssrc);
}
if (!_process) {
//未设置ssrc时尝试获取ssrc
if (!_ssrc && !RtpSelector::getSSRC(data, len, _ssrc)) {
return;
}
if (_stream_id.empty()) {
//未指定流id就使用ssrc为流id
_stream_id = printSSRC(_ssrc);
}
try {
_process = RtpSelector::Instance().getProcess(_stream_id, true);
} catch (RtpSelector::ProcessExisted &ex) {
if (!_is_udp) {
// tcp情况下立即断开连接
throw;
}
// udp情况下延时断开连接(等待超时自动关闭)防止频繁创建销毁RtpSession对象
WarnP(this) << ex.what();
_delay_close = true;
return;
}
_process = RtpProcess::createProcess(_tuple);
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
_process->setDelegate(static_pointer_cast<RtpSession>(shared_from_this()));
weak_ptr<RtpSession> weak_self = static_pointer_cast<RtpSession>(shared_from_this());
_process->setOnDetach([weak_self](const SockException &ex) {
if (auto strong_self = weak_self.lock()) {
strong_self->_process = nullptr;
strong_self->shutdown(ex);
}
});
}
try {
uint32_t rtp_ssrc = 0;
RtpSelector::getSSRC(data, len, rtp_ssrc);
getSSRC(data, len, rtp_ssrc);
if (rtp_ssrc != _ssrc) {
WarnP(this) << "ssrc mismatched, rtp dropped: " << rtp_ssrc << " != " << _ssrc;
return;
@ -143,26 +141,10 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
} else {
throw;
}
} catch (std::exception &ex) {
if (!_is_udp) {
// tcp情况下立即断开连接
throw;
}
// udp情况下延时断开连接(等待超时自动关闭)防止频繁创建销毁RtpSession对象
WarnP(this) << ex.what();
_delay_close = true;
return;
}
_ticker.resetTime();
}
bool RtpSession::close(MediaSource &sender) {
//此回调在其他线程触发
string err = StrPrinter << "close media: " << sender.getUrl();
safeShutdown(SockException(Err_shutdown, err));
return true;
}
static const char *findSSRC(const char *data, ssize_t len, uint32_t ssrc) {
// rtp前面必须预留两个字节的长度字段
for (ssize_t i = 2; i <= len - 4; ++i) {
@ -268,7 +250,7 @@ const char *RtpSession::searchByPsHeaderFlag(const char *data, size_t len) {
// TODO or Not ? 更新设置ssrc
uint32_t rtp_ssrc = 0;
RtpSelector::getSSRC(rtp_ptr + 2, len, rtp_ssrc);
getSSRC(rtp_ptr + 2, len, rtp_ssrc);
_ssrc = rtp_ssrc;
InfoL << "设置_ssrc为" << _ssrc;
// RtpServer::updateSSRC(uint32_t ssrc)

View File

@ -20,8 +20,10 @@
namespace mediakit{
class RtpSession : public toolkit::Session, public RtpSplitter, public MediaSourceEvent {
class RtpSession : public toolkit::Session, public RtpSplitter {
public:
static const std::string kVhost;
static const std::string kApp;
static const std::string kStreamID;
static const std::string kSSRC;
static const std::string kOnlyTrack;
@ -34,10 +36,9 @@ public:
void onManager() override;
void setParams(toolkit::mINI &ini);
void attachServer(const toolkit::Server &server) override;
void setRtpProcess(RtpProcess::Ptr process);
protected:
// 通知其停止推流
bool close(MediaSource &sender) override;
// 收到rtp回调
void onRtpPacket(const char *data, size_t len) override;
// RtpSplitter override
@ -48,14 +49,14 @@ protected:
const char *searchByPsHeaderFlag(const char *data, size_t len);
private:
bool _delay_close = false;
bool _is_udp = false;
bool _search_rtp = false;
bool _search_rtp_finished = false;
bool _emit_detach = false;
int _only_track = 0;
uint32_t _ssrc = 0;
toolkit::Ticker _ticker;
std::string _stream_id;
MediaTuple _tuple;
struct sockaddr_storage _addr;
RtpProcess::Ptr _process;
};

View File

@ -144,7 +144,16 @@ RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, con
});
});
_rtp_reader->setDetachCB([this]() {
string strKey = StrPrinter << local_ip << " " << vhost << " " << app << " " << stream << endl;
_rtp_reader->setDetachCB([this, strKey]() {
{
lock_guard<recursive_mutex> lck(g_mtx);
auto it = g_multi_caster_map.find(strKey);
if (it != g_multi_caster_map.end()) {
g_multi_caster_map.erase(it);
}
}
unordered_map<void *, onDetach> _detach_map_copy;
{
lock_guard<recursive_mutex> lck(_mtx);

View File

@ -470,6 +470,15 @@ string printSSRC(uint32_t ui32Ssrc) {
return tmp;
}
bool getSSRC(const char *data, size_t data_len, uint32_t &ssrc) {
if (data_len < 12) {
return false;
}
uint32_t *ssrc_ptr = (uint32_t *)(data + 8);
ssrc = ntohl(*ssrc_ptr);
return true;
}
bool isRtp(const char *buf, size_t size) {
if (size < 2) {
return false;

View File

@ -317,6 +317,7 @@ toolkit::Buffer::Ptr makeRtpOverTcpPrefix(uint16_t size, uint8_t interleaved);
void makeSockPair(std::pair<toolkit::Socket::Ptr, toolkit::Socket::Ptr> &pair, const std::string &local_ip, bool re_use_port = false, bool is_udp = true);
// 十六进制方式打印ssrc
std::string printSSRC(uint32_t ui32Ssrc);
bool getSSRC(const char *data, size_t data_len, uint32_t &ssrc);
bool isRtp(const char *buf, size_t size);
bool isRtcp(const char *buf, size_t size);

View File

@ -44,7 +44,13 @@ public:
*/
RtspMediaSource(const MediaTuple& tuple, int ring_size = RTP_GOP_SIZE): MediaSource(RTSP_SCHEMA, tuple), _ring_size(ring_size) {}
~RtspMediaSource() override { flush(); }
~RtspMediaSource() override {
try {
flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
/**
*

View File

@ -29,7 +29,13 @@ public:
getRtpRing()->setDelegate(_media_src);
}
~RtspMediaSourceMuxer() override { RtspMuxer::flush(); }
~RtspMediaSourceMuxer() override {
try {
RtspMuxer::flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
setDelegate(listener);

View File

@ -88,6 +88,7 @@ void RtspPlayer::play(const string &strUrl) {
_rtp_type = (Rtsp::eRtpType)(int)(*this)[Client::kRtpType];
_beat_type = (*this)[Client::kRtspBeatType].as<int>();
_beat_interval_ms = (*this)[Client::kBeatIntervalMS].as<int>();
_speed = (*this)[Client::kRtspSpeed].as<float>();
DebugL << url._url << " " << (url._user.size() ? url._user : "null") << " " << (url._passwd.size() ? url._passwd : "null") << " " << _rtp_type;
weak_ptr<RtspPlayer> weakSelf = static_pointer_cast<RtspPlayer>(shared_from_this());
@ -256,17 +257,19 @@ void RtspPlayer::sendSetup(unsigned int track_idx) {
switch (_rtp_type) {
case Rtsp::RTP_TCP: {
sendRtspRequest(
"SETUP", control_url, { "Transport", StrPrinter << "RTP/AVP/TCP;unicast;interleaved=" << track->_type * 2 << "-" << track->_type * 2 + 1 });
"SETUP", control_url,
{ "Transport", StrPrinter << "RTP/AVP/TCP;unicast;interleaved=" << track->_type * 2 << "-" << track->_type * 2 + 1 << ";mode=play" });
} break;
case Rtsp::RTP_MULTICAST: {
sendRtspRequest("SETUP", control_url, { "Transport", "RTP/AVP;multicast" });
sendRtspRequest("SETUP", control_url, { "Transport", "RTP/AVP;multicast;mode=play" });
} break;
case Rtsp::RTP_UDP: {
createUdpSockIfNecessary(track_idx);
sendRtspRequest(
"SETUP", control_url,
{ "Transport",
StrPrinter << "RTP/AVP;unicast;client_port=" << _rtp_sock[track_idx]->get_local_port() << "-" << _rtcp_sock[track_idx]->get_local_port() });
StrPrinter << "RTP/AVP;unicast;client_port=" << _rtp_sock[track_idx]->get_local_port() << "-" << _rtcp_sock[track_idx]->get_local_port()
<< ";mode=play" });
} break;
default: break;
}
@ -387,7 +390,12 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int track_idx) {
}
// 所有setup命令发送完毕
// 发送play命令
sendPause(type_play, 0);
if (_speed==0.0f) {
sendPause(type_play, 0);
} else {
sendPause(type_speed, 0);
}
}
void RtspPlayer::sendDescribe() {
@ -436,6 +444,9 @@ void RtspPlayer::sendPause(int type, uint32_t seekMS) {
case type_seek:
sendRtspRequest("PLAY", _control_url, { "Range", StrPrinter << "npt=" << setiosflags(ios::fixed) << setprecision(2) << seekMS / 1000.0 << "-" });
break;
case type_speed:
speed(_speed);
break;
default:
WarnL << "unknown type : " << type;
_on_response = nullptr;

View File

@ -120,6 +120,8 @@ private:
uint32_t _beat_interval_ms = 0;
std::string _play_url;
// rtsp开始倍速
float _speed= 0.0f;
std::vector<SdpTrack::Ptr> _sdp_track;
std::function<void(const Parser&)> _on_response;
//RTP端口,trackid idx 为数组下标

View File

@ -13,6 +13,7 @@
#include "Util/util.h"
#include "Util/logger.h"
#include "Common/macros.h"
#include "Rtsp/RtpReceiver.h"
using namespace std;
using namespace toolkit;
@ -59,7 +60,11 @@ const char *RtspSplitter::onSearchPacketTail_l(const char *data, size_t len) {
ssize_t RtspSplitter::onRecvHeader(const char *data, size_t len) {
if (_isRtpPacket) {
onRtpPacket(data, len);
try {
onRtpPacket(data, len);
} catch (RtpTrack::BadRtpException &ex) {
WarnL << ex.what();
}
return 0;
}
if (len == 4 && !memcmp(data, "\r\n\r\n", 4)) {

View File

@ -40,7 +40,13 @@ public:
TSMediaSource(const MediaTuple& tuple, int ring_size = TS_GOP_SIZE): MediaSource(TS_SCHEMA, tuple), _ring_size(ring_size) {}
~TSMediaSource() override { flush(); }
~TSMediaSource() override {
try {
flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
/**
*

View File

@ -20,7 +20,7 @@
#include "Thread/WorkThreadPool.h"
#include "Pusher/MediaPusher.h"
#include "Player/PlayerProxy.h"
#include "Record/MP4Reader.h"
using namespace std;
using namespace toolkit;
using namespace mediakit;
@ -52,7 +52,7 @@ public:
Option::ArgRequired,/*该选项后面必须跟值*/
nullptr,/*该选项默认值*/
true,/*该选项是否必须赋值如果没有默认值且为ArgRequired时用户必须提供该参数否则将抛异常*/
"拉流url,支持rtsp/rtmp/hls",/*该选项说明文字*/
"拉流url,支持rtsp/rtmp/hls/mp4文件",/*该选项说明文字*/
nullptr);
(*_parser) << Option('o',/*该选项简称,如果是\x00则说明无简称*/
@ -92,18 +92,16 @@ public:
Option::ArgRequired,/*该选项后面必须跟值*/
to_string((int) (Rtsp::RTP_TCP)).data(),/*该选项默认值*/
true,/*该选项是否必须赋值如果没有默认值且为ArgRequired时用户必须提供该参数否则将抛异常*/
"rtsp拉流和推流方式,支持tcp/udp:0/1",/*该选项说明文字*/
nullptr);
"rtsp拉流和推流方式,支持tcp/udp:0/1", /*该选项说明文字*/
nullptr);
}
~CMD_main() override {}
const char *description() const override {
return "主程序命令参数";
}
const char *description() const override { return "主程序命令参数"; }
};
//此程序用于推流性能测试
// 此程序用于推流性能测试
int main(int argc, char *argv[]) {
CMD_main cmd_main;
try {
@ -116,7 +114,7 @@ int main(int argc, char *argv[]) {
}
int threads = cmd_main["threads"];
LogLevel logLevel = (LogLevel) cmd_main["level"].as<int>();
LogLevel logLevel = (LogLevel)cmd_main["level"].as<int>();
logLevel = MIN(MAX(logLevel, LTrace), LError);
auto in_url = cmd_main["in"];
auto out_url = cmd_main["out"];
@ -129,6 +127,8 @@ int main(int argc, char *argv[]) {
cout << "推流协议只支持rtsp或rtmp" << endl;
return -1;
}
const std::string app = "app";
const std::string stream = "test";
//设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel", logLevel));
@ -145,22 +145,39 @@ int main(int argc, char *argv[]) {
ProtocolOption option;
option.enable_hls = false;
option.enable_mp4 = false;
MediaSource::Ptr src = nullptr;
PlayerProxy::Ptr proxy = nullptr;;
//添加拉流代理
auto proxy = std::make_shared<PlayerProxy>(DEFAULT_VHOST, "app", "test", option);
//rtsp拉流代理方式
(*proxy)[Client::kRtpType] = rtp_type;
//开始拉流代理
proxy->play(in_url);
if (end_with(in_url, ".mp4")) {
// create MediaSource from mp4file
auto reader = std::make_shared<MP4Reader>(DEFAULT_VHOST, app, stream, in_url);
//mp4 repeat
reader->startReadMP4(0, true, true);
src = MediaSource::find(schema, DEFAULT_VHOST, app, stream, false);
if (!src) {
// mp4文件不存在
WarnL << "no such file or directory: " << in_url;
return -1;
}
} else {
//添加拉流代理
proxy = std::make_shared<PlayerProxy>(DEFAULT_VHOST, app, stream, option);
//rtsp拉流代理方式
(*proxy)[Client::kRtpType] = rtp_type;
//开始拉流代理
proxy->play(in_url);
auto get_src = [schema]() {
return MediaSource::find(schema, DEFAULT_VHOST, "app", "test", false);
}
auto get_src = [schema,app,stream]() {
return MediaSource::find(schema, DEFAULT_VHOST, app, stream, false);
};
//推流器map
recursive_mutex mtx;
unordered_map<void *, MediaPusher::Ptr> pusher_map;
auto add_pusher = [&](const MediaSource::Ptr &src, const string &rand_str, size_t index) {
auto pusher = std::make_shared<MediaPusher>(src);
auto tag = pusher.get();

View File

@ -17,7 +17,7 @@
#include "Rtsp/RtspSession.h"
#include "Rtmp/RtmpSession.h"
#include "Http/HttpSession.h"
#include "Rtp/RtpSelector.h"
#include "Rtp/RtpProcess.h"
using namespace std;
using namespace toolkit;
@ -42,7 +42,7 @@ static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
memset(&addr, 0, sizeof(addr));
addr.ss_family = AF_INET;
auto sock = Socket::createSocket(poller);
auto process = RtpSelector::Instance().getProcess("test", true);
auto process = RtpProcess::createProcess(MediaTuple { DEFAULT_VHOST, kRtpAppName, "test", "" });
uint64_t stamp_last = 0;
auto total_size = std::make_shared<size_t>(0);
@ -89,7 +89,6 @@ static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
auto ret = do_read();
if (!ret) {
WarnL << *total_size / 1024 << "KB";
RtpSelector::Instance().delProcess("test", process.get());
}
return ret;
});

View File

@ -383,7 +383,7 @@ namespace RTC
});
// Set ciphers.
ret = SSL_CTX_set_cipher_list(
sslCtx, "DEFAULT:!NULL:!aNULL:!SHA256:!SHA384:!aECDH:!AESGCM+AES256:!aPSK");
sslCtx, "DEFAULT:!NULL:!aNULL:!SHA256:!SHA384:!aECDH:!AESGCM+AES256:!aPSK:!RC4");
if (ret == 0)
{

View File

@ -20,6 +20,28 @@
namespace mediakit {
// RTC配置项目
namespace Rtc {
//~ nack接收端(rtp发送端)
// Nack缓存包最早时间间隔
extern const std::string kMaxNackMS;
// Nack包检查间隔(包数量)
extern const std::string kRtpCacheCheckInterval;
//~ nack发送端(rtp接收端)
// 最大保留的rtp丢包状态个数
extern const std::string kNackMaxSize;
// rtp丢包状态最长保留时间
extern const std::string kNackMaxMS;
// nack最多请求重传次数
extern const std::string kNackMaxCount;
// nack重传频率rtt的倍数
extern const std::string kNackIntervalRatio;
// nack包中rtp个数减小此值可以让nack包响应更灵敏
extern const std::string kNackRtpSize;
} // namespace Rtc
class NackList {
public:
void pushBack(RtpPacket::Ptr rtp);

View File

@ -9,7 +9,10 @@
*/
#include "WebRtcPlayer.h"
#include "Common/config.h"
#include "Extension/Factory.h"
#include "Util/base64.h"
using namespace std;
@ -32,6 +35,9 @@ WebRtcPlayer::WebRtcPlayer(const EventPoller::Ptr &poller,
_media_info = info;
_play_src = src;
CHECK(src);
GET_CONFIG(bool, direct_proxy, Rtsp::kDirectProxy);
_send_config_frames_once = direct_proxy;
}
void WebRtcPlayer::onStartWebRTC() {
@ -56,6 +62,13 @@ void WebRtcPlayer::onStartWebRTC() {
if (!strong_self) {
return;
}
if (strong_self->_send_config_frames_once && !pkt->empty()) {
const auto &first_rtp = pkt->front();
strong_self->sendConfigFrames(first_rtp->getSeq(), first_rtp->sample_rate, first_rtp->getStamp(), first_rtp->ntp_stamp);
strong_self->_send_config_frames_once = false;
}
size_t i = 0;
pkt->for_each([&](const RtpPacket::Ptr &rtp) {
//TraceL<<"send track type:"<<rtp->type<<" ts:"<<rtp->getStamp()<<" ntp:"<<rtp->ntp_stamp<<" size:"<<rtp->getPayloadSize()<<" i:"<<i;
@ -111,4 +124,41 @@ void WebRtcPlayer::onRtcConfigure(RtcConfigure &configure) const {
configure.setPlayRtspInfo(playSrc->getSdp());
}
void WebRtcPlayer::sendConfigFrames(uint32_t before_seq, uint32_t sample_rate, uint32_t timestamp, uint64_t ntp_timestamp) {
auto play_src = _play_src.lock();
if (!play_src) {
return;
}
SdpParser parser(play_src->getSdp());
auto video_sdp = parser.getTrack(TrackVideo);
if (!video_sdp) {
return;
}
auto video_track = dynamic_pointer_cast<VideoTrack>(Factory::getTrackBySdp(video_sdp));
if (!video_track) {
return;
}
auto frames = video_track->getConfigFrames();
if (frames.empty()) {
return;
}
auto encoder = mediakit::Factory::getRtpEncoderByCodecId(video_track->getCodecId(), 0);
if (!encoder) {
return;
}
GET_CONFIG(uint32_t, video_mtu, Rtp::kVideoMtuSize);
encoder->setRtpInfo(0, video_mtu, sample_rate, 0, 0, 0);
auto seq = before_seq - frames.size();
for (const auto &frame : frames) {
auto rtp = encoder->getRtpInfo().makeRtp(TrackVideo, frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(), false, 0);
auto header = rtp->getHeader();
header->seq = htons(seq++);
header->stamp = htonl(timestamp);
rtp->ntp_stamp = ntp_timestamp;
onSendRtp(rtp, false);
}
}
}// namespace mediakit

View File

@ -31,11 +31,17 @@ protected:
private:
WebRtcPlayer(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info);
void sendConfigFrames(uint32_t before_seq, uint32_t sample_rate, uint32_t timestamp, uint64_t ntp_timestamp);
private:
//媒体相关元数据
MediaInfo _media_info;
//播放的rtsp源
std::weak_ptr<RtspMediaSource> _play_src;
// rtp 直接转发情况下通常会缺少 sps/pps, 在转发 rtp 前, 先发送一次相关帧信息, 部分情况下是可以播放的
bool _send_config_frames_once { false };
//播放rtsp源的reader对象
RtspMediaSource::RingType::RingReader::Ptr _reader;
};

View File

@ -13,6 +13,7 @@
#include "Util/base64.h"
#include "Network/sockutil.h"
#include "Common/config.h"
#include "Nack.h"
#include "RtpExt.h"
#include "Rtcp/Rtcp.h"
#include "Rtcp/RtcpFCI.h"
@ -57,9 +58,6 @@ const string kMinBitrate = RTC_FIELD "min_bitrate";
// 数据通道设置
const string kDataChannelEcho = RTC_FIELD "datachannel_echo";
// rtp丢包状态最长保留时间
const string kNackMaxMS = RTC_FIELD "nackMaxMS";
static onceToken token([]() {
mINI::Instance()[kTimeOutSec] = 15;
mINI::Instance()[kExternIP] = "";
@ -72,8 +70,6 @@ static onceToken token([]() {
mINI::Instance()[kMinBitrate] = 0;
mINI::Instance()[kDataChannelEcho] = true;
mINI::Instance()[kNackMaxMS] = 3 * 1000;
});
} // namespace RTC
@ -806,7 +802,8 @@ public:
setOnSorted(std::move(cb));
//设置jitter buffer参数
GET_CONFIG(uint32_t, nack_maxms, Rtc::kNackMaxMS);
RtpTrackImp::setParams(1024, nack_maxms, 512);
GET_CONFIG(uint32_t, nack_max_rtp, Rtc::kNackMaxSize);
RtpTrackImp::setParams(nack_max_rtp, nack_maxms, nack_max_rtp / 2);
_nack_ctx.setOnNack([this](const FCI_NACK &nack) { onNack(nack); });
}

View File

@ -1,16 +0,0 @@
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties
/.idea/

Binary file not shown.

View File

@ -1,2 +0,0 @@
/build
.cxx

View File

@ -1,54 +0,0 @@
plugins {
id 'com.android.application'
id 'org.jetbrains.kotlin.android'
id 'kotlin-android-extensions'
id 'kotlin-kapt'
}
apply plugin: 'kotlin-android'
android {
compileSdk 32
defaultConfig {
applicationId "com.zlmediakit.webrtc"
minSdk 21
targetSdk 32
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
}
dependencies {
implementation 'androidx.core:core-ktx:1.7.0'
implementation 'androidx.appcompat:appcompat:1.5.1'
implementation 'com.google.android.material:material:1.6.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
implementation 'com.google.code.gson:gson:2.8.9'
implementation("com.squareup.okhttp3:okhttp:4.10.0")
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'org.webrtc:google-webrtc:1.0.32006'
}

View File

@ -1,21 +0,0 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -1,24 +0,0 @@
package com.zlmediakit.webrtc
import androidx.test.platform.app.InstrumentationRegistry
import androidx.test.ext.junit.runners.AndroidJUnit4
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.Assert.*
/**
* Instrumented test, which will execute on an Android device.
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
@RunWith(AndroidJUnit4::class)
class ExampleInstrumentedTest {
@Test
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("com.zlmediakit.webrtc", appContext.packageName)
}
}

View File

@ -1,46 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.zlmediakit.webrtc">
<uses-feature android:name="android.hardware.camera"/>
<uses-feature android:name="android.hardware.camera.autofocus"/>
<uses-feature
android:glEsVersion="0x00020000"
android:required="true"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.CAPTURE_VIDEO_OUTPUT"/>
<uses-permission android:name="android.permission.READ_PHONE_STATE"/>
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AndroidWebRTC"
android:usesCleartextTraffic="true"
tools:targetApi="31">
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -1,79 +0,0 @@
package com.zlmediakit.webrtc
import android.annotation.SuppressLint
import android.graphics.drawable.BitmapDrawable
import android.graphics.drawable.Drawable
import android.os.Bundle
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import kotlinx.android.synthetic.main.activity_main.*
import kotlinx.android.synthetic.main.activity_main.view.*
class MainActivity : AppCompatActivity() {
private var isSpeaker = true
@SuppressLint("SetTextI18n")
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
lifecycle.addObserver(web_rtc_sv)
//http://124.223.98.45/index/api/webrtc?app=live&stream=test&type=play
url.setText("http://124.223.98.45/index/api/webrtc?app=live&stream=test&type=play")
//http://192.168.1.17/index/api/webrtc?app=live&stream=test&type=play
btn_play.setOnClickListener {
web_rtc_sv?.setVideoPath(url.text.toString())
web_rtc_sv.start()
}
web_rtc_sv.setOnErrorListener { errorCode, errorMsg ->
runOnUiThread {
Toast.makeText(this, "errorCode:$errorCode,errorMsg:$errorMsg", Toast.LENGTH_SHORT)
.show()
}
}
btn_pause.setOnClickListener {
web_rtc_sv?.pause()
}
btn_resume.setOnClickListener {
web_rtc_sv?.resume()
}
btn_screenshot.setOnClickListener {
web_rtc_sv?.screenshot {
runOnUiThread {
iv_screen.setImageDrawable(BitmapDrawable(it))
}
}
}
btn_mute.setOnClickListener {
web_rtc_sv.mute(true)
}
selectAudio()
btn_speaker.setOnClickListener {
selectAudio()
}
}
fun selectAudio(){
if (isSpeaker){
btn_speaker.setText("扬声器")
web_rtc_sv.setSpeakerphoneOn(isSpeaker)
}else{
btn_speaker.setText("话筒")
web_rtc_sv.setSpeakerphoneOn(isSpeaker)
}
isSpeaker=!isSpeaker
}
}

View File

@ -1,439 +0,0 @@
package com.zlmediakit.webrtc
import android.content.Context
import android.graphics.Bitmap
import android.media.AudioManager
import android.util.AttributeSet
import android.util.Log
import android.view.LayoutInflater
import android.widget.RelativeLayout
import androidx.lifecycle.DefaultLifecycleObserver
import androidx.lifecycle.LifecycleOwner
import com.google.gson.Gson
import okhttp3.*
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.MediaType.Companion.toMediaTypeOrNull
import org.webrtc.*
import org.webrtc.RendererCommon.ScalingType
import org.webrtc.audio.AudioDeviceModule
import org.webrtc.audio.JavaAudioDeviceModule
import java.io.IOException
import java.util.*
public class WebRTCSurfaceView(context: Context, attrs: AttributeSet?) :
RelativeLayout(context, attrs), DefaultLifecycleObserver, RendererCommon.RendererEvents {
private data class sdp(var sdp: String, var username: String, var password: String)
private data class SdpResponse(var code: Int, var id: String, var sdp: String, var type: String)
private enum class ErrorCode(val errorCode: Int) {
SUCCESS(0x00),
GET_REMOTE_SDP_ERROR(0x01);
}
companion object {
private val TAG = "WebRTCSurfaceView"
}
private var mContext: Context = context
private val eglBase: EglBase = EglBase.create()
private var mEGLBaseContext: EglBase.Context = eglBase.eglBaseContext
private lateinit var videoUrl: String;
private var mPeerConnectionFactory: PeerConnectionFactory? = null
private var mLocalMediaStream: MediaStream? = null
private var mLocalAudioTrack: AudioTrack? = null
private var mAudioSource: AudioSource? = null
private var mLocalSessionDescription: SessionDescription? = null
private var mRemoteSessionDescription: SessionDescription? = null
private var mLocalPeer: Peer? = null
private var mSurfaceViewRenderer: SurfaceViewRenderer
private lateinit var OnErrorListener: (errorCode: Int, errorMsg: String) -> Unit?
fun setOnErrorListener(listener: (errorCode: Int, errorMsg: String) -> Unit) {
this.OnErrorListener = listener
}
private lateinit var OnPreparedListener: () -> Unit?
fun setOnPreparedListener(listener: () -> Unit) {
this.OnPreparedListener = listener
}
private val audioManager: AudioManager
init {
val view = LayoutInflater.from(mContext).inflate(R.layout.layout_videoview, this)
mPeerConnectionFactory = createConnectionFactory()
mSurfaceViewRenderer = view.findViewById(R.id.surface_view_renderer)
mSurfaceViewRenderer.init(mEGLBaseContext, this)
mSurfaceViewRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL)
mSurfaceViewRenderer.setEnableHardwareScaler(true)
//创建媒体流
mLocalMediaStream = mPeerConnectionFactory?.createLocalMediaStream("ARDAMS")
//采集音频
mAudioSource = mPeerConnectionFactory?.createAudioSource(createAudioConstraints())
mLocalAudioTrack = mPeerConnectionFactory?.createAudioTrack("ARDAMSa0", mAudioSource)
//添加Tracks
mLocalMediaStream?.addTrack(mLocalAudioTrack)
audioManager = context.getSystemService(Context.AUDIO_SERVICE) as AudioManager
audioManager.isSpeakerphoneOn = false
}
private fun set(width: Int, height: Int) {
layoutParams.width = width
layoutParams.height = height
}
private fun createConnectionFactory(): PeerConnectionFactory? {
val options = PeerConnectionFactory.InitializationOptions.builder(mContext)
.setEnableInternalTracer(false)
.createInitializationOptions()
PeerConnectionFactory.initialize(options)
val videoEncoderFactory = DefaultVideoEncoderFactory(
mEGLBaseContext,
true,
true
)
val videoDecoderFactory = DefaultVideoDecoderFactory(mEGLBaseContext)
val audioDevice = createJavaAudioDevice()
val peerConnectionFactory = PeerConnectionFactory.builder()
.setAudioDeviceModule(audioDevice)
.setVideoEncoderFactory(videoEncoderFactory)
.setVideoDecoderFactory(videoDecoderFactory)
.createPeerConnectionFactory()
audioDevice.release()
return peerConnectionFactory
}
private fun createAudioConstraints(): MediaConstraints {
val audioConstraints = MediaConstraints()
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googEchoCancellation",
"true"
)
)
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googAutoGainControl",
"false"
)
)
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googHighpassFilter",
"true"
)
)
audioConstraints.mandatory.add(
MediaConstraints.KeyValuePair(
"googNoiseSuppression",
"true"
)
)
return audioConstraints
}
private fun offerOrAnswerConstraint(): MediaConstraints {
val mediaConstraints = MediaConstraints()
val keyValuePairs = java.util.ArrayList<MediaConstraints.KeyValuePair>()
keyValuePairs.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"))
keyValuePairs.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"))
mediaConstraints.mandatory.addAll(keyValuePairs)
return mediaConstraints
}
private fun createJavaAudioDevice(): AudioDeviceModule {
val audioTrackErrorCallback: JavaAudioDeviceModule.AudioTrackErrorCallback = object :
JavaAudioDeviceModule.AudioTrackErrorCallback {
override fun onWebRtcAudioTrackInitError(errorMessage: String) {
Log.i(TAG, "onWebRtcAudioTrackInitError ============> $errorMessage")
}
override fun onWebRtcAudioTrackStartError(
errorCode: JavaAudioDeviceModule.AudioTrackStartErrorCode, errorMessage: String
) {
Log.i(TAG, "onWebRtcAudioTrackStartError ============> $errorCode:$errorMessage")
}
override fun onWebRtcAudioTrackError(errorMessage: String) {
Log.i(TAG, "onWebRtcAudioTrackError ============> $errorMessage")
}
}
// Set audio track state callbacks.
val audioTrackStateCallback: JavaAudioDeviceModule.AudioTrackStateCallback = object :
JavaAudioDeviceModule.AudioTrackStateCallback {
override fun onWebRtcAudioTrackStart() {
Log.i(TAG, "onWebRtcAudioTrackStart ============>")
}
override fun onWebRtcAudioTrackStop() {
Log.i(TAG, "onWebRtcAudioTrackStop ============>")
}
}
return JavaAudioDeviceModule.builder(mContext)
.setUseHardwareAcousticEchoCanceler(true)
.setUseHardwareNoiseSuppressor(true)
.setAudioTrackErrorCallback(audioTrackErrorCallback)
.setAudioTrackStateCallback(audioTrackStateCallback)
.setUseStereoOutput(true) //立体声
.createAudioDeviceModule()
}
fun setVideoPath(url: String) {
videoUrl = url
}
fun start() {
mLocalPeer = Peer {
val okHttpClient = OkHttpClient.Builder().build()
val body = RequestBody.create("text/plain; charset=utf-8".toMediaType(), it!!)
val request: Request = Request.Builder()
.url(videoUrl)
.post(body)
.build()
val call: Call = okHttpClient.newCall(request)
call.enqueue(object : Callback {
override fun onFailure(call: Call, e: IOException) {
Log.i(TAG, "onFailure")
OnErrorListener?.invoke(
ErrorCode.GET_REMOTE_SDP_ERROR.errorCode,
e.message.toString()
)
}
override fun onResponse(call: Call, response: Response) {
val body = response.body?.string()
val sdpResponse = Gson().fromJson(body, SdpResponse::class.java)
try {
mRemoteSessionDescription = SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"),
sdpResponse.sdp
)
Log.i(
TAG,
"RemoteSdpObserver onCreateSuccess:[SessionDescription[type=${mRemoteSessionDescription?.type?.name},description=${mRemoteSessionDescription?.description}]]"
)
mLocalPeer?.setRemoteDescription(mRemoteSessionDescription!!)
} catch (e: Exception) {
Log.i(TAG, e.toString())
OnErrorListener.invoke(
ErrorCode.GET_REMOTE_SDP_ERROR.errorCode,
e.localizedMessage
)
}
}
})
}
}
fun pause() {
mSurfaceViewRenderer.pauseVideo()
//mSurfaceViewRenderer.disableFpsReduction()
}
fun resume() {
mSurfaceViewRenderer.setFpsReduction(15f)
}
fun screenshot(listener: (bitmap: Bitmap) -> Unit) {
mSurfaceViewRenderer.addFrameListener({
listener.invoke(it)
}, 1f)
}
fun setSpeakerphoneOn(on: Boolean) {
audioManager.isSpeakerphoneOn = on
}
fun mute(on:Boolean) {
audioManager.isMicrophoneMute=on
}
override fun onDestroy(owner: LifecycleOwner) {
super.onDestroy(owner)
mSurfaceViewRenderer.release()
mLocalPeer?.mPeerConnection?.dispose()
mAudioSource?.dispose()
mPeerConnectionFactory?.dispose()
}
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec)
}
inner class Peer(var sdp: (String?) -> Unit = {}) : PeerConnection.Observer, SdpObserver {
var mPeerConnection: PeerConnection? = null
init {
mPeerConnection = createPeerConnection()
mPeerConnection?.createOffer(this, offerOrAnswerConstraint())
}
//初始化 RTCPeerConnection 连接管道
private fun createPeerConnection(): PeerConnection? {
if (mPeerConnectionFactory == null) {
mPeerConnectionFactory = createConnectionFactory()
}
// 管道连接抽象类实现方法
val ICEServers = LinkedList<PeerConnection.IceServer>()
val rtcConfig = PeerConnection.RTCConfiguration(ICEServers)
//修改模式 PlanB无法使用仅接收音视频的配置
//rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.PLAN_B
return mPeerConnectionFactory?.createPeerConnection(rtcConfig, this)
}
fun setRemoteDescription(sdp: SessionDescription) {
mPeerConnection?.setRemoteDescription(this, sdp)
}
override fun onCreateSuccess(sessionDescription: SessionDescription?) {
mPeerConnection?.setLocalDescription(this, sessionDescription)
mPeerConnection?.addStream(mLocalMediaStream)
sdp.invoke(sessionDescription?.description)
}
override fun onSetSuccess() {
}
override fun onCreateFailure(p0: String?) {
}
override fun onSetFailure(p0: String?) {
}
override fun onSignalingChange(signalingState: PeerConnection.SignalingState?) {
Log.i(TAG, "onSignalingChange ============> " + signalingState.toString())
}
override fun onIceConnectionChange(iceConnectionState: PeerConnection.IceConnectionState?) {
Log.i(TAG, "onIceConnectionChange ============> " + iceConnectionState.toString())
}
override fun onIceConnectionReceivingChange(p0: Boolean) {
Log.i(TAG, "onIceConnectionReceivingChange ============> $p0")
}
override fun onIceGatheringChange(iceGatheringState: PeerConnection.IceGatheringState?) {
Log.i(TAG, "onIceGatheringChange ============> ${iceGatheringState.toString()}")
}
override fun onIceCandidate(iceCandidate: IceCandidate?) {
Log.i(TAG, "onIceCandidate ============> ${iceCandidate.toString()}")
}
override fun onIceCandidatesRemoved(p0: Array<out IceCandidate>?) {
Log.i(TAG, "onIceCandidatesRemoved ============> ${p0.toString()}")
}
override fun onAddStream(mediaStream: MediaStream?) {
Log.i(TAG, "onAddStream ============> ${mediaStream?.toString()}")
if (mediaStream?.videoTracks?.isEmpty() != true) {
val remoteVideoTrack = mediaStream?.videoTracks?.get(0)
remoteVideoTrack?.setEnabled(true)
remoteVideoTrack?.addSink(mSurfaceViewRenderer)
}
if (mediaStream?.audioTracks?.isEmpty() != true) {
val remoteAudioTrack = mediaStream?.audioTracks?.get(0)
remoteAudioTrack?.setEnabled(true)
remoteAudioTrack?.setVolume(1.0)
}
}
override fun onRemoveStream(mediaStream: MediaStream?) {
Log.i(TAG, "onRemoveStream ============> ${mediaStream.toString()}")
}
override fun onDataChannel(dataChannel: DataChannel?) {
Log.i(TAG, "onDataChannel ============> ${dataChannel.toString()}")
}
override fun onRenegotiationNeeded() {
Log.i(TAG, "onRenegotiationNeeded ============>")
}
override fun onAddTrack(rtpReceiver: RtpReceiver?, p1: Array<out MediaStream>?) {
Log.i(TAG, "onAddTrack ============>" + rtpReceiver?.track())
Log.i(TAG, "onAddTrack ============>" + p1?.size)
}
}
override fun onFirstFrameRendered() {
Log.i(TAG, "onFirstFrameRendered ============>")
}
override fun onFrameResolutionChanged(frameWidth: Int, frameHeight: Int, rotation: Int) {
Log.i(TAG, "onFrameResolutionChanged ============> $frameWidth:$frameHeight:$rotation")
//set(frameWidth,frameHeight)
}
}

View File

@ -1,30 +0,0 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

View File

@ -1,170 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

View File

@ -1,93 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<com.zlmediakit.webrtc.WebRTCSurfaceView
android:id="@+id/web_rtc_sv"
android:layout_width="match_parent"
android:layout_height="200dp"
app:layout_constraintTop_toTopOf="parent" />
<androidx.appcompat.widget.AppCompatEditText
android:id="@+id/url"
android:layout_width="match_parent"
android:layout_height="wrap_content"
app:layout_constraintTop_toBottomOf="@+id/web_rtc_sv"
android:text=""/>
<LinearLayout
android:id="@+id/ll"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginTop="30dp"
app:layout_constraintTop_toBottomOf="@+id/url">
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_play"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="播放" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_pause"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="暂停" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_resume"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="恢复" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_speaker"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="扬声器" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_mute"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="静音" />
</LinearLayout>
<LinearLayout
android:id="@+id/ll2"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginTop="10dp"
app:layout_constraintTop_toBottomOf="@+id/ll">
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_screenshot"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="截图" />
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/btn_screen_record"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="录制" />
</LinearLayout>
<androidx.appcompat.widget.AppCompatImageView
android:id="@+id/iv_screen"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:layout_constraintBottom_toBottomOf="parent"
tools:ignore="MissingConstraints" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -1,13 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/surface_view_renderer"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
</RelativeLayout>

View File

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View File

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Some files were not shown because too many files have changed in this diff Show More