Merge branch 'master' of https://gitee.com/xia-chu/ZLMediaKit into feature/transcode2

This commit is contained in:
cqm 2024-12-02 18:12:57 +08:00
commit 06ba39a832
82 changed files with 1704 additions and 567 deletions

@ -1 +1 @@
Subproject commit ac6ae2d76cb7463243ade44e6aa75a552e82e5c9
Subproject commit 9e319b70557f3ece0372a0d05f31a6560292556a

@ -1 +1 @@
Subproject commit cf83ebc62e65ae6f3b73bc5ebd06cb0b2da49fa5
Subproject commit 0658496d5fc7d238f41e10ea4d0a10113a8eed84

View File

@ -193,7 +193,7 @@ if(UNIX)
"-fPIC"
"-Wall;-Wextra"
"-Wno-unused-function;-Wno-unused-parameter;-Wno-unused-variable;-Wno-deprecated-declarations"
"-Wno-error=extra;-Wno-error=missing-field-initializers;-Wno-error=type-limits")
"-Wno-error=extra;-Wno-error=missing-field-initializers;-Wno-error=type-limits;-Wno-comment")
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
set(COMPILE_OPTIONS_DEFAULT ${COMPILE_OPTIONS_DEFAULT} "-g3")

View File

@ -37,6 +37,7 @@
- [谁在使用zlmediakit?](https://github.com/ZLMediaKit/ZLMediaKit/issues/511)
- 全面支持ipv6网络
- 支持多轨道模式(一个流中多个视频/音频)
- 全协议支持H264/H265/AAC/G711/OPUS部分支持VP8/VP9/AV1/JPEG/MP3/H266/ADPCM/SVAC/G722/G723/G729
## 项目定位

View File

@ -12,6 +12,7 @@
#define MK_RECORDER_API_H_
#include "mk_common.h"
#include "mk_util.h"
#ifdef __cplusplus
extern "C" {
@ -124,6 +125,18 @@ API_EXPORT int API_CALL mk_recorder_start(int type, const char *vhost, const cha
*/
API_EXPORT int API_CALL mk_recorder_stop(int type, const char *vhost, const char *app, const char *stream);
/**
* mp4列表
* @param vhost
* @param app app
* @param stream id
* @param file_path
* @param file_repeat
* @param ini
*/
API_EXPORT void API_CALL mk_load_mp4_file(const char *vhost, const char *app, const char *stream, const char *file_path, int file_repeat);
API_EXPORT void API_CALL mk_load_mp4_file2(const char *vhost, const char *app, const char *stream, const char *file_path, int file_repeat, mk_ini ini);
#ifdef __cplusplus
}
#endif

View File

@ -11,6 +11,7 @@
#include "mk_recorder.h"
#include "Rtmp/FlvMuxer.h"
#include "Record/Recorder.h"
#include "Record/MP4Reader.h"
using namespace std;
using namespace toolkit;
@ -83,3 +84,36 @@ API_EXPORT int API_CALL mk_recorder_stop(int type, const char *vhost, const char
assert(vhost && app && stream);
return stopRecord((Recorder::type)type,vhost,app,stream);
}
API_EXPORT void API_CALL mk_load_mp4_file(const char *vhost, const char *app, const char *stream, const char *file_path, int file_repeat) {
mINI ini;
mk_load_mp4_file2(vhost, app, stream, file_path, file_repeat, (mk_ini)&ini);
}
API_EXPORT void API_CALL mk_load_mp4_file2(const char *vhost, const char *app, const char *stream, const char *file_path, int file_repeat, mk_ini ini) {
#if ENABLE_MP4
assert(vhost && app && stream && file_path && ini);
ProtocolOption option(*((mINI *)ini));
// mp4支持多track [AUTO-TRANSLATED:b9688762]
// mp4 supports multiple tracks
option.max_track = 16;
// 默认解复用mp4不生成mp4 [AUTO-TRANSLATED:11f2dcee]
// By default, demultiplexing mp4 does not generate mp4
option.enable_mp4 = false;
// 但是如果参数明确指定开启mp4, 那么也允许之 [AUTO-TRANSLATED:b143a9e3]
// But if the parameter explicitly specifies to enable mp4, then it is also allowed
// 强制无人观看时自动关闭 [AUTO-TRANSLATED:f7c85948]
// Force automatic shutdown when no one is watching
option.auto_close = true;
MediaTuple tuple = { vhost, app, stream, "" };
auto reader = std::make_shared<MP4Reader>(tuple, file_path, option);
// sample_ms设置为0从配置文件加载file_repeat可以指定如果配置文件也指定循环解复用那么强制开启 [AUTO-TRANSLATED:23e826b4]
// sample_ms is set to 0, loaded from the configuration file; file_repeat can be specified, if the configuration file also specifies loop demultiplexing,
// then force it to be enabled
reader->startReadMP4(0, true, file_repeat);
#else
WarnL << "MP4-related features are disabled. Please enable the ENABLE_MP4 macro and recompile.";
#endif
}

View File

@ -196,7 +196,7 @@ API_EXPORT void API_CALL mk_get_statistic(on_mk_get_statistic_cb func, void *use
#ifdef ENABLE_MEM_DEBUG
auto bytes = getTotalMemUsage();
val["memory.memUsage"] = bytes;
val["memory.memUsageMB"] = (int)(bytes / 1024 / 1024);
val["memory.memUsageMB"] = (int)(bytes >> 20);
val["memory.memBlock"] = getTotalMemBlock();
static auto block_type_size = getBlockTypeSize();
{
@ -240,7 +240,7 @@ API_EXPORT void API_CALL mk_get_statistic(on_mk_get_statistic_cb func, void *use
#ifdef ENABLE_MEM_DEBUG
auto bytes = getThisThreadMemUsage();
val["memUsage"] = bytes;
val["memUsageMB"] = bytes / 1024 / 1024;
val["memUsageMB"] = bytes >> 20;
val["memBlock"] = getThisThreadMemBlock();
{
int i = 0;

23
cmake/FindPCAP.cmake Normal file
View File

@ -0,0 +1,23 @@
# - Try to find libpcap
#
# Once done this will define
# PCAP_FOUND - System has libpcap
# PCAP_INCLUDE_DIRS - The libpcap include directories
# PCAP_LIBRARIES - The libpcap library
# Find libpcap
FIND_PATH(
PCAP_INCLUDE_DIRS
NAMES pcap.h
)
FIND_LIBRARY(
PCAP_LIBRARIES
NAMES pcap
)
message(STATUS "PCAP LIBRARIES: " ${PCAP_LIBRARIES})
message(STATUS "PCAP INCLUDE DIRS: " ${PCAP_INCLUDE_DIRS})
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(PCAP DEFAULT_MSG PCAP_LIBRARIES PCAP_INCLUDE_DIRS)

View File

@ -151,8 +151,8 @@ listen_ip=::
fileBufSize=65536
#hls最大切片时间
segDur=2
#m3u8索引中,hls保留切片个数(实际保留切片个数大2~3个)
#如果设置为0则不删除切片,而是保存为点播
#m3u8索引中,hls保留切片个数(实际保留切片个数+segRetain个)
#如果设置为0则不删除切片且m3u8文件全量记录切片列表
segNum=3
#HLS切片延迟个数大于0将生成hls_delay.m3u8文件0则不生成
segDelay=0
@ -162,10 +162,8 @@ segRetain=5
broadcastRecordTs=0
#直播hls文件删除延时单位秒issue: #913
deleteDelaySec=10
#是否保留hls文件此功能部分等效于segNum=0的情况
#不同的是这个保留不会在m3u8文件中体现
#0为不保留不起作用
#1为保留则不删除hls文件如果开启此功能注意磁盘大小或者定期手动清理hls文件
#此选项开启后m3u8文件还是表现为直播但是切片文件会被全部保留为点播用
#segDur设置为0或segKeep设置为1的情况下每个切片文件夹下会生成一个vod.m3u8文件用于点播该时间段的录像
segKeep=0
#如果设置为1则第一个切片长度强制设置为1个GOP。当GOP小于segDur可以提高首屏速度
fastRegister=0

View File

@ -25,13 +25,8 @@ RUN apt-get update && \
ca-certificates \
tzdata \
libssl-dev \
libmysqlclient-dev \
libx264-dev \
libfaac-dev \
gcc \
g++ \
libavcodec-dev libavutil-dev libswscale-dev libresample-dev \
libsdl-dev libusrsctp-dev \
gdb && \
apt-get autoremove -y && \
apt-get clean -y && \
@ -69,13 +64,9 @@ RUN apt-get update && \
tzdata \
curl \
libssl-dev \
libx264-dev \
libfaac-dev \
ffmpeg \
gcc \
g++ \
libavcodec-dev libavutil-dev libswscale-dev libresample-dev \
libsdl-dev libusrsctp-dev \
gdb && \
apt-get autoremove -y && \
apt-get clean -y && \

View File

@ -388,11 +388,7 @@ Track::Ptr AACTrack::clone() const {
}
Sdp::Ptr AACTrack::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<AACSdp>(getExtraData()->toString(), payload_type, getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
return std::make_shared<AACSdp>(getExtraData()->toString(), payload_type, getAudioSampleRate(), getAudioChannel(), getBitRate() >> 10);
}
namespace {

View File

@ -19,70 +19,14 @@ using namespace toolkit;
namespace mediakit {
/**
* G711类型SDP
* G711 type SDP
* [AUTO-TRANSLATED:ea72d60a]
*/
class G711Sdp : public Sdp {
public:
/**
* G711采样率固定为8000
* @param codecId G711A G711U
* @param payload_type rtp payload type
* @param sample_rate
* @param channels
* @param bitrate
* G711 sampling rate is fixed at 8000
* @param codecId G711A G711U
* @param payload_type rtp payload type
* @param sample_rate audio sampling rate
* @param channels number of channels
* @param bitrate bitrate
* [AUTO-TRANSLATED:5ea4b771]
*/
G711Sdp(CodecId codecId, int payload_type, int sample_rate, int channels, int bitrate)
: Sdp(sample_rate, payload_type) {
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(codecId) << "/" << sample_rate << "/" << channels << "\r\n";
}
string getSdp() const override {
return _printer;
}
private:
_StrPrinter _printer;
};
Track::Ptr G711Track::clone() const {
return std::make_shared<G711Track>(*this);
}
Sdp::Ptr G711Track::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
const auto codec = getCodecId();
const auto sample_rate = getAudioSampleRate();
const auto audio_channel = getAudioChannel();
const auto bitrate = getBitRate() >> 10;
if (sample_rate == 8000 && audio_channel == 1) {
// https://datatracker.ietf.org/doc/html/rfc3551#section-6
payload_type = (codec == CodecG711U) ? Rtsp::PT_PCMU : Rtsp::PT_PCMA;
}
return std::make_shared<G711Sdp>(codec, payload_type, sample_rate, audio_channel, bitrate);
return std::make_shared<DefaultSdp>(payload_type, *this);
}
namespace {
CodecId getCodecA() {
@ -94,7 +38,7 @@ CodecId getCodecU() {
}
Track::Ptr getTrackByCodecId_l(CodecId codec, int sample_rate, int channels, int sample_bit) {
return (sample_rate && channels && sample_bit) ? std::make_shared<G711Track>(codec, sample_rate, channels, sample_bit) : nullptr;
return std::make_shared<G711Track>(codec, sample_rate, 1, 16);
}
Track::Ptr getTrackByCodecIdA(int sample_rate, int channels, int sample_bit) {
@ -119,7 +63,7 @@ Track::Ptr getTrackBySdpU(const SdpTrack::Ptr &track) {
RtpCodec::Ptr getRtpEncoderByCodecId_l(CodecId codec, uint8_t pt) {
if (pt == Rtsp::PT_PCMA || pt == Rtsp::PT_PCMU) {
return std::make_shared<G711RtpEncoder>(codec, 1);
return std::make_shared<G711RtpEncoder>(8000, 1);
}
return std::make_shared<CommonRtpEncoder>();
}

View File

@ -2,10 +2,10 @@
namespace mediakit {
G711RtpEncoder::G711RtpEncoder(CodecId codec, uint32_t channels){
_cache_frame = FrameImp::create();
_cache_frame->_codec_id = codec;
G711RtpEncoder::G711RtpEncoder(int sample_rate, int channels, int sample_bit) {
_sample_rate = sample_rate;
_channels = channels;
_sample_bit = sample_bit;
}
void G711RtpEncoder::setOpt(int opt, const toolkit::Any &param) {
@ -24,36 +24,24 @@ void G711RtpEncoder::setOpt(int opt, const toolkit::Any &param) {
}
bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto dur = (_cache_frame->size() - _cache_frame->prefixSize()) / (8 * _channels);
auto next_pts = _cache_frame->pts() + dur;
if (next_pts == 0) {
_cache_frame->_pts = frame->pts();
} else {
if ((next_pts + _pkt_dur_ms) < frame->pts()) { // 有丢包超过20ms
_cache_frame->_pts = frame->pts() - dur;
}
}
_cache_frame->_buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
auto ptr = frame->data() + frame->prefixSize();
auto size = frame->size() - frame->prefixSize();
_buffer.append(ptr, size);
_in_size += size;
_in_pts = frame->pts();
auto stamp = _cache_frame->pts();
auto ptr = _cache_frame->data() + _cache_frame->prefixSize();
auto len = _cache_frame->size() - _cache_frame->prefixSize();
auto remain_size = len;
size_t max_size = 160 * _channels * _pkt_dur_ms / 20; // 20 ms per 160 byte
size_t n = 0;
bool mark = false;
while (remain_size >= max_size) {
assert(remain_size >= max_size);
const size_t rtp_size = max_size;
n++;
stamp += _pkt_dur_ms;
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), false);
ptr += rtp_size;
remain_size -= rtp_size;
if (!_pkt_bytes) {
// G711压缩率固定是2倍
_pkt_bytes = _pkt_dur_ms * _channels * (_sample_bit / 8) * _sample_rate / 1000 / 2;
}
_cache_frame->_buffer.erase(0, n * max_size);
_cache_frame->_pts += (uint64_t)_pkt_dur_ms * n;
return len > 0;
while (_buffer.size() >= _pkt_bytes) {
_out_size += _pkt_bytes;
auto pts = _in_pts - (_in_size - _out_size) * (_pkt_dur_ms / (float)_pkt_bytes);
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, _buffer.data(), _pkt_bytes, false, pts), false);
_buffer.erase(0, _pkt_bytes);
}
return true;
}
} // namespace mediakit

View File

@ -29,15 +29,17 @@ public:
/**
*
* @param codec
* @param sample_rate
* @param channels
* @param sample_bit
* Constructor
* @param codec Encoding type
* @param sample_rate audio sample rate
* @param channels Number of channels
* @param sample_bit audio sample bits
* [AUTO-TRANSLATED:dbbd593e]
*/
G711RtpEncoder(CodecId codec, uint32_t channels);
G711RtpEncoder(int sample_rate = 8000, int channels = 1, int sample_bit = 16);
/**
* rtp
@ -51,9 +53,16 @@ public:
void setOpt(int opt, const toolkit::Any &param) override;
private:
uint32_t _channels = 1;
int _channels;
int _sample_rate;
int _sample_bit;
uint32_t _pkt_dur_ms = 20;
FrameImp::Ptr _cache_frame;
uint32_t _pkt_bytes = 0;
uint64_t _in_size = 0;
uint64_t _out_size = 0;
int64_t _in_pts = 0;
toolkit::BufferLikeString _buffer;
};
}//namespace mediakit

View File

@ -357,11 +357,7 @@ private:
};
Sdp::Ptr H264Track::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<H264Sdp>(_sps, _pps, payload_type, getBitRate() / 1024);
return std::make_shared<H264Sdp>(_sps, _pps, payload_type, getBitRate() >> 10);
}
namespace {

View File

@ -263,11 +263,7 @@ private:
};
Sdp::Ptr H265Track::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<H265Sdp>(_vps, _sps, _pps, payload_type, getBitRate() / 1024);
return std::make_shared<H265Sdp>(_vps, _sps, _pps, payload_type, getBitRate() >> 10);
}
namespace {

View File

@ -31,26 +31,10 @@ void JPEGTrack::getVideoResolution(const uint8_t *buf, int len) {
}
}
class JPEGSdp : public Sdp {
public:
JPEGSdp(int bitrate) : Sdp(90000, Rtsp::PT_JPEG) {
_printer << "m=video 0 RTP/AVP " << (int)getPayloadType() << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
}
std::string getSdp() const { return _printer; }
private:
_StrPrinter _printer;
};
Sdp::Ptr JPEGTrack::getSdp(uint8_t) const {
return std::make_shared<JPEGSdp>(getBitRate() / 1024);
Sdp::Ptr JPEGTrack::getSdp(uint8_t pt) const {
return std::make_shared<DefaultSdp>(pt, *this);
}
namespace {
CodecId getCodec() {

View File

@ -18,50 +18,8 @@ using namespace toolkit;
namespace mediakit {
/**
* L16类型SDP
* L16 type SDP
* [AUTO-TRANSLATED:11b1196d]
*/
class L16Sdp : public Sdp {
public:
/**
* L16采样位数固定为16位
* @param payload_type rtp payload type
* @param channels
* @param sample_rate
* @param bitrate
* L16 sampling bit width is fixed to 16 bits
* @param payload_type rtp payload type
* @param channels number of channels
* @param sample_rate audio sampling rate
* @param bitrate bitrate
* [AUTO-TRANSLATED:7a08a400]
*/
L16Sdp(int payload_type, int sample_rate, int channels, int bitrate) : Sdp(sample_rate, payload_type) {
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(CodecL16) << "/" << sample_rate << "/" << channels << "\r\n";
}
string getSdp() const override { return _printer; }
private:
_StrPrinter _printer;
};
Sdp::Ptr L16Track::getSdp(uint8_t payload_type) const {
WarnL << "Enter L16Track::getSdp function";
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<L16Sdp>(payload_type, getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
return std::make_shared<DefaultSdp>(payload_type, *this);
}
Track::Ptr L16Track::clone() const {

View File

@ -18,51 +18,9 @@ using namespace toolkit;
namespace mediakit {
/**
* Opus类型SDP
* Opus type SDP
* [AUTO-TRANSLATED:6c0a72ed]
*/
class OpusSdp : public Sdp {
public:
/**
* opus sdp
* @param payload_type rtp payload type
* @param sample_rate
* @param channels
* @param bitrate
* Construct opus sdp
* @param payload_type rtp payload type
* @param sample_rate audio sample rate
* @param channels number of channels
* @param bitrate bitrate
* [AUTO-TRANSLATED:40713e9d]
*/
OpusSdp(int payload_type, int sample_rate, int channels, int bitrate) : Sdp(sample_rate, payload_type) {
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
_printer << "a=rtpmap:" << payload_type << " " << getCodecName(CodecOpus) << "/" << sample_rate << "/" << channels << "\r\n";
}
string getSdp() const override {
return _printer;
}
private:
_StrPrinter _printer;
};
Sdp::Ptr OpusTrack::getSdp(uint8_t payload_type) const {
if (!ready()) {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<OpusSdp>(payload_type, getAudioSampleRate(), getAudioChannel(), getBitRate() / 1024);
return std::make_shared<DefaultSdp>(payload_type, *this);
}
namespace {

View File

@ -700,6 +700,58 @@
},
"response": []
},
{
"name": "获取拉流代理列表(listStreamProxy)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/listStreamProxy?secret={{ZLMediaKit_secret}}",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"listStreamProxy"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
}
]
}
},
"response": []
},
{
"name": "获取推流代理列表(listStreamPusherProxy)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/listStreamPusherProxy?secret={{ZLMediaKit_secret}}",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"listStreamPusherProxy"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
}
]
}
},
"response": []
},
{
"name": "添加rtsp/rtmp推流(addStreamPusherProxy)",
"request": {
@ -800,6 +852,32 @@
},
"response": []
},
{
"name": "获取FFmpeg拉流代理列表(listFFmpegSource)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/listFFmpegSource?secret={{ZLMediaKit_secret}}",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"listFFmpegSource"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
}
]
}
},
"response": []
},
{
"name": "添加FFmpeg拉流代理(addFFmpegSource)",
"request": {
@ -2083,6 +2161,81 @@
},
"response": []
},
{
"name": "开始双向对讲(startSendRtpTalk)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/startSendRtpTalk?secret={{ZLMediaKit_secret}}&vhost={{defaultVhost}}&app=live&stream=obs&ssrc=1&recv_stream_id=",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"startSendRtpTalk"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "vhost",
"value": "{{defaultVhost}}",
"description": "虚拟主机例如__defaultVhost__"
},
{
"key": "app",
"value": "rtp",
"description": "应用名,例如 rtp"
},
{
"key": "stream",
"value": "rtc",
"description": "流id例如webrtc推流上来的流id"
},
{
"key": "ssrc",
"value": "1",
"description": "rtp推流出去的ssrc"
},
{
"key": "recv_stream_id",
"value": "",
"description": "对方rtp推流上来的流id我们将通过这个链接回复他rtp流请注意两个流的app和vhost需一致"
},
{
"key": "from_mp4",
"value": "0",
"description": "是否推送本地MP4录像该参数非必选参数",
"disabled": true
},
{
"key": "type",
"value": "1",
"description": "0(ES流)、1(PS流)、2(TS流)默认1(PS流);该参数非必选参数",
"disabled": true
},
{
"key": "pt",
"value": "96",
"description": "rtp payload type默认96该参数非必选参数",
"disabled": true
},
{
"key": "only_audio",
"value": "1",
"description": "rtp es方式打包时是否只打包音频该参数非必选参数",
"disabled": true
}
]
}
},
"response": []
},
{
"name": "停止 发送rtp(stopSendRtp)",
"request": {

View File

@ -103,6 +103,7 @@ void FFmpegSource::play(const string &ffmpeg_cmd_key, const string &src_url, con
snprintf(cmd, sizeof(cmd), ffmpeg_cmd.data(), File::absolutePath("", ffmpeg_bin).data(), src_url.data(), dst_url.data());
auto log_file = ffmpeg_log.empty() ? "" : File::absolutePath("", ffmpeg_log);
_process.run(cmd, log_file);
_cmd = cmd;
InfoL << cmd;
if (is_local_ip(_media_info.host)) {

View File

@ -77,6 +77,12 @@ public:
*/
void play(const std::string &ffmpeg_cmd_key, const std::string &src_url, const std::string &dst_url, int timeout_ms, const onPlay &cb);
const std::string& getSrcUrl() const { return _src_url; }
const std::string& getDstUrl() const { return _dst_url; }
const std::string& getCmd() const { return _cmd; }
const std::string& getCmdKey() const { return _ffmpeg_cmd_key; }
const mediakit::MediaInfo& getMediaInfo() const { return _media_info; }
/**
*
* @param enable_hls hls直播或录制
@ -115,6 +121,7 @@ private:
std::string _src_url;
std::string _dst_url;
std::string _ffmpeg_cmd_key;
std::string _cmd;
std::function<void()> _onClose;
toolkit::Ticker _replay_ticker;
};

View File

@ -23,7 +23,11 @@
INSTANCE_IMP(VideoStackManager)
Param::~Param() { VideoStackManager::Instance().unrefChannel(id, width, height, pixfmt); }
Param::~Param() {
auto strongChn= weak_chn.lock();
if (!strongChn) { return; }
VideoStackManager::Instance().unrefChannel(id, width, height, pixfmt);
}
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
: _id(id), _width(width), _height(height), _pixfmt(pixfmt) {
@ -278,7 +282,9 @@ void VideoStack::start() {
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
pts += frameInterval;
}
} else {
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
}
});
}

View File

@ -346,6 +346,15 @@ public:
return it->second;
}
void for_each(const std::function<void(const std::string&, const Pointer&)>& cb) {
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto it = _map.begin();
while (it != _map.end()) {
cb(it->first, it->second);
it++;
}
}
template<class ..._Args>
Pointer make(const std::string &key, _Args&& ...__args) {
// assert(!find(key));
@ -409,6 +418,29 @@ void dumpMediaTuple(const MediaTuple &tuple, Json::Value& item) {
item["params"] = tuple.params;
}
Value ToJson(const PusherProxy::Ptr& p) {
Value item;
item["url"] = p->getUrl();
item["status"] = p->getStatus();
item["liveSecs"] = p->getLiveSecs();
item["rePublishCount"] = p->getRePublishCount();
if (auto src = p->getSrc()) {
dumpMediaTuple(src->getMediaTuple(), item["src"]);
}
return item;
}
Value ToJson(const PlayerProxy::Ptr& p) {
Value item;
item["url"] = p->getUrl();
item["status"] = p->getStatus();
item["liveSecs"] = p->getLiveSecs();
item["rePullCount"] = p->getRePullCount();
item["totalReaderCount"] = p->totalReaderCount();
dumpMediaTuple(p->getMediaTuple(), item["src"]);
return item;
}
Value makeMediaSourceJson(MediaSource &media){
Value item;
item["schema"] = media.getSchema();
@ -538,7 +570,7 @@ void getStatisticJson(const function<void(Value &val)> &cb) {
#ifdef ENABLE_MEM_DEBUG
auto bytes = getTotalMemUsage();
val["totalMemUsage"] = (Json::UInt64) bytes;
val["totalMemUsageMB"] = (int) (bytes / 1024 / 1024);
val["totalMemUsageMB"] = (int) (bytes >> 20);
val["totalMemBlock"] = (Json::UInt64) getTotalMemBlock();
static auto block_type_size = getBlockTypeSize();
{
@ -572,7 +604,7 @@ void getStatisticJson(const function<void(Value &val)> &cb) {
auto bytes = getThisThreadMemUsage();
val["threadName"] = getThreadName();
val["threadMemUsage"] = (Json::UInt64) bytes;
val["threadMemUsageMB"] = (Json::UInt64) (bytes / 1024 / 1024);
val["threadMemUsageMB"] = (Json::UInt64) (bytes >> 20);
val["threadMemBlock"] = (Json::UInt64) getThisThreadMemBlock();
{
int i = 0;
@ -1173,7 +1205,22 @@ void installWebApi() {
CHECK_ARGS("key");
val["data"]["flag"] = s_pusher_proxy.erase(allArgs["key"]) == 1;
});
api_regist("/index/api/listStreamPusherProxy", [](API_ARGS_MAP) {
CHECK_SECRET();
s_pusher_proxy.for_each([&val](const std::string& key, const PusherProxy::Ptr& p) {
Json::Value item = ToJson(p);
item["key"] = key;
val["data"].append(item);
});
});
api_regist("/index/api/listStreamProxy", [](API_ARGS_MAP) {
CHECK_SECRET();
s_player_proxy.for_each([&val](const std::string& key, const PlayerProxy::Ptr& p) {
Json::Value item = ToJson(p);
item["key"] = key;
val["data"].append(item);
});
});
// 动态添加rtsp/rtmp拉流代理 [AUTO-TRANSLATED:2616537c]
// Dynamically add rtsp/rtmp pull stream proxy
// 测试url http://127.0.0.1/index/api/addStreamProxy?vhost=__defaultVhost__&app=proxy&enable_rtsp=1&enable_rtmp=1&stream=0&url=rtmp://127.0.0.1/live/obs [AUTO-TRANSLATED:71ddce15]
@ -1286,7 +1333,18 @@ void installWebApi() {
CHECK_ARGS("key");
val["data"]["flag"] = s_ffmpeg_src.erase(allArgs["key"]) == 1;
});
api_regist("/index/api/listFFmpegSource", [](API_ARGS_MAP) {
CHECK_SECRET();
s_ffmpeg_src.for_each([&val](const std::string& key, const FFmpegSource::Ptr& src) {
Json::Value item;
item["src_url"] = src->getSrcUrl();
item["dst_url"] = src->getDstUrl();
item["cmd"] = src->getCmd();
item["ffmpeg_cmd_key"] = src->getCmdKey();
item["key"] = key;
val["data"].append(item);
});
});
// 新增http api下载可执行程序文件接口 [AUTO-TRANSLATED:d6e44e84]
// Add a new http api to download executable files
// 测试url http://127.0.0.1/index/api/downloadBin [AUTO-TRANSLATED:9525e834]
@ -1477,7 +1535,11 @@ void installWebApi() {
obj["vhost"] = vec[0];
obj["app"] = vec[1];
obj["stream_id"] = vec[2];
obj["port"] = pr.second->getPort();
auto& rtps = pr.second;
obj["port"] = rtps->getPort();
obj["ssrc"] = rtps->getSSRC();
obj["tcp_mode"] = rtps->getTcpMode();
obj["only_track"] = rtps->getOnlyTrack();
val["data"].append(obj);
}
});
@ -1544,6 +1606,41 @@ void installWebApi() {
start_send_rtp(true, API_ARGS_VALUE, invoker);
});
api_regist("/index/api/startSendRtpTalk",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream", "ssrc", "recv_stream_id");
auto src = MediaSource::find(allArgs["vhost"], allArgs["app"], allArgs["stream"], allArgs["from_mp4"].as<int>());
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
MediaSourceEvent::SendRtpArgs args;
args.con_type = mediakit::MediaSourceEvent::SendRtpArgs::kVoiceTalk;
args.ssrc = allArgs["ssrc"];
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
args.data_type = allArgs["type"].empty() ? MediaSourceEvent::SendRtpArgs::kRtpPS : (MediaSourceEvent::SendRtpArgs::DataType)(allArgs["type"].as<int>());
args.only_audio = allArgs["only_audio"].as<bool>();
args.recv_stream_id = allArgs["recv_stream_id"];
args.recv_stream_app = allArgs["app"];
args.recv_stream_vhost = allArgs["vhost"];
src->getOwnerPoller()->async([=]() mutable {
try {
src->startSendRtp(args, [val, headerOut, invoker](uint16_t local_port, const SockException &ex) mutable {
if (ex) {
val["code"] = API::OtherFailed;
val["msg"] = ex.what();
}
val["local_port"] = local_port;
invoker(200, headerOut, val.toStyledString());
});
} catch (std::exception &ex) {
val["code"] = API::Exception;
val["msg"] = ex.what();
invoker(200, headerOut, val.toStyledString());
}
});
});
api_regist("/index/api/listRtpSender",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
CHECK_ARGS("vhost", "app", "stream");
@ -1741,9 +1838,7 @@ void installWebApi() {
throw ApiRetException("can not find pusher", API::NotFound);
}
val["data"]["status"] = pusher->getStatus();
val["data"]["liveSecs"] = pusher->getLiveSecs();
val["data"]["rePublishCount"] = pusher->getRePublishCount();
val["data"] = ToJson(pusher);
invoker(200, headerOut, val.toStyledString());
});
@ -1755,9 +1850,7 @@ void installWebApi() {
throw ApiRetException("can not find the proxy", API::NotFound);
}
val["data"]["status"] = proxy->getStatus();
val["data"]["liveSecs"] = proxy->getLiveSecs();
val["data"]["rePullCount"] = proxy->getRePullCount();
val["data"] = ToJson(proxy);
invoker(200, headerOut, val.toStyledString());
});

View File

@ -215,6 +215,9 @@ public:
// Global variable, used in WebApi to save configuration files
string g_ini_file;
// 加载ssl证书函数对象
std::function<void()> g_reload_certificates;
int start_main(int argc,char *argv[]) {
{
CMD_main cmd_main;
@ -284,19 +287,24 @@ int start_main(int argc,char *argv[]) {
if (!File::is_dir(ssl_file)) {
// 不是文件夹,加载证书,证书包含公钥和私钥 [AUTO-TRANSLATED:5d3a5e49]
// Not a folder, load certificate, certificate contains public key and private key
SSL_Initor::Instance().loadCertificate(ssl_file.data());
g_reload_certificates = [ssl_file] () {
SSL_Initor::Instance().loadCertificate(ssl_file.data());
};
} else {
// 加载文件夹下的所有证书 [AUTO-TRANSLATED:0e1f9b20]
// Load all certificates under the folder
File::scanDir(ssl_file,[](const string &path, bool isDir){
if (!isDir) {
// 最后的一个证书会当做默认证书(客户端ssl握手时未指定主机) [AUTO-TRANSLATED:b242685c]
// The last certificate will be used as the default certificate (client ssl handshake does not specify the host)
SSL_Initor::Instance().loadCertificate(path.data());
}
return true;
});
g_reload_certificates = [ssl_file]() {
File::scanDir(ssl_file, [](const string &path, bool isDir) {
if (!isDir) {
// 最后的一个证书会当做默认证书(客户端ssl握手时未指定主机) [AUTO-TRANSLATED:b242685c]
// The last certificate will be used as the default certificate (client ssl handshake does not specify the host)
SSL_Initor::Instance().loadCertificate(path.data());
}
return true;
});
};
}
g_reload_certificates();
std::string listen_ip = mINI::Instance()[General::kListenIP];
uint16_t shellPort = mINI::Instance()[Shell::kPort];
@ -465,7 +473,10 @@ int start_main(int argc,char *argv[]) {
});
#if !defined(_WIN32)
signal(SIGHUP, [](int) { mediakit::loadIniConfig(g_ini_file.data()); });
signal(SIGHUP, [](int) {
mediakit::loadIniConfig(g_ini_file.data());
g_reload_certificates();
});
#endif
sem.wait();
}

View File

@ -124,7 +124,8 @@ public:
kTcpActive = 0, // tcp主动模式tcp客户端主动连接对方并发送rtp
kUdpActive = 1, // udp主动模式主动发送数据给对方
kTcpPassive = 2, // tcp被动模式tcp服务器等待对方连接并回复rtp
kUdpPassive = 3 // udp被动方式等待对方发送nat打洞包然后回复rtp至打洞包源地址
kUdpPassive = 3, // udp被动方式等待对方发送nat打洞包然后回复rtp至打洞包源地址
kVoiceTalk = 4, // 语音对讲模式对方必须想推流上来通过他的推流链路再回复rtp数据
};
// rtp类型 [AUTO-TRANSLATED:acca40ab]

View File

@ -131,12 +131,17 @@ private:
std::list<std::pair<uint64_t, Frame::Ptr>> _cache;
};
static std::shared_ptr<MediaSinkInterface> makeRecorder(MediaSource &sender, const vector<Track::Ptr> &tracks, Recorder::type type, const ProtocolOption &option){
auto recorder = Recorder::createRecorder(type, sender.getMediaTuple(), option);
for (auto &track : tracks) {
std::shared_ptr<MediaSinkInterface> MultiMediaSourceMuxer::makeRecorder(MediaSource &sender, Recorder::type type) {
auto recorder = Recorder::createRecorder(type, sender.getMediaTuple(), _option);
for (auto &track : getTracks()) {
recorder->addTrack(track);
}
recorder->addTrackCompleted();
if (_ring) {
_ring->flushGop([&](const Frame::Ptr &frame) {
recorder->inputFrame(frame);
});
}
return recorder;
}
@ -325,7 +330,7 @@ bool MultiMediaSourceMuxer::setupRecord(MediaSource &sender, Recorder::type type
// 开始录制 [AUTO-TRANSLATED:36d99250]
// Start recording
_option.hls_save_path = custom_path;
auto hls = dynamic_pointer_cast<HlsRecorder>(makeRecorder(sender, getTracks(), type, _option));
auto hls = dynamic_pointer_cast<HlsRecorder>(makeRecorder(sender, type));
if (hls) {
// 设置HlsMediaSource的事件监听器 [AUTO-TRANSLATED:69990c92]
// Set the event listener for HlsMediaSource
@ -345,7 +350,7 @@ bool MultiMediaSourceMuxer::setupRecord(MediaSource &sender, Recorder::type type
// Start recording
_option.mp4_save_path = custom_path;
_option.mp4_max_second = max_second;
_mp4 = makeRecorder(sender, getTracks(), type, _option);
_mp4 = makeRecorder(sender, type);
} else if (!start && _mp4) {
// 停止录制 [AUTO-TRANSLATED:3dee9292]
// Stop recording
@ -358,7 +363,7 @@ bool MultiMediaSourceMuxer::setupRecord(MediaSource &sender, Recorder::type type
// 开始录制 [AUTO-TRANSLATED:36d99250]
// Start recording
_option.hls_save_path = custom_path;
auto hls = dynamic_pointer_cast<HlsFMP4Recorder>(makeRecorder(sender, getTracks(), type, _option));
auto hls = dynamic_pointer_cast<HlsFMP4Recorder>(makeRecorder(sender, type));
if (hls) {
// 设置HlsMediaSource的事件监听器 [AUTO-TRANSLATED:69990c92]
// Set the event listener for HlsMediaSource
@ -374,7 +379,7 @@ bool MultiMediaSourceMuxer::setupRecord(MediaSource &sender, Recorder::type type
}
case Recorder::type_fmp4: {
if (start && !_fmp4) {
auto fmp4 = dynamic_pointer_cast<FMP4MediaSourceMuxer>(makeRecorder(sender, getTracks(), type, _option));
auto fmp4 = dynamic_pointer_cast<FMP4MediaSourceMuxer>(makeRecorder(sender, type));
if (fmp4) {
fmp4->setListener(shared_from_this());
}
@ -386,7 +391,7 @@ bool MultiMediaSourceMuxer::setupRecord(MediaSource &sender, Recorder::type type
}
case Recorder::type_ts: {
if (start && !_ts) {
auto ts = dynamic_pointer_cast<TSMediaSourceMuxer>(makeRecorder(sender, getTracks(), type, _option));
auto ts = dynamic_pointer_cast<TSMediaSourceMuxer>(makeRecorder(sender, type));
if (ts) {
ts->setListener(shared_from_this());
}

View File

@ -232,6 +232,7 @@ protected:
private:
void createGopCacheIfNeed();
std::shared_ptr<MediaSinkInterface> makeRecorder(MediaSource &sender, Recorder::type type);
private:
bool _is_enable = false;

View File

@ -27,6 +27,12 @@ DeltaStamp::DeltaStamp() {
_max_delta = 300;
}
void DeltaStamp::reset() {
_last_stamp = 0;
_relative_stamp = 0;
_last_delta = 1;
}
int64_t DeltaStamp::relativeStamp(int64_t stamp, bool enable_rollback) {
_relative_stamp += deltaStamp(stamp, enable_rollback);
return _relative_stamp;
@ -55,8 +61,9 @@ int64_t DeltaStamp::deltaStamp(int64_t stamp, bool enable_rollback) {
// In the live broadcast case, the timestamp increment must not be greater than MAX_DELTA_STAMP, otherwise the relative timestamp is forced to add 1
if (ret > _max_delta) {
needSync();
return 1;
return _last_delta;
}
_last_delta = ret;
return ret;
}
@ -67,7 +74,7 @@ int64_t DeltaStamp::deltaStamp(int64_t stamp, bool enable_rollback) {
// 不允许回退或者回退太多了, 强制时间戳加1 [AUTO-TRANSLATED:152f5ffa]
// Not allowed to retreat or retreat too much, force the timestamp to add 1
needSync();
return 1;
return _last_delta;
}
return ret;
}
@ -93,6 +100,14 @@ void Stamp::enableRollback(bool flag) {
_enable_rollback = flag;
}
void Stamp::reset() {
DeltaStamp::reset();
_relative_stamp = 0;
_last_dts_in = 0;
_last_dts_out = 0;
_last_pts_out = 0;
}
// 限制dts回退 [AUTO-TRANSLATED:6bc53b31]
// Limit dts retreat
void Stamp::revise(int64_t dts, int64_t pts, int64_t &dts_out, int64_t &pts_out, bool modifyStamp) {

View File

@ -42,11 +42,15 @@ public:
// Set the maximum allowed rollback or jump amplitude
void setMaxDelta(size_t max_delta);
// 重置
void reset();
protected:
virtual void needSync() {}
protected:
int _max_delta;
int _last_delta = 1;
int64_t _last_stamp = 0;
int64_t _relative_stamp = 0;
};
@ -123,6 +127,11 @@ public:
*/
void enableRollback(bool flag);
/**
*
*/
void reset();
private:
// 主要实现音视频时间戳同步功能 [AUTO-TRANSLATED:45863fce]
// Mainly implements audio and video timestamp synchronization function

View File

@ -10,6 +10,8 @@
#include "Factory.h"
#include "Rtmp/Rtmp.h"
#include "CommonRtmp.h"
#include "CommonRtp.h"
#include "Common/config.h"
using namespace std;
@ -19,15 +21,6 @@ namespace mediakit {
static std::unordered_map<int, const CodecPlugin *> s_plugins;
extern CodecPlugin h264_plugin;
extern CodecPlugin h265_plugin;
extern CodecPlugin jpeg_plugin;
extern CodecPlugin aac_plugin;
extern CodecPlugin opus_plugin;
extern CodecPlugin g711a_plugin;
extern CodecPlugin g711u_plugin;
extern CodecPlugin l16_plugin;
REGISTER_CODEC(h264_plugin);
REGISTER_CODEC(h265_plugin);
REGISTER_CODEC(jpeg_plugin);
@ -51,8 +44,7 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
}
auto it = s_plugins.find(codec);
if (it == s_plugins.end()) {
WarnL << "Unsupported codec: " << track->getName();
return nullptr;
return getTrackByCodecId(codec, track->_samplerate, track->_channel);
}
return it->second->getTrackBySdp(track);
}
@ -69,8 +61,8 @@ Track::Ptr Factory::getTrackByAbstractTrack(const Track::Ptr &track) {
RtpCodec::Ptr Factory::getRtpEncoderByCodecId(CodecId codec, uint8_t pt) {
auto it = s_plugins.find(codec);
if (it == s_plugins.end()) {
WarnL << "Unsupported codec: " << getCodecName(codec);
return nullptr;
WarnL << "Unsupported codec: " << getCodecName(codec) << ", use CommonRtpEncoder";
return std::make_shared<CommonRtpEncoder>();
}
return it->second->getRtpEncoderByCodecId(pt);
}
@ -78,8 +70,8 @@ RtpCodec::Ptr Factory::getRtpEncoderByCodecId(CodecId codec, uint8_t pt) {
RtpCodec::Ptr Factory::getRtpDecoderByCodecId(CodecId codec) {
auto it = s_plugins.find(codec);
if (it == s_plugins.end()) {
WarnL << "Unsupported codec: " << getCodecName(codec);
return nullptr;
WarnL << "Unsupported codec: " << getCodecName(codec) << ", use CommonRtpDecoder";
return std::make_shared<CommonRtpDecoder>(codec, 10 * 1024);
}
return it->second->getRtpDecoderByCodecId();
}
@ -87,7 +79,7 @@ RtpCodec::Ptr Factory::getRtpDecoderByCodecId(CodecId codec) {
// ///////////////////////////rtmp相关/////////////////////////////////////////// [AUTO-TRANSLATED:da9645df]
// ///////////////////////////rtmp related///////////////////////////////////////////
static CodecId getVideoCodecIdByAmf(const AMFValue &val){
static CodecId getVideoCodecIdByAmf(const AMFValue &val) {
if (val.type() == AMF_STRING) {
auto str = val.as_string();
if (str == "avc1") {
@ -117,15 +109,25 @@ static CodecId getVideoCodecIdByAmf(const AMFValue &val){
Track::Ptr Factory::getTrackByCodecId(CodecId codec, int sample_rate, int channels, int sample_bit) {
auto it = s_plugins.find(codec);
if (it == s_plugins.end()) {
WarnL << "Unsupported codec: " << getCodecName(codec);
return nullptr;
auto type = mediakit::getTrackType(codec);
switch (type) {
case TrackAudio: {
WarnL << "Unsupported codec: " << getCodecName(codec) << ", use default audio track";
return std::make_shared<AudioTrackImp>(codec, sample_rate, channels, sample_bit);
}
case TrackVideo: {
WarnL << "Unsupported codec: " << getCodecName(codec) << ", use default video track";
return std::make_shared<VideoTrackImp>(codec, 0, 0, 0);
}
default: WarnL << "Unsupported codec: " << getCodecName(codec); return nullptr;
}
}
return it->second->getTrackByCodecId(sample_rate, channels, sample_bit);
}
Track::Ptr Factory::getVideoTrackByAmf(const AMFValue &amf) {
CodecId codecId = getVideoCodecIdByAmf(amf);
if(codecId == CodecInvalid){
if (codecId == CodecInvalid) {
return nullptr;
}
return getTrackByCodecId(codecId);
@ -144,18 +146,19 @@ static CodecId getAudioCodecIdByAmf(const AMFValue &val) {
if (val.type() != AMF_NULL) {
auto type_id = (RtmpAudioCodec)val.as_integer();
switch (type_id) {
case RtmpAudioCodec::aac : return CodecAAC;
case RtmpAudioCodec::g711a : return CodecG711A;
case RtmpAudioCodec::g711u : return CodecG711U;
case RtmpAudioCodec::opus : return CodecOpus;
default : WarnL << "Unsupported codec: " << (int)type_id; return CodecInvalid;
case RtmpAudioCodec::aac: return CodecAAC;
case RtmpAudioCodec::mp3: return CodecMP3;
case RtmpAudioCodec::adpcm: return CodecADPCM;
case RtmpAudioCodec::g711a: return CodecG711A;
case RtmpAudioCodec::g711u: return CodecG711U;
case RtmpAudioCodec::opus: return CodecOpus;
default: WarnL << "Unsupported codec: " << (int)type_id; return CodecInvalid;
}
}
return CodecInvalid;
}
Track::Ptr Factory::getAudioTrackByAmf(const AMFValue& amf, int sample_rate, int channels, int sample_bit){
Track::Ptr Factory::getAudioTrackByAmf(const AMFValue &amf, int sample_rate, int channels, int sample_bit) {
CodecId codecId = getAudioCodecIdByAmf(amf);
if (codecId == CodecInvalid) {
return nullptr;
@ -166,8 +169,8 @@ Track::Ptr Factory::getAudioTrackByAmf(const AMFValue& amf, int sample_rate, int
RtmpCodec::Ptr Factory::getRtmpDecoderByTrack(const Track::Ptr &track) {
auto it = s_plugins.find(track->getCodecId());
if (it == s_plugins.end()) {
WarnL << "Unsupported codec: " << track->getCodecName();
return nullptr;
WarnL << "Unsupported codec: " << track->getCodecName() << ", use CommonRtmpDecoder";
return std::make_shared<CommonRtmpDecoder>(track);
}
return it->second->getRtmpDecoderByTrack(track);
}
@ -175,8 +178,9 @@ RtmpCodec::Ptr Factory::getRtmpDecoderByTrack(const Track::Ptr &track) {
RtmpCodec::Ptr Factory::getRtmpEncoderByTrack(const Track::Ptr &track) {
auto it = s_plugins.find(track->getCodecId());
if (it == s_plugins.end()) {
WarnL << "Unsupported codec: " << track->getCodecName();
return nullptr;
auto amf = Factory::getAmfByCodecId(track->getCodecId());
WarnL << "Unsupported codec: " << track->getCodecName() << (amf ? ", use CommonRtmpEncoder" : "");
return amf ? std::make_shared<CommonRtmpEncoder>(track) : nullptr;
}
return it->second->getRtmpEncoderByTrack(track);
}
@ -190,6 +194,8 @@ AMFValue Factory::getAmfByCodecId(CodecId codecId) {
case CodecG711A: return AMFValue((int)RtmpAudioCodec::g711a);
case CodecG711U: return AMFValue((int)RtmpAudioCodec::g711u);
case CodecOpus: return AMFValue((int)RtmpAudioCodec::opus);
case CodecADPCM: return AMFValue((int)RtmpAudioCodec::adpcm);
case CodecMP3: return AMFValue((int)RtmpAudioCodec::mp3);
case CodecAV1: return AMFValue((int)RtmpVideoCodec::fourcc_av1);
case CodecVP9: return AMFValue((int)RtmpVideoCodec::fourcc_vp9);
default: return AMFValue(AMF_NULL);
@ -208,11 +214,10 @@ Frame::Ptr Factory::getFrameFromPtr(CodecId codec, const char *data, size_t byte
Frame::Ptr Factory::getFrameFromBuffer(CodecId codec, Buffer::Ptr data, uint64_t dts, uint64_t pts) {
auto frame = Factory::getFrameFromPtr(codec, data->data(), data->size(), dts, pts);
if(!frame){
if (!frame) {
return nullptr;
}
return std::make_shared<FrameCacheAble>(frame, false, std::move(data));
}
}//namespace mediakit
} // namespace mediakit

View File

@ -23,6 +23,7 @@
#define REGISTER_STATIC_VAR(var_name, line) REGISTER_STATIC_VAR_INNER(var_name, line)
#define REGISTER_CODEC(plugin) \
extern CodecPlugin plugin; \
static toolkit::onceToken REGISTER_STATIC_VAR(s_token, __LINE__) ([]() { \
Factory::registerPlugin(plugin); \
});
@ -64,7 +65,7 @@ public:
* [AUTO-TRANSLATED:397b982e]
*/
static Track::Ptr getTrackByCodecId(CodecId codecId, int sample_rate = 0, int channels = 0, int sample_bit = 0);
static Track::Ptr getTrackByCodecId(CodecId codecId, int sample_rate = 0, int channels = 1, int sample_bit = 16);
// //////////////////////////////rtsp相关////////////////////////////////// [AUTO-TRANSLATED:884055ec]
// //////////////////////////////rtsp相关//////////////////////////////////

View File

@ -38,15 +38,24 @@ Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){
return std::make_shared<FrameCacheAble>(frame);
}
FrameStamp::FrameStamp(Frame::Ptr frame, Stamp &stamp, int modify_stamp)
{
FrameStamp::FrameStamp(Frame::Ptr frame) {
setIndex(frame->getIndex());
_frame = std::move(frame);
}
FrameStamp::FrameStamp(Frame::Ptr frame, Stamp &stamp, int modify_stamp)
: FrameStamp(std::move(frame)) {
// kModifyStampSystem时采用系统时间戳kModifyStampRelative采用相对时间戳 [AUTO-TRANSLATED:54dd5685]
// When using kModifyStampSystem, the system timestamp is used, and when using kModifyStampRelative, the relative timestamp is used.
stamp.revise(_frame->dts(), _frame->pts(), _dts, _pts, modify_stamp == ProtocolOption::kModifyStampSystem);
}
void FrameStamp::setStamp(int64_t dts, int64_t pts) {
_dts = dts;
_pts = pts;
}
TrackType getTrackType(CodecId codecId) {
switch (codecId) {
#define XX(name, type, value, str, mpeg_id, mp4_id) case name : return type;
@ -252,7 +261,7 @@ static bool isNeedMerge(CodecId codec){
bool FrameMerger::inputFrame(const Frame::Ptr &frame, onOutput cb, BufferLikeString *buffer) {
if (frame && !isNeedMerge(frame->getCodecId())) {
cb(frame->dts(), frame->pts(), frame, true);
cb(frame->dts(), frame->pts(), Frame::getCacheAbleFrame(frame), true);
return true;
}
if (willFlush(frame)) {

View File

@ -43,7 +43,18 @@ typedef enum {
XX(CodecVP8, TrackVideo, 7, "VP8", PSI_STREAM_VP8, MOV_OBJECT_VP8) \
XX(CodecVP9, TrackVideo, 8, "VP9", PSI_STREAM_VP9, MOV_OBJECT_VP9) \
XX(CodecAV1, TrackVideo, 9, "AV1", PSI_STREAM_AV1, MOV_OBJECT_AV1) \
XX(CodecJPEG, TrackVideo, 10, "JPEG", PSI_STREAM_JPEG_2000, MOV_OBJECT_JPEG)
XX(CodecJPEG, TrackVideo, 10, "JPEG", PSI_STREAM_JPEG_2000, MOV_OBJECT_JPEG) \
XX(CodecH266, TrackVideo, 11, "H266", PSI_STREAM_H266, MOV_OBJECT_H266) \
XX(CodecTS, TrackVideo, 12, "MP2T", PSI_STREAM_RESERVED, MOV_OBJECT_NONE) \
XX(CodecPS, TrackVideo, 13, "MPEG", PSI_STREAM_RESERVED, MOV_OBJECT_NONE) \
XX(CodecMP3, TrackAudio, 14, "MP3", PSI_STREAM_MP3, MOV_OBJECT_MP3) \
XX(CodecADPCM, TrackAudio, 15, "ADPCM", PSI_STREAM_RESERVED, MOV_OBJECT_NONE) \
XX(CodecSVACV, TrackVideo, 16, "SVACV", PSI_STREAM_VIDEO_SVAC, MOV_OBJECT_NONE) \
XX(CodecSVACA, TrackAudio, 17, "SVACA", PSI_STREAM_AUDIO_SVAC, MOV_OBJECT_NONE) \
XX(CodecG722, TrackAudio, 18, "G722", PSI_STREAM_AUDIO_G722, MOV_OBJECT_NONE) \
XX(CodecG723, TrackAudio, 19, "G723", PSI_STREAM_AUDIO_G723, MOV_OBJECT_NONE) \
XX(CodecG728, TrackAudio, 20, "G728", PSI_STREAM_RESERVED, MOV_OBJECT_NONE) \
XX(CodecG729, TrackAudio, 21, "G729", PSI_STREAM_AUDIO_G729, MOV_OBJECT_NONE)
typedef enum {
CodecInvalid = -1,
@ -524,6 +535,7 @@ private:
class FrameStamp : public Frame {
public:
using Ptr = std::shared_ptr<FrameStamp>;
FrameStamp(Frame::Ptr frame);
FrameStamp(Frame::Ptr frame, Stamp &stamp, int modify_stamp);
~FrameStamp() override {}
@ -538,6 +550,7 @@ public:
char *data() const override { return _frame->data(); }
size_t size() const override { return _frame->size(); }
CodecId getCodecId() const override { return _frame->getCodecId(); }
void setStamp(int64_t dts, int64_t pts);
private:
int64_t _dts;

26
src/Extension/Track.cpp Normal file
View File

@ -0,0 +1,26 @@
/*
* Copyright (c) 2016-present The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/ZLMediaKit/ZLMediaKit).
*
* Use of this source code is governed by MIT-like license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "Track.h"
#include "Util/util.h"
using namespace std;
using namespace toolkit;
namespace mediakit {
Sdp::Ptr AudioTrackImp::getSdp(uint8_t payload_type) const {
return std::make_shared<DefaultSdp>(payload_type, *this);
}
Sdp::Ptr VideoTrackImp::getSdp(uint8_t payload_type) const {
return std::make_shared<DefaultSdp>(payload_type, *this);
}
} // namespace mediakit

View File

@ -201,7 +201,7 @@ public:
bool ready() const override { return true; }
Track::Ptr clone() const override { return std::make_shared<VideoTrackImp>(*this); }
Sdp::Ptr getSdp(uint8_t payload_type) const override { return nullptr; }
Sdp::Ptr getSdp(uint8_t payload_type) const override;
CodecId getCodecId() const override { return _codec_id; }
private:
@ -298,7 +298,7 @@ public:
* [AUTO-TRANSLATED:9af5a0a4]
*/
int getAudioSampleRate() const override{
return _sample_rate;
return _sample_rate ? _sample_rate : RtpPayload::getClockRateByCodec(_codecid);
}
/**
@ -308,7 +308,7 @@ public:
* [AUTO-TRANSLATED:5fedc65d]
*/
int getAudioSampleBit() const override{
return _sample_bit;
return _sample_bit ? _sample_bit : 16;
}
/**
@ -318,11 +318,11 @@ public:
* [AUTO-TRANSLATED:2613b317]
*/
int getAudioChannel() const override{
return _channels;
return _channels ? _channels : 1;
}
Track::Ptr clone() const override { return std::make_shared<AudioTrackImp>(*this); }
Sdp::Ptr getSdp(uint8_t payload_type) const override { return nullptr; }
Sdp::Ptr getSdp(uint8_t payload_type) const override;
private:
CodecId _codecid;

View File

@ -284,9 +284,9 @@ static bool makeFolderMenu(const string &httpPath, const string &strFullPath, st
} else if (fileSize < 1024 * 1024) {
ss << fixed << setprecision(2) << " (" << fileSize / 1024.0 << "KB)";
} else if (fileSize < 1024 * 1024 * 1024) {
ss << fixed << setprecision(2) << " (" << fileSize / 1024 / 1024.0 << "MB)";
ss << fixed << setprecision(2) << " (" << (fileSize >> 10) / 1024.0 << "MB)";
} else {
ss << fixed << setprecision(2) << " (" << fileSize / 1024 / 1024 / 1024.0 << "GB)";
ss << fixed << setprecision(2) << " (" << (fileSize >> 20) / 1024.0 << "GB)";
}
ss << "</a></li>\r\n";
}

View File

@ -139,6 +139,10 @@ public:
// Using this only makes sense after a successful connection to the server
TranslationInfo getTranslationInfo();
const std::string& getUrl() const { return _pull_url; }
const MediaTuple& getMediaTuple() const { return _tuple; }
const ProtocolOption& getOption() const { return _option; }
private:
// MediaSourceEvent override
bool close(MediaSource &sender) override;

View File

@ -46,6 +46,7 @@ void MediaPusher::publish(const string &url) {
_delegate->setOnPublished(_on_publish);
_delegate->mINI::operator=(*this);
_delegate->publish(url);
_url = url;
}
EventPoller::Ptr MediaPusher::getPoller(){

View File

@ -33,11 +33,13 @@ public:
void publish(const std::string &url) override;
toolkit::EventPoller::Ptr getPoller();
void setOnCreateSocket(toolkit::Socket::onCreateSocket cb);
std::shared_ptr<MediaSource> getSrc() { return _src.lock(); }
const std::string& getUrl() const { return _url; }
private:
std::weak_ptr<MediaSource> _src;
toolkit::EventPoller::Ptr _poller;
toolkit::Socket::onCreateSocket _on_create_socket;
std::string _url;
};
} /* namespace mediakit */

View File

@ -19,7 +19,6 @@ PusherProxy::PusherProxy(const MediaSource::Ptr &src, int retry_count, const Eve
: MediaPusher(src, poller) {
_retry_count = retry_count;
_on_close = [](const SockException &) {};
_weak_src = src;
_live_secs = 0;
_live_status = 1;
_republish_count = 0;
@ -52,7 +51,7 @@ void PusherProxy::publish(const string &dst_url) {
strong_self->_on_publish = nullptr;
}
auto src = strong_self->_weak_src.lock();
auto src = strong_self->getSrc();
if (!err) {
// 推流成功 [AUTO-TRANSLATED:28ce6e56]
// Stream successfully pushed
@ -87,7 +86,7 @@ void PusherProxy::publish(const string &dst_url) {
TraceL << " live secs " << strong_self->_live_secs;
}
auto src = strong_self->_weak_src.lock();
auto src = strong_self->getSrc();
// 推流异常中断,延时重试播放 [AUTO-TRANSLATED:e69e5a05]
// Stream abnormally interrupted, retry playing with delay
if (src && (*failed_cnt < strong_self->_retry_count || strong_self->_retry_count < 0)) {

View File

@ -77,7 +77,6 @@ private:
std::atomic<int> _live_status;
std::atomic<uint64_t> _live_secs;
std::atomic<uint64_t> _republish_count;
std::weak_ptr<MediaSource> _weak_src;
std::function<void(const toolkit::SockException &ex)> _on_close;
std::function<void(const toolkit::SockException &ex)> _on_publish;
};

View File

@ -122,9 +122,9 @@ void HlsMaker::inputData(const char *data, size_t len, uint64_t timestamp, bool
void HlsMaker::delOldSegment() {
GET_CONFIG(uint32_t, segDelay, Hls::kSegmentDelay);
if (_seg_number == 0) {
// 如果设置为保留0个切片则认为是保存为点播 [AUTO-TRANSLATED:5bf20108]
// If set to keep 0 slices, it is considered to be saved as on-demand
if (_seg_number == 0 || _seg_keep) {
// 如果设置为保留0个切片则认为是保存为点播;或者设置为一直保存,就不删除 [AUTO-TRANSLATED:5bf20108]
// If set to keep 0 or all slices, it is considered to be saved as on-demand
return;
}
// 在hls m3u8索引文件中,我们保存的切片个数跟_seg_number相关设置一致 [AUTO-TRANSLATED:b14b5b98]
@ -132,11 +132,6 @@ void HlsMaker::delOldSegment() {
if (_file_index > _seg_number + segDelay) {
_seg_dur_list.pop_front();
}
// 如果设置为一直保存,就不删除 [AUTO-TRANSLATED:7c622e24]
// If set to always save, it will not be deleted
if (_seg_keep) {
return;
}
GET_CONFIG(uint32_t, segRetain, Hls::kSegmentRetain);
// 但是实际保存的切片个数比m3u8所述多若干个,这样做的目的是防止播放器在切片删除前能下载完毕 [AUTO-TRANSLATED:1688f857]
// However, the actual number of slices saved is a few more than what is stated in the m3u8, this is done to prevent the player from downloading the slices before they are deleted

View File

@ -9,6 +9,7 @@
*/
#include <ctime>
#include <iomanip>
#include <sys/stat.h>
#include "HlsMakerImp.h"
#include "Util/util.h"
@ -49,6 +50,10 @@ HlsMakerImp::~HlsMakerImp() {
} catch (std::exception &ex) {
WarnL << ex.what();
}
if (!isLive() || isKeep()) {
saveCurrentDir();
}
}
void HlsMakerImp::clearCache() {
@ -99,17 +104,66 @@ void HlsMakerImp::clearCache(bool immediately, bool eof) {
_segment_file_paths.clear();
}
/** 写入该目录的init.mp4文件以及m3u8文件 **/
void HlsMakerImp::saveCurrentDir() {
if (_current_dir.empty() || _current_dir_seg_list.empty()) {
return;
}
if (isFmp4()) {
// 写入init.mp4文件
File::saveFile(_current_dir_init_file, _path_prefix + "/" + _current_dir + "init.mp4");
}
int maxSegmentDuration = 0;
for (auto &tp : _current_dir_seg_list) {
int dur = std::get<0>(tp);
if (dur > maxSegmentDuration) {
maxSegmentDuration = dur;
}
}
string index_str;
index_str.reserve(2048);
index_str += "#EXTM3U\n";
index_str += (isFmp4() ? "#EXT-X-VERSION:7\n" : "#EXT-X-VERSION:4\n");
index_str += "#EXT-X-ALLOW-CACHE:YES\n";
index_str += "#EXT-X-TARGETDURATION:" + std::to_string((maxSegmentDuration + 999) / 1000) + "\n";
index_str += "#EXT-X-MEDIA-SEQUENCE:0\n";
if (isFmp4()) {
index_str += "#EXT-X-MAP:URI=\"init.mp4\"\n";
}
stringstream ss;
for (auto &t : _current_dir_seg_list) {
ss << "#EXTINF:" << std::setprecision(3) << std::get<0>(t) / 1000.0 << ",\n" << std::get<1>(t) << "\n";
}
_current_dir_seg_list.clear();
index_str += ss.str();
index_str += "#EXT-X-ENDLIST\n";
/** 写入该目录的m3u8文件 **/
File::saveFile(index_str, _path_prefix + "/" + _current_dir + (isFmp4() ? "vod.fmp4.m3u8" : "vod.m3u8"));
}
string HlsMakerImp::onOpenSegment(uint64_t index) {
string segment_name, segment_path;
{
auto strDate = getTimeStr("%Y-%m-%d");
auto strHour = getTimeStr("%H");
auto strTime = getTimeStr("%M-%S");
segment_name = StrPrinter << strDate + "/" + strHour + "/" + strTime << "_" << index << (isFmp4() ? ".mp4" : ".ts");
auto current_dir = strDate + "/" + strHour + "/";
segment_name = current_dir + strTime + "_" + std::to_string(index) + (isFmp4() ? ".mp4" : ".ts");
segment_path = _path_prefix + "/" + segment_name;
if (isLive()) {
// 直播
_segment_file_paths.emplace(index, segment_path);
}
if (!isLive() || isKeep()) {
// 目录将发生变更保留ts切片时每个目录都生成一个m3u8文件
if (!_current_dir.empty() && current_dir != _current_dir) {
saveCurrentDir();
}
_current_dir = std::move(current_dir);
}
}
_file = makeFile(segment_path, true);
@ -139,13 +193,14 @@ void HlsMakerImp::onDelSegment(uint64_t index) {
}
void HlsMakerImp::onWriteInitSegment(const char *data, size_t len) {
if (!isLive() || isKeep()) {
_current_dir_init_file.assign(data, len);
}
string init_seg_path = _path_prefix + "/init.mp4";
_file = makeFile(init_seg_path);
if (_file) {
fwrite(data, len, 1, _file.get());
auto file = makeFile(init_seg_path);
if (file) {
fwrite(data, len, 1, file.get());
_path_init = std::move(init_seg_path);
_file = nullptr;
} else {
WarnL << "Create file failed," << init_seg_path << " " << get_uv_errmsg();
}
@ -178,7 +233,9 @@ void HlsMakerImp::onFlushLastSegment(uint64_t duration_ms) {
// 关闭并flush文件到磁盘 [AUTO-TRANSLATED:9798ec4d]
// Close and flush file to disk
_file = nullptr;
if (!isLive() || isKeep()) {
_current_dir_seg_list.emplace_back(duration_ms, _info.file_name.erase(0, _current_dir.size()));
}
GET_CONFIG(bool, broadcastRecordTs, Hls::kBroadcastRecordTs);
if (broadcastRecordTs) {
_info.time_len = duration_ms / 1000.0f;

View File

@ -63,6 +63,7 @@ protected:
private:
std::shared_ptr<FILE> makeFile(const std::string &file,bool setbuf = false);
void clearCache(bool immediately, bool eof);
void saveCurrentDir();
private:
int _buf_size;
@ -71,12 +72,15 @@ private:
std::string _path_hls_delay;
std::string _path_init;
std::string _path_prefix;
std::string _current_dir;
std::string _current_dir_init_file;
RecordInfo _info;
std::shared_ptr<FILE> _file;
std::shared_ptr<char> _file_buf;
HlsMediaSource::Ptr _media_src;
toolkit::EventPoller::Ptr _poller;
std::map<uint64_t/*index*/,std::string/*file_path*/> _segment_file_paths;
std::deque<std::tuple<int,std::string> > _current_dir_seg_list;
};
}//namespace mediakit

View File

@ -9,7 +9,10 @@
*/
#ifdef ENABLE_MP4
#include <algorithm>
#include "MP4Demuxer.h"
#include "Util/File.h"
#include "Util/logger.h"
#include "Extension/Factory.h"
@ -189,5 +192,90 @@ uint64_t MP4Demuxer::getDurationMS() const {
return _duration_ms;
}
/////////////////////////////////////////////////////////////////////////////////
void MultiMP4Demuxer::openMP4(const string &files_string) {
std::vector<std::string> files;
if (File::is_dir(files_string)) {
File::scanDir(files_string, [&](const string &path, bool is_dir) {
if (!is_dir) {
files.emplace_back(path);
}
return true;
});
std::sort(files.begin(), files.end());
} else {
files = split(files_string, ";");
}
uint64_t duration_ms = 0;
for (auto &file : files) {
auto demuxer = std::make_shared<MP4Demuxer>();
demuxer->openMP4(file);
_demuxers.emplace(duration_ms, demuxer);
duration_ms += demuxer->getDurationMS();
}
CHECK(!_demuxers.empty());
_it = _demuxers.begin();
for (auto &track : _it->second->getTracks(false)) {
_tracks.emplace(track->getIndex(), track->clone());
}
}
uint64_t MultiMP4Demuxer::getDurationMS() const {
return _demuxers.empty() ? 0 : _demuxers.rbegin()->first + _demuxers.rbegin()->second->getDurationMS();
}
void MultiMP4Demuxer::closeMP4() {
_demuxers.clear();
_it = _demuxers.end();
_tracks.clear();
}
int64_t MultiMP4Demuxer::seekTo(int64_t stamp_ms) {
if (stamp_ms >= (int64_t)getDurationMS()) {
return -1;
}
_it = std::prev(_demuxers.upper_bound(stamp_ms));
return _it->first + _it->second->seekTo(stamp_ms - _it->first);
}
Frame::Ptr MultiMP4Demuxer::readFrame(bool &keyFrame, bool &eof) {
for (;;) {
auto ret = _it->second->readFrame(keyFrame, eof);
if (ret) {
auto it = _tracks.find(ret->getIndex());
if (it != _tracks.end()) {
auto ret2 = std::make_shared<FrameStamp>(ret);
ret2->setStamp(_it->first + ret->dts(), _it->first + ret->pts());
ret = std::move(ret2);
it->second->inputFrame(ret);
}
}
if (eof && _it != _demuxers.end()) {
// 切换到下一个文件
if (++_it == _demuxers.end()) {
// 已经是最后一个文件了
eof = true;
return nullptr;
}
// 下一个文件从头开始播放
_it->second->seekTo(0);
continue;
}
return ret;
}
}
std::vector<Track::Ptr> MultiMP4Demuxer::getTracks(bool trackReady) const {
std::vector<Track::Ptr> ret;
for (auto &pr : _tracks) {
if (!trackReady || pr.second->ready()) {
ret.emplace_back(pr.second);
}
}
return ret;
}
}//namespace mediakit
#endif// ENABLE_MP4

View File

@ -11,9 +11,12 @@
#ifndef ZLMEDIAKIT_MP4DEMUXER_H
#define ZLMEDIAKIT_MP4DEMUXER_H
#ifdef ENABLE_MP4
#include <map>
#include "MP4.h"
#include "Extension/Track.h"
#include "Util/ResourcePool.h"
namespace mediakit {
class MP4Demuxer : public TrackSource {
@ -103,6 +106,56 @@ private:
toolkit::ResourcePool<toolkit::BufferRaw> _buffer_pool;
};
class MultiMP4Demuxer : public TrackSource {
public:
using Ptr = std::shared_ptr<MultiMP4Demuxer>;
~MultiMP4Demuxer() override = default;
/**
* mp4文件mp4看待
* @param file mp4文件路径; mp4文件的文件夹
*/
void openMP4(const std::string &file);
/**
* @brief mp4
*/
void closeMP4();
/**
*
* @param stamp_ms
* @return
*/
int64_t seekTo(int64_t stamp_ms);
/**
*
* @param keyFrame
* @param eof
* @return ,
*/
Frame::Ptr readFrame(bool &keyFrame, bool &eof);
/**
* Track信息
* @param trackReady track为就绪状态
* @return Track信息
*/
std::vector<Track::Ptr> getTracks(bool trackReady) const override;
/**
*
* @return
*/
uint64_t getDurationMS() const;
private:
std::map<int, Track::Ptr> _tracks;
std::map<uint64_t, MP4Demuxer::Ptr>::iterator _it;
std::map<uint64_t, MP4Demuxer::Ptr> _demuxers;
};
}//namespace mediakit
#endif//ENABLE_MP4

View File

@ -247,7 +247,8 @@ bool MP4MuxerMemory::inputFrame(const Frame::Ptr &frame) {
_key_frame = false;
}
if (frame->keyFrame()) {
// only audio all frame is key frame
if (frame->keyFrame() || !haveVideo()) {
_key_frame = true;
}
if (frame->getTrackType() == TrackVideo || !haveVideo()) {

View File

@ -54,7 +54,7 @@ void MP4Reader::setup(const MediaTuple &tuple, const std::string &file_path, con
_file_path = File::absolutePath(_file_path, recordPath);
}
_demuxer = std::make_shared<MP4Demuxer>();
_demuxer = std::make_shared<MultiMP4Demuxer>();
_demuxer->openMP4(_file_path);
if (tuple.stream.empty()) {
@ -164,7 +164,7 @@ void MP4Reader::startReadMP4(uint64_t sample_ms, bool ref_self, bool file_repeat
_file_repeat = file_repeat;
}
const MP4Demuxer::Ptr &MP4Reader::getDemuxer() const {
const MultiMP4Demuxer::Ptr &MP4Reader::getDemuxer() const {
return _demuxer;
}

View File

@ -68,7 +68,7 @@ public:
* [AUTO-TRANSLATED:4f0dfc29]
*/
const MP4Demuxer::Ptr& getDemuxer() const;
const MultiMP4Demuxer::Ptr& getDemuxer() const;
private:
//MediaSourceEvent override
@ -100,7 +100,7 @@ private:
std::recursive_mutex _mtx;
toolkit::Ticker _seek_ticker;
toolkit::Timer::Ptr _timer;
MP4Demuxer::Ptr _demuxer;
MultiMP4Demuxer::Ptr _demuxer;
MultiMediaSourceMuxer::Ptr _muxer;
toolkit::EventPoller::Ptr _poller;
};

View File

@ -123,11 +123,15 @@ bool MP4Recorder::inputFrame(const Frame::Ptr &frame) {
if (!(_have_video && frame->getTrackType() == TrackAudio)) {
// 如果有视频且输入的是音频,那么应该忽略切片逻辑 [AUTO-TRANSLATED:fbb15d93]
// If there is video and the input is audio, then the slice logic should be ignored
if (_last_dts == 0 || _last_dts > frame->dts()) {
if (_last_dts == 0) {
// first frame assign dts
_last_dts = frame->dts();
} else if (_last_dts > frame->dts()) {
// b帧情况下dts时间戳可能回退 [AUTO-TRANSLATED:1de38f77]
// In the case of b-frames, the dts timestamp may regress
_last_dts = MAX(frame->dts(), _last_dts);
_last_dts = MIN(frame->dts(), _last_dts);
}
auto duration = 5u; // 默认至少一帧5ms
if (frame->dts() > 0 && frame->dts() > _last_dts) {
duration = MAX(duration, frame->dts() - _last_dts);

View File

@ -209,8 +209,13 @@ void RtcpHeader::net2Host(size_t len) {
RtcpXRDLRR *dlrr = (RtcpXRDLRR *)this;
dlrr->net2Host(len);
TraceL << dlrr->dumpString();
} else if (xr->bt == 42){
//当有浏览器将屏幕推流到服务器时会发生这个, 暂时没发现什么作用,先解析出来,不做处理
RtcpXRTargetBitrate* tb = (RtcpXRTargetBitrate *)this;
tb->net2Host(len);
//TraceL << tb->dumpString();
} else {
throw std::runtime_error(StrPrinter << "rtcp xr bt " << xr->bt << " not support");
throw std::runtime_error(StrPrinter << "rtcp xr bt " << (int)xr->bt << " not support");
}
break;
}
@ -796,6 +801,71 @@ std::shared_ptr<RtcpXRDLRR> RtcpXRDLRR::create(size_t item_count) {
return std::shared_ptr<RtcpXRDLRR>(ptr, [](RtcpXRDLRR *ptr) { delete[](char *) ptr; });
}
////////////////////////////////////
string RtcpXRTargetBitrateItem::dumpString() const {
_StrPrinter printer;
printer << "Spatial Layer :" << spatial_layer << "\r\n";
printer << "Temporal Layer :" << temporal_layer << "\r\n";
printer << "Target Bitrate: " << target_bitrate << "\r\n";
return std::move(printer);
}
void RtcpXRTargetBitrateItem::net2Host() {
target_bitrate = ntohl(target_bitrate) >> 8;
}
std::vector<RtcpXRTargetBitrateItem *> RtcpXRTargetBitrate::getItemList() {
auto count = block_length;
RtcpXRTargetBitrateItem *ptr = &items;
vector<RtcpXRTargetBitrateItem *> ret;
for (int i = 0; i < (int)count; ++i) {
ret.emplace_back(ptr);
++ptr;
}
return ret;
}
string RtcpXRTargetBitrate::dumpString() const {
_StrPrinter printer;
printer << RtcpHeader::dumpHeader();
printer << "ssrc :" << ssrc << "\r\n";
printer << "bt :" << (int)bt << "\r\n";
printer << "block_length : " << block_length << "\r\n";
auto items_list = ((RtcpXRTargetBitrate *)this)->getItemList();
auto i = 0;
for (auto &item : items_list) {
printer << "---- item:" << i++ << " ----\r\n";
printer << item->dumpString();
}
return std::move(printer);
}
void RtcpXRTargetBitrate::net2Host(size_t size) {
static const size_t kMinSize = sizeof(RtcpHeader);
CHECK_MIN_SIZE(size, kMinSize);
ssrc = ntohl(ssrc);
block_length = ntohs(block_length);
auto count = block_length;
for (int i = 0; i < (int)count; ++i) {
RtcpXRTargetBitrateItem *ptr = &items;
ptr->net2Host();
ptr++;
}
}
std::shared_ptr<RtcpXRTargetBitrate> RtcpXRTargetBitrate::create(size_t item_count) {
auto real_size = sizeof(RtcpXRTargetBitrate) - sizeof(RtcpXRTargetBitrateItem) + item_count * sizeof(RtcpXRTargetBitrateItem);
auto bytes = alignSize(real_size);
auto ptr = (RtcpXRTargetBitrate *)new char[bytes];
setupHeader(ptr, RtcpType::RTCP_XR, 0, bytes);
setupPadding(ptr, bytes - real_size);
return std::shared_ptr<RtcpXRTargetBitrate>(ptr, [](RtcpXRTargetBitrate *ptr) { delete[](char *) ptr; });
}
#if 0
#include "Util/onceToken.h"

View File

@ -772,7 +772,6 @@ private:
/**
*
* @param size
*/
void net2Host();
};
@ -814,6 +813,105 @@ private:
};
// RFC 4585: Feedback format.
//
// Common packet format:
//
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | BT=42 | reserved | block length |
// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// Target bitrate item (repeat as many times as necessary).
//
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | S | T | Target Bitrate |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// : ... :
//
// Spatial Layer (S): 4 bits
// Indicates which temporal layer this bitrate concerns.
//
// Temporal Layer (T): 4 bits
// Indicates which temporal layer this bitrate concerns.
//
// Target Bitrate: 24 bits
// The encoder target bitrate for this layer, in kbps.
//
// As an example of how S and T are intended to be used, VP8 simulcast will
// use a separate TargetBitrate message per stream, since they are transmitted
// on separate SSRCs, with temporal layers grouped by stream.
// If VP9 SVC is used, there will be only one SSRC, so each spatial and
// temporal layer combo used shall be specified in the TargetBitrate packet.
class RtcpXRTargetBitrateItem {
public:
friend class RtcpXRTargetBitrate;
#if __BYTE_ORDER == __BIG_ENDIAN
// Indicates which temporal layer this bitrate concerns.
uint32_t spatial_layer : 4;
// Indicates which temporal layer this bitrate concerns.
uint32_t temporal_layer : 4;
#else
// Indicates which temporal layer this bitrate concerns.
uint32_t temporal_layer : 4;
// Indicates which temporal layer this bitrate concerns.
uint32_t spatial_layer : 4;
#endif
//The encoder target bitrate for this layer, in kbps.
uint32_t target_bitrate : 24;
private:
/**
*
* 使net2Host转换成主机字节序后才可使用此函数
*/
std::string dumpString() const;
/**
*
*/
void net2Host();
};
class RtcpXRTargetBitrate : public RtcpHeader {
public:
friend class RtcpHeader;
uint32_t ssrc;
uint8_t bt;
uint8_t reserved;
uint16_t block_length;
RtcpXRTargetBitrateItem items;
/**
* RtcpXRTargetBitrate包RtcpHeader部分()
* @param item_count RtcpXRTargetBitrateItem对象个数
* @return RtcpXRTargetBitrate包
*/
static std::shared_ptr<RtcpXRTargetBitrate> create(size_t item_count);
/**
* RtcpXRTargetBitrateItem对象指针列表
* 使net2Host转换成主机字节序后才可使用此函数
*/
std::vector<RtcpXRTargetBitrateItem *> getItemList();
private:
/**
*
* 使net2Host转换成主机字节序后才可使用此函数
*/
std::string dumpString() const;
/**
*
* @param size
*/
void net2Host(size_t size);
};
#pragma pack(pop)
} // namespace mediakit

View File

@ -34,14 +34,14 @@ VideoMeta::VideoMeta(const VideoTrack::Ptr &video) {
_metadata.set("framerate", video->getVideoFps());
}
if (video->getBitRate()) {
_metadata.set("videodatarate", video->getBitRate() / 1024);
_metadata.set("videodatarate", video->getBitRate() >> 10);
}
_metadata.set("videocodecid", Factory::getAmfByCodecId(video->getCodecId()));
}
AudioMeta::AudioMeta(const AudioTrack::Ptr &audio) {
if (audio->getBitRate()) {
_metadata.set("audiodatarate", audio->getBitRate() / 1024);
_metadata.set("audiodatarate", audio->getBitRate() >> 10);
}
if (audio->getAudioSampleRate() > 0) {
_metadata.set("audiosamplerate", audio->getAudioSampleRate());
@ -68,29 +68,23 @@ uint8_t getAudioRtmpFlags(const Track::Ptr &track) {
auto iChannel = audioTrack->getAudioChannel();
auto iSampleBit = audioTrack->getAudioSampleBit();
uint8_t flvAudioType;
auto amf = Factory::getAmfByCodecId(track->getCodecId());
if (!amf) {
WarnL << "该编码格式不支持转换为RTMP: " << track->getCodecName();
return 0;
}
uint8_t flvAudioType = amf.as_integer();
switch (track->getCodecId()) {
case CodecG711A: flvAudioType = (uint8_t)RtmpAudioCodec::g711a; break;
case CodecG711U: flvAudioType = (uint8_t)RtmpAudioCodec::g711u; break;
case CodecAAC:
case CodecOpus: {
flvAudioType = (uint8_t)RtmpAudioCodec::opus;
// opus不通过flags获取音频相关信息 [AUTO-TRANSLATED:0ddf328b]
// opus does not get audio information through flags
// opus/aac不通过flags获取音频相关信息 [AUTO-TRANSLATED:0ddf328b]
// opus/aac does not get audio information through flags
iSampleRate = 44100;
iSampleBit = 16;
iChannel = 2;
break;
}
case CodecAAC: {
flvAudioType = (uint8_t)RtmpAudioCodec::aac;
// aac不通过flags获取音频相关信息 [AUTO-TRANSLATED:63ac5081]
// aac does not get audio information through flags
iSampleRate = 44100;
iSampleBit = 16;
iChannel = 2;
break;
}
default: WarnL << "该编码格式不支持转换为RTMP: " << track->getCodecName(); return 0;
default: break;
}
uint8_t flvSampleRate;

View File

@ -371,6 +371,8 @@ enum class RtmpAudioCodec : uint8_t {
14 = MP3 8 kHz
15 = Device-specific sound
*/
adpcm = 1,
mp3 = 2,
g711a = 7,
g711u = 8,
aac = 10,

View File

@ -90,9 +90,12 @@ void DecoderImp::onStream(int stream, int codecid, const void *extra, size_t byt
}
// G711传统只支持 8000/1/16的规格FFmpeg貌似做了扩展但是这里不管它了 [AUTO-TRANSLATED:851813f7]
// G711 traditionally only supports the 8000/1/16 specification. FFmpeg seems to have extended it, but we'll ignore that here.
auto track = Factory::getTrackByCodecId(getCodecByMpegId(codecid), 8000, 1, 16);
if (track) {
onTrack(stream, std::move(track));
auto codec = getCodecByMpegId(codecid);
if (codec != CodecInvalid) {
auto track = Factory::getTrackByCodecId(codec);
if (track) {
onTrack(stream, std::move(track));
}
}
// 防止未获取视频track提前complete导致忽略后续视频的问题用于兼容一些不太规范的ps流 [AUTO-TRANSLATED:d6b349b5]
// Prevent the problem of ignoring subsequent video due to premature completion of the video track before it is obtained. This is used to be compatible with some non-standard PS streams.
@ -113,7 +116,7 @@ void DecoderImp::onDecode(int stream, int codecid, int flags, int64_t pts, int64
}
auto &ref = _tracks[stream];
if (!ref.first) {
onTrack(stream, Factory::getTrackByCodecId(codec, 8000, 1, 16));
onTrack(stream, Factory::getTrackByCodecId(codec));
}
if (!ref.first) {
WarnL << "Unsupported codec :" << getCodecName(codec);

View File

@ -83,79 +83,76 @@ bool GB28181Process::inputRtp(bool, const char *data, size_t data_len) {
// Prevent too many pt types from causing memory overflow
WarnL << "Rtp payload type more than 2 types: " << _rtp_receiver.size();
}
switch (pt) {
case Rtsp::PT_PCMA:
case Rtsp::PT_PCMU: {
// CodecG711U or CodecG711A
ref = std::make_shared<RtpReceiverImp>(8000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(pt == Rtsp::PT_PCMU ? CodecG711U : CodecG711A, 8000, 1, 16);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
break;
}
case Rtsp::PT_JPEG: {
// mjpeg
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(CodecJPEG);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
break;
}
default: {
if (pt == opus_pt) {
// opus负载 [AUTO-TRANSLATED:defa6a8d]
// opus payload
ref = std::make_shared<RtpReceiverImp>(48000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(CodecOpus);
do {
if (pt < 96) {
auto codec = RtpPayload::getCodecId(pt);
if (codec != CodecInvalid && codec != CodecTS) {
auto sample_rate = RtpPayload::getClockRate(pt);
auto channels = RtpPayload::getAudioChannel(pt);
ref = std::make_shared<RtpReceiverImp>(sample_rate, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(codec, sample_rate, channels);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
} else if (pt == h265_pt) {
// H265负载 [AUTO-TRANSLATED:61fbcf7f]
// H265 payload
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(CodecH265);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
} else if (pt == h264_pt) {
// H264负载 [AUTO-TRANSLATED:6f3fbb0d]
// H264 payload
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(CodecH264);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
} else {
if (pt != Rtsp::PT_MP2T && pt != ps_pt) {
WarnL << "Unknown rtp payload type(" << (int)pt << "), decode it as mpeg-ps or mpeg-ts";
}
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
// ts或ps负载 [AUTO-TRANSLATED:3ca31480]
// ts or ps payload
_rtp_decoder[pt] = std::make_shared<CommonRtpDecoder>(CodecInvalid, 32 * 1024);
// 设置dump目录 [AUTO-TRANSLATED:23c88ace]
// Set dump directory
GET_CONFIG(string, dump_dir, RtpProxy::kDumpDir);
if (!dump_dir.empty()) {
auto save_path = File::absolutePath(_media_info.stream + ".mpeg", dump_dir);
_save_file_ps.reset(File::create_file(save_path.data(), "wb"), [](FILE *fp) {
if (fp) {
fclose(fp);
}
});
}
break;
}
}
if (pt == opus_pt) {
// opus负载 [AUTO-TRANSLATED:defa6a8d]
// opus payload
ref = std::make_shared<RtpReceiverImp>(48000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(CodecOpus);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
break;
}
}
if (pt == h265_pt) {
// H265负载 [AUTO-TRANSLATED:61fbcf7f]
// H265 payload
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(CodecH265);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
break;
}
if (pt == h264_pt) {
// H264负载 [AUTO-TRANSLATED:6f3fbb0d]
// H264 payload
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
auto track = Factory::getTrackByCodecId(CodecH264);
CHECK(track);
track->setIndex(pt);
_interface->addTrack(track);
_rtp_decoder[pt] = Factory::getRtpDecoderByCodecId(track->getCodecId());
break;
}
if (pt != Rtsp::PT_MP2T && pt != ps_pt) {
WarnL << "Unknown rtp payload type(" << (int)pt << "), decode it as mpeg-ps or mpeg-ts";
}
ref = std::make_shared<RtpReceiverImp>(90000, [this](RtpPacket::Ptr rtp) { onRtpSorted(std::move(rtp)); });
// ts或ps负载 [AUTO-TRANSLATED:3ca31480]
// ts or ps payload
_rtp_decoder[pt] = std::make_shared<CommonRtpDecoder>(CodecInvalid, 32 * 1024);
// 设置dump目录 [AUTO-TRANSLATED:23c88ace]
// Set dump directory
GET_CONFIG(string, dump_dir, RtpProxy::kDumpDir);
if (!dump_dir.empty()) {
auto save_path = File::absolutePath(_media_info.stream + ".mpeg", dump_dir);
_save_file_ps.reset(File::create_file(save_path.data(), "wb"), [](FILE *fp) {
if (fp) {
fclose(fp);
}
});
}
} while (false);
// 设置frame回调 [AUTO-TRANSLATED:dec7590f]
// Set frame callback
_rtp_decoder[pt]->addDelegate([this, pt](const Frame::Ptr &frame) {
@ -169,11 +166,16 @@ bool GB28181Process::inputRtp(bool, const char *data, size_t data_len) {
}
void GB28181Process::onRtpDecode(const Frame::Ptr &frame) {
if (frame->getCodecId() != CodecInvalid) {
// 这里不是ps或ts [AUTO-TRANSLATED:6f79ac69]
// This is not ps or ts
_interface->inputFrame(frame);
return;
switch (frame->getCodecId()) {
case CodecInvalid:
case CodecTS:
case CodecPS: break;
default:
// 这里不是ps或ts [AUTO-TRANSLATED:6f79ac69]
// This is not ps or ts
_interface->inputFrame(frame);
return;
}
// 这是TS或PS [AUTO-TRANSLATED:55782860]

View File

@ -346,5 +346,9 @@ float RtpProcess::getLossRate(MediaSource &sender, TrackType type) {
return getLostInterval() * 100 / expected;
}
const toolkit::Socket::Ptr& RtpProcess::getSock() const {
return _sock;
}
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)

View File

@ -102,6 +102,8 @@ public:
uint16_t get_peer_port() override;
std::string getIdentifier() const override;
const toolkit::Socket::Ptr& getSock() const;
protected:
bool inputFrame(const Frame::Ptr &frame) override;
bool addTrack(const Track::Ptr & track) override;

View File

@ -190,6 +190,25 @@ void RtpSender::startSend(const MediaSourceEvent::SendRtpArgs &args, const funct
}
}, delay_ms / 1000.0, "::", args.src_port);
InfoL << "start tcp active send rtp to: " << args.dst_url << ":" << args.dst_port;
} else if (args.con_type == MediaSourceEvent::SendRtpArgs::kVoiceTalk) {
auto src = MediaSource::find(args.recv_stream_vhost, args.recv_stream_app, args.recv_stream_id);
if (!src) {
cb(0, SockException(Err_other, "can not find the target stream"));
return;
}
auto processor = src->getRtpProcess();
if (!processor) {
cb(0, SockException(Err_other, "get rtp processor from target stream failed"));
return;
}
auto sock = processor->getSock();
if (!sock) {
cb(0, SockException(Err_other, "get sock from rtp processor failed"));
return;
}
_socket_rtp = std::move(sock);
onConnect();
cb(_socket_rtp->get_local_port(), SockException());
} else {
CHECK(0, "invalid con type");
}
@ -249,48 +268,51 @@ void RtpSender::onConnect() {
// 加大发送缓存,防止udp丢包之类的问题 [AUTO-TRANSLATED:6e1cb40a]
// Increase the send buffer to prevent problems such as UDP packet loss
SockUtil::setSendBuf(_socket_rtp->rawFD(), 4 * 1024 * 1024);
if (_args.con_type == MediaSourceEvent::SendRtpArgs::kTcpActive || _args.con_type == MediaSourceEvent::SendRtpArgs::kTcpPassive) {
// 关闭tcp no_delay并开启MSG_MORE, 提高发送性能 [AUTO-TRANSLATED:c0f4e378]
// Close TCP no_delay and enable MSG_MORE to improve sending performance
SockUtil::setNoDelay(_socket_rtp->rawFD(), false);
_socket_rtp->setSendFlags(SOCKET_DEFAULE_FLAGS | FLAG_MORE);
} else if (_args.udp_rtcp_timeout) {
createRtcpSocket();
}
// 连接建立成功事件 [AUTO-TRANSLATED:ac279c86]
// Connection established successfully event
weak_ptr<RtpSender> weak_self = shared_from_this();
if (!_args.recv_stream_id.empty()) {
mINI ini;
ini[RtpSession::kStreamID] = _args.recv_stream_id;
// 强制同步接收流和发送流的app和vhost [AUTO-TRANSLATED:134c9663]
// Force synchronization of the app and vhost of the receive stream and send stream
ini[RtpSession::kApp] = _args.recv_stream_app;
ini[RtpSession::kVhost] = _args.recv_stream_vhost;
_rtp_session = std::make_shared<RtpSession>(_socket_rtp);
_rtp_session->setParams(ini);
_socket_rtp->setOnRead([weak_self](const Buffer::Ptr &buf, struct sockaddr *addr, int addr_len) {
if (_args.con_type != MediaSourceEvent::SendRtpArgs::kVoiceTalk) {
if (_args.con_type == MediaSourceEvent::SendRtpArgs::kTcpActive || _args.con_type == MediaSourceEvent::SendRtpArgs::kTcpPassive) {
// 关闭tcp no_delay并开启MSG_MORE, 提高发送性能 [AUTO-TRANSLATED:c0f4e378]
// Close TCP no_delay and enable MSG_MORE to improve sending performance
SockUtil::setNoDelay(_socket_rtp->rawFD(), false);
_socket_rtp->setSendFlags(SOCKET_DEFAULE_FLAGS | FLAG_MORE);
} else if (_args.udp_rtcp_timeout) {
createRtcpSocket();
}
// 连接建立成功事件 [AUTO-TRANSLATED:ac279c86]
// Connection established successfully event
weak_ptr<RtpSender> weak_self = shared_from_this();
if (!_args.recv_stream_id.empty()) {
mINI ini;
ini[RtpSession::kStreamID] = _args.recv_stream_id;
// 强制同步接收流和发送流的app和vhost [AUTO-TRANSLATED:134c9663]
// Force synchronization of the app and vhost of the receive stream and send stream
ini[RtpSession::kApp] = _args.recv_stream_app;
ini[RtpSession::kVhost] = _args.recv_stream_vhost;
_rtp_session = std::make_shared<RtpSession>(_socket_rtp);
_rtp_session->setParams(ini);
_socket_rtp->setOnRead([weak_self](const Buffer::Ptr &buf, struct sockaddr *addr, int addr_len) {
auto strong_self = weak_self.lock();
if (!strong_self) {
return;
}
try {
strong_self->_rtp_session->onRecv(buf);
} catch (std::exception &ex) {
SockException err(toolkit::Err_shutdown, ex.what());
strong_self->_rtp_session->shutdown(err);
}
});
} else {
_socket_rtp->setOnRead(nullptr);
}
_socket_rtp->setOnErr([weak_self](const SockException &err) {
auto strong_self = weak_self.lock();
if (!strong_self) {
return;
}
try {
strong_self->_rtp_session->onRecv(buf);
} catch (std::exception &ex) {
SockException err(toolkit::Err_shutdown, ex.what());
strong_self->_rtp_session->shutdown(err);
if (strong_self) {
strong_self->onErr(err);
}
});
} else {
_socket_rtp->setOnRead(nullptr);
}
_socket_rtp->setOnErr([weak_self](const SockException &err) {
auto strong_self = weak_self.lock();
if (strong_self) {
strong_self->onErr(err);
}
});
InfoL << "startSend rtp success: " << _socket_rtp->get_peer_ip() << ":" << _socket_rtp->get_peer_port() << ", data_type: " << _args.data_type << ", con_type: " << _args.con_type;
}
@ -378,28 +400,51 @@ void RtpSender::onFlushRtpList(shared_ptr<List<Buffer::Ptr>> rtp_list) {
return;
}
size_t i = 0;
auto size = rtp_list->size();
rtp_list->for_each([&](Buffer::Ptr &packet) {
switch (_args.con_type) {
case MediaSourceEvent::SendRtpArgs::kUdpActive:
case MediaSourceEvent::SendRtpArgs::kUdpPassive: {
onSendRtpUdp(packet, i == 0);
// udp模式rtp over tcp前4个字节可以忽略 [AUTO-TRANSLATED:5d648f4b]
// UDP mode, the first 4 bytes of rtp over tcp can be ignored
_socket_rtp->send(std::make_shared<BufferRtp>(std::move(packet), RtpPacket::kRtpTcpHeaderSize), nullptr, 0, ++i == size);
break;
auto send_func = [this](const shared_ptr<List<Buffer::Ptr>> &rtp_list) {
size_t i = 0;
auto size = rtp_list->size();
rtp_list->for_each([&](Buffer::Ptr &packet) {
switch (_args.con_type) {
case MediaSourceEvent::SendRtpArgs::kUdpActive:
case MediaSourceEvent::SendRtpArgs::kUdpPassive: {
onSendRtpUdp(packet, i == 0);
// udp模式rtp over tcp前4个字节可以忽略 [AUTO-TRANSLATED:5d648f4b]
// UDP mode, the first 4 bytes of rtp over tcp can be ignored
_socket_rtp->send(std::make_shared<BufferRtp>(std::move(packet), RtpPacket::kRtpTcpHeaderSize), nullptr, 0, ++i == size);
break;
}
case MediaSourceEvent::SendRtpArgs::kTcpActive:
case MediaSourceEvent::SendRtpArgs::kTcpPassive: {
// tcp模式, rtp over tcp前2个字节可以忽略,只保留后续rtp长度的2个字节 [AUTO-TRANSLATED:a3bc338a]
// TCP mode, the first 2 bytes of rtp over tcp can be ignored, only the subsequent 2 bytes of rtp length are retained
_socket_rtp->send(std::make_shared<BufferRtp>(std::move(packet), 2), nullptr, 0, ++i == size);
break;
}
case MediaSourceEvent::SendRtpArgs::kVoiceTalk: {
auto type = _socket_rtp->alive() ? _socket_rtp->sockType() : SockNum::Sock_Invalid;
if (type == SockNum::Sock_UDP) {
_socket_rtp->send(std::make_shared<BufferRtp>(std::move(packet), RtpPacket::kRtpTcpHeaderSize), nullptr, 0, ++i == size);
} else if (type == SockNum::Sock_TCP) {
_socket_rtp->send(std::make_shared<BufferRtp>(std::move(packet), 2), nullptr, 0, ++i == size);
} else {
onErr(SockException(Err_other, "dst socket disconnected"));
}
break;
}
default: CHECK(0);
}
case MediaSourceEvent::SendRtpArgs::kTcpActive:
case MediaSourceEvent::SendRtpArgs::kTcpPassive: {
// tcp模式, rtp over tcp前2个字节可以忽略,只保留后续rtp长度的2个字节 [AUTO-TRANSLATED:a3bc338a]
// TCP mode, the first 2 bytes of rtp over tcp can be ignored, only the subsequent 2 bytes of rtp length are retained
_socket_rtp->send(std::make_shared<BufferRtp>(std::move(packet), 2), nullptr, 0, ++i == size);
break;
});
};
if (_args.con_type != MediaSourceEvent::SendRtpArgs::kVoiceTalk) {
weak_ptr<RtpSender> weak_self = shared_from_this();
_socket_rtp->getPoller()->async([weak_self, rtp_list, send_func]() {
if (auto strong_self = weak_self.lock()) {
send_func(rtp_list);
}
default: CHECK(0);
}
});
});
} else {
send_func(rtp_list);
}
}
void RtpSender::onErr(const SockException &ex) {

View File

@ -295,5 +295,15 @@ void RtpServer::updateSSRC(uint32_t ssrc) {
}
}
uint32_t RtpServer::getSSRC() const {
if (_ssrc) {
return *_ssrc;
}
if (_tcp_server) {
return (*_tcp_server)[RtpSession::kSSRC];
}
return 0;
}
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)

View File

@ -97,6 +97,9 @@ public:
*/
void updateSSRC(uint32_t ssrc);
uint32_t getSSRC() const;
int getOnlyTrack() const { return _only_track; }
TcpMode getTcpMode() const { return _tcp_mode; }
private:
// tcp主动模式连接服务器成功回调 [AUTO-TRANSLATED:0775844e]
// tcp active mode connection server success callback

View File

@ -127,8 +127,7 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
weak_ptr<RtpSession> weak_self = static_pointer_cast<RtpSession>(shared_from_this());
_process->setOnDetach([weak_self](const SockException &ex) {
if (auto strong_self = weak_self.lock()) {
strong_self->_process = nullptr;
strong_self->shutdown(ex);
strong_self->safeShutdown(ex);
}
});
}

View File

@ -12,9 +12,10 @@
#include <cinttypes>
#include <random>
#include "Rtsp.h"
#include "Network/Socket.h"
#include "Common/Parser.h"
#include "Common/config.h"
#include "Network/Socket.h"
#include "Extension/Track.h"
#include "Extension/Factory.h"
using namespace std;
@ -24,50 +25,84 @@ namespace mediakit {
int RtpPayload::getClockRate(int pt) {
switch (pt) {
#define SWITCH_CASE(name, type, value, clock_rate, channel, codec_id) \
#define XX(name, type, value, clock_rate, channel, codec_id) \
case value: return clock_rate;
RTP_PT_MAP(SWITCH_CASE)
#undef SWITCH_CASE
RTP_PT_MAP(XX)
#undef XX
default: return 90000;
}
}
int RtpPayload::getClockRateByCodec(CodecId codec) {
#define XX(name, type, value, clock_rate, channel, codec_id) { codec_id, clock_rate },
static map<CodecId, int> s_map = { RTP_PT_MAP(XX) };
#undef XX
auto it = s_map.find(codec);
if (it == s_map.end()) {
WarnL << "Unsupported codec: " << getCodecName(codec);
return 90000;
}
return it->second;
}
int RtpPayload::getPayloadType(const Track &track) {
#define XX(name, type, value, clock_rate, channel, codec_id) { codec_id, info { clock_rate, channel, value } },
struct info {
int clock_rate;
int channels;
int pt;
};
static map<CodecId, info> s_map = { RTP_PT_MAP(XX) };
#undef XX
auto it = s_map.find(track.getCodecId());
if (it == s_map.end()) {
return -1;
}
if (track.getTrackType() == TrackAudio) {
if (static_cast<const AudioTrack &>(track).getAudioSampleRate() != it->second.clock_rate
|| static_cast<const AudioTrack &>(track).getAudioChannel() != it->second.channels) {
return -1;
}
}
return it->second.pt;
}
TrackType RtpPayload::getTrackType(int pt) {
switch (pt) {
#define SWITCH_CASE(name, type, value, clock_rate, channel, codec_id) \
#define XX(name, type, value, clock_rate, channel, codec_id) \
case value: return type;
RTP_PT_MAP(SWITCH_CASE)
#undef SWITCH_CASE
RTP_PT_MAP(XX)
#undef XX
default: return TrackInvalid;
}
}
int RtpPayload::getAudioChannel(int pt) {
switch (pt) {
#define SWITCH_CASE(name, type, value, clock_rate, channel, codec_id) \
#define XX(name, type, value, clock_rate, channel, codec_id) \
case value: return channel;
RTP_PT_MAP(SWITCH_CASE)
#undef SWITCH_CASE
RTP_PT_MAP(XX)
#undef XX
default: return 1;
}
}
const char *RtpPayload::getName(int pt) {
switch (pt) {
#define SWITCH_CASE(name, type, value, clock_rate, channel, codec_id) \
#define XX(name, type, value, clock_rate, channel, codec_id) \
case value: return #name;
RTP_PT_MAP(SWITCH_CASE)
#undef SWITCH_CASE
RTP_PT_MAP(XX)
#undef XX
default: return "unknown payload type";
}
}
CodecId RtpPayload::getCodecId(int pt) {
switch (pt) {
#define SWITCH_CASE(name, type, value, clock_rate, channel, codec_id) \
#define XX(name, type, value, clock_rate, channel, codec_id) \
case value: return codec_id;
RTP_PT_MAP(SWITCH_CASE)
#undef SWITCH_CASE
RTP_PT_MAP(XX)
#undef XX
default: return CodecInvalid;
}
}
@ -91,13 +126,7 @@ static void getAttrSdp(const multimap<string, string> &attr, _StrPrinter &printe
}
string SdpTrack::getName() const {
switch (_pt) {
#define SWITCH_CASE(name, type, value, clock_rate, channel, codec_id) \
case value: return #name;
RTP_PT_MAP(SWITCH_CASE)
#undef SWITCH_CASE
default: return _codec;
}
return RtpPayload::getName(_pt);
}
string SdpTrack::getControlUrl(const string &base_url) const {
@ -715,6 +744,23 @@ TitleSdp::TitleSdp(float dur_sec, const std::map<std::string, std::string> &head
_printer << "a=control:*\r\n";
}
DefaultSdp::DefaultSdp(int payload_type, const Track &track)
: Sdp(track.getTrackType() == TrackVideo ? 9000 : static_cast<const AudioTrack &>(track).getAudioSampleRate(), payload_type) {
_printer << "m=" << track.getTrackTypeStr() << " 0 RTP/AVP " << payload_type << "\r\n";
auto bitrate = track.getBitRate() >> 10;
if (bitrate) {
_printer << "b=AS:" << bitrate << "\r\n";
}
if (payload_type < 96) {
return;
}
_printer << "a=rtpmap:" << payload_type << " " << track.getCodecName() << "/" << getSampleRate();
if (track.getTrackType() == TrackAudio) {
_printer << "/" << static_cast<const AudioTrack &>(track).getAudioChannel();
}
_printer << "\r\n";
}
} // namespace mediakit
namespace toolkit {

View File

@ -11,16 +11,18 @@
#ifndef RTSP_RTSP_H_
#define RTSP_RTSP_H_
#include "Common/macros.h"
#include "Extension/Frame.h"
#include "Network/Socket.h"
#include <memory>
#include <string.h>
#include <string>
#include <memory>
#include <unordered_map>
#include "Network/Socket.h"
#include "Common/macros.h"
#include "Extension/Frame.h"
namespace mediakit {
class Track;
namespace Rtsp {
typedef enum {
RTP_Invalid = -1,
@ -32,27 +34,27 @@ typedef enum {
#define RTP_PT_MAP(XX) \
XX(PCMU, TrackAudio, 0, 8000, 1, CodecG711U) \
XX(GSM, TrackAudio, 3, 8000, 1, CodecInvalid) \
XX(G723, TrackAudio, 4, 8000, 1, CodecInvalid) \
XX(G723, TrackAudio, 4, 8000, 1, CodecG723) \
XX(DVI4_8000, TrackAudio, 5, 8000, 1, CodecInvalid) \
XX(DVI4_16000, TrackAudio, 6, 16000, 1, CodecInvalid) \
XX(LPC, TrackAudio, 7, 8000, 1, CodecInvalid) \
XX(PCMA, TrackAudio, 8, 8000, 1, CodecG711A) \
XX(G722, TrackAudio, 9, 8000, 1, CodecInvalid) \
XX(G722, TrackAudio, 9, 16000, 1, CodecG722) \
XX(L16_Stereo, TrackAudio, 10, 44100, 2, CodecInvalid) \
XX(L16_Mono, TrackAudio, 11, 44100, 1, CodecInvalid) \
XX(QCELP, TrackAudio, 12, 8000, 1, CodecInvalid) \
XX(CN, TrackAudio, 13, 8000, 1, CodecInvalid) \
XX(MPA, TrackAudio, 14, 90000, 1, CodecInvalid) \
XX(G728, TrackAudio, 15, 8000, 1, CodecInvalid) \
XX(MP3, TrackAudio, 14, 44100, 2, CodecMP3) \
XX(G728, TrackAudio, 15, 8000, 1, CodecG728) \
XX(DVI4_11025, TrackAudio, 16, 11025, 1, CodecInvalid) \
XX(DVI4_22050, TrackAudio, 17, 22050, 1, CodecInvalid) \
XX(G729, TrackAudio, 18, 8000, 1, CodecInvalid) \
XX(G729, TrackAudio, 18, 8000, 1, CodecG729) \
XX(CelB, TrackVideo, 25, 90000, 1, CodecInvalid) \
XX(JPEG, TrackVideo, 26, 90000, 1, CodecJPEG) \
XX(nv, TrackVideo, 28, 90000, 1, CodecInvalid) \
XX(H261, TrackVideo, 31, 90000, 1, CodecInvalid) \
XX(MPV, TrackVideo, 32, 90000, 1, CodecInvalid) \
XX(MP2T, TrackVideo, 33, 90000, 1, CodecInvalid) \
XX(MP2T, TrackVideo, 33, 90000, 1, CodecTS) \
XX(H263, TrackVideo, 34, 90000, 1, CodecInvalid)
typedef enum {
@ -213,10 +215,12 @@ private:
class RtpPayload {
public:
static int getClockRate(int pt);
static int getClockRateByCodec(CodecId codec);
static TrackType getTrackType(int pt);
static int getAudioChannel(int pt);
static const char *getName(int pt);
static CodecId getCodecId(int pt);
static int getPayloadType(const Track &track);
private:
RtpPayload() = delete;
@ -243,8 +247,8 @@ public:
public:
int _pt = 0xff;
int _channel;
int _samplerate;
int _channel = 0;
int _samplerate = 0;
TrackType _type;
std::string _codec;
std::string _fmtp;
@ -340,6 +344,15 @@ private:
uint32_t _sample_rate;
};
class DefaultSdp : public Sdp {
public:
DefaultSdp(int payload_type, const Track &track);
std::string getSdp() const override { return _printer; }
private:
toolkit::_StrPrinter _printer;
};
/**
* sdp中除音视频外的其他描述部分
* Other description part in sdp except audio and video

View File

@ -54,7 +54,9 @@ void RtspMediaSource::onWrite(RtpPacket::Ptr rtp, bool keyPos) {
assert(rtp->type >= 0 && rtp->type < TrackMax);
auto &track = _tracks[rtp->type];
auto stamp = rtp->getStampMS();
if (track) {
bool is_video = rtp->type == TrackVideo;
// 音频总是更新,视频在关键包时更新
if (track && ((keyPos && _have_video && is_video) || (!is_video))) {
track->_seq = rtp->getSeq();
track->_time_stamp = rtp->getStamp() * uint64_t(1000) / rtp->sample_rate;
track->_ssrc = rtp->getSSRC();
@ -77,7 +79,7 @@ void RtspMediaSource::onWrite(RtpPacket::Ptr rtp, bool keyPos) {
regist();
}
}
bool is_video = rtp->type == TrackVideo;
PacketCache<RtpPacket>::inputPacket(stamp, is_video, std::move(rtp), keyPos);
}

View File

@ -23,11 +23,10 @@ void RtspMuxer::onRtp(RtpPacket::Ptr in, bool is_key) {
if (ref.rtp_stamp != in->getHeader()->stamp) {
// rtp时间戳变化才计算ntp节省cpu资源 [AUTO-TRANSLATED:729d54f2]
// Only calculate NTP when the RTP timestamp changes, saving CPU resources
int64_t stamp_ms = in->getStamp() * uint64_t(1000) / in->sample_rate;
int64_t stamp_ms_inc;
// 求rtp时间戳增量 [AUTO-TRANSLATED:f6ba022f]
// Get the RTP timestamp increment
ref.stamp.revise(stamp_ms, stamp_ms, stamp_ms_inc, stamp_ms_inc);
ref.stamp.revise(in->ntp_stamp, in->ntp_stamp, stamp_ms_inc, stamp_ms_inc);
ref.rtp_stamp = in->getHeader()->stamp;
ref.ntp_stamp = stamp_ms_inc + _ntp_stamp_start;
}
@ -67,14 +66,19 @@ bool RtspMuxer::addTrack(const Track::Ptr &track) {
WarnL << "Already add a track kind of: " << track->getTrackTypeStr() << ", ignore track: " << track->getCodecName();
return false;
}
if (!track->ready()) {
WarnL << track->getCodecName() << " unready!";
return false;
}
auto &ref = _tracks[track->getIndex()];
auto &encoder = ref.encoder;
CHECK(!encoder);
auto pt = RtpPayload::getPayloadType(*track);
// payload type 96以后则为动态pt [AUTO-TRANSLATED:812ac0a2]
// Payload type 96 and above is dynamic PT
Sdp::Ptr sdp = track->getSdp(96 + _index);
Sdp::Ptr sdp = track->getSdp(pt == -1 ? 96 + _index : pt);
if (!sdp) {
WarnL << "Unsupported codec: " << track->getCodecName();
return false;

View File

@ -833,7 +833,7 @@ void RtspSession::handleReq_Play(const Parser &parser) {
rtp_info << "url=" << track->getControlUrl(_content_base) << ";"
<< "seq=" << track->_seq << ";"
<< "rtptime=" << (int) (track->_time_stamp * (track->_samplerate / 1000)) << ",";
<< "rtptime=" << (int64_t)(track->_time_stamp) * (int64_t)(track->_samplerate/ 1000) << ",";
}
rtp_info.pop_back();
@ -1136,7 +1136,7 @@ int RtspSession::getTrackIndexByTrackType(TrackType type) {
int RtspSession::getTrackIndexByControlUrl(const string &control_url) {
for (size_t i = 0; i < _sdp_track.size(); ++i) {
if (control_url == _sdp_track[i]->getControlUrl(_content_base)) {
if (control_url.find(_sdp_track[i]->getControlUrl(_content_base)) == 0) {
return i;
}
}

View File

@ -292,8 +292,8 @@ bool HandshakePacket::loadFromData(uint8_t *buf, size_t len) {
syn_cookie = loadUint32(ptr);
ptr += 4;
memcpy(peer_ip_addr, ptr, sizeof(peer_ip_addr) * sizeof(peer_ip_addr[0]));
ptr += sizeof(peer_ip_addr) * sizeof(peer_ip_addr[0]);
memcpy(peer_ip_addr, ptr, sizeof(peer_ip_addr));
ptr += sizeof(peer_ip_addr);
if (encryption_field != NO_ENCRYPTION) {
ErrorL << "not support encryption " << encryption_field;
@ -400,8 +400,8 @@ bool HandshakePacket::storeToData() {
storeUint32(ptr, syn_cookie);
ptr += 4;
memcpy(ptr, peer_ip_addr, sizeof(peer_ip_addr) * sizeof(peer_ip_addr[0]));
ptr += sizeof(peer_ip_addr) * sizeof(peer_ip_addr[0]);
memcpy(ptr, peer_ip_addr, sizeof(peer_ip_addr));
ptr += sizeof(peer_ip_addr);
if (encryption_field != NO_ENCRYPTION) {
ErrorL << "not support encryption " << encryption_field;
@ -433,7 +433,7 @@ uint32_t HandshakePacket::getSynCookie(uint8_t *buf, size_t len) {
}
void HandshakePacket::assignPeerIP(struct sockaddr_storage *addr) {
memset(peer_ip_addr, 0, sizeof(peer_ip_addr) * sizeof(peer_ip_addr[0]));
memset(peer_ip_addr, 0, sizeof(peer_ip_addr));
if (addr->ss_family == AF_INET) {
struct sockaddr_in *ipv4 = (struct sockaddr_in *)addr;
// 抓包 奇怪好像是小头端??? [AUTO-TRANSLATED:40eb164c]
@ -446,7 +446,7 @@ void HandshakePacket::assignPeerIP(struct sockaddr_storage *addr) {
storeUint32LE(peer_ip_addr, addr4.s_addr);
} else {
const sockaddr_in6 *ipv6 = (struct sockaddr_in6 *)addr;
memcpy(peer_ip_addr, ipv6->sin6_addr.s6_addr, sizeof(peer_ip_addr) * sizeof(peer_ip_addr[0]));
memcpy(peer_ip_addr, ipv6->sin6_addr.s6_addr, sizeof(peer_ip_addr));
}
}
}

View File

@ -206,7 +206,7 @@ void SrtTransport::handleHandshakeInduction(HandshakePacket &pkt, struct sockadd
res->srt_socket_id = _peer_socket_id;
res->syn_cookie = HandshakePacket::generateSynCookie(addr, _start_timestamp);
_sync_cookie = res->syn_cookie;
memcpy(res->peer_ip_addr, pkt.peer_ip_addr, sizeof(pkt.peer_ip_addr) * sizeof(pkt.peer_ip_addr[0]));
memcpy(res->peer_ip_addr, pkt.peer_ip_addr, sizeof(pkt.peer_ip_addr));
_handleshake_res = res;
res->storeToData();

View File

@ -23,9 +23,18 @@
aux_source_directory(. TEST_SRC_LIST)
find_package(PCAP QUIET)
foreach(TEST_SRC ${TEST_SRC_LIST})
get_filename_component(TEST_EXE_NAME ${TEST_SRC} NAME_WE)
if(NOT PCAP_FOUND)
# message(WARNING "PCAP 未找到")
if("${TEST_EXE_NAME}" MATCHES "test_rtp_pcap")
continue()
endif()
endif()
if(NOT TARGET ZLMediaKit::WebRTC)
# WebRTC
if("${TEST_EXE_NAME}" MATCHES "test_rtcp_nack")
@ -53,8 +62,14 @@ foreach(TEST_SRC ${TEST_SRC_LIST})
endif()
if(CMAKE_SYSTEM_NAME MATCHES "Linux")
target_link_libraries(${TEST_EXE_NAME} -Wl,--start-group ${MK_LINK_LIBRARIES} -Wl,--end-group)
else()
target_link_libraries(${TEST_EXE_NAME} ${MK_LINK_LIBRARIES})
endif()
endforeach()
if(TARGET test_rtp_pcap)
target_include_directories(test_rtp_pcap SYSTEM PRIVATE ${PCAP_INCLUDE_DIRS})
target_link_libraries(test_rtp_pcap ${PCAP_LIBRARIES})
endif()

View File

@ -7,7 +7,7 @@
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifdef ENABLE_MP4
#include <signal.h>
#include <atomic>
#include <iostream>
@ -261,3 +261,4 @@ int main(int argc, char *argv[]) {
return 0;
}
#endif

View File

@ -84,7 +84,7 @@ static bool loadFile(const char *path){
}
timeStamp_last = timeStamp;
}
WarnL << total_size / 1024 << "KB";
WarnL << (total_size >> 10) << "KB";
fclose(fp);
return true;
}

119
tests/test_ps.cpp Normal file
View File

@ -0,0 +1,119 @@
#include "Common/config.h"
#include "Http/HttpSession.h"
#include "Network/TcpServer.h"
#include "Rtmp/RtmpSession.h"
#include "Rtp/Decoder.h"
#include "Rtp/RtpProcess.h"
#include "Rtsp/RtspSession.h"
#include "Util/File.h"
#include "Util/MD5.h"
#include "Util/SSLBox.h"
#include "Util/logger.h"
#include "Util/util.h"
#include <iostream>
#include <map>
using namespace std;
using namespace toolkit;
using namespace mediakit;
static semaphore sem;
class PsProcess : public MediaSinkInterface, public std::enable_shared_from_this<PsProcess> {
public:
using Ptr = std::shared_ptr<PsProcess>;
PsProcess() {
MediaTuple media_info;
media_info.vhost = DEFAULT_VHOST;
media_info.app = "rtp";
media_info.stream = "000001";
_muxer = std::make_shared<MultiMediaSourceMuxer>(media_info, 0.0f, ProtocolOption());
}
~PsProcess() {
}
bool inputFrame(const Frame::Ptr &frame) override {
if (_muxer) {
_muxer->inputFrame(frame);
int64_t diff = frame->dts() - timeStamp_last;
if (diff > 0 && diff < 500) {
usleep(diff * 1000);
} else {
usleep(1 * 1000);
}
timeStamp_last = frame->dts();
}
return true;
}
bool addTrack(const Track::Ptr &track) override {
if (_muxer) {
return _muxer->addTrack(track);
}
return true;
}
void addTrackCompleted() override {
if (_muxer) {
_muxer->addTrackCompleted();
}
}
void resetTracks() override {
}
virtual void flush() override {}
private:
MultiMediaSourceMuxer::Ptr _muxer;
uint64_t timeStamp = 0;
uint64_t timeStamp_last = 0;
};
static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
FILE *fp = fopen(path, "rb");
if (!fp) {
WarnL << "open eqq failed:" << path;
return false;
}
fseek(fp, 0, SEEK_END);
long lSize = ftell(fp);
uint8_t *text = (uint8_t *)malloc(lSize);
rewind(fp);
fread(text, sizeof(char), lSize, fp);
PsProcess::Ptr ps_process = std::make_shared<PsProcess>();
DecoderImp::Ptr ps_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ps, ps_process.get());
if (ps_decoder) {
ps_decoder->input(text, lSize);
}
WarnL << (lSize >> 10) << "KB";
fclose(fp);
return true;
}
int main(int argc, char *argv[]) {
// 设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel"));
// 启动异步日志线程
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
loadIniConfig((exeDir() + "config.ini").data());
TcpServer::Ptr rtspSrv(new TcpServer());
TcpServer::Ptr rtmpSrv(new TcpServer());
TcpServer::Ptr httpSrv(new TcpServer());
rtspSrv->start<RtspSession>(554); // 默认554
rtmpSrv->start<RtmpSession>(1935); // 默认1935
httpSrv->start<HttpSession>(81); // 默认80
if (argc == 2) {
auto poller = EventPollerPool::Instance().getPoller();
poller->async_first([poller, argv]() {
loadFile(argv[1], poller);
sem.post();
});
sem.wait();
sleep(1);
} else
ErrorL << "parameter error.";
return 0;
}

View File

@ -7,7 +7,7 @@
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifdef ENABLE_MP4
#include <signal.h>
#include <iostream>
#include "Util/logger.h"
@ -141,3 +141,4 @@ int main(int argc, char *argv[]) {
// return domain("/Users/xiongziliang/Downloads/mp4/Quantum.mp4", "rtsp://127.0.0.1/live/rtsp_push");
return domain(argv[1], argv[2]);
}
#endif

View File

@ -89,7 +89,7 @@ static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
poller->doDelayTask(1, [do_read, total_size, process]() mutable {
auto ret = do_read();
if (!ret) {
WarnL << *total_size / 1024 << "KB";
WarnL << (*total_size >> 10) << "KB";
}
return ret;
});

246
tests/test_rtp_pcap.cpp Normal file
View File

@ -0,0 +1,246 @@
#include "Common/config.h"
#include "Http/HttpSession.h"
#include "Network/TcpServer.h"
#include "Rtmp/RtmpSession.h"
#include "Rtp/RtpProcess.h"
#include "Rtsp/RtspSession.h"
#include "Util/logger.h"
#include "Util/util.h"
#include <iostream>
#include <map>
#include <pcap.h>
using namespace std;
using namespace toolkit;
using namespace mediakit;
/* 以太网帧头部 */
struct sniff_ethernet {
#define ETHER_ADDR_LEN 6
u_char ether_dhost[ETHER_ADDR_LEN]; /* 目的主机的地址 */
u_char ether_shost[ETHER_ADDR_LEN]; /* 源主机的地址 */
u_short ether_unused;
u_short ether_type; /* IP0x0800;IPV6:0x86DD; ARP:0x0806;RARP:0x8035 */
};
#define ETHERTYPE_IPV4 (0x0800)
#define ETHERTYPE_IPV6 (0x86DD)
#define ETHERTYPE_ARP (0x0806)
#define ETHERTYPE_RARP (0x8035)
/* IP数据包的头部 */
struct sniff_ip {
#if BYTE_ORDER == LITTLE_ENDIAN
u_int ip_hl : 4, /* 头部长度 */
ip_v : 4; /* 版本号 */
#if BYTE_ORDER == BIG_ENDIAN
u_int ip_v : 4, /* 版本号 */
ip_hl : 4; /* 头部长度 */
#endif
#endif /* not _IP_VHL */
u_char ip_tos; /* 服务的类型 */
u_short ip_len; /* 总长度 */
u_short ip_id; /*包标志号 */
u_char ip_flag;
u_char ip_off; /* 碎片偏移 */
#define IP_RF 0x8000 /* 保留的碎片标志 */
#define IP_DF 0x4000 /* dont fragment flag */
#define IP_MF 0x2000 /* 多碎片标志*/
#define IP_OFFMASK 0x1fff /*分段位 */
u_char ip_ttl; /* 数据包的生存时间 */
u_char ip_p; /* 所使用的协议:1 ICMP;2 IGMP;4 IP;6 TCP;17 UDP;89 OSPF */
u_short ip_sum; /* 校验和 */
struct in_addr ip_src, ip_dst; /* 源地址、目的地址*/
};
#define IPTYPE_ICMP (1)
#define IPTYPE_IGMP (2)
#define IPTYPE_IP (4)
#define IPTYPE_TCP (6)
#define IPTYPE_UDP (17)
#define IPTYPE_OSPF (89)
typedef u_int tcp_seq;
/* TCP 数据包的头部 */
struct sniff_tcp {
u_short th_sport; /* 源端口 */
u_short th_dport; /* 目的端口 */
tcp_seq th_seq; /* 包序号 */
tcp_seq th_ack; /* 确认序号 */
#if BYTE_ORDER == LITTLE_ENDIAN
u_int th_x2 : 4, /* 还没有用到 */
th_off : 4; /* 数据偏移 */
#endif
#if BYTE_ORDER == BIG_ENDIAN
u_int th_off : 4, /* 数据偏移*/
th_x2 : 4; /*还没有用到 */
#endif
u_char th_flags;
#define TH_FIN 0x01
#define TH_SYN 0x02
#define TH_RST 0x04
#define TH_PUSH 0x08
#define TH_ACK 0x10
#define TH_URG 0x20
#define TH_ECE 0x40
#define TH_CWR 0x80
#define TH_FLAGS (TH_FINTH_SYNTH_RSTTH_ACKTH_URGTH_ECETH_CWR)
u_short th_win; /* TCP滑动窗口 */
u_short th_sum; /* 头部校验和 */
u_short th_urp; /* 紧急服务位 */
};
/* UDP header */
struct sniff_udp {
uint16_t sport; /* source port */
uint16_t dport; /* destination port */
uint16_t udp_length;
uint16_t udp_sum; /* checksum */
};
struct rtp_stream {
uint64_t stamp = 0;
uint64_t stamp_last = 0;
std::shared_ptr<RtpProcess> rtp_process;
Socket::Ptr sock;
struct sockaddr_storage addr;
};
static semaphore sem;
unordered_map<uint32_t, rtp_stream> rtp_streams_map;
#if defined(ENABLE_RTPPROXY)
void processRtp(uint32_t stream_id, const char *rtp, int &size, bool is_udp, const EventPoller::Ptr &poller) {
rtp_stream &stream = rtp_streams_map[stream_id];
if (!stream.rtp_process) {
auto process = RtpProcess::createProcess(MediaTuple{DEFAULT_VHOST, kRtpAppName, to_string(stream_id), ""});
stream.rtp_process = process;
struct sockaddr_storage addr;
memset(&addr, 0, sizeof(addr));
addr.ss_family = AF_INET;
auto sock = Socket::createSocket(poller);
stream.sock = sock;
stream.addr = addr;
}
try {
stream.rtp_process->inputRtp(is_udp, stream.sock, rtp, size, (struct sockaddr *)&stream.addr, &stream.stamp);
} catch (std::exception &ex) {
WarnL << "Input rtp failed: " << ex.what();
return ;
}
auto diff = static_cast<int64_t>(stream.stamp - stream.stamp_last);
if (diff > 0 && diff < 500) {
usleep(diff * 1000);
} else {
usleep(1 * 1000);
}
stream.stamp_last = stream.stamp;
rtp = nullptr;
size = 0;
}
#endif // #if defined(ENABLE_RTPPROXY)
static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
char errbuf[PCAP_ERRBUF_SIZE] = {'\0'};
std::shared_ptr<pcap_t> handle(pcap_open_offline(path, errbuf), [](pcap_t *handle) {
sem.post();
if (handle) {
pcap_close(handle);
}
});
if (!handle) {
WarnL << "open file failed:" << path << "error: " << errbuf;
return false;
}
auto total_size = std::make_shared<size_t>(0);
struct pcap_pkthdr header = {0};
while (true) {
const u_char *pkt_buff = pcap_next(handle.get(), &header);
if (!pkt_buff) {
PrintE("pcapng read over.");
break;
}
struct sniff_ethernet *ethernet = (struct sniff_ethernet *)pkt_buff;
int eth_len = sizeof(struct sniff_ethernet); // 以太网头的长度
int ip_len = sizeof(struct sniff_ip); // ip头的长度
int tcp_len = sizeof(struct sniff_tcp); // tcp头的长度
int udp_headr_len = sizeof(struct sniff_udp); // udp头的长度
/*解析网络层 IP头*/
if (ntohs(ethernet->ether_type) == ETHERTYPE_IPV4) { // IPV4
struct sniff_ip *ip = (struct sniff_ip *)(pkt_buff + eth_len);
ip_len = (ip->ip_hl & 0x0f) * 4; // ip头的长度
unsigned char *saddr = (unsigned char *)&ip->ip_src.s_addr; // 网络字节序转换成主机字节序
unsigned char *daddr = (unsigned char *)&ip->ip_dst.s_addr;
/*解析传输层 TCP、UDP、ICMP*/
if (ip->ip_p == IPTYPE_TCP) { // TCP
PrintI("ip->proto:TCP "); // 传输层用的哪一个协议
struct sniff_tcp *tcp = (struct sniff_tcp *)(pkt_buff + eth_len + ip_len);
PrintI("tcp_sport = %u ", tcp->th_sport);
PrintI("tcp_dport = %u ", tcp->th_dport);
for (int i = 0; *(pkt_buff + eth_len + ip_len + tcp_len + i) != '\0'; i++) {
PrintI("%02x ", *(pkt_buff + eth_len + ip_len + tcp_len + i));
}
} else if (ip->ip_p == IPTYPE_UDP) { // UDP
// PrintI("ip->proto:UDP "); // 传输层用的哪一个协议
struct sniff_udp *udp = (struct sniff_udp *)(pkt_buff + eth_len + ip_len);
auto udp_pack_len = ntohs(udp->udp_length);
uint32_t src_ip = ntohl(ip->ip_src.s_addr);
uint32_t dst_ip = ntohl(ip->ip_dst.s_addr);
uint16_t src_port = ntohs(udp->sport);
uint16_t dst_port = ntohs(udp->dport);
uint32_t stream_id = (src_ip << 16) + src_port + (dst_ip << 4) + dst_port;
const char *rtp = reinterpret_cast<const char *>(pkt_buff + eth_len + ip_len + udp_headr_len);
auto rtp_len = udp_pack_len - udp_headr_len;
#if defined(ENABLE_RTPPROXY)
processRtp(stream_id, rtp, rtp_len, true, poller);
#endif // #if defined(ENABLE_RTPPROXY)
} else if (ip->ip_p == IPTYPE_ICMP) { // ICMP
PrintI("ip->proto:CCMP "); // 传输层用的哪一个协议
} else {
PrintI("未识别的传输层协议");
}
} else if (ntohs(ethernet->ether_type) == ETHERTYPE_IPV6) { // IPV6
PrintI("It's IPv6! ");
} else {
PrintI("既不是IPV4也不是IPV6 ");
}
}
return true;
}
int main(int argc, char *argv[]) {
// 设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel"));
// 启动异步日志线程
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
loadIniConfig((exeDir() + "config.ini").data());
TcpServer::Ptr rtspSrv(new TcpServer());
TcpServer::Ptr rtmpSrv(new TcpServer());
TcpServer::Ptr httpSrv(new TcpServer());
rtspSrv->start<RtspSession>(554); // 默认554
rtmpSrv->start<RtmpSession>(1935); // 默认1935
httpSrv->start<HttpSession>(81); // 默认80
if (argc == 2) {
auto poller = EventPollerPool::Instance().getPoller();
poller->async_first([poller, argv]() {
loadFile(argv[1], poller);
sem.post();
});
sem.wait();
sleep(1);
} else {
ErrorL << "parameter error.";
}
return 0;
}

View File

@ -15,8 +15,8 @@ var ZLMRTCClient = (function (exports) {
CAPTURE_STREAM_FAILED: 'CAPTURE_STREAM_FAILED'
};
const VERSION$1 = '1.1.0';
const BUILD_DATE = 'Thu Jun 20 2024 16:15:41 GMT+0800 (China Standard Time)';
const VERSION$1 = '1.1.1';
const BUILD_DATE = 'Tue Nov 19 2024 20:10:15 GMT+0800 (China Standard Time)';
// Copyright (C) <2018> Intel Corporation
//
@ -9090,6 +9090,12 @@ var ZLMRTCClient = (function (exports) {
if (this.options.useCamera) {
if (this.options.videoEnable) videoConstraints = new VideoTrackConstraints(VideoSourceInfo.CAMERA);
if (this.options.audioEnable) audioConstraints = new AudioTrackConstraints(AudioSourceInfo.MIC);
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
} else {
if (this.options.videoEnable) {
videoConstraints = new VideoTrackConstraints(VideoSourceInfo.SCREENCAST);
@ -9099,17 +9105,14 @@ var ZLMRTCClient = (function (exports) {
// error shared display media not only audio
error(this.TAG, 'error paramter');
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
}
}
if (this.options.resolution.w != 0 && this.options.resolution.h != 0 && typeof videoConstraints == 'object') {
videoConstraints.resolution = new Resolution(this.options.resolution.w, this.options.resolution.h);
}
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
MediaStreamFactory.createMediaStream(new StreamConstraints(audioConstraints, videoConstraints)).then(stream => {
this._localStream = stream;
this.dispatch(Events$1.WEBRTC_ON_LOCAL_STREAM, stream);

File diff suppressed because one or more lines are too long

@ -1 +1 @@
Subproject commit b02d2a4c1abf95db45e50bb77d789defa0fcc4b7
Subproject commit 6689195ac89462d40accd88f13dfde58902da57b

View File

@ -15,8 +15,8 @@ var ZLMRTCClient = (function (exports) {
CAPTURE_STREAM_FAILED: 'CAPTURE_STREAM_FAILED'
};
const VERSION$1 = '1.1.0';
const BUILD_DATE = 'Thu Jun 20 2024 16:15:41 GMT+0800 (China Standard Time)';
const VERSION$1 = '1.1.1';
const BUILD_DATE = 'Tue Nov 19 2024 20:10:15 GMT+0800 (China Standard Time)';
// Copyright (C) <2018> Intel Corporation
//
@ -9090,6 +9090,12 @@ var ZLMRTCClient = (function (exports) {
if (this.options.useCamera) {
if (this.options.videoEnable) videoConstraints = new VideoTrackConstraints(VideoSourceInfo.CAMERA);
if (this.options.audioEnable) audioConstraints = new AudioTrackConstraints(AudioSourceInfo.MIC);
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
} else {
if (this.options.videoEnable) {
videoConstraints = new VideoTrackConstraints(VideoSourceInfo.SCREENCAST);
@ -9099,17 +9105,14 @@ var ZLMRTCClient = (function (exports) {
// error shared display media not only audio
error(this.TAG, 'error paramter');
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
}
}
if (this.options.resolution.w != 0 && this.options.resolution.h != 0 && typeof videoConstraints == 'object') {
videoConstraints.resolution = new Resolution(this.options.resolution.w, this.options.resolution.h);
}
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
MediaStreamFactory.createMediaStream(new StreamConstraints(audioConstraints, videoConstraints)).then(stream => {
this._localStream = stream;
this.dispatch(Events$1.WEBRTC_ON_LOCAL_STREAM, stream);

File diff suppressed because one or more lines are too long