Optimize the code related to decoding and screenshot (#4106)

Co-authored-by: xia-chu <771730766@qq.com>
This commit is contained in:
PioLing 2025-01-19 20:18:32 +08:00 committed by GitHub
parent 83a622fa25
commit a8507d9ecc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 168 additions and 32 deletions

View File

@ -1552,6 +1552,12 @@
"key": "expire_sec",
"value": "1",
"description": "截图的过期时间,该时间内产生的截图都会作为缓存返回"
},
{
"key": "async",
"value": "0",
"disabled": true,
"description": "是否采用zlm内置播放器、解码器api异步截图开启后截图速度提升但兼容性降低"
}
]
}

View File

@ -349,7 +349,57 @@ void FFmpegSource::onGetMediaSource(const MediaSource::Ptr &src) {
}
}
void FFmpegSnap::makeSnap(const string &play_url, const string &save_path, float timeout_sec, const onSnap &cb) {
#if defined(ENABLE_FFMPEG)
#include "Player/MediaPlayer.h"
#include "Codec/Transcode.h"
static void makeSnapAsync(const string &play_url, const string &save_path, float timeout_sec, const FFmpegSnap::onSnap &cb) {
struct Holder {
MediaPlayer::Ptr player;
};
auto holder = std::make_shared<Holder>();
holder->player = std::make_shared<MediaPlayer>();
(*holder->player)[mediakit::Client::kTimeoutMS] = timeout_sec * 1000;
holder->player->setOnPlayResult([holder, save_path, cb, timeout_sec](const SockException &ex) mutable {
onceToken token(nullptr, [&]() { holder->player = nullptr; });
auto video = ex ? nullptr : dynamic_pointer_cast<VideoTrack>(holder->player->getTrack(TrackVideo, false));
if (!video) {
cb(false, ex ? ex.what() : "none video track");
return;
}
auto decoder = std::make_shared<FFmpegDecoder>(video);
auto new_holder = std::make_shared<Holder>(*holder);
auto timer = EventPollerPool::Instance().getPoller()->doDelayTask(1000 * timeout_sec, [new_holder]() {
// 防止解码失败导致播放器无法释放
new_holder->player = nullptr;
return 0;
});
auto success = false;
decoder->setOnDecode([save_path, new_holder, cb, success, timer](const FFmpegFrame::Ptr &frame) mutable {
if (success) {
return;
}
onceToken token(nullptr, [&]() { new_holder->player = nullptr; timer->cancel(); });
auto ret = FFmpegUtils::saveFrame(frame, save_path.data());
success = std::get<0>(ret);
cb(success, std::get<1>(ret));
});
video->addDelegate([decoder](const Frame::Ptr &frame) { return decoder->inputFrame(frame, false, true); });
});
holder->player->play(play_url);
}
#endif
void FFmpegSnap::makeSnap(bool async, const string &play_url, const string &save_path, float timeout_sec, const onSnap &cb) {
#if defined(ENABLE_FFMPEG)
if (async) {
makeSnapAsync(play_url, save_path, timeout_sec, cb);
return;
}
#endif
GET_CONFIG(string, ffmpeg_bin, FFmpeg::kBin);
GET_CONFIG(string, ffmpeg_snap, FFmpeg::kSnap);
GET_CONFIG(string, ffmpeg_log, FFmpeg::kLog);

View File

@ -26,17 +26,20 @@ namespace FFmpeg {
class FFmpegSnap {
public:
using onSnap = std::function<void(bool success, const std::string &err_msg)>;
// / 创建截图 [AUTO-TRANSLATED:6d334c49]
// / Create a screenshot
// / \param play_url 播放url地址只要FFmpeg支持即可 [AUTO-TRANSLATED:609d4de4]
// / \param play_url The playback URL address, as long as FFmpeg supports it
// / \param save_path 截图jpeg文件保存路径 [AUTO-TRANSLATED:0fc0ac0d]
// / \param save_path The path to save the screenshot JPEG file
// / \param timeout_sec 生成截图超时时间(防止阻塞太久) [AUTO-TRANSLATED:0dcc0095]
// / \param timeout_sec Timeout for generating the screenshot (to prevent blocking for too long)
// / \param cb 生成截图成功与否回调 [AUTO-TRANSLATED:5b4b93c9]
// / \param cb Callback for whether the screenshot was generated successfully
static void makeSnap(const std::string &play_url, const std::string &save_path, float timeout_sec, const onSnap &cb);
/**
* [AUTO-TRANSLATED:6d334c49]
* Create a screenshot
* @param async 使(ffmpeg命令行使zlm apizlm播放器支持的拉流协议)
* @param play_url url地址FFmpeg支持即可 [AUTO-TRANSLATED:609d4de4]
* @param play_url The playback URL address, as long as FFmpeg supports it
* @param save_path jpeg文件保存路径 [AUTO-TRANSLATED:0fc0ac0d]
* @param save_path The path to save the screenshot JPEG file
* @param timeout_sec () [AUTO-TRANSLATED:0dcc0095]
* @param timeout_sec Timeout for generating the screenshot (to prevent blocking for too long)
* @param cb [AUTO-TRANSLATED:5b4b93c9]
* @param cb Callback for whether the screenshot was generated successfully
*/
static void makeSnap(bool async, const std::string &play_url, const std::string &save_path, float timeout_sec, const onSnap &cb);
private:
FFmpegSnap() = delete;

View File

@ -151,10 +151,10 @@ void StackPlayer::play() {
// auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
if (videoTrack) {
// 如果每次不同 可以加个时间戳 time(NULL);
// TODO:添加使用显卡还是cpu解码的判断逻辑 [AUTO-TRANSLATED:44bef37a]
// TODO: Add logic to determine whether to use GPU or CPU decoding
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(
videoTrack, 0, std::vector<std::string>{"h264", "hevc"});
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
auto self = weakSelf.lock();

View File

@ -2047,7 +2047,7 @@ void installWebApi() {
// 启动FFmpeg进程开始截图生成临时文件截图成功后替换为正式文件 [AUTO-TRANSLATED:7d589e3f]
// Start the FFmpeg process, start taking screenshots, generate temporary files, replace them with formal files after successful screenshots
auto new_snap_tmp = new_snap + ".tmp";
FFmpegSnap::makeSnap(allArgs["url"], new_snap_tmp, allArgs["timeout_sec"], [invoker, allArgs, new_snap, new_snap_tmp](bool success, const string &err_msg) {
FFmpegSnap::makeSnap(allArgs["async"], allArgs["url"], new_snap_tmp, allArgs["timeout_sec"], [invoker, allArgs, new_snap, new_snap_tmp](bool success, const string &err_msg) {
if (!success) {
// 生成截图失败,可能残留空文件 [AUTO-TRANSLATED:c96a4468]
// Screenshot generation failed, there may be residual empty files

View File

@ -16,6 +16,7 @@
#include "Util/uv_errno.h"
#include "Transcode.h"
#include "Common/config.h"
#define MAX_DELAY_SECOND 3
using namespace std;
@ -29,13 +30,8 @@ static string ffmpeg_err(int errnum) {
return errbuf;
}
std::shared_ptr<AVPacket> alloc_av_packet() {
auto pkt = std::shared_ptr<AVPacket>(av_packet_alloc(), [](AVPacket *pkt) {
av_packet_free(&pkt);
});
pkt->data = NULL; // packet data will be allocated by the encoder
pkt->size = 0;
return pkt;
std::unique_ptr<AVPacket, void (*)(AVPacket *)> alloc_av_packet() {
return std::unique_ptr<AVPacket, void (*)(AVPacket *)>(av_packet_alloc(), [](AVPacket *pkt) { av_packet_free(&pkt); });
}
//////////////////////////////////////////////////////////////////////////////////////////
@ -317,6 +313,7 @@ static inline const AVCodec *getCodecByName(const std::vector<std::string> &code
FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num, const std::vector<std::string> &codec_name) {
setupFFmpeg();
_frame_pool.setSize(AV_NUM_DATA_POINTERS);
const AVCodec *codec = nullptr;
const AVCodec *codec_default = nullptr;
if (!codec_name.empty()) {
@ -414,6 +411,7 @@ FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num, const std:
if (track->getTrackType() == TrackVideo) {
_context->width = static_pointer_cast<VideoTrack>(track)->getVideoWidth();
_context->height = static_pointer_cast<VideoTrack>(track)->getVideoHeight();
InfoL << "media source :" << _context->width << " X " << _context->height;
}
switch (track->getCodecId()) {
@ -485,7 +483,7 @@ FFmpegDecoder::~FFmpegDecoder() {
void FFmpegDecoder::flush() {
while (true) {
auto out_frame = std::make_shared<FFmpegFrame>();
auto out_frame = _frame_pool.obtain2();
auto ret = avcodec_receive_frame(_context.get(), out_frame->get());
if (ret == AVERROR(EAGAIN)) {
avcodec_send_packet(_context.get(), nullptr);
@ -532,7 +530,7 @@ bool FFmpegDecoder::inputFrame(const Frame::Ptr &frame, bool live, bool async, b
inputFrame_l(frame_cache, live, enable_merge);
// 此处模拟解码太慢导致的主动丢帧 [AUTO-TRANSLATED:fc8bea8a]
// Here simulates decoding too slow, resulting in active frame dropping
//usleep(100 * 1000);
// usleep(100 * 1000);
});
}
@ -540,7 +538,7 @@ bool FFmpegDecoder::decodeFrame(const char *data, size_t size, uint64_t dts, uin
TimeTicker2(30, TraceL);
auto pkt = alloc_av_packet();
pkt->data = (uint8_t *) data;
pkt->data = (uint8_t *)data;
pkt->size = size;
pkt->dts = dts;
pkt->pts = pts;
@ -556,8 +554,8 @@ bool FFmpegDecoder::decodeFrame(const char *data, size_t size, uint64_t dts, uin
return false;
}
while (true) {
auto out_frame = std::make_shared<FFmpegFrame>();
for (;;) {
auto out_frame = _frame_pool.obtain2();
ret = avcodec_receive_frame(_context.get(), out_frame->get());
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
@ -600,6 +598,8 @@ FFmpegSwr::FFmpegSwr(AVSampleFormat output, int channel, int channel_layout, int
_target_channels = channel;
_target_channel_layout = channel_layout;
_target_samplerate = samplerate;
_swr_frame_pool.setSize(AV_NUM_DATA_POINTERS);
}
#endif
@ -644,7 +644,7 @@ FFmpegFrame::Ptr FFmpegSwr::inputFrame(const FFmpegFrame::Ptr &frame) {
<< av_get_sample_fmt_name(_target_format);
}
if (_ctx) {
auto out = std::make_shared<FFmpegFrame>();
auto out = _swr_frame_pool.obtain2();
out->get()->format = _target_format;
#if LIBAVCODEC_VERSION_INT >= FF_CODEC_VER_7_1
@ -676,6 +676,8 @@ FFmpegSws::FFmpegSws(AVPixelFormat output, int width, int height) {
_target_format = output;
_target_width = width;
_target_height = height;
_sws_frame_pool.setSize(AV_NUM_DATA_POINTERS);
}
FFmpegSws::~FFmpegSws() {
@ -706,7 +708,7 @@ FFmpegFrame::Ptr FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame, int &ret,
// Do not convert format
return frame;
}
if (_ctx && (_src_width != frame->get()->width || _src_height != frame->get()->height || _src_format != (enum AVPixelFormat) frame->get()->format)) {
if (_ctx && (_src_width != frame->get()->width || _src_height != frame->get()->height || _src_format != (enum AVPixelFormat)frame->get()->format)) {
// 输入分辨率发生变化了 [AUTO-TRANSLATED:0e4ea2e8]
// Input resolution has changed
sws_freeContext(_ctx);
@ -720,7 +722,7 @@ FFmpegFrame::Ptr FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame, int &ret,
InfoL << "sws_getContext:" << av_get_pix_fmt_name((enum AVPixelFormat) frame->get()->format) << " -> " << av_get_pix_fmt_name(_target_format);
}
if (_ctx) {
auto out = std::make_shared<FFmpegFrame>();
auto out = _sws_frame_pool.obtain2();
if (!out->get()->data[0]) {
if (data) {
av_image_fill_arrays(out->get()->data, out->get()->linesize, data, _target_format, target_width, target_height, 32);
@ -743,5 +745,64 @@ FFmpegFrame::Ptr FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame, int &ret,
return nullptr;
}
} //namespace mediakit
#endif//ENABLE_FFMPEG
std::tuple<bool, std::string> FFmpegUtils::saveFrame(const FFmpegFrame::Ptr &frame, const char *filename, AVPixelFormat fmt) {
_StrPrinter ss;
const AVCodec *jpeg_codec = avcodec_find_encoder(fmt == AV_PIX_FMT_YUVJ420P ? AV_CODEC_ID_MJPEG : AV_CODEC_ID_PNG);
std::unique_ptr<AVCodecContext, void (*)(AVCodecContext *)> jpeg_codec_ctx(
jpeg_codec ? avcodec_alloc_context3(jpeg_codec) : nullptr, [](AVCodecContext *ctx) { avcodec_free_context(&ctx); });
if (!jpeg_codec_ctx) {
ss << "Could not allocate JPEG/PNG codec context";
DebugL << ss;
return { false, ss };
}
jpeg_codec_ctx->width = frame->get()->width;
jpeg_codec_ctx->height = frame->get()->height;
jpeg_codec_ctx->pix_fmt = fmt;
jpeg_codec_ctx->time_base = { 1, 1 };
auto ret = avcodec_open2(jpeg_codec_ctx.get(), jpeg_codec, NULL);
if (ret < 0) {
ss << "Could not open JPEG codec, " << ffmpeg_err(ret);
DebugL << ss;
return { false, ss };
}
FFmpegSws sws(fmt, 0, 0);
auto new_frame = sws.inputFrame(frame);
if (!new_frame) {
ss << "Could not scale the frame: " << ffmpeg_err(ret);
DebugL << ss;
return { false, ss };
}
auto pkt = alloc_av_packet();
ret = avcodec_send_frame(jpeg_codec_ctx.get(), new_frame->get());
if (ret < 0) {
ss << "Error sending a frame for encoding," << ffmpeg_err(ret);
DebugL << ss;
return { false, ss };
}
std::unique_ptr<FILE, void (*)(FILE *)> tmp_save_file_jpg(File::create_file(filename, "wb"), [](FILE *fp) {
if (fp) {
fclose(fp);
}
});
if (!tmp_save_file_jpg) {
ss << "Could not open the file " << filename;
DebugL << ss;
return { false, ss };
}
while (avcodec_receive_packet(jpeg_codec_ctx.get(), pkt.get()) == 0) {
fwrite(pkt.get()->data, pkt.get()->size, 1, tmp_save_file_jpg.get());
}
DebugL << "Screenshot successful: " << filename;
return { true, "" };
}
} // namespace mediakit
#endif // ENABLE_FFMPEG

View File

@ -75,6 +75,8 @@ private:
int _target_samplerate;
AVSampleFormat _target_format;
SwrContext *_ctx = nullptr;
toolkit::ResourcePool<FFmpegFrame> _swr_frame_pool;
};
class TaskManager {
@ -134,6 +136,7 @@ private:
onDec _cb;
std::shared_ptr<AVCodecContext> _context;
FrameMerger _merger{FrameMerger::h264_prefix};
toolkit::ResourcePool<FFmpegFrame> _frame_pool;
};
class FFmpegSws {
@ -156,6 +159,19 @@ private:
SwsContext *_ctx = nullptr;
AVPixelFormat _src_format = AV_PIX_FMT_NONE;
AVPixelFormat _target_format = AV_PIX_FMT_NONE;
toolkit::ResourcePool<FFmpegFrame> _sws_frame_pool;
};
class FFmpegUtils {
public:
/**
* jpeg或png
* @param frame
* @param filename
* @param fmt jpg:AV_PIX_FMT_YUVJ420PPNG:AV_PIX_FMT_RGB24
* @return
*/
static std::tuple<bool, std::string> saveFrame(const FFmpegFrame::Ptr &frame, const char *filename, AVPixelFormat fmt = AV_PIX_FMT_YUVJ420P);
};
}//namespace mediakit