一、获得远端裸数据
1、获得h264数据
1)、远端编码后视频数据监测器
/**
* @locale zh
* @type callback
* @region 视频管理
* @brief 远端编码后视频数据监测器<br>
* 注意:回调函数是在 SDK 内部线程(非 UI 线程)同步抛出来的,请不要做耗时操作或直接操作 UI,否则可能导致 app 崩溃。
*/
/**
* @locale en
* @type callback
* @region video management
* @brief Remote encoded video data monitor<br>
* Note: Callback functions are thrown synchronously in a non-UI thread within the SDK. Therefore, you must not perform any time-consuming operations or direct UI operations within the callback function, as this may cause the app to crash.
*/
class IRemoteEncodedVideoFrameObserver {
public:
/**
* @locale zh
* @hidden constructor/destructor
* @brief 析构函数
*/
/**
* @locale en
* @hidden constructor/destructor
* @brief Destructor
*/
virtual ~IRemoteEncodedVideoFrameObserver() {
}
/**
* @locale zh
* @type callback
* @region 视频数据回调
* @brief 调用 registerRemoteEncodedVideoFrameObserver{@link #IRTCVideo#registerRemoteEncodedVideoFrameObserver} 后,SDK 监测到远端编码后视频数据时,触发该回调
* @param stream_info 收到的远端流信息,参看 RemoteStreamKey{@link #RemoteStreamKey}
* @param video_stream 收到的远端视频帧信息,参看 IEncodedVideoFrame{@link #IEncodedVideoFrame}
*/
/**
* @locale en
* @type callback
* @region video data callback
* @brief Call registerRemoteEncodedVideoFrameObserver{@link #IRTCVideo#registerRemoteEncodedVideoFrameObserver}, the callback is triggered when the SDK detects the remote encoded video data
* @param stream_info The received remote stream information. See RemoteStreamKey{@link #RemoteStreamKey}
* @param video_stream The received remote video frame information. See IEncodedVideoFrame{@link #IEncodedVideoFrame}
*/
virtual void onRemoteEncodedVideoFrame(const RemoteStreamKey& stream_info, const IEncodedVideoFrame& video_stream) = 0;
};
2)、IRemoteEncodedVideoFrameObserver 派生
class ByteRTCEventHandler : public QObject,
public bytertc::IRTCVideoEventHandler,
public bytertc::IAudioEffectPlayerEventHandler,
public bytertc::IMixedStreamObserver,
public bytertc::IMediaPlayerEventHandler,
public bytertc::IRemoteEncodedVideoFrameObserver,
public bytertc::IVideoSink
virtual void onRemoteEncodedVideoFrame(const bytertc::RemoteStreamKey& stream_info, const bytertc::IEncodedVideoFrame& video_stream) override;
void ByteRTCEventHandler::onRemoteEncodedVideoFrame(const bytertc::RemoteStreamKey& stream_info, const bytertc::IEncodedVideoFrame& video_stream) {
}
std::unique_ptr<ByteRTCEventHandler> m_handler;
3)、registerRemoteEncodedVideoFrameObserver
/**
* @locale zh
* @type api
* @region 视频管理
* @brief 注册远端编码后视频数据回调。 <br>
* 完成注册后,当 SDK 监测到远端编码后视频帧时,会触发 onRemoteEncodedVideoFrame{@link #IRemoteEncodedVideoFrameObserver#onRemoteEncodedVideoFrame} 回调
* @param observer 远端编码后视频数据监测器,参看 IRemoteEncodedVideoFrameObserver{@link #IRemoteEncodedVideoFrameObserver}
* @return
* + 0: 调用成功。<br>
* + < 0 : 调用失败。查看 ReturnStatus{@link #ReturnStatus} 获得更多错误说明
* @note
* + 更多自定义解码功能说明参看 [自定义视频编解码](https://www.volcengine.com/docs/6348/82921#%E8%87%AA%E5%AE%9A%E4%B9%89%E8%A7%86%E9%A2%91%E8%A7%A3%E7%A0%81)。<br>
* + 该方法适用于手动订阅,并且进房前后均可调用,建议在进房前调用。 <br>
* + 引擎销毁前需取消注册,调用该方法将参数设置为 nullptr 即可。
*/
/**
* @locale en
* @type api
* @region video management
* @brief Video data callback after registering remote encoding. <br>
* After registration, when the SDK detects a remote encoded video frame, it will trigger the onRemoteEncodedVideoFrame{@link #IRemoteEncodedVideoFrameObserver#onRemoteEncodedVideoFrame} callback
* @param observer Remote encoded video data monitor. See IRemoteEncodedVideoFrameObserver{@link #IRemoteEncodedVideoFrameObserver}
* @return
* + 0: Success.<br>
* + < 0 : Fail. See ReturnStatus{@link #ReturnStatus} for more details
* @note
* + See [Custom Video Encoding and Decoding](https://docs.byteplus.com/byteplus-rtc/docs/82921#custom-video-decoding) for more details about custom video decoding. <br>
* + This method applys to manual subscription mode and can be called either before or after entering the Room. It is recommended to call it before entering the room. <br>
* + The engine needs to be unregistered before it is destroyed. Call this method to set the parameter to nullptr.
*/
virtual int registerRemoteEncodedVideoFrameObserver(IRemoteEncodedVideoFrameObserver* observer) = 0;
m_video->registerRemoteEncodedVideoFrameObserver(m_handler.get());
2、自定义视频渲染器
0)、IVideoSink
/**
* @locale zh
* @type keytype
* @brief 自定义视频渲染器
*/
/**
* @locale en
* @type keytype
* @brief Custom video renderer
*/
class IVideoSink {
public:
/**
* @locale zh
* @type keytype
* @brief 视频帧编码格式
*/
/**
* @locale en
* @type keytype
* @brief Video frame encoding format
*/
enum PixelFormat {
/**
* @locale zh
* @brief YUV I420 格式
*/
/**
* @locale en
* @brief YUV I420 format
*/
kI420 = VideoPixelFormat::kVideoPixelFormatI420,
/**
* @locale zh
* @brief RGBA 格式, 字节序为 R8 G8 B8 A8
*/
/**
* @locale en
* @brief RGBA format
*/
kRGBA = VideoPixelFormat::kVideoPixelFormatRGBA,
/**
* @locale zh
* @brief 原始视频帧格式
*/
/**
* @locale en
* @brief Original format
*/
kOriginal = VideoPixelFormat::kVideoPixelFormatUnknown,
};
/**
* @locale zh
* @type callback
* @brief 视频帧回调
* @param [out] video_frame 视频帧结构类,参看 IVideoFrame{@link #IVideoFrame}
* @return 返回值暂未使用
*/
/**
* @locale en
* @type callback
* @brief Video frame callback
* @param [out] video_frame Video frame structure. See IVideoFrame{@link #IVideoFrame}.
* @return Temporarily unavailable
*/
virtual bool onFrame(IVideoFrame* video_frame) = 0;
/**
* @locale zh
* @type callback
* @region 房间管理
* @brief 获取外部渲染耗时。
* @note 获取外部渲染耗时进行上报。开发者需要自己计算平均渲染耗时。
*/
/**
* @locale en
* @type callback
* @region Room Management
* @brief Gets the time taken in custom rendering.
* @note Gets the time taken in custom rendering and report. You need to calculate the average rendering time by yourself.
*/
virtual int getRenderElapse() = 0;
/**
* @locale zh
* @type callback
* @brief 释放渲染器。
* @note 通知开发者渲染器即将被废弃。收到该返回通知后即可释放资源。
*/
/**
* @locale en
* @type callback
* @brief Releases the renderer.
* @note Used to notify the user that the renderer is about to be deprecated. Resources can be released upon receipt of this notification.
*/
virtual void release() {
}
/**
* @locale zh
* @hidden constructor/destructor
* @brief 析构函数
*/
/**
* @locale en
* @hidden constructor/destructor
* @brief Destructor
*/
virtual ~IVideoSink() = default;
/**
* @locale zh
* @hidden sink id
* @brief sink id
*/
/**
* @locale en
* @hidden sink id
* @brief sink id
*/
virtual void* uniqueId() const { return (void *)this; }
};
1)、setRemoteVideoSink
/**
* @locale zh
* @type api
* @deprecated since 3.57, use setRemoteVideoRender{@link #IRTCVideo#setRemoteVideoRender} instead.
* @region 自定义视频采集渲染
* @brief 将远端视频流与自定义渲染器绑定。
* @param stream_key 远端流信息,用于指定需要渲染的视频流来源及属性,参看 RemoteStreamKey{@link #RemoteStreamKey}。
* @param video_sink 自定义视频渲染器,参看 IVideoSink{@link #IVideoSink}。
* @param required_format video_sink 适用的视频帧编码格式,参看 PixelFormat{@link #PixelFormat}。
* @return
* + 0: 调用成功。<br>
* + < 0 : 调用失败。查看 ReturnStatus{@link #ReturnStatus} 获得更多错误说明
* @note
* + RTC SDK 默认使用 RTC SDK 自带的渲染器(内部渲染器)进行视频渲染。<br>
* + 该方法进房前后均可以调用。若想在进房前调用,你需要在加入房间前获取远端流信息;若无法预先获取远端流信息,你可以在加入房间并通过 onUserPublishStream{@link #IRTCRoomEventHandler#onUserPublishStream} 回调获取到远端流信息之后,再调用该方法。<br>
* + 如果需要解除绑定,必须将 video_sink 设置为 null。退房时将清除绑定状态。<br>
* + 本方法获取的是后处理后的视频帧,如需获取其他位置的视频帧(如解码后的视频帧),请调用 setRemoteVideoRender{@link #IRTCVideo#setRemoteVideoRender}。
*/
/**
* @locale en
* @type api
* @region Custom Video Capturing & Rendering
* @brief Binds the remote video stream to a custom renderer.
* @param stream_key Remote stream information which specifys the source and type of the video stream to be rendered. See RemoteStreamKey{@link #RemoteStreamKey}.
* @param video_sink Custom video renderer. See IVideoSink{@link #IVideoSink}.
* @param required_format Encoding format which applys to the custom renderer. See PixelFormat{@link #PixelFormat}.
* @return
* + 0: Success.<br>
* + < 0 : Fail. See ReturnStatus{@link #ReturnStatus} for more details
* @note
* + RTC SDK uses its own renderer (internal renderer) for video rendering by default. <br>
* + Joining or leaving the room will not affect the binding state. <br>
* + This API can be called before and after entering the room. To call before entering the room, you need to get the remote stream information before joining the room; if you cannot get the remote stream information in advance, you can call the API after joining the room and getting the remote stream information via onUserPublishStream{@link #IRTCRoomEventHandler#onUserPublishStream}.<br>
* + If you need to unbind, you must set videoSink to null.
*/
virtual int setRemoteVideoSink(RemoteStreamKey stream_key, IVideoSink* video_sink, IVideoSink::PixelFormat required_format) = 0;
2)、远端用户发布流时,设置渲染方式
注意:设置registerRemoteEncodedVideoFrameObserver 后,setRemoteVideoSink 不再起作用了
//远端用户发流
void QuickStartWidget::onSigUserPublishStream(std::string roomid, std::string uid, bytertc::MediaStreamType type)
{
QString log_str = QString("onUserPublishStream,roomid:")
+ QString::fromStdString(roomid)
+ ",uid:" + QString::fromStdString(uid)
+ ",type:" + QString::number(type);
appendCallback(log_str);
if (!m_remote_rendered) {
if (0) {
bytertc::VideoCanvas cas;
bytertc::RemoteStreamKey key;
key.room_id = roomid.c_str();
key.user_id = uid.c_str();
key.stream_index = bytertc::kStreamIndexMain;
cas.background_color = 0;
cas.render_mode = bytertc::kRenderModeHidden;
cas.view = nullptr;
m_video->setRemoteVideoCanvas(key, cas);
cas.view = (void*)ui->widget_remote->getWinId();
m_video->setRemoteVideoCanvas(key, cas);
ui->widget_remote->setUserInfo(roomid, uid);
m_remote_rendered = true;
}
else {
bytertc::RemoteStreamKey key;
key.room_id = roomid.c_str();
key.user_id = uid.c_str();
key.stream_index = bytertc::kStreamIndexMain;
// m_video->setRemoteVideoSink(key, m_handler.get(), bytertc::IVideoSink::PixelFormat::kRGBA);
m_video->setRemoteVideoSink(key, m_handler.get(), bytertc::IVideoSink::PixelFormat::kRGBA);
m_remote_rendered = true;
}
}
}
3)、获得远端裸数据
bool ByteRTCEventHandler::onFrame(bytertc::IVideoFrame* video_frame) {
bytertc::VideoFrameType type= video_frame->frameType();
bytertc::VideoPixelFormat format=video_frame->pixelFormat();
bytertc::VideoContentType contentType= video_frame->videoContentType();
int width = video_frame->width();
int height= video_frame->height();
bytertc::VideoRotation rotation = video_frame->rotation();
bytertc::ColorSpace space = video_frame->colorSpace();
int numPlans = video_frame->numberOfPlanes();
uint8_t* data = video_frame->getPlaneData(numPlans-1);
SaveRGBAToPNG(data, width, height, "output.png");
return true;
}
测试
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include "stb_image_write.h"
void SaveRGBAToPNG(uint8_t* rgbaData, int width, int height, const std::string& filePath) {
// 第4个参数是每像素通道数,这里RGBA是4
// 每行像素的跨度是 width * 4 字节
stbi_write_png(filePath.c_str(), width, height, 4, rgbaData, width * 4);
}