实现一个websocket 传输码流服务器
可以正常的传输h264裸流
接着上一章,这里将在websocket服务器的基础上,实现传输h264裸流。这样,前端接收裸流,可以正常的进行播放。
整体是采集到的数据经过编码,然后再通过websocket传输到前端。前端接收到数据后,就可以通过JMuxer库封装成fMP4,喂给video标签,既可以实现web端播放视频。
下面分别讲解服务端和客户端的问题
服务端主要有两个,一个rtsp服务,一个websocket服务。
下面来展示websocket服务器代码:
服务器关键代码:
#ifndef _COLLECTION_CFG_H_
#define _COLLECTION_CFG_H_
#include
#include
#include "CollectionCfg.h"
interface ICollection;
typedef std::shared_ptr spICollection;
typedef std::function NotifyVideo;
interface COLLECTION_API ICollection
{
virtual void start(const std::string& url) = 0;
virtual void stop() = 0;
};
interface COLLECTION_API CollectionFactory
{
static spICollection createCollection(const NotifyVideo& video);
};
#endif // _COLLECTION_CFG_H_
#include "CollectionData.h"
CollectionData::CollectionData(const NotifyVideo& video)
: m_video(video)
, m_i32Len(1024000)
, m_bQuit(false)
, m_nVideoIndex(-1)
, m_iFmtCtx(nullptr)
{
m_spBuffer.reset(new uint8_t[m_i32Len], std::default_delete());
av_register_all();
avcodec_register_all();
av_register_all();
avformat_network_init();
}
CollectionData::~CollectionData()
{
release();
}
void CollectionData::start(const std::string& url)
{
int ret = 0;
AVDictionary* opts = nullptr;
av_dict_set(&opts, "buffer_size", "1024000", 0);
av_dict_set(&opts, "max_delay", "50000", 0);
av_dict_set(&opts, "stimeout", "20000000", 0);
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
av_dict_set(&opts, "tune", "zerolatency", 0);
av_dict_set(&opts, "preset", "superfast", 0);
setQuit(false);
if (url == "")
{
return;
}
do
{
if (ret = avformat_open_input(&m_iFmtCtx, url.c_str(), 0, &opts) < 0)
{
fprintf(stderr, "Could not open input stream file '%s'", url.c_str());
break;
}
ret = avformat_find_stream_info(m_iFmtCtx, nullptr);
if (ret < 0)
{
fprintf(stderr, "avformat_find_stream_info fail");
break;
}
m_nVideoIndex = av_find_best_stream(m_iFmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
if (m_nVideoIndex < 0)
{
fprintf(stderr, "no video stream fail");
break;
}
av_dump_format(m_iFmtCtx, 0, url.c_str(), 0);
m_spThread.reset(new std::thread(
std::bind(&CollectionData::doThread, this)));
return;
} while(0);
release();
}
void CollectionData::doThread()
{
int ret = 0;
AVPacket pkt;
av_init_packet(&pkt);
while (!m_bQuit)
{
ret = av_read_frame(m_iFmtCtx, &pkt);
if (pkt.stream_index != m_nVideoIndex)
{
continue;
}
if (m_video)
{
m_video(pkt.data, pkt.size);
}
av_packet_unref(&pkt);
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
}
void CollectionData::stop()
{
setQuit(true);
}
bool CollectionData::getQuit()
{
return m_bQuit;
}
void CollectionData::setQuit(bool bQuit)
{
m_bQuit = bQuit;
}
void CollectionData::thdReset()
{
if (m_spThread)
{
if (m_spThread->joinable())
{
m_spThread->join();
}
}
m_spThread.reset();
}
void CollectionData::release()
{
setQuit(true);
thdReset();
if (m_iFmtCtx != nullptr)
{
avformat_close_input(&m_iFmtCtx);
}
}
创建video标签,用来显示视频。
创建websocket,用来收取h264流。
创建JMutex用来播放h264
DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
测试web播放
head>
<body>
<div>
<label>服务器 IPlabel>
<input class="te" type="text" value = "127.0.0.1" id="txtip">
div>
<div>
<label>服务器端口label>
<input class="te" type="text" value = "8000" id="txtport">
div>
<div>
<button id="play" type="button" onclick="play()">播放button>
<button id="stop" type="button" onclick="stop()">停止button>
div>
<div>
<video id = "player" width="640" height="480" defaultPlaybackRate autoplay>video>
div>
<script src="jmuxer.min.js">script>
<script src="adapter-latest.js">script>
<script type = "text/javascript">
var player = null
var websocket = null;
function play() {
if (player == null) {
player = new JMuxer({
node: 'player',
mode: 'video',
flushingTime: 200,
fps: 30,
debug: false
})
}
if (websocket == null) {
var ip = document.getElementById("txtip").value;
var port = document.getElementById("txtport").value;
var socketURL = "ws://" + ip + ":" + port;
console.log("ws url: " + socketURL)
websocket = new WebSocket(socketURL);
websocket.binaryType = 'arraybuffer';
websocket.addEventListener('message',function(event) {
player.feed({
video: new Uint8Array(event.data)
})
websocket.addEventListener('error', function(e) {
console.log('Socket Error');
});
});
}
}
function stop() {
if (websocket != null) {
websocket.close()
websocket = null
}
if (player != null) {
var video = document.querySelector("#player");
video.srcObject = null
player.destroy()
delete player
player = null
}
}
script>
body>
html>
# 分别是ip和端口输入框
<input class="te" type="text" value = "127.0.0.1" id="txtip">
<input class="te" type="text" value = "8000" id="txtport">
# 显示视频的标签页
<video id = "player" width="640" height="480" defaultPlaybackRate autoplay></video>
# 开始播放,创建websocket和监听,并创建JMuxer
function play() {
if (player == null) {
player = new JMuxer({
node: 'player',
mode: 'video',
flushingTime: 200,
fps: 30,
debug: false
})
}
if (websocket == null) {
var ip = document.getElementById("txtip").value;
var port = document.getElementById("txtport").value;
var socketURL = "ws://" + ip + ":" + port;
console.log("ws url: " + socketURL)
websocket = new WebSocket(socketURL);
websocket.binaryType = 'arraybuffer';
websocket.addEventListener('message',function(event) {
player.feed({
video: new Uint8Array(event.data)
})
websocket.addEventListener('error', function(e) {
console.log('Socket Error');
});
});
}
# 停止播放,创建websocket和监听,并创建JMuxer
function stop() {
if (websocket != null) {
websocket.close()
websocket = null
}
if (player != null) {
var video = document.querySelector("#player");
video.srcObject = null
player.destroy()
delete player
player = null
}
}

jmuxer可以播放h264裸流,同时可以播放音频,这个demo只是测试视频,不做音频处理
下面开始展示测试效果
下图中,testRtspServer 是一个rtsp服务器。基于Qt和ffmpeg的抓屏rtsp服务(二)详细信息请查看这篇文章。WebSocketServed.exe 是拉取rtsp流,并利用ffmpeg获取h264裸流。通过websocket传输给前端。

