全球优质服务器购买

为用户应用推荐适合的服务器,针对需求定制,将质量做到更好

香港服务器

香港CN2优化带宽,国内访问快

CPU:I3-2120(2核心4线程)

内存:4G DDR3内存

硬盘:1T HDD

带宽:10M优化、10M国际

IP数:1个

价格:699/月

美国服务器

美国洛杉矶高性价比服务器

CPU:I3-2120(2核心4线程)

内存:4G DDR3内存

硬盘:1T HDD

带宽:30M优化/100M普通

IP数:1个(10G防护)

价格:499/月

香港站群服务器

香港多IP站群服务器租用

CPU:E3-1230V2(4核

内存:8G DDR3内存

硬盘:240G SSD/1T SATA

带宽:10M优化

IP数:125个IP(1/2C)

价格:1099/月

美国站群服务器

美国多IP站群服务器租用

CPU:E3-1230V2(4核)

内存:16G DDR3内存

硬盘:1T HDD/240G SSD

带宽:30M优化/100M普通

IP数:125个IP(1/2C)

价格:999/月

b'
\n
\n

SkeyePlayer RTSP Windows端(下文简称:SkeyePlayer)播放器之前抓图代码主要通过OpenCV来实现,且数据格式转换的效率过于低下;故而在当时的代码中采用线程机制来解决抓图导致视频播放时卡顿的问题;而最新版的SkeyePlayer为了精简代码也为了提高抓图效率,我们采用ffmpeg进行抓图,为了保证视频播放的流畅性,线程机制我们仍然保留。

采用ffmpeg进行抓图代码如下

// 抓图函数实现
int take_snapshot(char *file, int w, int h, uint8_t *buffer, AVPixelFormat Format)
{
char *fileext = NULL;
enum AVCodecID codecid = AV_CODEC_ID_NONE;
struct SwsContext *sws_ctx = NULL;
AVPixelFormat swsofmt = AV_PIX_FMT_NONE;
AVFrame picture = {};
int ret = -1;

AVFormatContext *fmt_ctxt = NULL;
AVOutputFormat *out_fmt = NULL;
AVStream *stream = NULL;
AVCodecContext *codec_ctxt = NULL;
AVCodec *codec = NULL;
AVPacket packet = {};
int retry = 8;
int got = 0;

// init ffmpeg
av_register_all();

fileext = file + strlen(file) - 3;
if (_stricmp(fileext, "png") == 0) {
codecid = AV_CODEC_ID_APNG;
swsofmt = AV_PIX_FMT_RGB24;
}
else {
codecid = AV_CODEC_ID_MJPEG;
swsofmt = AV_PIX_FMT_YUVJ420P;
}

AVFrame video;
int numBytesIn;
numBytesIn = av_image_get_buffer_size(Format, w, h, 1);
av_image_fill_arrays(video.data, video.linesize, buffer, Format, w, h, 1);
video.width = w;
video.height = h;
video.format = Format;

// alloc picture
picture.format = swsofmt;
picture.width = w > 0 ? w : video.width;
picture.height = h > 0 ? h : video.height;

int numBytes = av_image_get_buffer_size(swsofmt, picture.width, picture.height , 1);

buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));

av_image_fill_arrays(picture.data, picture.linesize, buffer, swsofmt, picture.width, picture.height, 1);

// scale picture
sws_ctx = sws_getContext(video.width, video.height, (AVPixelFormat)Format/*video->format*/,
picture.width, picture.height, swsofmt, SWS_FAST_BILINEAR, NULL, NULL, NULL);
if (!sws_ctx) {
//av_log(NULL, AV_LOG_ERROR, "could not initialize the conversion context jpg\\n");
goto done;
}
sws_scale(sws_ctx, video.data, video.linesize, 0, video.height, picture.data, picture.linesize);

// do encoding
fmt_ctxt = avformat_alloc_context();
out_fmt = av_guess_format(codecid == AV_CODEC_ID_APNG ? "apng" : "mjpeg", NULL, NULL);
fmt_ctxt->oformat = out_fmt;
if (!out_fmt) {
//av_log(NULL, AV_LOG_ERROR, "failed to guess format !\\n");
goto done;
}

if (avio_open(&fmt_ctxt->pb, file, AVIO_FLAG_READ_WRITE) < 0) {
//av_log(NULL, AV_LOG_ERROR, "failed to open output file: %s !\\n", file);
goto done;
}

stream = avformat_new_stream(fmt_ctxt, 0);
if (!stream) {
//av_log(NULL, AV_LOG_ERROR, "failed to create a new stream !\\n");
goto done;
}

codec_ctxt = stream->codec;
codec_ctxt->codec_id = out_fmt->video_codec;
codec_ctxt->codec_type = AVMEDIA_TYPE_VIDEO;
codec_ctxt->pix_fmt = swsofmt;
codec_ctxt->width = picture.width;
codec_ctxt->height = picture.height;
codec_ctxt->time_base.num = 1;
codec_ctxt->time_base.den = 25;

codec = avcodec_find_encoder(codec_ctxt->codec_id);
if (!codec) {
//av_log(NULL, AV_LOG_ERROR, "failed to find encoder !\\n");
goto done;
}

if (avcodec_open2(codec_ctxt, codec, NULL) < 0) {
//av_log(NULL, AV_LOG_ERROR, "failed to open encoder !\\n");
goto done;
}

while (retry-- && !got) {
if (avcodec_encode_video2(codec_ctxt, &packet, &picture, &got) < 0) {
//av_log(NULL, AV_LOG_ERROR, "failed to do picture encoding !\\n");
goto done;
}

if (got) {
ret = avformat_write_header(fmt_ctxt, NULL);
if (ret < 0) {
//av_log(NULL, AV_LOG_ERROR, "error occurred when opening output file !\\n");
goto done;
}
av_write_frame(fmt_ctxt, &packet);
av_write_trailer(fmt_ctxt);
}
}

// ok
ret = 0;

done:
avcodec_close(codec_ctxt);
if (fmt_ctxt)
{
avio_close(fmt_ctxt->pb);
}
avformat_free_context(fmt_ctxt);
av_packet_unref(&packet);

sws_freeContext(sws_ctx);
av_free(buffer);

return ret;
}

借助ffmpeg强大的视频处理和转换功能,我们可以将一帧图像转换成任意格式的图片,当然如代码所示我们只选择性地支持了“jpeg”和“png”两种格式的图片格式;\n采用ffmpeg抓图的步骤分两步:

  1. 需要将图像转换成指定的格式,当然强大的格式转换函数也支持图像的缩放,且效率很高;
  2. 图像编码,细心的同学不难发现,ffmpeg的编码和存文件/推送流的代码是通用的,这套代码可以用来抓图也可以用来编码H264、265等然后存文件(如MP4等)或者推送RTMP/RTSP等;

已经完成了抓图代码调用起来就很简单了,只需替换掉旧的抓图函数即可,需要注意的是之前的抓图固定了格式为YUY2,所以缓冲区大小只有WidthHeight2的大小,而显然RGB24格式的数据会导致缓冲区溢出,所以,我们需要重新定义缓冲区的大小,如下代码所示:

//抓图
if (pThread->manuScreenshot == 0x01 )//Just support jpeg,png
{
unsigned int timestamp = (unsigned int)time(NULL);
time_t tt = timestamp;
struct tm *_time = localtime(&tt);
char szTime[64] = {0,};
strftime(szTime, 32, "%Y%m%d-%H%M%S", _time);

// char strPath[512] = {0,};
// sprintf(strPath , "%sch%d_%s.jpg", pThread->strScreenCapturePath, pThread->channelId, szTime) ;

PhotoShotThreadInfo* pShotThreadInfo = new PhotoShotThreadInfo;
sprintf(pShotThreadInfo->strPath , "%sch%d_%s.jpg", pThread->strScreenCapturePath, pThread->channelId, szTime) ;

int nYuvBufLen = frameinfo.width*frameinfo.height*3;// most size = RGB24, we donot support RGBA Render type
pShotThreadInfo->pYuvBuf = new unsigned char[nYuvBufLen];
pShotThreadInfo->width = frameinfo.width;
pShotThreadInfo->height = frameinfo.height;
pShotThreadInfo->renderFormat = pThread->renderFormat ;

memcpy(pShotThreadInfo->pYuvBuf, pThread->yuvFrame[pThread->decodeYuvIdx].pYuvBuf, pThread->yuvFrame[pThread->decodeYuvIdx].Yuvsize-1);
CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)_lpPhotoShotThread, pShotThreadInfo, 0, NULL);
pThread->manuScreenshot = 0;
}

目前我们所支持的最大数据格式是RGB24,所以我们定义了WidthHeight3+1的最大缓冲区大小,其实这里可以优化一下,就是根据具体的renderFormat来定义缓冲区的大小,从而避免不必要的内存资源浪费,这会在后续版本进行优化。

SkeyePlayer技术交流QQ群:102644504

\n
\n \n
\n
\n\n \n \n \n\n
\n
\n '
内容来源于网络如有侵权请私信删除

推荐文章