博客
关于我
强烈建议你试试无所不能的chatGPT,快点击我
ffmpeg在android上输出滑屏问题处理
阅读量:5126 次
发布时间:2019-06-13

本文共 5174 字,大约阅读时间需要 17 分钟。

ffmpeg部分机器上有花屏的问题

原代码例如以下:

while(av_read_frame(formatCtx, &packet)>=0 && !_stop && NULL!=window && bInit) {		// Is this a packet from the video stream?		if(packet.stream_index==videoStream) {			// Decode video frame			avcodec_decode_video2(codecCtx, decodedFrame, &frameFinished,			   &packet);			// Did we get a video frame?			if(frameFinished) {				// Convert the image from its native format to RGBA				sws_scale				(					sws_ctx,					(uint8_t const * const *)decodedFrame->data,					decodedFrame->linesize,					0,					codecCtx->height,					frameRGBA->data,					frameRGBA->linesize				);				if(packet.dts == AV_NOPTS_VALUE						 && decodedFrame->opaque && *(uint64_t*)decodedFrame->opaque != AV_NOPTS_VALUE)				{					pts = *(uint64_t *)decodedFrame->opaque;					LOGD("pst1: %d",pts);				}				else if(packet.dts != AV_NOPTS_VALUE) {				  pts = packet.dts;				  LOGD("pst2: %d",pts);				} else {				  pts = 0;				  LOGD("pst3: %d",pts);				}				//pts = av_q2d(codecCtx->time_base) * 1000000.0 * i * 2;				pts *= 1000;				//LOGD("debug %d,%d,%f",pts, (long)(av_q2d(codecCtx->time_base) * 1000000.0 * i * 2), av_q2d(codecCtx->time_base));				if(0 == pts || 0 == baseTime)				{					baseTime = av_gettime() - pts;					LOGD("BASETIME: %d",baseTime);				}else{					waitTime = (baseTime + pts) - av_gettime();					LOGD("WAITTIME: %d, %d",waitTime,pts);				}				//waitTime = (av_q2d(codecCtx->time_base) * 1000.0 - 0.0) * 1000;				if(waitTime>0)					usleep(waitTime);				if(!_stop)				{					synchronized(lockWindow)					{						if(!_stop && NULL!=window)						{							// lock the window buffer							if (ANativeWindow_lock(pWin, &windowBuffer, NULL) < 0) {								LOGE("cannot lock window");							} else {								// draw the frame on buffer								//LOGD("copy buffer %d:%d:%d", width, height, width*height*RGB_SIZE);								//LOGD("window buffer: %d:%d:%d", windowBuffer.width, windowBuffer.height, windowBuffer.stride);								memcpy(windowBuffer.bits, buffer,  width * height * RGB_SIZE);								// unlock the window buffer and post it to display								ANativeWindow_unlockAndPost(pWin);								// count number of frames								++i;							}						}					}				}			}		}
细致分析后发现 部分分辨率又可以正常展示,感觉是宽度错位导致的,分析例如以下:

ORG: 176  * 144   F

X2:  352 288   O
X3:  528 432   F
X4:  704 576   O
X6:  1056 *   O
X1.1 193 158   F
X1.2 211 172   F
X1.5 264 216   F
X0.5 88 72    F
X2?

: 352 290   O

X2?: 352 600   O
X2?: 352 720   O
X4?: 704 720   O
X6?: 1056 720   O
   
1280 ---1312
        1056
1184
1248 ok

发现分辨率依照%64+32对齐, 感觉是内存对齐造成的, 查看ANativeWindow_Buffer例如以下

typedef struct ANativeWindow_Buffer {    // The number of pixels that are show horizontally.    int32_t width;    // The number of pixels that are shown vertically.    int32_t height;    // The number of *pixels* that a line in the buffer takes in    // memory.  This may be >= width.    int32_t stride;    // The format of the buffer.  One of WINDOW_FORMAT_*    int32_t format;    // The actual bits.    void* bits;        // Do not touch.    uint32_t reserved[6];} ANativeWindow_Buffer;

输出stride和width的日志发现,假设正常显示则stride==width, 通过凝视能够看出应该是内存对齐问题导致的,调整代码:

if(packet.stream_index==videoStream) {			// Decode video frame			avcodec_decode_video2(codecCtx, decodedFrame, &frameFinished,			   &packet);			// Did we get a video frame?

if(frameFinished) { // Convert the image from its native format to RGBA sws_scale ( sws_ctx, (uint8_t const * const *)decodedFrame->data, decodedFrame->linesize, 0, codecCtx->height, frameRGBA->data, frameRGBA->linesize ); if(packet.dts == AV_NOPTS_VALUE && decodedFrame->opaque && *(uint64_t*)decodedFrame->opaque != AV_NOPTS_VALUE) { pts = *(uint64_t *)decodedFrame->opaque; LOGD("pst1: %d",pts); } else if(packet.dts != AV_NOPTS_VALUE) { pts = packet.dts; LOGD("pst2: %d",pts); } else { pts = 0; LOGD("pst3: %d",pts); } //pts = av_q2d(codecCtx->time_base) * 1000000.0 * i * 2; pts *= 1000; //LOGD("debug %d,%d,%f",pts, (long)(av_q2d(codecCtx->time_base) * 1000000.0 * i * 2), av_q2d(codecCtx->time_base)); if(0 == pts || 0 == baseTime) { baseTime = av_gettime() - pts; LOGD("BASETIME: %d",baseTime); }else{ waitTime = (baseTime + pts) - av_gettime(); LOGD("WAITTIME: %d, %d",waitTime,pts); } //waitTime = (av_q2d(codecCtx->time_base) * 1000.0 - 0.0) * 1000; if(waitTime>0) usleep(waitTime); if(!_stop) { synchronized(lockWindow) { if(!_stop && NULL!=window) { // lock the window buffer if (ANativeWindow_lock(pWin, &windowBuffer, NULL) < 0) { LOGE("cannot lock window"); } else { // draw the frame on buffer //LOGD("copy buffer %d:%d:%d", width, height, width*height*RGB_SIZE); //LOGD("window buffer: %d:%d:%d", windowBuffer.width, windowBuffer.height, windowBuffer.stride); //memcpy(windowBuffer.bits, buffer, width * height * RGB_SIZE); if(windowBuffer.width >= windowBuffer.stride){ memcpy(windowBuffer.bits, buffer, width * height * RGB_SIZE); }else{ //skip stride-width 跳过padding部分内存 for(int i=0;i<height;++i) memcpy(windowBuffer.bits + windowBuffer.stride * i * RGB_SIZE , buffer + width * i * RGB_SIZE , width * RGB_SIZE); } // unlock the window buffer and post it to display ANativeWindow_unlockAndPost(pWin); // count number of frames ++i; } } } } } }

通过行拷贝方式,跳过后面对齐部分的内存, 

解决这个问题,

转载于:https://www.cnblogs.com/yangykaifa/p/6992349.html

你可能感兴趣的文章
treegrid.bootstrap使用说明
查看>>
[Docker]Docker拉取,上传镜像到Harbor仓库
查看>>
javascript 浏览器类型检测
查看>>
nginx 不带www到www域名的重定向
查看>>
记录:Android中StackOverflow的问题
查看>>
导航,头部,CSS基础
查看>>
[草稿]挂载新硬盘
查看>>
[USACO 2017 Feb Gold] Tutorial
查看>>
关于mysql中GROUP_CONCAT函数的使用
查看>>
OD使用教程20 - 调试篇20
查看>>
Java虚拟机(JVM)默认字符集详解
查看>>
Java Servlet 过滤器与 springmvc 拦截器的区别?
查看>>
(tmp >> 8) & 0xff;
查看>>
linux命令之ifconfig详细解释
查看>>
NAT地址转换
查看>>
Nhibernate 过长的字符串报错 dehydration property
查看>>
Deque - leetcode 【双端队列】
查看>>
Linux 普通用户拿到root权限及使用szrz命令上传下载文件
查看>>
人物角色群体攻击判定(一)
查看>>
JavaWeb学习过程 之c3p0的使用
查看>>