ffmpeg在android上输出滑屏问题处理

ffmpeg部分机器上有花屏的问题


原代码如下:

while(av_read_frame(formatCtx, &packet)>=0 && !_stop && NULL!=window && bInit) {
		// Is this a packet from the video stream?
		if(packet.stream_index==videoStream) {
			// Decode video frame
			avcodec_decode_video2(codecCtx, decodedFrame, &frameFinished,
			   &packet);
			// Did we get a video frame?
			if(frameFinished) {
				// Convert the image from its native format to RGBA
				sws_scale
				(
					sws_ctx,
					(uint8_t const * const *)decodedFrame->data,
					decodedFrame->linesize,
					0,
					codecCtx->height,
					frameRGBA->data,
					frameRGBA->linesize
				);

				if(packet.dts == AV_NOPTS_VALUE
						 && decodedFrame->opaque && *(uint64_t*)decodedFrame->opaque != AV_NOPTS_VALUE)
				{
					pts = *(uint64_t *)decodedFrame->opaque;
					LOGD("pst1: %d",pts);
				}
				else if(packet.dts != AV_NOPTS_VALUE) {
				  pts = packet.dts;
				  LOGD("pst2: %d",pts);
				} else {
				  pts = 0;
				  LOGD("pst3: %d",pts);
				}
				//pts = av_q2d(codecCtx->time_base) * 1000000.0 * i * 2;
				pts *= 1000;
				//LOGD("debug %d,%d,%f",pts, (long)(av_q2d(codecCtx->time_base) * 1000000.0 * i * 2), av_q2d(codecCtx->time_base));
				if(0 == pts || 0 == baseTime)
				{
					baseTime = av_gettime() - pts;
					LOGD("BASETIME: %d",baseTime);
				}else{
					waitTime = (baseTime + pts) - av_gettime();
					LOGD("WAITTIME: %d, %d",waitTime,pts);
				}

				//waitTime = (av_q2d(codecCtx->time_base) * 1000.0 - 0.0) * 1000;
				if(waitTime>0)
					usleep(waitTime);
				if(!_stop)
				{
					synchronized(lockWindow)
					{
						if(!_stop && NULL!=window)
						{
							// lock the window buffer
							if (ANativeWindow_lock(pWin, &windowBuffer, NULL) < 0) {
								LOGE("cannot lock window");
							} else {
								// draw the frame on buffer
								//LOGD("copy buffer %d:%d:%d", width, height, width*height*RGB_SIZE);
								//LOGD("window buffer: %d:%d:%d", windowBuffer.width, windowBuffer.height, windowBuffer.stride);
								memcpy(windowBuffer.bits, buffer,  width * height * RGB_SIZE);
								// unlock the window buffer and post it to display
								ANativeWindow_unlockAndPost(pWin);
								// count number of frames
								++i;
							}
						}
					}
				}
			}
		}
仔细分析后发现 部分分辨率又能够正常展示,感觉是宽度错位导致的,分析如下:

ORG: 176  * 144   F
X2:  352 288   O
X3:  528 432   F
X4:  704 576   O
X6:  1056 *   O


X1.1 193 158   F
X1.2 211 172   F
X1.5 264 216   F


X0.5 88 72    F




X2?: 352 290   O
X2?: 352 600   O
X2?: 352 720   O
X4?: 704 720   O
X6?: 1056 720   O
   


1280 ---1312
        1056
1184
1248 ok

发现分辨率按照%64+32对齐, 感觉是内存对齐造成的, 查看ANativeWindow_Buffer如下

typedef struct ANativeWindow_Buffer {
    // The number of pixels that are show horizontally.
    int32_t width;

    // The number of pixels that are shown vertically.
    int32_t height;

    // The number of *pixels* that a line in the buffer takes in
    // memory.  This may be >= width.
    int32_t stride;

    // The format of the buffer.  One of WINDOW_FORMAT_*
    int32_t format;

    // The actual bits.
    void* bits;
    
    // Do not touch.
    uint32_t reserved[6];
} ANativeWindow_Buffer;


输出stride和width的日志发现,如果正常显示则stride==width, 通过注释可以看出应该是内存对齐问题导致的,调整代码:

if(packet.stream_index==videoStream) {
			// Decode video frame
			avcodec_decode_video2(codecCtx, decodedFrame, &frameFinished,
			   &packet);
			// Did we get a video frame?
			if(frameFinished) {
				// Convert the image from its native format to RGBA
				sws_scale
				(
					sws_ctx,
					(uint8_t const * const *)decodedFrame->data,
					decodedFrame->linesize,
					0,
					codecCtx->height,
					frameRGBA->data,
					frameRGBA->linesize
				);

				if(packet.dts == AV_NOPTS_VALUE
						 && decodedFrame->opaque && *(uint64_t*)decodedFrame->opaque != AV_NOPTS_VALUE)
				{
					pts = *(uint64_t *)decodedFrame->opaque;
					LOGD("pst1: %d",pts);
				}
				else if(packet.dts != AV_NOPTS_VALUE) {
				  pts = packet.dts;
				  LOGD("pst2: %d",pts);
				} else {
				  pts = 0;
				  LOGD("pst3: %d",pts);
				}
				//pts = av_q2d(codecCtx->time_base) * 1000000.0 * i * 2;
				pts *= 1000;
				//LOGD("debug %d,%d,%f",pts, (long)(av_q2d(codecCtx->time_base) * 1000000.0 * i * 2), av_q2d(codecCtx->time_base));
				if(0 == pts || 0 == baseTime)
				{
					baseTime = av_gettime() - pts;
					LOGD("BASETIME: %d",baseTime);
				}else{
					waitTime = (baseTime + pts) - av_gettime();
					LOGD("WAITTIME: %d, %d",waitTime,pts);
				}

				//waitTime = (av_q2d(codecCtx->time_base) * 1000.0 - 0.0) * 1000;
				if(waitTime>0)
					usleep(waitTime);
				if(!_stop)
				{
					synchronized(lockWindow)
					{
						if(!_stop && NULL!=window)
						{
							// lock the window buffer
							if (ANativeWindow_lock(pWin, &windowBuffer, NULL) < 0) {
								LOGE("cannot lock window");
							} else {
								// draw the frame on buffer
								//LOGD("copy buffer %d:%d:%d", width, height, width*height*RGB_SIZE);
								//LOGD("window buffer: %d:%d:%d", windowBuffer.width, windowBuffer.height, windowBuffer.stride);
								//memcpy(windowBuffer.bits, buffer,  width * height * RGB_SIZE);
								if(windowBuffer.width >= windowBuffer.stride){
									memcpy(windowBuffer.bits, buffer,  width * height * RGB_SIZE);
								}else{
									//skip stride-width 跳过padding部分内存
									for(int i=0;i<height;++i)
										memcpy(windowBuffer.bits +  windowBuffer.stride * i * RGB_SIZE
												, buffer + width * i * RGB_SIZE
												, width * RGB_SIZE);
								}
								// unlock the window buffer and post it to display
								ANativeWindow_unlockAndPost(pWin);
								// count number of frames
								++i;
							}
						}
					}
				}
			}
		}

通过行拷贝方式,跳过后面对齐部分的内存, 

解决问题,


ffmpeg在android上输出滑屏问题处理,,5-wow.com

郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。