各位高手,大家好。我是一个android新手,原来也没有怎么接触过多媒体方面的知识,我想问下在android 2.0下如何解析h.264 的视频文件得到视频帧数据。
解决方案 »
- 被分到一个优化算法的任务,求大神给点自己的经验和开发的思路~~
- android apk java混淆加密 分享一下,欢迎探讨
- android的桌面小工具怎么开发?各位大侠能提点建议和文档吗?
- eclipse里面layout的xml文件无法打开,求助!
- Android里怎么获取WAP Push类型信息是由哪个短信中心(SMSC)发来?
- 躲开危险的洗虾粉,全面的健康饮食-饮食禁忌--Android应用!
- 大家好,请教一个小问题
- Android 中string.xml资源放在 static final string 数组中
- android sqlite 一般在什么时候建表
- qq空间评论功能的实现细节(android版qq)
- android使用mediaRecorder录制视频问题
- android 随机数生成后不能比较
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
vv = new VView(this);
setContentView(vv);
}
// Menu item Ids
public static final int PLAY_ID = Menu.FIRST;
public static final int EXIT_ID = Menu.FIRST + 1; @Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(0, PLAY_ID, 0, R.string.play);
menu.add(0, EXIT_ID, 1, R.string.exit); return true;
} @Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case PLAY_ID:
{
String file = "/sdcard/352x288.264";
vv.PlayVideo(file);
return true;
}
case EXIT_ID:
{
finish();
return true;
}
}
return super.onOptionsItemSelected(item);
}
}class VView extends View implements Runnable{
Bitmap mBitQQ = null;
Paint mPaint = null;
Bitmap mSCBitmap = null;
int width = 352;
int height = 288; byte [] mPixel = new byte[width*height*2];
ByteBuffer buffer = ByteBuffer.wrap( mPixel );
Bitmap VideoBit = Bitmap.createBitmap(width, height, Config.RGB_565);
int mTrans=0x0F0F0F0F;
String PathFileName;
public native int InitDecoder(int width, int height);
public native int UninitDecoder();
public native int DecoderNal(byte[] in, int insize, byte[] out);
static {
System.loadLibrary("H264Android");
}
public VView(Context context) {
super(context);
setFocusable(true);
int i = mPixel.length;
for(i=0; i<mPixel.length; i++)
{
mPixel[i]=(byte)0x00;
}
}
public void PlayVideo(String file)
{
PathFileName = file; new Thread(this).start();
}
@Override protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
VideoBit.copyPixelsFromBuffer(buffer);
canvas.drawBitmap(VideoBit, 0, 0, null);
}
int MergeBuffer(byte[] NalBuf, int NalBufUsed, byte[] SockBuf, int SockBufUsed, int SockRemain)
{
int i=0;
byte Temp; for(i=0; i<SockRemain; i++)
{
Temp =SockBuf[i+SockBufUsed];
NalBuf[i+NalBufUsed]=Temp; mTrans <<= 8;
mTrans |= Temp; if(mTrans == 1) {
i++;
break;
}
} return i;
}
public void run()
{
InputStream is = null;
FileInputStream fileIS=null;
int iTemp=0;
int nalLen;
boolean bFirst=true;
boolean bFindPPS=true;
int bytesRead=0;
int NalBufUsed=0;
int SockBufUsed=0;
byte [] NalBuf = new byte[40980];
byte [] SockBuf = new byte[2048];
try
{
fileIS = new FileInputStream(PathFileName);
}
catch(IOException e)
{
return ;
}
InitDecoder(width, height);
while (!Thread.currentThread().isInterrupted())
{
try
{
bytesRead = fileIS.read(SockBuf, 0, 2048);
}
catch (IOException e) {}
if(bytesRead<=0)
break;
SockBufUsed =0;
while(bytesRead-SockBufUsed>0)
{
nalLen = MergeBuffer(NalBuf, NalBufUsed, SockBuf, SockBufUsed, bytesRead-SockBufUsed);
NalBufUsed += nalLen;
SockBufUsed += nalLen;
while(mTrans == 1)
{
mTrans = 0xFFFFFFFF; if(bFirst==true) {
bFirst = false;
}
else {
if(bFindPPS==true) {
if( (NalBuf[4]&0x1F) == 7 )
{
bFindPPS = false;
}
else
{
NalBuf[0]=0;
NalBuf[1]=0;
NalBuf[2]=0;
NalBuf[3]=1;
NalBufUsed=4;
break;
}
}
iTemp=DecoderNal(NalBuf, NalBufUsed-4, mPixel);
if(iTemp>0)
postInvalidate(); } NalBuf[0]=0;
NalBuf[1]=0;
NalBuf[2]=0;
NalBuf[3]=1;
NalBufUsed=4;
}
}
}
try{
if(fileIS!=null)
fileIS.close();
if(is!=null)
is.close();
}
catch (IOException e) {
e.printStackTrace();
}
UninitDecoder();
}
}
/*
* Class: h264_VView
* Method: InitDecoder
* Signature: ()I
*/
jlong Java_h264_VView_InitDecoder(JNIEnv* env, jobject thiz)
{
CreateYUVTab_16();
Decoder * de = (Decoder *)av_malloc(sizeof(Decoder));
de->c = avcodec_alloc_context(); avcodec_open(de->c); de->picture = avcodec_alloc_frame();//picture= malloc(sizeof(AVFrame));
return (jlong)de;
}/*
* Class: h264_VView
* Method: UninitDecoder
* Signature: ()I
*/
jint Java_h264_VView_UninitDecoder(JNIEnv* env, jobject thiz,jlong pDecoder)
{
DeleteYUVTab();
if (pDecoder)
{
Decoder * de=(Decoder * )pDecoder;
if(de->c)
{
decode_end(de->c);
free(de->c->priv_data); free(de->c);
de->c = NULL;
} if(de->picture)
{
free(de->picture);
de->picture = NULL;
}
free(de);
return 1;
}
else
return 0;
}/*
* Class: h264_VView
* Method: DecoderNal
* Signature: ([B[I)I
*/
jint Java_h264_VView_DecoderNal(JNIEnv* env, jobject thiz,jlong pDecoder,jbyteArray in, jint nalLen, jbyteArray out)
{
int i;
int imod;
int got_picture;
if (pDecoder)
{
Decoder * de=(Decoder * )pDecoder; jbyte * Buf = (jbyte*)(*env)->GetByteArrayElements(env, in, 0);
jbyte * Pixel= (jbyte*)(*env)->GetByteArrayElements(env, out, 0); int consumed_bytes = decode_frame(de->c, de->picture, &got_picture, Buf, nalLen); if(consumed_bytes > 0)
{
DisplayYUV_16((int*)Pixel, de->picture->data[0], de->picture->data[1], de->picture->data[2], de->c->width, de->c->height, de->picture->linesize[0], de->picture->linesize[1], de->c->width);
} (*env)->ReleaseByteArrayElements(env, in, Buf, 0);
(*env)->ReleaseByteArrayElements(env, out, Pixel, 0);
return consumed_bytes;
}
else
return 0;
}