请教各位大牛几个问题O(∩_∩)O哈!
有个解码器是用C语言写的,我想通过NDK把C代码封装成android调用的so文件,用jni调用,有几个问题请教哈
1.我把opensvc的给移植过去,我怎么从main.c函数中选择哪些函数作为入口函数呢解,要选择哪些函数作为java的native函数入口呢?选择有什么原则吗?是在main.c中所有被调用的函数都要当做这个java的接口函数?
2.接口函数选定后,用在.java把这些函数在main.c中的定义全部修改到java层吗?
3.比如修改后的代码为opensvcAndroid.c,这个函数是不是要参考由javah生成的.h文件的格式,把main.c的中所有的的函数格式改成jint(JNIEnv* env, jobject thiz),还是只改从原来的main.c的函数入口处开始改,而main.c中函数入口前面的每个函数的定义不用改?
有个解码器是用C语言写的,我想通过NDK把C代码封装成android调用的so文件,用jni调用,有几个问题请教哈
1.我把opensvc的给移植过去,我怎么从main.c函数中选择哪些函数作为入口函数呢解,要选择哪些函数作为java的native函数入口呢?选择有什么原则吗?是在main.c中所有被调用的函数都要当做这个java的接口函数?
2.接口函数选定后,用在.java把这些函数在main.c中的定义全部修改到java层吗?
3.比如修改后的代码为opensvcAndroid.c,这个函数是不是要参考由javah生成的.h文件的格式,把main.c的中所有的的函数格式改成jint(JNIEnv* env, jobject thiz),还是只改从原来的main.c的函数入口处开始改,而main.c中函数入口前面的每个函数的定义不用改?
对于库和接口这个话题就深了,简单够用就好吧
不太明白lz的意思,最好能贴点源码上来示意一下
网上一位大牛写的main.c函数代码如下:
#include "avcodec.h"
#include "h264.h"int iWidth = 352;
int iHeight = 288;int iBytesPixel =2;//H.264全局变量
struct AVCodec *codec; // Codec
struct AVCodecContext *c; // Codec Context
struct AVFrame *picture; // Frame int *colortab;
int *u_b_tab;
int *u_g_tab;
int *v_g_tab;
int *v_r_tab;unsigned int *rgb_2_pix;
unsigned int *r_2_pix;
unsigned int *g_2_pix;
unsigned int *b_2_pix;/*
R=Y+1.402Cr
G=Y-0.344Cb-0.714Cr
B=Y+1.772Cb
// */void DeleteYUVTab()
{
av_free(colortab);
av_free(rgb_2_pix);
}void CreateYUVTab_16()
{
int i;
int u, v; colortab = (int *)av_malloc(4*256*sizeof(int));
u_b_tab = &colortab[0*256];
u_g_tab = &colortab[1*256];
v_g_tab = &colortab[2*256];
v_r_tab = &colortab[3*256]; for (i=0; i<256; i++)
{
u = v = (i-128); u_b_tab[i] = (int) ( 1.772 * u);
u_g_tab[i] = (int) ( 0.34414 * u);
v_g_tab[i] = (int) ( 0.71414 * v);
v_r_tab[i] = (int) ( 1.402 * v);
} rgb_2_pix = (unsigned int *)av_malloc(3*768*sizeof(unsigned int)); r_2_pix = &rgb_2_pix[0*768];
g_2_pix = &rgb_2_pix[1*768];
b_2_pix = &rgb_2_pix[2*768]; for(i=0; i<256; i++)
{
r_2_pix[i] = 0;
g_2_pix[i] = 0;
b_2_pix[i] = 0;
} for(i=0; i<256; i++)
{
r_2_pix[i+256] = (i & 0xF8) << 8;
g_2_pix[i+256] = (i & 0xFC) << 3;
b_2_pix[i+256] = (i ) >> 3;
} for(i=0; i<256; i++)
{
r_2_pix[i+512] = 0xF8 << 8;
g_2_pix[i+512] = 0xFC << 3;
b_2_pix[i+512] = 0x1F;
} r_2_pix += 256;
g_2_pix += 256;
b_2_pix += 256;
}void DisplayYUV_16(unsigned int *pdst, unsigned char *y, unsigned char *u, unsigned char *v, int width, int height, int src_ystride, int src_uvstride, int dst_ystride)
{
int i, j;
int r, g, b, rgb; int yy, ub, ug, vg, vr; unsigned char* yoff;
unsigned char* uoff;
unsigned char* voff; int width2 = width/2;
int height2 = height/2; if(width2>iWidth/2)
{
width2=iWidth/2; y+=(width-iWidth)/4*2;
u+=(width-iWidth)/4;
v+=(width-iWidth)/4;
} if(height2>iHeight)
height2=iHeight; for(j=0; j<height2; j++)
{
yoff = y + j * 2 * src_ystride;
uoff = u + j * src_uvstride;
voff = v + j * src_uvstride; for(i=0; i<width2; i++)
{
yy = *(yoff+(i<<1));
ub = u_b_tab[*(uoff+i)];
ug = u_g_tab[*(uoff+i)];
vg = v_g_tab[*(voff+i)];
vr = v_r_tab[*(voff+i)]; b = yy + ub;
g = yy - ug - vg;
r = yy + vr; rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b]; yy = *(yoff+(i<<1)+1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr; pdst[(j*dst_ystride+i)] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16); yy = *(yoff+(i<<1)+src_ystride);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr; rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b]; yy = *(yoff+(i<<1)+src_ystride+1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr; pdst [((2*j+1)*dst_ystride+i*2)>>1] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16);
}
}
}// ================= MEMBER FUNCTIONS =======================
//
int FindStartCode (unsigned char *Buf, int zeros_in_startcode)
{
int info;
int i; info = 1;
for (i = 0; i < zeros_in_startcode; i++)
if(Buf[i] != 0)
info = 0; if(Buf[i] != 1)
info = 0;
return info;
}int getNextNal(FILE* inpf, unsigned char* Buf)
{
int pos = 0;
int StartCodeFound = 0;
int info2 = 0;
int info3 = 0; while(!feof(inpf) && (Buf[pos++]=fgetc(inpf))==0); while (!StartCodeFound)
{
if (feof (inpf))
{
// return -1;
return pos-1;
}
Buf[pos++] = fgetc (inpf);
info3 = FindStartCode(&Buf[pos-4], 3);
if(info3 != 1)
info2 = FindStartCode(&Buf[pos-3], 2);
StartCodeFound = (info2 == 1 || info3 == 1);
}
fseek (inpf, -4, SEEK_CUR);
return pos - 4;
}int main()
{
char *fname= "352x288.264"; FILE * inpf=NULL;
FILE * outf=NULL; int i;
int nalLen=0;
unsigned char* Buf=NULL;
int got_picture, consumed_bytes; inpf = fopen(fname, "rb");
outf = fopen("test_s.yuv", "wb"); if(!inpf)
return 0; Buf = (unsigned char*)calloc ( 500000, sizeof(char)); c = avcodec_alloc_context();
if(!c)
return 0; if (avcodec_open(c) < 0)
return 0; picture = avcodec_alloc_frame();
if(!picture)
return 0; CreateYUVTab_16(); while(!feof(inpf))
{
nalLen = getNextNal(inpf, Buf); consumed_bytes= decode_frame(c, picture, &got_picture, Buf, nalLen); if(consumed_bytes > 0)
{
//*
for(i=0; i<c->height; i++)
fwrite(picture->data[0] + i * picture->linesize[0], 1, c->width, outf);
for(i=0; i<c->height/2; i++)
fwrite(picture->data[1] + i * picture->linesize[1], 1, c->width/2, outf);
for(i=0; i<c->height/2; i++)
fwrite(picture->data[2] + i * picture->linesize[2], 1, c->width/2, outf);
// */
/*
if(iBytesPixel==2)
{
unsigned int *rgb = (unsigned int*)(iDDraw->BeginDraw());
DisplayYUV_16(rgb, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
iDDraw->EndDraw();
}
else if(iBytesPixel==3)
{
unsigned char *rgb = (unsigned char*)(iDDraw->BeginDraw());
DisplayYUV_24(rgb, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
iDDraw->EndDraw();
}
else if(iBytesPixel==4)
{
unsigned int *rgb = (unsigned int*)(iDDraw->BeginDraw());
DisplayYUV_32(rgb, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
iDDraw->EndDraw();
}
// */
}
} DeleteYUVTab(); if(inpf)
fclose(inpf); if(outf)
fclose(outf);Decodereturn: if(c)
{
avcodec_close(c);
av_free(c);
c = NULL;
}
if(picture)
{
av_free(picture);
picture = NULL;
} if(Buf)
{
free(Buf);
Buf = NULL;
} return 0;
}
1.设置哪些函数作为native函数,主要还是看你自己的程序结构了,抽象的层级是怎么设计的。原则我也谈不上什么,通常我都是放一些简单的初始化,配置类的函数以及jni回调java的函数。我觉得有许多相对数据交互的最好还是放在底层
2.这个其实更主要是程序几层之间接口设计的问题,不要太死板,也不需要一定按照javah生成的头文件来定义,可以自己做函数名的映射。
3.既然都封装so了,哪里又出来的main入口?函数的签名根据具体函数而定
你贴的这一大坨代码没什么分析价值吧。问题看得有点乱,就不照顺序答了
1.设置哪些函数作为native函数,主要还是看你自己的程序结构了,抽象的层级是怎么设计的。原则我也谈不上什么,通常我都是放一些简单的初始化,配置类的函数以及jni回调java的函数。我觉得有许多相对数据交互的最好还是放在底层
2.这个其实更主要是程序几层之间接口设计的问题,不要太死板,也不需要一定按照javah生成的头文件来……不好意思,我把问题描述清楚的有点乱O(∩_∩)O哈~
就上面的main.c函数具体来说,怎样选择java的native函数呢?
int iHeight = 288;int iBytesPixel =2;//H.264全局变量
struct AVCodec *codec; // Codec
struct AVCodecContext *c; // Codec Context
struct AVFrame *picture; // Frame int *colortab;
int *u_b_tab;
int *u_g_tab;
int *v_g_tab;
int *v_r_tab;unsigned int *rgb_2_pix;
unsigned int *r_2_pix;
unsigned int *g_2_pix;
unsigned int *b_2_pix;/*
R=Y+1.402Cr
G=Y-0.344Cb-0.714Cr
B=Y+1.772Cb
// */void DeleteYUVTab()
{
av_free(colortab);
av_free(rgb_2_pix);
}void CreateYUVTab_16()
{
int i;
int u, v;colortab = (int *)av_malloc(4*256*sizeof(int));
u_b_tab = &colortab[0*256];
u_g_tab = &colortab[1*256];
v_g_tab = &colortab[2*256];
v_r_tab = &colortab[3*256];for (i=0; i<256; i++)
{
u = v = (i-128);u_b_tab[i] = (int) ( 1.772 * u);
u_g_tab[i] = (int) ( 0.34414 * u);
v_g_tab[i] = (int) ( 0.71414 * v);
v_r_tab[i] = (int) ( 1.402 * v);
}rgb_2_pix = (unsigned int *)av_malloc(3*768*sizeof(unsigned int));r_2_pix = &rgb_2_pix[0*768];
g_2_pix = &rgb_2_pix[1*768];
b_2_pix = &rgb_2_pix[2*768];for(i=0; i<256; i++)
{
r_2_pix[i] = 0;
g_2_pix[i] = 0;
b_2_pix[i] = 0;
}for(i=0; i<256; i++)
{
r_2_pix[i+256] = (i & 0xF8) << 8;
g_2_pix[i+256] = (i & 0xFC) << 3;
b_2_pix[i+256] = (i ) >> 3;
}for(i=0; i<256; i++)
{
r_2_pix[i+512] = 0xF8 << 8;
g_2_pix[i+512] = 0xFC << 3;
b_2_pix[i+512] = 0x1F;
}r_2_pix += 256;
g_2_pix += 256;
b_2_pix += 256;
}void DisplayYUV_16(unsigned int *pdst, unsigned char *y, unsigned char *u, unsigned char *v, int width, int height, int src_ystride, int src_uvstride, int dst_ystride)
{
int i, j;
int r, g, b, rgb;int yy, ub, ug, vg, vr;unsigned char* yoff;
unsigned char* uoff;
unsigned char* voff;int width2 = width/2;
int height2 = height/2;if(width2>iWidth/2)
{
width2=iWidth/2;y+=(width-iWidth)/4*2;
u+=(width-iWidth)/4;
v+=(width-iWidth)/4;
}if(height2>iHeight)
height2=iHeight;for(j=0; j<height2; j++)
{
yoff = y + j * 2 * src_ystride;
uoff = u + j * src_uvstride;
voff = v + j * src_uvstride;for(i=0; i<width2; i++)
{
yy = *(yoff+(i<<1));
ub = u_b_tab[*(uoff+i)];
ug = u_g_tab[*(uoff+i)];
vg = v_g_tab[*(voff+i)];
vr = v_r_tab[*(voff+i)];b = yy + ub;
g = yy - ug - vg;
r = yy + vr;rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];yy = *(yoff+(i<<1)+1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;pdst[(j*dst_ystride+i)] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16);yy = *(yoff+(i<<1)+src_ystride);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];yy = *(yoff+(i<<1)+src_ystride+1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;pdst [((2*j+1)*dst_ystride+i*2)>>1] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16);
}
}
}// ================= MEMBER FUNCTIONS =======================
//
int FindStartCode (unsigned char *Buf, int zeros_in_startcode)
{
int info;
int i;info = 1;
for (i = 0; i < zeros_in_startcode; i++)
if(Buf[i] != 0)
info = 0;if(Buf[i] != 1)
info = 0;
return info;
}int getNextNal(FILE* inpf, unsigned char* Buf)
{
int pos = 0;
int StartCodeFound = 0;
int info2 = 0;
int info3 = 0;while(!feof(inpf) && (Buf[pos++]=fgetc(inpf))==0);while (!StartCodeFound)
{
if (feof (inpf))
{
// return -1;
return pos-1;
}
Buf[pos++] = fgetc (inpf);
info3 = FindStartCode(&Buf[pos-4], 3);
if(info3 != 1)
info2 = FindStartCode(&Buf[pos-3], 2);
StartCodeFound = (info2 == 1 || info3 == 1);
}
fseek (inpf, -4, SEEK_CUR);
return pos - 4;
}int main()
{
char *fname= "352x288.264";FILE * inpf=NULL;
FILE * outf=NULL;int i;
int nalLen=0;
unsigned char* Buf=NULL;
int got_picture, consumed_bytes; inpf = fopen(fname, "rb");
outf = fopen("test_s.yuv", "wb");if(!inpf)
return 0;Buf = (unsigned char*)calloc ( 500000, sizeof(char));c = avcodec_alloc_context();
if(!c)
return 0;if (avcodec_open(c) < 0)
return 0; picture = avcodec_alloc_frame();
if(!picture)
return 0;CreateYUVTab_16();while(!feof(inpf))
{
nalLen = getNextNal(inpf, Buf);consumed_bytes= decode_frame(c, picture, &got_picture, Buf, nalLen); if(consumed_bytes > 0)
{
//*
for(i=0; i<c->height; i++)
fwrite(picture->data[0] + i * picture->linesize[0], 1, c->width, outf);
for(i=0; i<c->height/2; i++)
fwrite(picture->data[1] + i * picture->linesize[1], 1, c->width/2, outf);
for(i=0; i<c->height/2; i++)
fwrite(picture->data[2] + i * picture->linesize[2], 1, c->width/2, outf);
// */
/*
if(iBytesPixel==2)
{
unsigned int *rgb = (unsigned int*)(iDDraw->BeginDraw());
DisplayYUV_16(rgb, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
iDDraw->EndDraw();
}
else if(iBytesPixel==3)
{
unsigned char *rgb = (unsigned char*)(iDDraw->BeginDraw());
DisplayYUV_24(rgb, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
iDDraw->EndDraw();
}
else if(iBytesPixel==4)
{
unsigned int *rgb = (unsigned int*)(iDDraw->BeginDraw());
DisplayYUV_32(rgb, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
iDDraw->EndDraw();
}
// */
}
}DeleteYUVTab();if(inpf)
fclose(inpf);if(outf)
fclose(outf);Decodereturn:if(c)
{
avcodec_close(c);
av_free(c);
c = NULL;
}
if(picture)
{
av_free(picture);
picture = NULL;
}if(Buf)
{
free(Buf);
Buf = NULL;
} return 0;
}
想请问大家:在mian.c中选择java的native接口函数时,是不是从main.c 的函数入口处把main.c代码分为三部分就好,或者大家有什么开源的移植jni的android推荐下O(∩_∩)O哈!
You can download(下载地址) a PRE-RELEASE source snapshot for evaluation.
2.avcodec_register_all();3.avcodec_find_decoder(CodecID); // enum CodecID (AVCodec* m_codec)4.avcodec_alloc_context(); // (AVCodecContext* m_codeccontext)5.avcodec_open(m_codeccontext, m_codec);6.avcodec_alloc_frame(); // AVFrame* m_framepicture7.SDL_SetVideoMode();8.SDL_WM_SetCaption();9.SDL_CreateYUVOverlay();10.while()10.1.avcodec_decode_video();(if>0)显示10.2.SDL_LockYUVOverlay();10.3.sws_getContext() // 只进行一次10.4.sws_scale();10.5.SDL_UnlockYUVOverlay();10.6.SDL_DisplayYUVOverlay();11.av_free();12.avcodec_close();
2.“照上面的main函数的流程封装ffmpeg,最好封装成init(),decoder(),uninit()三个函数"
2.1我能理解为在java中声明native本地方法时,看库的主函数main的流程即可?
2.2随便给我一个开源库的main.c,按照你的意思,都有三个接口函数:init(),功能函数(),uninit ()?
2.3不用考虑库的对外头文件"avcodec.h"中暴露的接口声明一个对应的java中的本地方法?
3.还有就是按照你的封装成init(),decoder(),uninit()三个函数,那么在移植中创建的.c文件中,这三个接口函数的实现是什么?从main.c中怎么选择哪些段代码呢?
首先,不好意思,前几天一直没空,所以一直没回复。其次:1.我没有说h264不能移植,我现在接的一个项目就是我移植的ffmpeg的h264解码。我只是说你不能直接把这个main函数进行移植,这只能做为一个例子,你可以参考它进行封装。
2.1.因为C是一个过程语言,一般给出的main函数都是一个标准流程,所以你可以按照main 里面的流程进行封装
2.2.因为是解码,如果只考虑单一的h264的话,你最好就定义这三个函数,一般来说就够用了,初始化传格式进去,解码传帧数据,反初始化时进行回收操作,一般这三个就可以用了,而且方便、简单
2.3.如果你封成库的话,用jni,在java层用native做好了,就跟本不用.h文件了,后面直接可以把库拷过去用,再加一个.java的native封装文件就行了
3.实现你可以参照main函数进行,有的可以直接拿main中的代码来用,但用到jni,所以,细节部分还需要自己修改,但整体流程不变