最近在有个项目是关于android的视频采集,要求是捕捉某个事件发生前后30秒的视频片段。以前从没有做过android或多媒体开发,很多概念和技术不是很清楚,在网上各论坛转了很久,大致有个思路(MediaRecorder类不知道能不能满足上面的要求),就是利用Camera的PreviewCallback接口,通过实现onPreviewFrame(byte[] data, Camera camera)来捕捉每一帧数据,然后将data保存到文件里,问题是直接保存的数据无法播放,网上有人提供过decodeYUV420SP方法来将默认的YUV格式转码为RGB格式,问题是无论转码前还是转码后我都不知道应该保存为什么文件,是不是直接保存为后缀名为".3gp"或".mp4"就可以了?
我一直是在模拟器上调试的,不知道是不是一定需要在真机上运行才行。
跪求各达人显显灵,谢啦!以下是我的代码,请各位达人斧正
import java.io.File;
import java.io.RandomAccessFile;import android.app.Activity;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;public class AndroidCamera extends Activity implements SurfaceHolder.Callback {
private SurfaceView mSurfaceView = null;
private SurfaceHolder mSurfaceHolder = null;
private Camera mCamera = null;
private boolean mPreviewRunning = false;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
mSurfaceView = (SurfaceView)findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
} @Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if(mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(width, height);
mCamera.setPreviewCallback(new VideoData(width, height));
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch(Exception e) {
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
} @Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
} @Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v("AndroidCamera", "surfaceDestroyed");
if(mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning =false;
mCamera.release();
mCamera = null;
}
}
}class VideoData implements Camera.PreviewCallback { RandomAccessFile raf=null;
byte[] h264Buff =null;
public VideoData(int width, int height) {
Log.v("androidCamera", "new VideoData");
h264Buff = new byte[width * height *8];
try {
Log.v("androidCamera", "Create File: /sdcard/camera.dat start");
File file = new File("/sdcard/camera.3gp");
Log.v("androidCamera", "Create File: /sdcard/camera.dat end");
raf = new RandomAccessFile(file, "rw");
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(data == null) {
return;
} int previewWidth = camera.getParameters().getPreviewSize().width;
int previewHeight = camera.getParameters().getPreviewSize().height;
byte[] rgbBuffer = new byte[previewWidth * previewHeight * 3];
decodeYUV420SP(rgbBuffer, data, previewWidth, previewHeight);
try {
raf.write(rgbBuffer, 0, data.length);
} catch(Exception ex) {
ex.printStackTrace();
}
}
protected void finalize() {
if (null != raf) {
try {
raf.close();
} catch (Exception ex) {
ex.printStackTrace();
}
}
try {
super.finalize();
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void decodeYUV420SP(byte[] rgbBuf, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
if (rgbBuf == null)
throw new NullPointerException("buffer 'rgbBuf' is null");
if (rgbBuf.length < frameSize * 3)
throw new IllegalArgumentException("buffer 'rgbBuf' size "
+ rgbBuf.length + " < minimum " + frameSize * 3);
if (yuv420sp == null)
throw new NullPointerException("buffer 'yuv420sp' is null");
if (yuv420sp.length < frameSize * 3 / 2)
throw new IllegalArgumentException("buffer 'yuv420sp' size " + yuv420sp.length
+ " < minimum " + frameSize * 3 / 2);
int i = 0, y = 0;
int uvp = 0, u = 0, v = 0;
int y1192 = 0, r = 0, g = 0, b = 0;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgbBuf[yp * 3] = (byte)(r >> 10);
rgbBuf[yp * 3 + 1] = (byte)(g >> 10);
rgbBuf[yp * 3 + 2] = (byte)(b >> 10);
}
}
}
}
我一直是在模拟器上调试的,不知道是不是一定需要在真机上运行才行。
跪求各达人显显灵,谢啦!以下是我的代码,请各位达人斧正
import java.io.File;
import java.io.RandomAccessFile;import android.app.Activity;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;public class AndroidCamera extends Activity implements SurfaceHolder.Callback {
private SurfaceView mSurfaceView = null;
private SurfaceHolder mSurfaceHolder = null;
private Camera mCamera = null;
private boolean mPreviewRunning = false;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
mSurfaceView = (SurfaceView)findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
} @Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if(mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(width, height);
mCamera.setPreviewCallback(new VideoData(width, height));
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch(Exception e) {
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
} @Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
} @Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v("AndroidCamera", "surfaceDestroyed");
if(mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning =false;
mCamera.release();
mCamera = null;
}
}
}class VideoData implements Camera.PreviewCallback { RandomAccessFile raf=null;
byte[] h264Buff =null;
public VideoData(int width, int height) {
Log.v("androidCamera", "new VideoData");
h264Buff = new byte[width * height *8];
try {
Log.v("androidCamera", "Create File: /sdcard/camera.dat start");
File file = new File("/sdcard/camera.3gp");
Log.v("androidCamera", "Create File: /sdcard/camera.dat end");
raf = new RandomAccessFile(file, "rw");
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(data == null) {
return;
} int previewWidth = camera.getParameters().getPreviewSize().width;
int previewHeight = camera.getParameters().getPreviewSize().height;
byte[] rgbBuffer = new byte[previewWidth * previewHeight * 3];
decodeYUV420SP(rgbBuffer, data, previewWidth, previewHeight);
try {
raf.write(rgbBuffer, 0, data.length);
} catch(Exception ex) {
ex.printStackTrace();
}
}
protected void finalize() {
if (null != raf) {
try {
raf.close();
} catch (Exception ex) {
ex.printStackTrace();
}
}
try {
super.finalize();
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void decodeYUV420SP(byte[] rgbBuf, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
if (rgbBuf == null)
throw new NullPointerException("buffer 'rgbBuf' is null");
if (rgbBuf.length < frameSize * 3)
throw new IllegalArgumentException("buffer 'rgbBuf' size "
+ rgbBuf.length + " < minimum " + frameSize * 3);
if (yuv420sp == null)
throw new NullPointerException("buffer 'yuv420sp' is null");
if (yuv420sp.length < frameSize * 3 / 2)
throw new IllegalArgumentException("buffer 'yuv420sp' size " + yuv420sp.length
+ " < minimum " + frameSize * 3 / 2);
int i = 0, y = 0;
int uvp = 0, u = 0, v = 0;
int y1192 = 0, r = 0, g = 0, b = 0;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgbBuf[yp * 3] = (byte)(r >> 10);
rgbBuf[yp * 3 + 1] = (byte)(g >> 10);
rgbBuf[yp * 3 + 2] = (byte)(b >> 10);
}
}
}
}
解决方案 »
- 求助,急求各位大神帮忙啊
- 项目中需要开发android播放器,需要能缓冲网络视频资源及在线播放,请教各位大牛!
- ListView怎么样横向显示内容
- android后台服务用servlet怎么搭建
- 如何通过CMWAP连接WAP网站跳过移动资费提示
- android Home键的后台运行处理问题
- 请教读文件
- 多线程HandlerThread和getLooper
- 可否用win平台读写system文件夹
- studio新人安装问题在线求大哥解决一下!
- android程序添加中文数据到mysql数据库中 出现乱码
- android下用ViewPager或者ViewFlow加载Activity,但是自定义的Activity切换动画失效
没看你的代码,但是如果存在本地的话,不用一帧一帧的那么保存,设置几个属性就好
/**
* 捕获视频
*
* @param filePath
* 视频保存的路径
*/
public void captureVideo(String filePath) {
mCamera.unlock();
mRecorder = new MediaRecorder();
mRecorder.setCamera(mCamera); mRecorder.setPreviewDisplay(mHolder.getSurface()); mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4); mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.MPEG_4_SP); mRecorder.setVideoSize(320, 240);
// mRecorder.setVideoFrameRate(15); mRecorder.setOutputFile(filePath);
try {
mRecorder.prepare();
} catch (Exception e) { }
mRecorder.start(); // Recording is now started
} /**
* 停止捕获视频
*/
public void stopVideo() {
if (mRecorder == null)
return; mRecorder.stop();
mRecorder.reset(); // You can reuse the object by going back to
// setAudioSource() step
mRecorder.release(); // Now the object cannot be reused
mRecorder = null;
try {
mCamera.reconnect();
} catch (Exception e) {
}
}
还需要预览功能,使用SurfaceView就能实现, 网上很多相关资料
问题是,这样捕捉的视频数据需要转换格式,还要保存为某种可以看的视频文件,这里实在是没有概念。
在其他论坛上有人说我这种想法完全错误,不知道问题出在哪里,对多媒体这一块没有任何经验。
yellshine用MediaRecorder实现,认为是设置了MPEG_4_SP属性,这样输出的数据是mpeg4编码后的数据
看你的程序,你的程序获取的数据是RGB格式的,可能和你使用的MediaRecorder中播放数据格式不一样只要采集到得数据和MediaRecorder能播放的数据一致,应该就可以播放出来
你可以把摄像头输出的数据就为RGB格式,然后用Bitmap的方式进行播放
File file = new File(Environment.getExternalStorageDirectory(),"myvideo.yuv");
outStream = new FileOutputStream(file);class YourStream implements Camera.PreviewCallback{ ......
byte[] pdata = new byte[data.length];
int value = MetroEncoderJNI.VideoDecode(data,pdata);
outStream.write(pdata,0,value);
......
}
MediaRecorder比较适合用录制和采集,虽然不知道是软还是硬编码,但是设置起来还是相对轻松的。只是如果用于实时的话则有点问题,因为你无法即时的获取没帧的数据,你能获取的只是一个长串的流。
如果采用MediaRecorder的话,感觉问题就出在如何截取数据上了。
你好,你说的这个mediarecorder我尝试了很多demo都没有实现,总是在setAudioSource时出异常,请问你有没有使用mediarecorder录制视频的小例子?而且我感觉这个录制视频对机器适配还 有关,谢谢
请问,设置编码格式MPEG_4_SP后录制成的视频格式为何还是3gp的呢?