Reputation: 1920
using this url I wrote below code to encode onpreviewframe data to mp4 video and I used a thread to do this job well, but it seems that it doesn't work properly.
private void initCodec() {
String root = Environment.getExternalStorageDirectory().toString();
File myDir = new File(root + "/Vocalist");
if(!myDir.exists()) {
myDir.mkdirs();
}
try {
File file = new File (myDir, "myVideo.mp4");
if(file.exists()){
file.delete();
}
fos = new FileOutputStream(file, false);
} catch (FileNotFoundException e) {
e.printStackTrace();
}try {
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
}
catch (Exception e){
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc",
320,
240);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 500000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mMediaCodec.configure(mediaFormat,
null,
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();
}
private synchronized void encode(byte[] dataInput)
{
byte[] data = dataInput;
inputBuffers = mMediaCodec.getInputBuffers();// here changes
outputBuffers = mMediaCodec.getOutputBuffers();
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
} else {
return;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
Log.i("tag", "outputBufferIndex-->" + outputBufferIndex);
do {
if (outputBufferIndex >= 0) {
ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
System.out.println("buffer info-->" + bufferInfo.offset + "--"
+ bufferInfo.size + "--" + bufferInfo.flags + "--"
+ bufferInfo.presentationTimeUs);
byte[] outData = new byte[bufferInfo.size];
outBuffer.get(outData);
try {
if (bufferInfo.offset != 0) {
fos.write(outData, bufferInfo.offset, outData.length
- bufferInfo.offset);
} else {
fos.write(outData, 0, outData.length);
}
fos.flush();
Log.i("camera", "out data -- > " + outData.length);
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo,
0);
} catch (IOException e) {
e.printStackTrace();
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = mMediaCodec.getOutputFormat();
}
} while (outputBufferIndex >= 0);
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mHolder.getSurface() == null) {
return;
}
try {
initCodec();
mCamera.setPreviewDisplay(mHolder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(final byte[] bytes, Camera camera) {
if (recording == true) {
if(mThread.isAlive())
encode(bytes);
}
}
});
} catch (Exception e) {
Log.d("TAG", "Error starting camera preview: " + e.getMessage());
}
}
}
public void newOpenCamera() {
if (mThread == null) {
mThread = new CameraHandlerThread();
}
synchronized (mThread) {
mThread.openCamera();
}
}
private static void oldOpenCamera() {
try {
c = Camera.open(1);
Camera.Parameters parameters = c.getParameters();
parameters.set("orientation", "portrait");
parameters.setJpegQuality(100);
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewSize(320, 240);
c.setParameters(parameters);
}
catch (RuntimeException e) {
Log.e("camera", "failed to open front camera");
}
}
public CameraHandlerThread mThread = null;
public static class CameraHandlerThread extends HandlerThread {
Handler mHandler = null;
CameraHandlerThread() {
super("CameraHandlerThread");
start();
mHandler = new Handler(getLooper());
}
synchronized void notifyCameraOpened() {
notify();
}
public void openCamera() {
mHandler.post(new Runnable() {
@Override
public void run() {
oldOpenCamera();
notifyCameraOpened();
}
});
}
}
I converted onpreviewframe data to a video but after first second video doesn't play smoothly. what should I do ?
Upvotes: 0
Views: 325
Reputation: 52353
First, you're not forwarding the timing information with the frames:
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0)
So your BufferInfo.presentationTimeUs
will always be zero when you dequeue the buffer.
Second, you don't appear to be using MediaMuxer, which means you're just writing raw the raw H.264 stream to a file. This is not ".mp4"; it doesn't include the timing information at all. Many video players don't even know what to do with plain H.264.
Wrapping the file as .mp4, with the frame timing from the camera, should yield better results.
Your code structure appears to be assuming that it can feed one frame of input and get one frame of output, which isn't always the case. You want to keep the input full, and drain the output as it becomes available.
You can find more information and some sample code on bigflake and in Grafika.
Upvotes: 2