Browse Source

编码优化,支持边直播边录像

main v1.1.0
jiangdongguo 7 years ago
parent
commit
33810d5384
  1. 4
      app/src/main/AndroidManifest.xml
  2. 13
      app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java
  3. 60
      libusbcamera/src/main/java/com/jiangdg/usbcamera/FileUtils.java
  4. 5
      libusbcamera/src/main/java/com/jiangdg/usbcamera/USBCameraManager.java
  5. 225
      libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java
  6. 223
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaEncoder.java
  7. 35
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaVideoBufferEncoder.java
  8. 365
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/AACEncodeConsumer.java
  9. 371
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java
  10. 148
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/Mp4MediaMuxer.java

4
app/src/main/AndroidManifest.xml

@ -4,8 +4,8 @@
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<!--全局异常检测,需添加-->
<!--android:name=".application.MyApplication"-->
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"

13
app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java

@ -12,6 +12,7 @@ import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import com.jiangdg.usbcamera.FileUtils;
import com.jiangdg.usbcamera.R;
import com.jiangdg.usbcamera.USBCameraManager;
import com.serenegiant.usb.CameraDialog;
@ -19,6 +20,7 @@ import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.common.AbstractUVCCameraHandler;
import com.serenegiant.usb.widget.CameraViewInterface;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
@ -46,6 +48,8 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
private CameraViewInterface mUVCCameraView;
private boolean isRequest;
private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
private BufferedOutputStream outputStream;
/**
* USB设备事件监听器
@ -164,15 +168,22 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
if(! mUSBManager.isRecording()){
String videoPath = USBCameraManager.ROOT_PATH+System.currentTimeMillis()
+USBCameraManager.SUFFIX_MP4;
FileUtils.createfile(FileUtils.ROOT_PATH+"test666.h264");
mUSBManager.startRecording(videoPath, new AbstractUVCCameraHandler.OnEncodeResultListener() {
@Override
public void onEncodeResult(byte[] data, int offset, int length, long timestamp, int type) {
// type = 0,aac格式音频流
// type = 1,h264格式视频流
if(type == 1){
FileUtils.putFileStream(data,offset,length);
}
}
});
mBtnRecord.setText("正在录制");
} else {
FileUtils.releaseFile();
mUSBManager.stopRecording();
mBtnRecord.setText("开始录制");
}

60
libusbcamera/src/main/java/com/jiangdg/usbcamera/FileUtils.java

@ -0,0 +1,60 @@
package com.jiangdg.usbcamera;
import android.os.Environment;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
/** 创建文件
*
* Created by jiangdongguo on 2017/10/18.
*/
public class FileUtils {
private static BufferedOutputStream outputStream;
public static String ROOT_PATH = Environment.getExternalStorageDirectory().getAbsolutePath()+File.separator;
public static void createfile(String path){
File file = new File(path);
if(file.exists()){
file.delete();
}
try {
outputStream = new BufferedOutputStream(new FileOutputStream(file));
} catch (Exception e){
e.printStackTrace();
}
}
public static void releaseFile(){
try {
outputStream.flush();
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void putFileStream(byte[] data,int offset,int length){
if(outputStream != null) {
try {
outputStream.write(data,offset,length);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void putFileStream(byte[] data){
if(outputStream != null) {
try {
outputStream.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

5
libusbcamera/src/main/java/com/jiangdg/usbcamera/USBCameraManager.java

@ -31,9 +31,10 @@ public class USBCameraManager{
private static final String TAG = "USBCameraManager";
private static final int PREVIEW_WIDTH = 640;
private static final int PREVIEW_HEIGHT = 480;
private static final int ENCODER_TYPE = 1;
// 使用MediaVideoBufferEncoder
private static final int ENCODER_TYPE = 2;
//0为YUYV,1为MJPEG
private static final int PREVIEW_FORMAT = 1;
private static final int PREVIEW_FORMAT = 0;
private static USBCameraManager mUsbCamManager;
// USB设备管理类

225
libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java

@ -2,13 +2,10 @@ package com.serenegiant.usb.common;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.usb.UsbDevice;
import android.media.AudioManager;
import android.media.MediaScannerConnection;
import android.media.SoundPool;
import android.os.Build;
import android.os.Environment;
import android.os.Handler;
@ -19,7 +16,7 @@ import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import com.jiangdg.libusbcamera.R;
import com.jiangdg.usbcamera.FileUtils;
import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.UVCCamera;
@ -29,6 +26,9 @@ import com.serenegiant.usb.encoder.MediaMuxerWrapper;
import com.serenegiant.usb.encoder.MediaSurfaceEncoder;
import com.serenegiant.usb.encoder.MediaVideoBufferEncoder;
import com.serenegiant.usb.encoder.MediaVideoEncoder;
import com.serenegiant.usb.encoder.biz.AACEncodeConsumer;
import com.serenegiant.usb.encoder.biz.H264EncodeConsumer;
import com.serenegiant.usb.encoder.biz.Mp4MediaMuxer;
import com.serenegiant.usb.widget.CameraViewInterface;
import java.io.BufferedOutputStream;
@ -37,9 +37,10 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
@ -48,6 +49,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
*
* */
public abstract class AbstractUVCCameraHandler extends Handler {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "AbsUVCCameraHandler";
@ -347,8 +349,9 @@ public abstract class AbstractUVCCameraHandler extends Handler {
// 处理与Camera相关的逻辑,比如获取byte数据流等
private UVCCamera mUVCCamera;
private MediaMuxerWrapper mMuxer;
// private MediaMuxerWrapper mMuxer;
private MediaVideoBufferEncoder mVideoEncoder;
private Mp4MediaMuxer mMuxer;
/** 构造方法
*
@ -536,82 +539,180 @@ public abstract class AbstractUVCCameraHandler extends Handler {
}
// 开始录制视频
public void handleStartRecording(String path) {
if (DEBUG) Log.v(TAG_THREAD, "handleStartRecording:");
try {
if ((mUVCCamera == null) || (mMuxer != null)) return;
// final MediaMuxerWrapper muxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
final MediaMuxerWrapper muxer = new MediaMuxerWrapper(path);
MediaVideoBufferEncoder videoEncoder = null;
switch (mEncoderType) {
case 1: // for video capturing using MediaVideoEncoder
// 开启视频编码线程
new MediaVideoEncoder(muxer,getWidth(), getHeight(), mMediaEncoderListener);
break;
case 2: // for video capturing using MediaVideoBufferEncoder
videoEncoder = new MediaVideoBufferEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
break;
// case 0: // for video capturing using MediaSurfaceEncoder
default:
new MediaSurfaceEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
break;
// public void handleStartRecording2(String path) {
// if (DEBUG) Log.v(TAG_THREAD, "handleStartRecording:");
// try {
// if ((mUVCCamera == null) || (mMuxer != null)) return;
//// final MediaMuxerWrapper muxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
// final MediaMuxerWrapper muxer = new MediaMuxerWrapper(path);
// MediaVideoBufferEncoder videoEncoder = null;
// switch (mEncoderType) {
// case 1: // for video capturing using MediaVideoEncoder
// // 开启视频编码线程
// new MediaVideoEncoder(muxer,getWidth(), getHeight(), mMediaEncoderListener);
// break;
// case 2: // for video capturing using MediaVideoBufferEncoder
// videoEncoder = new MediaVideoBufferEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
// break;
// // case 0: // for video capturing using MediaSurfaceEncoder
// default:
// new MediaSurfaceEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
// break;
// }
// // 开启音频编码线程
// if (true) {
// // for audio capturing
//// new MediaAudioEncoder(muxer, mMediaEncoderListener);
// }
// muxer.prepare();
// muxer.startRecording();
// if (videoEncoder != null) {
// mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
// }
// synchronized (mSync) {
// mMuxer = muxer;
// mVideoEncoder = videoEncoder;
// }
// callOnStartRecording();
// } catch (final IOException e) {
// callOnError(e);
// Log.e(TAG, "startCapture:", e);
// }
// }
private AACEncodeConsumer mAacConsumer;
private H264EncodeConsumer mH264Consumer;
public void handleStartRecording(String path){
if ((mUVCCamera == null) || (mMuxer != null))
return;
// 获取USB Camera预览数据
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
// 启动视频编码线程,type=1
mH264Consumer = new H264EncodeConsumer();
mH264Consumer.setOnH264EncodeResultListener(new H264EncodeConsumer.OnH264EncodeResultListener() {
@Override
public void onEncodeResult(byte[] data, int offset, int length, long timestamp) {
if(mListener != null){
mListener.onEncodeResult(data,offset,length,timestamp,1);
}
// 开启音频编码线程
if (true) {
// for audio capturing
new MediaAudioEncoder(muxer, mMediaEncoderListener);
}
muxer.prepare();
muxer.startRecording();
if (videoEncoder != null) {
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
});
mH264Consumer.start();
// 启动音频编码线程,type=0
mAacConsumer = new AACEncodeConsumer();
mAacConsumer.setOnAACEncodeResultListener(new AACEncodeConsumer.OnAACEncodeResultListener() {
@Override
public void onEncodeResult(byte[] data, int offset, int length, long timestamp) {
if(mListener != null){
mListener.onEncodeResult(data,offset,length,timestamp,0);
}
synchronized (mSync) {
mMuxer = muxer;
mVideoEncoder = videoEncoder;
}
callOnStartRecording();
} catch (final IOException e) {
callOnError(e);
Log.e(TAG, "startCapture:", e);
});
mAacConsumer.start();
// 启动混合器
long millis = 30 * 60 * 1000;
mMuxer = new Mp4MediaMuxer(new File(FileUtils.ROOT_PATH, new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss").format(new Date())).toString(), millis);
if(mH264Consumer != null){
mH264Consumer.setTmpuMuxer(mMuxer);
}
if(mAacConsumer != null){
mAacConsumer.setTmpuMuxer(mMuxer);
}
callOnStartRecording();
}
// 停止录制视频
public void handleStopRecording() {
if (DEBUG) Log.v(TAG_THREAD, "handleStopRecording:mMuxer=" + mMuxer);
final MediaMuxerWrapper muxer;
synchronized (mSync) {
muxer = mMuxer;
public void handleStopRecording(){
// 停止混合器
if (mMuxer != null){
mMuxer.release();
mMuxer = null;
mVideoEncoder = null;
if (mUVCCamera != null) {
mUVCCamera.stopCapture();
Log.i(TAG,TAG+"---->停止本地录制");
}
// 停止音视频编码线程
if(mH264Consumer != null){
mH264Consumer.setTmpuMuxer(null);
}
if(mAacConsumer != null){
mAacConsumer.setTmpuMuxer(null);
}
if(mH264Consumer != null){
mH264Consumer.exit();
try {
mWeakCameraView.get().setVideoEncoder(null);
} catch (final Exception e) {
// ignore
Thread t2 = mH264Consumer;
mH264Consumer = null;
if(t2 != null){
t2.interrupt();
t2.join();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
if (muxer != null) {
muxer.stopRecording();
}
if(mAacConsumer != null){
mAacConsumer.exit();
try {
Thread t1 = mAacConsumer;
mAacConsumer = null;
if(t1 != null){
t1.interrupt();
t1.join();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
// 停止捕获视频数据
if (mUVCCamera != null) {
mUVCCamera.stopCapture();
mUVCCamera.setFrameCallback(null, 0);
}
mWeakCameraView.get().setVideoEncoder(null);
// you should not wait here
callOnStopRecording();
}
}
// 停止录制视频
// public void handleStopRecording2() {
// if (DEBUG) Log.v(TAG_THREAD, "handleStopRecording:mMuxer=" + mMuxer);
// final MediaMuxerWrapper muxer;
// synchronized (mSync) {
// muxer = mMuxer;
// mMuxer = null;
// mVideoEncoder = null;
// if (mUVCCamera != null) {
// mUVCCamera.stopCapture();
// }
// }
// try {
// mWeakCameraView.get().setVideoEncoder(null);
// } catch (final Exception e) {
// // ignore
// }
// if (muxer != null) {
// muxer.stopRecording();
// mUVCCamera.setFrameCallback(null, 0);
// // you should not wait here
// callOnStopRecording();
// }
// }
private final IFrameCallback mIFrameCallback = new IFrameCallback() {
@Override
public void onFrame(final ByteBuffer frame) {
final MediaVideoBufferEncoder videoEncoder;
synchronized (mSync) {
videoEncoder = mVideoEncoder;
}
if (videoEncoder != null) {
videoEncoder.frameAvailableSoon();
videoEncoder.encode(frame);
// final MediaVideoBufferEncoder videoEncoder;
// synchronized (mSync) {
// videoEncoder = mVideoEncoder;
// }
// if (videoEncoder != null) {
// videoEncoder.frameAvailableSoon();
// videoEncoder.encode(frame);
// }
int len = frame.capacity();
byte[] yuv = new byte[len];
frame.get(yuv);
if(mH264Consumer != null){
mH264Consumer.setRawYuv(yuv,640,480);
}
}
};

223
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaEncoder.java

@ -1,26 +1,3 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
import android.media.MediaCodec;
@ -30,11 +7,14 @@ import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import com.jiangdg.usbcamera.FileUtils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.Buffer;
import java.nio.ByteBuffer;
public abstract class MediaEncoder implements Runnable {
@ -43,9 +23,12 @@ public abstract class MediaEncoder implements Runnable {
public static final int TYPE_AUDIO = 0; // 音频数据
public static final int TYPE_VIDEO = 1; // 视频数据
protected static final int TIMEOUT_USEC = 10000; // 10[msec]
protected static final int TIMEOUT_USEC = 10000; // 10毫秒
protected static final int MSG_FRAME_AVAILABLE = 1;
protected static final int MSG_STOP_RECORDING = 9;
private long lastPush;
private long millisPerframe;
private boolean isExit;
public interface MediaEncoderListener {
void onPrepared(MediaEncoder encoder);
@ -169,6 +152,10 @@ public abstract class MediaEncoder implements Runnable {
final boolean isRunning = true;
boolean localRequestStop;
boolean localRequestDrain;
boolean localIsNotExit;
// 创建h264
FileUtils.createfile(Environment.getExternalStorageDirectory().getAbsolutePath()+"/test222.h264");
while (isRunning) {
synchronized (mSync) {
localRequestStop = mRequestStop;
@ -198,11 +185,11 @@ public abstract class MediaEncoder implements Runnable {
}
}
} // end of while
if (DEBUG) Log.d(TAG, "Encoder thread exiting");
synchronized (mSync) {
mRequestStop = true;
mIsCapturing = false;
}
FileUtils.releaseFile();
}
/*
@ -217,6 +204,7 @@ public abstract class MediaEncoder implements Runnable {
synchronized (mSync) {
mIsCapturing = true;
mRequestStop = false;
isExit = false;
mSync.notifyAll();
}
}
@ -231,6 +219,7 @@ public abstract class MediaEncoder implements Runnable {
return;
}
mRequestStop = true; // for rejecting newer frame
isExit = true;
mSync.notifyAll();
// We can not know when the encoding and writing finish.
// so we return immediately after request to avoid delay of caller thread
@ -288,7 +277,6 @@ public abstract class MediaEncoder implements Runnable {
*/
@SuppressWarnings("deprecation")
protected void encode(final byte[] buffer, final int length, final long presentationTimeUs) {
// if (DEBUG) Log.v(TAG, "encode:buffer=" + buffer);
if (!mIsCapturing) return;
int ix = 0, sz;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
@ -323,6 +311,46 @@ public abstract class MediaEncoder implements Runnable {
}
}
protected void encode(ByteBuffer yuvBuffer,int len){
if (!mIsCapturing) return;
try {
if (lastPush == 0) {
lastPush = System.currentTimeMillis();
}
long time = System.currentTimeMillis() - lastPush;
if (time >= 0) {
time = millisPerframe - time;
if (time > 0)
Thread.sleep(time / 2);
}
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
int bufferIndex = -1;
try{
bufferIndex = mMediaCodec.dequeueInputBuffer(0);
}catch (IllegalStateException e){
e.printStackTrace();
}
if (bufferIndex >= 0) {
ByteBuffer mBuffer;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mBuffer = mMediaCodec.getInputBuffer(bufferIndex);
} else {
mBuffer = inputBuffers[bufferIndex];
}
byte[] yuvData = new byte[yuvBuffer.capacity()];
yuvBuffer.get(yuvData);
mBuffer.clear();
mBuffer.put(yuvData);
mBuffer.clear();
mMediaCodec.queueInputBuffer(bufferIndex, 0, yuvData.length, System.nanoTime() / 1000, MediaCodec.BUFFER_FLAG_KEY_FRAME);
}
lastPush = System.currentTimeMillis();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
/**
* Method to set ByteBuffer to the MediaCodec encoder
* @param buffer null means EOS
@ -367,135 +395,133 @@ public abstract class MediaEncoder implements Runnable {
}
}
byte[] mPpsSps = new byte[0];
byte[] h264 = new byte[640 * 480 * 3 / 2];
ByteBuffer mBuffer = ByteBuffer.allocate(10240);
long timeStamp = System.currentTimeMillis();
/**
* drain encoded data and write them to muxer
*/
@SuppressWarnings("deprecation")
protected void drain() {
if (mMediaCodec == null) return;
if (mMediaCodec == null)
return;
ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
int encoderStatus, count = 0;
final MediaMuxerWrapper muxer = mWeakMuxer.get();
if (muxer == null) {
// throw new NullPointerException("muxer is unexpectedly null");
Log.w(TAG, "muxer is unexpectedly null");
return;
}
byte[] mPpsSps = new byte[0];
byte[] h264 = new byte[640 * 480];
LOOP: while (mIsCapturing) {
// get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
while (mIsCapturing) {
encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
// 等待 TIMEOUT_USEC x 5 = 50毫秒
// 如果还没有数据,终止循环
if (!mIsEOS) {
if (++count > 5)
break LOOP; // out of while
break;
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
// this shoud not come when encoding
encoderOutputBuffers = mMediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
// this status indicate the output format of codec is changed
// this should come only once before actual encoded data
// but this status never come on Android4.3 or less
// and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
if (mMuxerStarted) { // second time request is error
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
// get output format from codec and pass them to muxer
// getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
final MediaFormat format = mMediaCodec.getOutputFormat();
mTrackIndex = muxer.addTrack(format);
mMuxerStarted = true;
if (!muxer.start()) {
// we should wait until muxer is ready
synchronized (muxer) {
while (!muxer.isStarted())
try {
muxer.wait(100);
} catch (final InterruptedException e) {
break LOOP;
break;
}
}
}
} else if (encoderStatus < 0) {
// unexpected status
if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
} else {
final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
ByteBuffer encodedData;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
encodedData = mMediaCodec.getOutputBuffer(encoderStatus);
} else {
encodedData = encoderOutputBuffers[encoderStatus];
}
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
// final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
// this never should come...may be a MediaCodec internal error
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
// BUFFER_FLAG_CODEC_CONFIG标志
// BufferInfo清零
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// You shoud set output format to muxer here when you target Android4.3 or less
// but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
// therefor we should expand and prepare output format from buffer data.
// This sample is for API>=18(>=Android 4.3), just ignore this flag here
if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
// BUFFER_FLAG_END_OF_STREAM标志
// 流结束,终止循环
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mMuxerStarted = mIsCapturing = false;
break;
}
// 有效编码数据流
if (mBufferInfo.size != 0) {
// encoded data is ready, clear waiting counter
count = 0;
if (!mMuxerStarted) {
// muxer is not ready...this will prrograming failure.
throw new RuntimeException("drain:muxer hasn't started");
}
// write encoded data to muxer(need to adjust presentationTimeUs.
// 写入音频流或视频流到混合器
mBufferInfo.presentationTimeUs = getPTSUs();
muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
prevOutputPTSUs = mBufferInfo.presentationTimeUs;
// 推流,获取h.264数据流
// 根据mBufferInfo.size来判断音视
// > 1000,视频;< 1000,音频
synchronized (this){
if(mBufferInfo.size > 1000) {
int type = encodedData.get(4) & 0x07;
if(type == 7 || type == 8) {
byte[] outData = new byte[mBufferInfo.size];
encodedData.get(outData);
mPpsSps = outData;
}else if(type == 5) {
System.arraycopy(mPpsSps,0,h264,0,mPpsSps.length);
if(mBufferInfo.size > h264.length) {
// mTrackIndex=0 视频;mTrackIndex=1 音
if(mTrackIndex == 0) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
boolean sync = false;
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {// sps
sync = (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (!sync) {
byte[] temp = new byte[mBufferInfo.size];
encodedData.get(temp);
mPpsSps = temp;
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
continue;
} else {
mPpsSps = new byte[0];
}
}
sync |= (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
int len = mPpsSps.length + mBufferInfo.size;
if (len > h264.length) {
h264 = new byte[len];
}
encodedData.get(h264,mPpsSps.length,mBufferInfo.size);
if (sync) {
System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length);
encodedData.get(h264, mPpsSps.length, mBufferInfo.size);
if(mListener != null) {
mListener.onEncodeResult(h264, 0,mPpsSps.length + mBufferInfo.size,
mBufferInfo.presentationTimeUs / 1000,TYPE_VIDEO);
}
// 保存数据流到文件
FileUtils.putFileStream(h264, 0,mPpsSps.length + mBufferInfo.size);
} else {
if(mBufferInfo.size > h264.length){
continue ;
}
encodedData.get(h264,0,mBufferInfo.size);
if(System.currentTimeMillis() - timeStamp >= 3000) {
timeStamp = System.currentTimeMillis();
if(Build.VERSION.SDK_INT >= 23) {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mMediaCodec.setParameters(params);
}
}
encodedData.get(h264, 0, mBufferInfo.size);
if(mListener != null) {
mListener.onEncodeResult(h264, 0,mBufferInfo.size,
mBufferInfo.presentationTimeUs / 1000,TYPE_VIDEO);
}
FileUtils.putFileStream(h264, 0,mBufferInfo.size);
}
} else {
} else if(mTrackIndex == 1){
mBuffer.clear();
encodedData.get(mBuffer.array(), 7, mBufferInfo.size);
encodedData.clear();
@ -503,19 +529,13 @@ LOOP: while (mIsCapturing) {
addADTStoPacket(mBuffer.array(), mBufferInfo.size + 7);
mBuffer.flip();
if(mListener != null){
mListener.onEncodeResult(mBuffer.array(),0, mBufferInfo.size + 7, mBufferInfo.presentationTimeUs / 1000,TYPE_AUDIO);
mListener.onEncodeResult(mBuffer.array(),0, mBufferInfo.size + 7,
mBufferInfo.presentationTimeUs / 1000,TYPE_AUDIO);
}
}
}
}
// return buffer to encoder
// 释放输出缓存,将其还给编码器
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// when EOS come.
mMuxerStarted = mIsCapturing = false;
break; // out of while
}
}
}
}
@ -541,18 +561,11 @@ LOOP: while (mIsCapturing) {
return mSamplingRateIndex;
}
/**
* previous presentationTimeUs for writing
*/
private long prevOutputPTSUs = 0;
/**
* get next encoding presentationTimeUs
* @return
*/
protected long getPTSUs() {
long result = System.nanoTime() / 1000L;
// presentationTimeUs should be monotonic
// otherwise muxer fail to write
if (result < prevOutputPTSUs)
result = (prevOutputPTSUs - result) + result;
return result;

35
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaVideoBufferEncoder.java

@ -27,24 +27,18 @@ import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class receives video images as ByteBuffer(strongly recommend direct ByteBuffer) as NV21(YUV420SP)
* and encode them to h.264.
* If you use this directly with IFrameCallback, you should know UVCCamera and it backend native libraries
* never execute color space conversion. This means that color tone of resulted movie will be different
* from that you expected/can see on screen.
*/
public class MediaVideoBufferEncoder extends MediaEncoder implements IVideoEncoder {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MediaVideoBufferEncoder";
private static final String MIME_TYPE = "video/avc";
// parameters for recording
private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f;
@ -59,11 +53,11 @@ public class MediaVideoBufferEncoder extends MediaEncoder implements IVideoEncod
}
public void encode(final ByteBuffer buffer) {
// if (DEBUG) Log.v(TAG, "encode:buffer=" + buffer);
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) return;
}
encode(buffer, buffer.capacity(), getPTSUs());
// encode(buffer, buffer.capacity(), getPTSUs());
encode(buffer, buffer.capacity());
}
@Override
@ -89,6 +83,12 @@ public class MediaVideoBufferEncoder extends MediaEncoder implements IVideoEncod
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mMediaCodec.setParameters(params);
}
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
@ -105,11 +105,7 @@ public class MediaVideoBufferEncoder extends MediaEncoder implements IVideoEncod
return bitrate;
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return null if no codec matched
*/
// 选择第一个与制定MIME类型匹配的编码器
@SuppressWarnings("deprecation")
protected final MediaCodecInfo selectVideoCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectVideoCodec:");
@ -138,10 +134,7 @@ public class MediaVideoBufferEncoder extends MediaEncoder implements IVideoEncod
return null;
}
/**
* select color format available on specific codec and we can use.
* @return 0 if no colorFormat is matched
*/
// 选择编码器支持的格式
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
@ -166,9 +159,7 @@ public class MediaVideoBufferEncoder extends MediaEncoder implements IVideoEncod
return result;
}
/**
* color formats that we can use in this class
*/
// YUV颜色格式
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[] {

365
libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/AACEncodeConsumer.java

@ -0,0 +1,365 @@
package com.serenegiant.usb.encoder.biz;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Process;
import android.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
/**将PCM编码为AAC
*
* Created by jianddongguo on 2017/7/21.
*/
public class AACEncodeConsumer extends Thread{
private static final boolean DEBUG = false;
private static final String TAG = "TMPU";
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final long TIMES_OUT = 1000;
private static final int SAMPLE_RATE = 8000; // 采样率
private static final int BIT_RATE = 16000; // 比特率
private static final int BUFFER_SIZE = 1920; // 最小缓存
private int outChannel = 1;
private int bitRateForLame = 32;
private int qaulityDegree = 7;
private int bufferSizeInBytes;
private AudioRecord mAudioRecord; // 音频采集
private MediaCodec mAudioEncoder; // 音频编码
private OnAACEncodeResultListener listener;
private int mSamplingRateIndex = 0;//ADTS
private boolean isEncoderStart = false;
private boolean isRecMp3 = false;
private boolean isExit = false;
private long prevPresentationTimes = 0;
private WeakReference<Mp4MediaMuxer> mMuxerRef;
private MediaFormat newFormat;
/**
* There are 13 supported frequencies by ADTS.
**/
public static final int[] AUDIO_SAMPLING_RATES = { 96000, // 0
88200, // 1
64000, // 2
48000, // 3
44100, // 4
32000, // 5
24000, // 6
22050, // 7
16000, // 8
12000, // 9
11025, // 10
8000, // 11
7350, // 12
-1, // 13
-1, // 14
-1, // 15
};
private FileOutputStream fops;
// 编码流结果回调接口
public interface OnAACEncodeResultListener{
void onEncodeResult(byte[] data, int offset,
int length, long timestamp);
}
public AACEncodeConsumer(){
for (int i=0;i < AUDIO_SAMPLING_RATES.length; i++) {
if (AUDIO_SAMPLING_RATES[i] == SAMPLE_RATE) {
mSamplingRateIndex = i;
break;
}
}
}
public void setOnAACEncodeResultListener(OnAACEncodeResultListener listener){
this.listener = listener;
}
public void exit(){
isExit = true;
}
public synchronized void setTmpuMuxer(Mp4MediaMuxer mMuxer){
this.mMuxerRef = new WeakReference<>(mMuxer);
Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null && newFormat != null) {
muxer.addTrack(newFormat, false);
}
}
@Override
public void run() {
// 开启音频采集、编码
if(! isEncoderStart){
initAudioRecord();
initMediaCodec();
}
// 初始化音频文件参数
byte[] mp3Buffer = new byte[1024];
// 这里有问题,当本地录制结束后,没有写入
while(! isExit){
byte[] audioBuffer = new byte[2048];
// 采集音频
int readBytes = mAudioRecord.read(audioBuffer,0,BUFFER_SIZE);
if(DEBUG)
Log.i(TAG,"采集音频readBytes = "+readBytes);
// 编码音频
if(readBytes > 0){
encodeBytes(audioBuffer,readBytes);
}
}
// 停止音频采集、编码
stopMediaCodec();
stopAudioRecord();
}
@TargetApi(21)
private void encodeBytes(byte[] audioBuf, int readBytes) {
ByteBuffer[] inputBuffers = mAudioEncoder.getInputBuffers();
ByteBuffer[] outputBuffers = mAudioEncoder.getOutputBuffers();
//返回编码器的一个输入缓存区句柄,-1表示当前没有可用的输入缓存区
int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(TIMES_OUT);
if(inputBufferIndex >= 0){
// 绑定一个被空的、可写的输入缓存区inputBuffer到客户端
ByteBuffer inputBuffer = null;
if(!isLollipop()){
inputBuffer = inputBuffers[inputBufferIndex];
}else{
inputBuffer = mAudioEncoder.getInputBuffer(inputBufferIndex);
}
// 向输入缓存区写入有效原始数据,并提交到编码器中进行编码处理
if(audioBuf==null || readBytes<=0){
mAudioEncoder.queueInputBuffer(inputBufferIndex,0,0,getPTSUs(),MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}else{
inputBuffer.clear();
inputBuffer.put(audioBuf);
mAudioEncoder.queueInputBuffer(inputBufferIndex,0,readBytes,getPTSUs(),0);
}
}
// 返回一个输出缓存区句柄,当为-1时表示当前没有可用的输出缓存区
// mBufferInfo参数包含被编码好的数据,timesOut参数为超时等待的时间
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = -1;
do{
outputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mBufferInfo,TIMES_OUT);
if(outputBufferIndex == MediaCodec. INFO_TRY_AGAIN_LATER){
if(DEBUG)
Log.i(TAG,"获得编码器输出缓存区超时");
}else if(outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
// 如果API小于21,APP需要重新绑定编码器的输入缓存区;
// 如果API大于21,则无需处理INFO_OUTPUT_BUFFERS_CHANGED
if(!isLollipop()){
outputBuffers = mAudioEncoder.getOutputBuffers();
}
}else if(outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
// 编码器输出缓存区格式改变,通常在存储数据之前且只会改变一次
// 这里设置混合器视频轨道,如果音频已经添加则启动混合器(保证音视频同步)
if(DEBUG)
Log.i(TAG,"编码器输出缓存区格式改变,添加视频轨道到混合器");
synchronized (AACEncodeConsumer.this) {
newFormat = mAudioEncoder.getOutputFormat();
if(mMuxerRef != null){
Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) {
muxer.addTrack(newFormat, false);
}
}
}
}else{
// 当flag属性置为BUFFER_FLAG_CODEC_CONFIG后,说明输出缓存区的数据已经被消费了
if((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0){
if(DEBUG)
Log.i(TAG,"编码数据被消费,BufferInfo的size属性置0");
mBufferInfo.size = 0;
}
// 数据流结束标志,结束本次循环
if((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0){
if(DEBUG)
Log.i(TAG,"数据流结束,退出循环");
break;
}
// 获取一个只读的输出缓存区inputBuffer ,它包含被编码好的数据
ByteBuffer mBuffer = ByteBuffer.allocate(10240);
ByteBuffer outputBuffer = null;
if(!isLollipop()){
outputBuffer = outputBuffers[outputBufferIndex];
}else{
outputBuffer = mAudioEncoder.getOutputBuffer(outputBufferIndex);
}
if(mBufferInfo.size != 0){
// 获取输出缓存区失败,抛出异常
if(outputBuffer == null){
throw new RuntimeException("encodecOutputBuffer"+outputBufferIndex+"was null");
}
// 添加视频流到混合器
if(mMuxerRef != null){
Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) {
muxer.pumpStream(outputBuffer, mBufferInfo, false);
}
}
// AAC流添加ADTS头,缓存到mBuffer
mBuffer.clear();
outputBuffer.get(mBuffer.array(), 7, mBufferInfo.size);
outputBuffer.clear();
mBuffer.position(7 + mBufferInfo.size);
addADTStoPacket(mBuffer.array(), mBufferInfo.size + 7);
mBuffer.flip();
// 将AAC回调给MainModelImpl进行push
if(listener != null){
Log.i(TAG,"----->得到aac数据流<-----");
listener.onEncodeResult(mBuffer.array(),0, mBufferInfo.size + 7, mBufferInfo.presentationTimeUs / 1000);
}
}
// 处理结束,释放输出缓存区资源
mAudioEncoder.releaseOutputBuffer(outputBufferIndex,false);
}
}while (outputBufferIndex >= 0);
}
private void initAudioRecord(){
if(DEBUG)
Log.d(TAG,"AACEncodeConsumer-->开始采集音频");
// 设置进程优先级
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
mAudioRecord.startRecording();
}
private void initMediaCodec(){
if(DEBUG)
Log.d(TAG,"AACEncodeConsumer-->开始编码音频");
MediaCodecInfo mCodecInfo = selectSupportCodec(MIME_TYPE);
if(mCodecInfo == null){
Log.e(TAG,"编码器不支持"+MIME_TYPE+"类型");
return;
}
try{
mAudioEncoder = MediaCodec.createByCodecName(mCodecInfo.getName());
}catch(IOException e){
Log.e(TAG,"创建编码器失败"+e.getMessage());
e.printStackTrace();
}
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, BUFFER_SIZE);
mAudioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mAudioEncoder.start();
isEncoderStart = true;
}
private void stopAudioRecord() {
if(DEBUG)
Log.d(TAG,"AACEncodeConsumer-->停止采集音频");
if(mAudioRecord != null){
mAudioRecord.stop();
mAudioRecord.release();
mAudioRecord = null;
}
}
private void stopMediaCodec() {
if(DEBUG)
Log.d(TAG,"AACEncodeConsumer-->停止编码音频");
if(mAudioEncoder != null){
mAudioEncoder.stop();
mAudioEncoder.release();
mAudioEncoder = null;
}
isEncoderStart = false;
}
// API>=21
private boolean isLollipop(){
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
}
// API<=19
private boolean isKITKAT(){
return Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT;
}
private long getPTSUs(){
long result = System.nanoTime()/1000;
if(result < prevPresentationTimes){
result = (prevPresentationTimes - result ) + result;
}
return result;
}
/**
* 遍历所有编解码器返回第一个与指定MIME类型匹配的编码器
* 判断是否有支持指定mime类型的编码器
* */
private MediaCodecInfo selectSupportCodec(String mimeType){
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
// 判断是否为编码器,否则直接进入下一次循环
if (!codecInfo.isEncoder()) {
continue;
}
// 如果是编码器,判断是否支持Mime类型
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private void addADTStoPacket(byte[] packet, int packetLen) {
packet[0] = (byte) 0xFF;
packet[1] = (byte) 0xF1;
packet[2] = (byte) (((2 - 1) << 6) + (mSamplingRateIndex << 2) + (1 >> 2));
packet[3] = (byte) (((1 & 3) << 6) + (packetLen >> 11));
packet[4] = (byte) ((packetLen & 0x7FF) >> 3);
packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F);
packet[6] = (byte) 0xFC;
}
private short[] transferByte2Short(byte[] data,int readBytes){
// byte[] 转 short[],数组长度缩减一半
int shortLen = readBytes / 2;
// 将byte[]数组装如ByteBuffer缓冲区
ByteBuffer byteBuffer = ByteBuffer.wrap(data, 0, readBytes);
// 将ByteBuffer转成小端并获取shortBuffer
ShortBuffer shortBuffer = byteBuffer.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer();
short[] shortData = new short[shortLen];
shortBuffer.get(shortData, 0, shortLen);
return shortData;
}
}

371
libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java

@ -0,0 +1,371 @@
package com.serenegiant.usb.encoder.biz;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import com.jiangdg.usbcamera.FileUtils;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
/** 对YUV视频流进行编码
* Created by jiangdongguo on 2017/5/6.
*/
@SuppressWarnings("deprecation")
public class H264EncodeConsumer extends Thread {
private static final boolean DEBUG = false;
private static final String TAG = "H264EncodeConsumer";
private static final String MIME_TYPE = "video/avc";
// 间隔1s插入一帧关键帧
private static final int FRAME_INTERVAL = 1;
// 绑定编码器缓存区超时时间为10s
private static final int TIMES_OUT = 10000;
// 硬编码器
private MediaCodec mMediaCodec;
private int mColorFormat;
private boolean isExit = false;
private boolean isEncoderStart = false;
private MediaFormat mFormat;
private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test2.h264";
private BufferedOutputStream outputStream;
final int millisPerframe = 1000 / 20;
long lastPush = 0;
private OnH264EncodeResultListener listener;
private int mWidth ;
private int mHeight ;
private MediaFormat newFormat;
private WeakReference<Mp4MediaMuxer> mMuxerRef;
private boolean isAddKeyFrame = false;
public interface OnH264EncodeResultListener{
void onEncodeResult(byte[] data, int offset,
int length, long timestamp);
}
public void setOnH264EncodeResultListener(OnH264EncodeResultListener listener) {
this.listener = listener;
}
public synchronized void setTmpuMuxer(Mp4MediaMuxer mMuxer){
this.mMuxerRef = new WeakReference<>(mMuxer);
Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null && newFormat != null) {
muxer.addTrack(newFormat, true);
}
}
private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers;
public void setRawYuv(byte[] yuvData,int width,int height){
if (! isEncoderStart)
return;
// 根据编码器支持转换颜色空间格式
// 即 nv21 ---> YUV420sp(21)
// nv21 ---> YUV420p (19)
mWidth = width;
mHeight = height;
try {
if (lastPush == 0) {
lastPush = System.currentTimeMillis();
}
long time = System.currentTimeMillis() - lastPush;
if (time >= 0) {
time = millisPerframe - time;
if (time > 0)
Thread.sleep(time / 2);
}
// 将数据写入编码器
feedMediaCodecData(yuvData);
if (time > 0)
Thread.sleep(time / 2);
lastPush = System.currentTimeMillis();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
private void feedMediaCodecData(byte[] data){
if (! isEncoderStart)
return;
int bufferIndex = -1;
try{
bufferIndex = mMediaCodec.dequeueInputBuffer(0);
}catch (IllegalStateException e){
e.printStackTrace();
}
if (bufferIndex >= 0) {
ByteBuffer buffer;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
buffer = mMediaCodec.getInputBuffer(bufferIndex);
} else {
buffer = inputBuffers[bufferIndex];
}
buffer.clear();
buffer.put(data);
buffer.clear();
mMediaCodec.queueInputBuffer(bufferIndex, 0, data.length, System.nanoTime() / 1000, MediaCodec.BUFFER_FLAG_KEY_FRAME);
}
}
public void exit(){
isExit = true;
}
@Override
public void run() {
if(!isEncoderStart){
startMediaCodec();
}
// 休眠200ms,等待音频线程开启
// 否则视频第一秒会卡住
try {
Thread.sleep(200);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
// 如果编码器没有启动或者没有图像数据,线程阻塞等待
while(!isExit){
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = 0;
byte[] mPpsSps = new byte[0];
byte[] h264 = new byte[mWidth * mHeight];
do {
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 10000);
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
outputBuffers = mMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
synchronized (H264EncodeConsumer.this) {
newFormat = mMediaCodec.getOutputFormat();
if(mMuxerRef != null){
Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) {
muxer.addTrack(newFormat, true);
}
}
}
} else if (outputBufferIndex < 0) {
// let's ignore it
} else {
ByteBuffer outputBuffer;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
outputBuffer = mMediaCodec.getOutputBuffer(outputBufferIndex);
} else {
outputBuffer = outputBuffers[outputBufferIndex];
}
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
boolean sync = false;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {// sps
sync = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (!sync) {
byte[] temp = new byte[bufferInfo.size];
outputBuffer.get(temp);
mPpsSps = temp;
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
continue;
} else {
mPpsSps = new byte[0];
}
}
sync |= (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
int len = mPpsSps.length + bufferInfo.size;
if (len > h264.length) {
h264 = new byte[len];
}
if (sync) {
System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length);
outputBuffer.get(h264, mPpsSps.length, bufferInfo.size);
if(listener != null){
listener.onEncodeResult(h264, 0,mPpsSps.length + bufferInfo.size, bufferInfo.presentationTimeUs / 1000);
}
// 添加视频流到混合器
if(mMuxerRef != null){
Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true);
}
isAddKeyFrame = true;
}
if(DEBUG)
Log.i(TAG,"关键帧 h264.length = "+h264.length+";mPpsSps.length="+mPpsSps.length
+ " bufferInfo.size = " + bufferInfo.size);
} else {
outputBuffer.get(h264, 0, bufferInfo.size);
if(listener != null){
listener.onEncodeResult(h264, 0,bufferInfo.size, bufferInfo.presentationTimeUs / 1000);
}
// 添加视频流到混合器
if(isAddKeyFrame && mMuxerRef != null){
Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true);
}
}
if(DEBUG)
Log.i(TAG,"普通帧 h264.length = "+h264.length+ " bufferInfo.size = " + bufferInfo.size);
}
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
}
} while (!isExit && isEncoderStart);
}
stopMediaCodec();
}
private void startMediaCodec() {
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
if (videoCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, 640, 480);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
isEncoderStart = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + 1) {
inputBuffers = outputBuffers = null;
} else {
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();
}
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mMediaCodec.setParameters(params);
}
}
private void stopMediaCodec(){
isEncoderStart = false;
if(mMediaCodec != null){
mMediaCodec.stop();
mMediaCodec.release();
Log.d(TAG,"关闭视频编码器");
}
}
private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f;
private int calcBitRate() {
final int bitrate = (int)(BPP * FRAME_RATE * 640 * 480);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate;
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return null if no codec matched
*/
@SuppressWarnings("deprecation")
protected final MediaCodecInfo selectVideoCodec(final String mimeType) {
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
// select first codec that match a specific MIME type and color format
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
final int format = selectColorFormat(codecInfo, mimeType);
if (format > 0) {
mColorFormat = format;
return codecInfo;
}
}
}
}
return null;
}
/**
* select color format available on specific codec and we can use.
* @return 0 if no colorFormat is matched
*/
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedViewoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
/**
* color formats that we can use in this class
*/
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[] {
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
}
private static final boolean isRecognizedViewoFormat(final int colorFormat) {
final int n = recognizedFormats != null ? recognizedFormats.length : 0;
for (int i = 0; i < n; i++) {
if (recognizedFormats[i] == colorFormat) {
return true;
}
}
return false;
}
}

148
libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/Mp4MediaMuxer.java

@ -0,0 +1,148 @@
package com.serenegiant.usb.encoder.biz;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Build;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
/**Mp4封装混合器
*
* Created by jianddongguo on 2017/7/28.
*/
public class Mp4MediaMuxer {
private static final boolean VERBOSE = false;
private static final String TAG = Mp4MediaMuxer.class.getSimpleName();
private final String mFilePath;
private MediaMuxer mMuxer;
private final long durationMillis;
private int index = 0;
private int mVideoTrackIndex = -1;
private int mAudioTrackIndex = -1;
private long mBeginMillis;
private MediaFormat mVideoFormat;
private MediaFormat mAudioFormat;
// 文件路径;文件时长
public Mp4MediaMuxer(String path, long durationMillis) {
mFilePath = path;
this.durationMillis = durationMillis;
Object mux = null;
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
mux = new MediaMuxer(path + "-" + index++ + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
mMuxer = (MediaMuxer) mux;
}
}
public synchronized void addTrack(MediaFormat format, boolean isVideo) {
// now that we have the Magic Goodies, start the muxer
if (mAudioTrackIndex != -1 && mVideoTrackIndex != -1)
throw new RuntimeException("already add all tracks");
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
int track = mMuxer.addTrack(format);
if (VERBOSE)
Log.i(TAG, String.format("addTrack %s result %d", isVideo ? "video" : "audio", track));
if (isVideo) {
mVideoFormat = format;
mVideoTrackIndex = track;
if (mAudioTrackIndex != -1) {
if (VERBOSE)
Log.i(TAG, "both audio and video added,and muxer is started");
mMuxer.start();
mBeginMillis = System.currentTimeMillis();
}
} else {
mAudioFormat = format;
mAudioTrackIndex = track;
if (mVideoTrackIndex != -1) {
mMuxer.start();
mBeginMillis = System.currentTimeMillis();
}
}
}
}
public synchronized void pumpStream(ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo, boolean isVideo) {
if (mAudioTrackIndex == -1 || mVideoTrackIndex == -1) {
// Log.i(TAG, String.format("pumpStream [%s] but muxer is not start.ignore..", isVideo ? "video" : "audio"));
return;
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
} else if (bufferInfo.size != 0) {
if (isVideo && mVideoTrackIndex == -1) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
mMuxer.writeSampleData(isVideo ? mVideoTrackIndex : mAudioTrackIndex, outputBuffer, bufferInfo);
}
// if (VERBOSE)
// Log.d(TAG, String.format("sent %s [" + bufferInfo.size + "] with timestamp:[%d] to muxer", isVideo ? "video" : "audio", bufferInfo.presentationTimeUs / 1000));
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// if (VERBOSE)
// Log.i(TAG, "BUFFER_FLAG_END_OF_STREAM received");
}
if (System.currentTimeMillis() - mBeginMillis >= durationMillis) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
// if (VERBOSE)
// Log.i(TAG, String.format("record file reach expiration.create new file:" + index));
mMuxer.stop();
mMuxer.release();
mMuxer = null;
mVideoTrackIndex = mAudioTrackIndex = -1;
try {
mMuxer = new MediaMuxer(mFilePath + "-" + ++index + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
addTrack(mVideoFormat, true);
addTrack(mAudioFormat, false);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public synchronized void release() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
if (mMuxer != null) {
if (mAudioTrackIndex != -1 && mVideoTrackIndex != -1) {
if (VERBOSE)
Log.i(TAG, String.format("muxer is started. now it will be stoped."));
try {
mMuxer.stop();
mMuxer.release();
} catch (IllegalStateException ex) {
ex.printStackTrace();
}
if (System.currentTimeMillis() - mBeginMillis <= 1500){
new File(mFilePath + "-" + index + ".mp4").delete();
}
mAudioTrackIndex = mVideoTrackIndex = -1;
}else{
if (VERBOSE)
Log.i(TAG, String.format("muxer is failed to be stoped."));
}
}
}
}
}
Loading…
Cancel
Save