Browse Source

修复拍照颜色失真问题

main
jiangdongguo 7 years ago
parent
commit
824d277921
  1. 3
      app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java
  2. 3
      app/src/main/res/layout/activity_usbcamera.xml
  3. 26
      libusbcamera/src/main/java/com/jiangdg/usbcamera/UVCCameraHelper.java
  4. 6
      libusbcamera/src/main/java/com/serenegiant/usb/UVCCamera.java
  5. 6
      libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java
  6. 6
      libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandler.java
  7. 6
      libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandlerMultiSurface.java
  8. 188
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java

3
app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java

@ -139,8 +139,10 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
mUVCCameraView = (CameraViewInterface) mTextureView; mUVCCameraView = (CameraViewInterface) mTextureView;
mUVCCameraView.setCallback(this); mUVCCameraView.setCallback(this);
mCameraHelper = UVCCameraHelper.getInstance(); mCameraHelper = UVCCameraHelper.getInstance();
mCameraHelper.setDefaultPreviewSize(320,240);
mCameraHelper.initUSBMonitor(this, mUVCCameraView, listener); mCameraHelper.initUSBMonitor(this, mUVCCameraView, listener);
mCameraHelper.setOnPreviewFrameListener(new AbstractUVCCameraHandler.OnPreViewResultListener() { mCameraHelper.setOnPreviewFrameListener(new AbstractUVCCameraHandler.OnPreViewResultListener() {
@Override @Override
public void onPreviewResult(byte[] nv21Yuv) { public void onPreviewResult(byte[] nv21Yuv) {
@ -232,6 +234,7 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
Log.i(TAG,"save path:" + path); Log.i(TAG,"save path:" + path);
} }
}); });
break; break;
case R.id.menu_recording: case R.id.menu_recording:
if (mCameraHelper == null || !mCameraHelper.isCameraOpened()) { if (mCameraHelper == null || !mCameraHelper.isCameraOpened()) {

3
app/src/main/res/layout/activity_usbcamera.xml

@ -21,7 +21,8 @@
android:id="@+id/camera_view" android:id="@+id/camera_view"
android:layout_below="@id/toolbar" android:layout_below="@id/toolbar"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="wrap_content"
android:layout_centerInParent="true"
android:layout_centerHorizontal="true" android:layout_centerHorizontal="true"
android:layout_centerVertical="true" /> android:layout_centerVertical="true" />

26
libusbcamera/src/main/java/com/jiangdg/usbcamera/UVCCameraHelper.java

@ -34,8 +34,6 @@ public class UVCCameraHelper {
private int previewHeight = 480; private int previewHeight = 480;
public static int MODE_BRIGHTNESS = UVCCamera.PU_BRIGHTNESS; public static int MODE_BRIGHTNESS = UVCCamera.PU_BRIGHTNESS;
public static int MODE_CONTRAST = UVCCamera.PU_CONTRAST; public static int MODE_CONTRAST = UVCCamera.PU_CONTRAST;
//0-YUYV
private static final int PREVIEW_FORMAT = 0;
private static UVCCameraHelper mCameraHelper; private static UVCCameraHelper mCameraHelper;
// USB Manager // USB Manager
@ -135,9 +133,9 @@ public class UVCCameraHelper {
mCameraHandler = null; mCameraHandler = null;
} }
// initialize camera handler // initialize camera handler
// cameraView.setAspectRatio(previewWidth / (float)previewHeight); // mCamViewWrf.get().setAspectRatio(previewWidth / (float)previewHeight);
mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2, mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2,
previewWidth, previewHeight, PREVIEW_FORMAT); previewWidth, previewHeight, UVCCamera.FRAME_FORMAT_YUYV);
} }
public void updateResolution(int width, int height) { public void updateResolution(int width, int height) {
@ -150,9 +148,9 @@ public class UVCCameraHelper {
mCameraHandler.release(); mCameraHandler.release();
mCameraHandler = null; mCameraHandler = null;
} }
// cameraView.setAspectRatio(previewWidth / (float)previewHeight); // mCamViewWrf.get().setAspectRatio(previewWidth / (float)previewHeight);
mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2, mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2,
previewWidth, previewHeight, PREVIEW_FORMAT); previewWidth, previewHeight, UVCCamera.FRAME_FORMAT_YUYV);
openCamera(mCtrlBlock); openCamera(mCtrlBlock);
startPreview(mCamViewWrf.get()); startPreview(mCamViewWrf.get());
} }
@ -299,4 +297,20 @@ public class UVCCameraHelper {
return null; return null;
return mCameraHandler.getSupportedPreviewSizes(); return mCameraHandler.getSupportedPreviewSizes();
} }
public void setDefaultPreviewSize(int defaultWidth,int defaultHeight) {
if(mUSBMonitor != null) {
throw new IllegalStateException("setDefaultPreviewSize should be call before initMonitor");
}
this.previewWidth = defaultWidth;
this.previewHeight = defaultHeight;
}
public int getPreviewWidth() {
return previewWidth;
}
public int getPreviewHeight() {
return previewHeight;
}
} }

6
libusbcamera/src/main/java/com/serenegiant/usb/UVCCamera.java

@ -58,8 +58,8 @@ public class UVCCamera {
public static final int PIXEL_FORMAT_YUV = 1; public static final int PIXEL_FORMAT_YUV = 1;
public static final int PIXEL_FORMAT_RGB565 = 2; public static final int PIXEL_FORMAT_RGB565 = 2;
public static final int PIXEL_FORMAT_RGBX = 3; public static final int PIXEL_FORMAT_RGBX = 3;
public static final int PIXEL_FORMAT_YUV420SP = 4; public static final int PIXEL_FORMAT_YUV420SP = 4; // NV12
public static final int PIXEL_FORMAT_NV21 = 5; // = YVU420SemiPlanar public static final int PIXEL_FORMAT_NV21 = 5; // = YVU420SemiPlanar,NV21,但是保存到jpg颜色失真
//-------------------------------------------------------------------------------- //--------------------------------------------------------------------------------
public static final int CTRL_SCANNING = 0x00000001; // D0: Scanning Mode public static final int CTRL_SCANNING = 0x00000001; // D0: Scanning Mode
@ -127,7 +127,7 @@ public class UVCCamera {
private UsbControlBlock mCtrlBlock; private UsbControlBlock mCtrlBlock;
protected long mControlSupports; // カメラコントロールでサポートしている機能フラグ protected long mControlSupports; // カメラコントロールでサポートしている機能フラグ
protected long mProcSupports; // プロセッシングユニットでサポートしている機能フラグ protected long mProcSupports; // プロセッシングユニットでサポートしている機能フラグ
protected int mCurrentFrameFormat = FRAME_FORMAT_MJPEG; protected int mCurrentFrameFormat = FRAME_FORMAT_YUYV;
protected int mCurrentWidth = DEFAULT_PREVIEW_WIDTH, mCurrentHeight = DEFAULT_PREVIEW_HEIGHT; protected int mCurrentWidth = DEFAULT_PREVIEW_WIDTH, mCurrentHeight = DEFAULT_PREVIEW_HEIGHT;
protected float mCurrentBandwidthFactor = DEFAULT_BANDWIDTH; protected float mCurrentBandwidthFactor = DEFAULT_BANDWIDTH;
protected String mSupportedSize; protected String mSupportedSize;

6
libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java

@ -530,9 +530,9 @@ public abstract class AbstractUVCCameraHandler extends Handler {
if ((mUVCCamera == null) || mIsPreviewing) return; if ((mUVCCamera == null) || mIsPreviewing) return;
try { try {
mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor); mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor);
// 获取USB Camera预览数据 // 获取USB Camera预览数据,使用NV21颜色会失真
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21); // mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_YUV420SP);
} catch (final IllegalArgumentException e) { } catch (final IllegalArgumentException e) {
// try { // try {
// // fallback to YUV mode // // fallback to YUV mode

6
libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandler.java

@ -42,7 +42,7 @@ public class UVCCameraHandler extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height) { final int width, final int height) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**
@ -58,7 +58,7 @@ public class UVCCameraHandler extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height, final float bandwidthFactor) { final int width, final int height, final float bandwidthFactor) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, bandwidthFactor); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, bandwidthFactor);
} }
/** /**
@ -74,7 +74,7 @@ public class UVCCameraHandler extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height) { final int encoderType, final int width, final int height) {
return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**

6
libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandlerMultiSurface.java

@ -43,7 +43,7 @@ public class UVCCameraHandlerMultiSurface extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height) { final int width, final int height) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**
@ -59,7 +59,7 @@ public class UVCCameraHandlerMultiSurface extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height, final float bandwidthFactor) { final int width, final int height, final float bandwidthFactor) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, bandwidthFactor); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, bandwidthFactor);
} }
/** /**
@ -75,7 +75,7 @@ public class UVCCameraHandlerMultiSurface extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height) { final int encoderType, final int width, final int height) {
return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**

188
libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java

@ -1,10 +1,6 @@
package com.serenegiant.usb.encoder.biz; package com.serenegiant.usb.encoder.biz;
import java.io.BufferedOutputStream; import android.annotation.SuppressLint;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaCodecList; import android.media.MediaCodecList;
@ -14,7 +10,13 @@ import android.os.Bundle;
import android.os.Environment; import android.os.Environment;
import android.util.Log; import android.util.Log;
/** 对YUV视频流进行编码 import java.io.BufferedOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
/**
* 对YUV视频流进行编码
* Created by jiangdongguo on 2017/5/6. * Created by jiangdongguo on 2017/5/6.
*/ */
@ -33,19 +35,19 @@ public class H264EncodeConsumer extends Thread {
private boolean isExit = false; private boolean isExit = false;
private boolean isEncoderStart = false; private boolean isEncoderStart = false;
private MediaFormat mFormat; private MediaFormat mFormat;
private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test2.h264"; private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test2.h264";
private BufferedOutputStream outputStream; private BufferedOutputStream outputStream;
final int millisPerframe = 1000 / 20; final int millisPerframe = 1000 / 20;
long lastPush = 0; long lastPush = 0;
private OnH264EncodeResultListener listener; private OnH264EncodeResultListener listener;
private int mWidth ; private int mWidth;
private int mHeight ; private int mHeight;
private MediaFormat newFormat; private MediaFormat newFormat;
private WeakReference<Mp4MediaMuxer> mMuxerRef; private WeakReference<Mp4MediaMuxer> mMuxerRef;
private boolean isAddKeyFrame = false; private boolean isAddKeyFrame = false;
public interface OnH264EncodeResultListener{ public interface OnH264EncodeResultListener {
void onEncodeResult(byte[] data, int offset, void onEncodeResult(byte[] data, int offset,
int length, long timestamp); int length, long timestamp);
} }
@ -54,17 +56,13 @@ public class H264EncodeConsumer extends Thread {
this.listener = listener; this.listener = listener;
} }
public H264EncodeConsumer(){ public H264EncodeConsumer(int width, int height) {
}
public H264EncodeConsumer(int width,int height){
this.mWidth = width; this.mWidth = width;
this.mHeight = height; this.mHeight = height;
} }
public synchronized void setTmpuMuxer(Mp4MediaMuxer mMuxer){ public synchronized void setTmpuMuxer(Mp4MediaMuxer mMuxer) {
this.mMuxerRef = new WeakReference<>(mMuxer); this.mMuxerRef = new WeakReference<>(mMuxer);
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null && newFormat != null) { if (muxer != null && newFormat != null) {
muxer.addTrack(newFormat, true); muxer.addTrack(newFormat, true);
@ -74,13 +72,10 @@ public class H264EncodeConsumer extends Thread {
private ByteBuffer[] inputBuffers; private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
public void setRawYuv(byte[] yuvData,int width,int height){ public void setRawYuv(byte[] yuvData, int width, int height) {
if (! isEncoderStart) if (!isEncoderStart)
return; return;
// 根据编码器支持转换颜色空间格式 if (mWidth != width || mHeight != height) {
// 即 nv21 ---> YUV420sp(21)
// nv21 ---> YUV420p (19)
if(mWidth != width || mHeight != height){
mWidth = width; mWidth = width;
mHeight = height; mHeight = height;
return; return;
@ -96,7 +91,8 @@ public class H264EncodeConsumer extends Thread {
Thread.sleep(time / 2); Thread.sleep(time / 2);
} }
// 将数据写入编码器 // 将数据写入编码器
feedMediaCodecData(yuvData);
feedMediaCodecData(nv12ToNV21(yuvData, mWidth, mHeight));
if (time > 0) if (time > 0)
Thread.sleep(time / 2); Thread.sleep(time / 2);
@ -106,13 +102,13 @@ public class H264EncodeConsumer extends Thread {
} }
} }
private void feedMediaCodecData(byte[] data){ private void feedMediaCodecData(byte[] data) {
if (! isEncoderStart) if (!isEncoderStart)
return; return;
int bufferIndex = -1; int bufferIndex = -1;
try{ try {
bufferIndex = mMediaCodec.dequeueInputBuffer(0); bufferIndex = mMediaCodec.dequeueInputBuffer(0);
}catch (IllegalStateException e){ } catch (IllegalStateException e) {
e.printStackTrace(); e.printStackTrace();
} }
if (bufferIndex >= 0) { if (bufferIndex >= 0) {
@ -129,15 +125,16 @@ public class H264EncodeConsumer extends Thread {
} }
} }
public void exit(){ public void exit() {
isExit = true; isExit = true;
} }
@SuppressLint("WrongConstant")
@Override @Override
public void run() { public void run() {
if(!isEncoderStart){ if (!isEncoderStart) {
startMediaCodec(); startMediaCodec();
} }
// 休眠200ms,等待音频线程开启 // 休眠200ms,等待音频线程开启
// 否则视频第一秒会卡住 // 否则视频第一秒会卡住
try { try {
@ -147,7 +144,7 @@ public class H264EncodeConsumer extends Thread {
} }
// 如果编码器没有启动或者没有图像数据,线程阻塞等待 // 如果编码器没有启动或者没有图像数据,线程阻塞等待
while(!isExit){ while (!isExit) {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = 0; int outputBufferIndex = 0;
byte[] mPpsSps = new byte[0]; byte[] mPpsSps = new byte[0];
@ -162,7 +159,7 @@ public class H264EncodeConsumer extends Thread {
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
synchronized (H264EncodeConsumer.this) { synchronized (H264EncodeConsumer.this) {
newFormat = mMediaCodec.getOutputFormat(); newFormat = mMediaCodec.getOutputFormat();
if(mMuxerRef != null){ if (mMuxerRef != null) {
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) { if (muxer != null) {
muxer.addTrack(newFormat, true); muxer.addTrack(newFormat, true);
@ -202,35 +199,35 @@ public class H264EncodeConsumer extends Thread {
if (sync) { if (sync) {
System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length); System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length);
outputBuffer.get(h264, mPpsSps.length, bufferInfo.size); outputBuffer.get(h264, mPpsSps.length, bufferInfo.size);
if(listener != null){ if (listener != null) {
listener.onEncodeResult(h264, 0,mPpsSps.length + bufferInfo.size, bufferInfo.presentationTimeUs / 1000); listener.onEncodeResult(h264, 0, mPpsSps.length + bufferInfo.size, bufferInfo.presentationTimeUs / 1000);
} }
// 添加视频流到混合器 // 添加视频流到混合器
if(mMuxerRef != null){ if (mMuxerRef != null) {
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) { if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true); muxer.pumpStream(outputBuffer, bufferInfo, true);
} }
isAddKeyFrame = true; isAddKeyFrame = true;
} }
if(DEBUG) if (DEBUG)
Log.i(TAG,"关键帧 h264.length = "+h264.length+";mPpsSps.length="+mPpsSps.length Log.i(TAG, "关键帧 h264.length = " + h264.length + ";mPpsSps.length=" + mPpsSps.length
+ " bufferInfo.size = " + bufferInfo.size); + " bufferInfo.size = " + bufferInfo.size);
} else { } else {
outputBuffer.get(h264, 0, bufferInfo.size); outputBuffer.get(h264, 0, bufferInfo.size);
if(listener != null){ if (listener != null) {
listener.onEncodeResult(h264, 0,bufferInfo.size, bufferInfo.presentationTimeUs / 1000); listener.onEncodeResult(h264, 0, bufferInfo.size, bufferInfo.presentationTimeUs / 1000);
} }
// 添加视频流到混合器 // 添加视频流到混合器
if(isAddKeyFrame && mMuxerRef != null){ if (isAddKeyFrame && mMuxerRef != null) {
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) { if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true); muxer.pumpStream(outputBuffer, bufferInfo, true);
} }
} }
if(DEBUG) if (DEBUG)
Log.i(TAG,"普通帧 h264.length = "+h264.length+ " bufferInfo.size = " + bufferInfo.size); Log.i(TAG, "普通帧 h264.length = " + h264.length + " bufferInfo.size = " + bufferInfo.size);
} }
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
} }
@ -261,7 +258,6 @@ public class H264EncodeConsumer extends Thread {
mMediaCodec.start(); mMediaCodec.start();
isEncoderStart = true; isEncoderStart = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + 1) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + 1) {
inputBuffers = outputBuffers = null; inputBuffers = outputBuffers = null;
@ -277,25 +273,27 @@ public class H264EncodeConsumer extends Thread {
} }
} }
private void stopMediaCodec(){ private void stopMediaCodec() {
isEncoderStart = false; isEncoderStart = false;
if(mMediaCodec != null){ if (mMediaCodec != null) {
mMediaCodec.stop(); mMediaCodec.stop();
mMediaCodec.release(); mMediaCodec.release();
Log.d(TAG,"关闭视频编码器"); Log.d(TAG, "关闭视频编码器");
} }
} }
private static final int FRAME_RATE = 15; private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f; private static final float BPP = 0.50f;
private int calcBitRate() { private int calcBitRate() {
final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight); final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f)); Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate; return bitrate;
} }
/** /**
* select the first codec that match a specific MIME type * select the first codec that match a specific MIME type
*
* @param mimeType * @param mimeType
* @return null if no codec matched * @return null if no codec matched
*/ */
@ -307,7 +305,7 @@ public class H264EncodeConsumer extends Thread {
for (int i = 0; i < numCodecs; i++) { for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder if (!codecInfo.isEncoder()) { // skipp decoder
continue; continue;
} }
// select first codec that match a specific MIME type and color format // select first codec that match a specific MIME type and color format
@ -327,6 +325,7 @@ public class H264EncodeConsumer extends Thread {
/** /**
* select color format available on specific codec and we can use. * select color format available on specific codec and we can use.
*
* @return 0 if no colorFormat is matched * @return 0 if no colorFormat is matched
*/ */
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) { protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
@ -356,12 +355,14 @@ public class H264EncodeConsumer extends Thread {
* color formats that we can use in this class * color formats that we can use in this class
*/ */
protected static int[] recognizedFormats; protected static int[] recognizedFormats;
static { static {
recognizedFormats = new int[] { recognizedFormats = new int[]{
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
}; };
} }
@ -374,4 +375,87 @@ public class H264EncodeConsumer extends Thread {
} }
return false; return false;
} }
private byte[] nv21ToI420(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV21 YYYYYYYY VUVU
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferV.put(data[i]);
bufferU.put(data[i + 1]);
}
return ret;
}
private byte[] nv12ToI420(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV12 YYYYYYYY UVUV
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferU.put(data[i]);
bufferV.put(data[i + 1]);
}
return ret;
}
private byte[] nv12ToNv21(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV12 YYYYYYYY UVUV
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferU.put(data[i]);
bufferV.put(data[i + 1]);
}
return ret;
}
// YYYYYYYY UVUV(nv21)--> YYYYYYYY VUVU(nv12)
private byte[] nv21ToNV12(byte[] nv21, int width, int height) {
byte[] ret = new byte[width * height * 3 /2];
int framesize = width * height;
int i = 0, j = 0;
// 拷贝Y分量
System.arraycopy(nv21, 0,ret , 0, framesize);
// 拷贝UV分量
for (j = framesize; j < nv21.length; j += 2) {
ret[j+1] = nv21[j+1];
ret[j] = nv21[j];
}
return ret;
}
// YYYYYYYY UVUV(nv12)--> YYYYYYYY VUVU(nv21)
private byte[] nv12ToNV21(byte[] nv12, int width, int height) {
byte[] ret = new byte[width * height * 3 /2];
int framesize = width * height;
int i = 0, j = 0;
// 拷贝Y分量
System.arraycopy(nv12, 0,ret , 0, framesize);
// 拷贝UV分量
for (j = framesize; j < nv12.length; j += 2) {
ret[j] = nv12[j+1];
ret[j+1] = nv12[j];
}
return ret;
}
} }

Loading…
Cancel
Save