Browse Source

修复拍照颜色失真问题

main
jiangdongguo 7 years ago
parent
commit
824d277921
  1. 3
      app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java
  2. 3
      app/src/main/res/layout/activity_usbcamera.xml
  3. 26
      libusbcamera/src/main/java/com/jiangdg/usbcamera/UVCCameraHelper.java
  4. 6
      libusbcamera/src/main/java/com/serenegiant/usb/UVCCamera.java
  5. 6
      libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java
  6. 6
      libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandler.java
  7. 6
      libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandlerMultiSurface.java
  8. 116
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java

3
app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java

@ -139,8 +139,10 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
mUVCCameraView = (CameraViewInterface) mTextureView; mUVCCameraView = (CameraViewInterface) mTextureView;
mUVCCameraView.setCallback(this); mUVCCameraView.setCallback(this);
mCameraHelper = UVCCameraHelper.getInstance(); mCameraHelper = UVCCameraHelper.getInstance();
mCameraHelper.setDefaultPreviewSize(320,240);
mCameraHelper.initUSBMonitor(this, mUVCCameraView, listener); mCameraHelper.initUSBMonitor(this, mUVCCameraView, listener);
mCameraHelper.setOnPreviewFrameListener(new AbstractUVCCameraHandler.OnPreViewResultListener() { mCameraHelper.setOnPreviewFrameListener(new AbstractUVCCameraHandler.OnPreViewResultListener() {
@Override @Override
public void onPreviewResult(byte[] nv21Yuv) { public void onPreviewResult(byte[] nv21Yuv) {
@ -232,6 +234,7 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
Log.i(TAG,"save path:" + path); Log.i(TAG,"save path:" + path);
} }
}); });
break; break;
case R.id.menu_recording: case R.id.menu_recording:
if (mCameraHelper == null || !mCameraHelper.isCameraOpened()) { if (mCameraHelper == null || !mCameraHelper.isCameraOpened()) {

3
app/src/main/res/layout/activity_usbcamera.xml

@ -21,7 +21,8 @@
android:id="@+id/camera_view" android:id="@+id/camera_view"
android:layout_below="@id/toolbar" android:layout_below="@id/toolbar"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="wrap_content"
android:layout_centerInParent="true"
android:layout_centerHorizontal="true" android:layout_centerHorizontal="true"
android:layout_centerVertical="true" /> android:layout_centerVertical="true" />

26
libusbcamera/src/main/java/com/jiangdg/usbcamera/UVCCameraHelper.java

@ -34,8 +34,6 @@ public class UVCCameraHelper {
private int previewHeight = 480; private int previewHeight = 480;
public static int MODE_BRIGHTNESS = UVCCamera.PU_BRIGHTNESS; public static int MODE_BRIGHTNESS = UVCCamera.PU_BRIGHTNESS;
public static int MODE_CONTRAST = UVCCamera.PU_CONTRAST; public static int MODE_CONTRAST = UVCCamera.PU_CONTRAST;
//0-YUYV
private static final int PREVIEW_FORMAT = 0;
private static UVCCameraHelper mCameraHelper; private static UVCCameraHelper mCameraHelper;
// USB Manager // USB Manager
@ -135,9 +133,9 @@ public class UVCCameraHelper {
mCameraHandler = null; mCameraHandler = null;
} }
// initialize camera handler // initialize camera handler
// cameraView.setAspectRatio(previewWidth / (float)previewHeight); // mCamViewWrf.get().setAspectRatio(previewWidth / (float)previewHeight);
mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2, mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2,
previewWidth, previewHeight, PREVIEW_FORMAT); previewWidth, previewHeight, UVCCamera.FRAME_FORMAT_YUYV);
} }
public void updateResolution(int width, int height) { public void updateResolution(int width, int height) {
@ -150,9 +148,9 @@ public class UVCCameraHelper {
mCameraHandler.release(); mCameraHandler.release();
mCameraHandler = null; mCameraHandler = null;
} }
// cameraView.setAspectRatio(previewWidth / (float)previewHeight); // mCamViewWrf.get().setAspectRatio(previewWidth / (float)previewHeight);
mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2, mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2,
previewWidth, previewHeight, PREVIEW_FORMAT); previewWidth, previewHeight, UVCCamera.FRAME_FORMAT_YUYV);
openCamera(mCtrlBlock); openCamera(mCtrlBlock);
startPreview(mCamViewWrf.get()); startPreview(mCamViewWrf.get());
} }
@ -299,4 +297,20 @@ public class UVCCameraHelper {
return null; return null;
return mCameraHandler.getSupportedPreviewSizes(); return mCameraHandler.getSupportedPreviewSizes();
} }
public void setDefaultPreviewSize(int defaultWidth,int defaultHeight) {
if(mUSBMonitor != null) {
throw new IllegalStateException("setDefaultPreviewSize should be call before initMonitor");
}
this.previewWidth = defaultWidth;
this.previewHeight = defaultHeight;
}
public int getPreviewWidth() {
return previewWidth;
}
public int getPreviewHeight() {
return previewHeight;
}
} }

6
libusbcamera/src/main/java/com/serenegiant/usb/UVCCamera.java

@ -58,8 +58,8 @@ public class UVCCamera {
public static final int PIXEL_FORMAT_YUV = 1; public static final int PIXEL_FORMAT_YUV = 1;
public static final int PIXEL_FORMAT_RGB565 = 2; public static final int PIXEL_FORMAT_RGB565 = 2;
public static final int PIXEL_FORMAT_RGBX = 3; public static final int PIXEL_FORMAT_RGBX = 3;
public static final int PIXEL_FORMAT_YUV420SP = 4; public static final int PIXEL_FORMAT_YUV420SP = 4; // NV12
public static final int PIXEL_FORMAT_NV21 = 5; // = YVU420SemiPlanar public static final int PIXEL_FORMAT_NV21 = 5; // = YVU420SemiPlanar,NV21,但是保存到jpg颜色失真
//-------------------------------------------------------------------------------- //--------------------------------------------------------------------------------
public static final int CTRL_SCANNING = 0x00000001; // D0: Scanning Mode public static final int CTRL_SCANNING = 0x00000001; // D0: Scanning Mode
@ -127,7 +127,7 @@ public class UVCCamera {
private UsbControlBlock mCtrlBlock; private UsbControlBlock mCtrlBlock;
protected long mControlSupports; // カメラコントロールでサポートしている機能フラグ protected long mControlSupports; // カメラコントロールでサポートしている機能フラグ
protected long mProcSupports; // プロセッシングユニットでサポートしている機能フラグ protected long mProcSupports; // プロセッシングユニットでサポートしている機能フラグ
protected int mCurrentFrameFormat = FRAME_FORMAT_MJPEG; protected int mCurrentFrameFormat = FRAME_FORMAT_YUYV;
protected int mCurrentWidth = DEFAULT_PREVIEW_WIDTH, mCurrentHeight = DEFAULT_PREVIEW_HEIGHT; protected int mCurrentWidth = DEFAULT_PREVIEW_WIDTH, mCurrentHeight = DEFAULT_PREVIEW_HEIGHT;
protected float mCurrentBandwidthFactor = DEFAULT_BANDWIDTH; protected float mCurrentBandwidthFactor = DEFAULT_BANDWIDTH;
protected String mSupportedSize; protected String mSupportedSize;

6
libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java

@ -530,9 +530,9 @@ public abstract class AbstractUVCCameraHandler extends Handler {
if ((mUVCCamera == null) || mIsPreviewing) return; if ((mUVCCamera == null) || mIsPreviewing) return;
try { try {
mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor); mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor);
// 获取USB Camera预览数据 // 获取USB Camera预览数据,使用NV21颜色会失真
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21); // mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_YUV420SP);
} catch (final IllegalArgumentException e) { } catch (final IllegalArgumentException e) {
// try { // try {
// // fallback to YUV mode // // fallback to YUV mode

6
libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandler.java

@ -42,7 +42,7 @@ public class UVCCameraHandler extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height) { final int width, final int height) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**
@ -58,7 +58,7 @@ public class UVCCameraHandler extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height, final float bandwidthFactor) { final int width, final int height, final float bandwidthFactor) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, bandwidthFactor); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, bandwidthFactor);
} }
/** /**
@ -74,7 +74,7 @@ public class UVCCameraHandler extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height) { final int encoderType, final int width, final int height) {
return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**

6
libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandlerMultiSurface.java

@ -43,7 +43,7 @@ public class UVCCameraHandlerMultiSurface extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height) { final int width, final int height) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**
@ -59,7 +59,7 @@ public class UVCCameraHandlerMultiSurface extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int width, final int height, final float bandwidthFactor) { final int width, final int height, final float bandwidthFactor) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, bandwidthFactor); return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_YUYV, bandwidthFactor);
} }
/** /**
@ -75,7 +75,7 @@ public class UVCCameraHandlerMultiSurface extends AbstractUVCCameraHandler {
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height) { final int encoderType, final int width, final int height) {
return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH); return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_YUYV, UVCCamera.DEFAULT_BANDWIDTH);
} }
/** /**

116
libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java

@ -1,10 +1,6 @@
package com.serenegiant.usb.encoder.biz; package com.serenegiant.usb.encoder.biz;
import java.io.BufferedOutputStream; import android.annotation.SuppressLint;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaCodecList; import android.media.MediaCodecList;
@ -14,7 +10,13 @@ import android.os.Bundle;
import android.os.Environment; import android.os.Environment;
import android.util.Log; import android.util.Log;
/** 对YUV视频流进行编码 import java.io.BufferedOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
/**
* 对YUV视频流进行编码
* Created by jiangdongguo on 2017/5/6. * Created by jiangdongguo on 2017/5/6.
*/ */
@ -54,10 +56,6 @@ public class H264EncodeConsumer extends Thread {
this.listener = listener; this.listener = listener;
} }
public H264EncodeConsumer(){
}
public H264EncodeConsumer(int width, int height) { public H264EncodeConsumer(int width, int height) {
this.mWidth = width; this.mWidth = width;
this.mHeight = height; this.mHeight = height;
@ -77,9 +75,6 @@ public class H264EncodeConsumer extends Thread {
public void setRawYuv(byte[] yuvData, int width, int height) { public void setRawYuv(byte[] yuvData, int width, int height) {
if (!isEncoderStart) if (!isEncoderStart)
return; return;
// 根据编码器支持转换颜色空间格式
// 即 nv21 ---> YUV420sp(21)
// nv21 ---> YUV420p (19)
if (mWidth != width || mHeight != height) { if (mWidth != width || mHeight != height) {
mWidth = width; mWidth = width;
mHeight = height; mHeight = height;
@ -96,7 +91,8 @@ public class H264EncodeConsumer extends Thread {
Thread.sleep(time / 2); Thread.sleep(time / 2);
} }
// 将数据写入编码器 // 将数据写入编码器
feedMediaCodecData(yuvData);
feedMediaCodecData(nv12ToNV21(yuvData, mWidth, mHeight));
if (time > 0) if (time > 0)
Thread.sleep(time / 2); Thread.sleep(time / 2);
@ -133,6 +129,7 @@ public class H264EncodeConsumer extends Thread {
isExit = true; isExit = true;
} }
@SuppressLint("WrongConstant")
@Override @Override
public void run() { public void run() {
if (!isEncoderStart) { if (!isEncoderStart) {
@ -261,7 +258,6 @@ public class H264EncodeConsumer extends Thread {
mMediaCodec.start(); mMediaCodec.start();
isEncoderStart = true; isEncoderStart = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + 1) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + 1) {
inputBuffers = outputBuffers = null; inputBuffers = outputBuffers = null;
@ -288,6 +284,7 @@ public class H264EncodeConsumer extends Thread {
private static final int FRAME_RATE = 15; private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f; private static final float BPP = 0.50f;
private int calcBitRate() { private int calcBitRate() {
final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight); final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f)); Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
@ -296,6 +293,7 @@ public class H264EncodeConsumer extends Thread {
/** /**
* select the first codec that match a specific MIME type * select the first codec that match a specific MIME type
*
* @param mimeType * @param mimeType
* @return null if no codec matched * @return null if no codec matched
*/ */
@ -327,6 +325,7 @@ public class H264EncodeConsumer extends Thread {
/** /**
* select color format available on specific codec and we can use. * select color format available on specific codec and we can use.
*
* @return 0 if no colorFormat is matched * @return 0 if no colorFormat is matched
*/ */
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) { protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
@ -356,12 +355,14 @@ public class H264EncodeConsumer extends Thread {
* color formats that we can use in this class * color formats that we can use in this class
*/ */
protected static int[] recognizedFormats; protected static int[] recognizedFormats;
static { static {
recognizedFormats = new int[]{ recognizedFormats = new int[]{
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
}; };
} }
@ -374,4 +375,87 @@ public class H264EncodeConsumer extends Thread {
} }
return false; return false;
} }
private byte[] nv21ToI420(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV21 YYYYYYYY VUVU
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferV.put(data[i]);
bufferU.put(data[i + 1]);
}
return ret;
}
private byte[] nv12ToI420(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV12 YYYYYYYY UVUV
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferU.put(data[i]);
bufferV.put(data[i + 1]);
}
return ret;
}
private byte[] nv12ToNv21(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV12 YYYYYYYY UVUV
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferU.put(data[i]);
bufferV.put(data[i + 1]);
}
return ret;
}
// YYYYYYYY UVUV(nv21)--> YYYYYYYY VUVU(nv12)
private byte[] nv21ToNV12(byte[] nv21, int width, int height) {
byte[] ret = new byte[width * height * 3 /2];
int framesize = width * height;
int i = 0, j = 0;
// 拷贝Y分量
System.arraycopy(nv21, 0,ret , 0, framesize);
// 拷贝UV分量
for (j = framesize; j < nv21.length; j += 2) {
ret[j+1] = nv21[j+1];
ret[j] = nv21[j];
}
return ret;
}
// YYYYYYYY UVUV(nv12)--> YYYYYYYY VUVU(nv21)
private byte[] nv12ToNV21(byte[] nv12, int width, int height) {
byte[] ret = new byte[width * height * 3 /2];
int framesize = width * height;
int i = 0, j = 0;
// 拷贝Y分量
System.arraycopy(nv12, 0,ret , 0, framesize);
// 拷贝UV分量
for (j = framesize; j < nv12.length; j += 2) {
ret[j] = nv12[j+1];
ret[j+1] = nv12[j];
}
return ret;
}
} }

Loading…
Cancel
Save