Browse Source

Merge remote-tracking branch 'origin/master'

# Conflicts:
#	app/app.iml
#	gradle/wrapper/gradle-wrapper.properties
main
jiangdongguo 7 years ago
parent
commit
186f69e217
  1. BIN
      .gradle/3.3/taskArtifacts/fileHashes.bin
  2. BIN
      .gradle/3.3/taskArtifacts/fileSnapshots.bin
  3. BIN
      .gradle/3.3/taskArtifacts/taskArtifacts.bin
  4. BIN
      .gradle/3.3/taskArtifacts/taskArtifacts.lock
  5. BIN
      .gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.bin
  6. BIN
      .gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.lock
  7. BIN
      .gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.bin
  8. BIN
      .gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.lock
  9. BIN
      .gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.bin
  10. BIN
      .gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.lock
  11. BIN
      .gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.bin
  12. BIN
      .gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.lock
  13. 2
      .idea/gradle.xml
  14. 13
      .idea/libraries/animated_vector_drawable_25_3_1.xml
  15. 11
      .idea/libraries/butterknife_annotations_8_8_1.xml
  16. 11
      .idea/libraries/butterknife_compiler_8_8_1.xml
  17. 13
      .idea/libraries/espresso_idling_resource_2_2_2.xml
  18. 13
      .idea/libraries/exposed_instrumentation_api_publish_0_5.xml
  19. 11
      .idea/libraries/support_annotations_25_3_1.xml
  20. 17
      .idea/libraries/support_media_compat_25_3_1.xml
  21. 13
      .idea/libraries/support_vector_drawable_25_3_1.xml
  22. 15
      .idea/misc.xml
  23. 21
      README.md
  24. 9
      app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java
  25. 34
      app/src/main/res/layout/activity_usbcamera.xml
  26. 3
      build.gradle
  27. BIN
      build/generated/mockable-android-25.jar
  28. BIN
      gifs/2.1.0.gif
  29. 88
      libusbcamera/src/main/java/com/jiangdg/usbcamera/UVCCameraHelper.java
  30. 82
      libusbcamera/src/main/java/com/jiangdg/usbcamera/task/SaveYuvImageTask.java
  31. 45
      libusbcamera/src/main/java/com/jiangdg/usbcamera/utils/YUVBean.java
  32. 6
      libusbcamera/src/main/java/com/serenegiant/usb/UVCCamera.java
  33. 24
      libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java
  34. 178
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java
  35. 5
      local.properties

BIN
.gradle/3.3/taskArtifacts/fileHashes.bin

Binary file not shown.

BIN
.gradle/3.3/taskArtifacts/fileSnapshots.bin

Binary file not shown.

BIN
.gradle/3.3/taskArtifacts/taskArtifacts.bin

Binary file not shown.

BIN
.gradle/3.3/taskArtifacts/taskArtifacts.lock

Binary file not shown.

BIN
.gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.bin

Binary file not shown.

BIN
.gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.lock

Binary file not shown.

BIN
.gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.bin

Binary file not shown.

BIN
.gradle/3.3/tasks/_app_compileDebugJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.lock

Binary file not shown.

BIN
.gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.bin

Binary file not shown.

BIN
.gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localClassSetAnalysis/localClassSetAnalysis.lock

Binary file not shown.

BIN
.gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.bin

Binary file not shown.

BIN
.gradle/3.3/tasks/_libusbcamera_compileReleaseJavaWithJavac/localJarClasspathSnapshot/localJarClasspathSnapshot.lock

Binary file not shown.

2
.idea/gradle.xml

@ -5,7 +5,7 @@
<GradleProjectSettings> <GradleProjectSettings>
<option name="distributionType" value="DEFAULT_WRAPPED" /> <option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" /> <option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleHome" value="$APPLICATION_HOME_DIR$/gradle/gradle-3.3" /> <option name="gradleHome" value="$APPLICATION_HOME_DIR$/gradle/gradle-4.1" />
<option name="modules"> <option name="modules">
<set> <set>
<option value="$PROJECT_DIR$" /> <option value="$PROJECT_DIR$" />

13
.idea/libraries/animated_vector_drawable_25_3_1.xml

@ -1,13 +0,0 @@
<component name="libraryTable">
<library name="animated-vector-drawable-25.3.1">
<CLASSES>
<root url="jar://$USER_HOME$/.android/build-cache/17dbf24c311d327da7565d7b9b8c7205d2b3f2c6/output/jars/classes.jar!/" />
<root url="file://$USER_HOME$/.android/build-cache/17dbf24c311d327da7565d7b9b8c7205d2b3f2c6/output/res" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/animated-vector-drawable/25.3.1/animated-vector-drawable-25.3.1-sources.jar!/" />
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/animated-vector-drawable/25.3.1/animated-vector-drawable-25.3.1-sources.jar!/" />
</SOURCES>
</library>
</component>

11
.idea/libraries/butterknife_annotations_8_8_1.xml

@ -1,11 +0,0 @@
<component name="libraryTable">
<library name="butterknife-annotations-8.8.1">
<CLASSES>
<root url="jar://$USER_HOME$/.gradle/caches/modules-2/files-2.1/com.jakewharton/butterknife-annotations/8.8.1/bc373fb6bc7bca3035041b924f158fd2b946ee8d/butterknife-annotations-8.8.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://$USER_HOME$/.gradle/caches/modules-2/files-2.1/com.jakewharton/butterknife-annotations/8.8.1/4f68b7085132dfeb48a0473bc589fdee8a3de757/butterknife-annotations-8.8.1-sources.jar!/" />
</SOURCES>
</library>
</component>

11
.idea/libraries/butterknife_compiler_8_8_1.xml

@ -1,11 +0,0 @@
<component name="libraryTable">
<library name="butterknife-compiler-8.8.1">
<CLASSES>
<root url="jar://$USER_HOME$/.gradle/caches/modules-2/files-2.1/com.jakewharton/butterknife-compiler/8.8.1/b2f4505a1babb7b7c11abbbf8ea4c90b18c3aeac/butterknife-compiler-8.8.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://$USER_HOME$/.gradle/caches/modules-2/files-2.1/com.jakewharton/butterknife-compiler/8.8.1/97cf58a258bb347d7b8642f3f4996abd0f6adb6/butterknife-compiler-8.8.1-sources.jar!/" />
</SOURCES>
</library>
</component>

13
.idea/libraries/espresso_idling_resource_2_2_2.xml

@ -1,13 +0,0 @@
<component name="libraryTable">
<library name="espresso-idling-resource-2.2.2">
<CLASSES>
<root url="jar://$USER_HOME$/.android/build-cache/5c86e8eab3d72b00394bf9e598645f884c7c63a5/output/jars/classes.jar!/" />
<root url="file://$USER_HOME$/.android/build-cache/5c86e8eab3d72b00394bf9e598645f884c7c63a5/output/res" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/test/espresso/espresso-idling-resource/2.2.2/espresso-idling-resource-2.2.2-sources.jar!/" />
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/test/espresso/espresso-idling-resource/2.2.2/espresso-idling-resource-2.2.2-sources.jar!/" />
</SOURCES>
</library>
</component>

13
.idea/libraries/exposed_instrumentation_api_publish_0_5.xml

@ -1,13 +0,0 @@
<component name="libraryTable">
<library name="exposed-instrumentation-api-publish-0.5">
<CLASSES>
<root url="file://$USER_HOME$/.android/build-cache/a6ea49c91460d946e46649e01e209781e6cf4b4a/output/res" />
<root url="jar://$USER_HOME$/.android/build-cache/a6ea49c91460d946e46649e01e209781e6cf4b4a/output/jars/classes.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/test/exposed-instrumentation-api-publish/0.5/exposed-instrumentation-api-publish-0.5-sources.jar!/" />
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/test/exposed-instrumentation-api-publish/0.5/exposed-instrumentation-api-publish-0.5-sources.jar!/" />
</SOURCES>
</library>
</component>

11
.idea/libraries/support_annotations_25_3_1.xml

@ -1,11 +0,0 @@
<component name="libraryTable">
<library name="support-annotations-25.3.1">
<CLASSES>
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/support-annotations/25.3.1/support-annotations-25.3.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/support-annotations/25.3.1/support-annotations-25.3.1-sources.jar!/" />
</SOURCES>
</library>
</component>

17
.idea/libraries/support_media_compat_25_3_1.xml

@ -1,17 +0,0 @@
<component name="libraryTable">
<library name="support-media-compat-25.3.1">
<ANNOTATIONS>
<root url="jar://$USER_HOME$/.android/build-cache/f1ca6919d3e9176e08e199c56de54351aac5f05a/output/annotations.zip!/" />
<root url="jar://$USER_HOME$/.android/build-cache/f1ca6919d3e9176e08e199c56de54351aac5f05a/output/annotations.zip!/" />
</ANNOTATIONS>
<CLASSES>
<root url="file://$USER_HOME$/.android/build-cache/f1ca6919d3e9176e08e199c56de54351aac5f05a/output/res" />
<root url="jar://$USER_HOME$/.android/build-cache/f1ca6919d3e9176e08e199c56de54351aac5f05a/output/jars/classes.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/support-media-compat/25.3.1/support-media-compat-25.3.1-sources.jar!/" />
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/support-media-compat/25.3.1/support-media-compat-25.3.1-sources.jar!/" />
</SOURCES>
</library>
</component>

13
.idea/libraries/support_vector_drawable_25_3_1.xml

@ -1,13 +0,0 @@
<component name="libraryTable">
<library name="support-vector-drawable-25.3.1">
<CLASSES>
<root url="jar://$USER_HOME$/.android/build-cache/7a405513524c444d4aa139ab61911f927325318f/output/jars/classes.jar!/" />
<root url="file://$USER_HOME$/.android/build-cache/7a405513524c444d4aa139ab61911f927325318f/output/res" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/support-vector-drawable/25.3.1/support-vector-drawable-25.3.1-sources.jar!/" />
<root url="jar://E:/Environment/android-sdk-windows/extras/android/m2repository/com/android/support/support-vector-drawable/25.3.1/support-vector-drawable-25.3.1-sources.jar!/" />
</SOURCES>
</library>
</component>

15
.idea/misc.xml

@ -1,8 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="EntryPointsManager">
<entry_points version="2.0" />
</component>
<component name="NullableNotNullManager"> <component name="NullableNotNullManager">
<option name="myDefaultNullable" value="android.support.annotation.Nullable" /> <option name="myDefaultNullable" value="android.support.annotation.Nullable" />
<option name="myDefaultNotNull" value="android.support.annotation.NonNull" /> <option name="myDefaultNotNull" value="android.support.annotation.NonNull" />
@ -27,17 +24,7 @@
</value> </value>
</option> </option>
</component> </component>
<component name="ProjectLevelVcsManager" settingsEditedManually="false"> <component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" default="true" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<OptionsSetting value="true" id="Add" />
<OptionsSetting value="true" id="Remove" />
<OptionsSetting value="true" id="Checkout" />
<OptionsSetting value="true" id="Update" />
<OptionsSetting value="true" id="Status" />
<OptionsSetting value="true" id="Edit" />
<ConfirmationsSetting value="0" id="Add" />
<ConfirmationsSetting value="0" id="Remove" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" default="true" assert-keyword="true" jdk-15="true" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" /> <output url="file://$PROJECT_DIR$/build/classes" />
</component> </component>
<component name="ProjectType"> <component name="ProjectType">

21
README.md

@ -1,6 +1,6 @@
OkCamera AndroidUSBCamera
============ ============
AndroidUSBCamera is developed based on the [saki4510t/UVCCamera](https://github.com/saki4510t/UVCCamera), the project of USB Camera (UVC equipment) and the use of video data acquisition are highly packaged, and it can help developers using USB Camera devices easily by a few simple APIs. By using AndroidUSBCamera,you can detect and connect to a USB Camera simply.And you also can use it to realize taking picture,recording mp4,switching resolutions and setting  camera's contrast or brightness,etc. AndroidUSBCamera is developed based on the [saki4510t/UVCCamera](https://github.com/saki4510t/UVCCamera), the project of USB Camera (UVC equipment) and the use of video data acquisition are highly packaged, and it can help developers using USB Camera devices easily by a few simple APIs. By using AndroidUSBCamera,you can detect and connect to a USB Camera simply.And you also can use it to realize taking picture,recording mp4,switching resolutions ,getting h.264/aac/yuv(nv21) stream and setting  camera's contrast or brightness,supporting 480P、720P、1080P and higher,etc.
[中文文档: AndroidUSBCamera,UVCCamera开发通用库](http://blog.csdn.net/andrexpert/article/details/78324181) [中文文档: AndroidUSBCamera,UVCCamera开发通用库](http://blog.csdn.net/andrexpert/article/details/78324181)
@ -21,7 +21,7 @@ allprojects {
Step 2. Add the dependency Step 2. Add the dependency
```java ```java
dependencies { dependencies {
compile 'com.github.jiangdongguo:AndroidUSBCamera:2.0' implementation 'com.github.jiangdongguo:AndroidUSBCamera:2.1.0'
} }
``` ```
### 2. APIs Introduction ### 2. APIs Introduction
@ -30,6 +30,11 @@ dependencies {
mUVCCameraView = (CameraViewInterface) mTextureView; mUVCCameraView = (CameraViewInterface) mTextureView;
mUVCCameraView.setCallback(mCallback); mUVCCameraView.setCallback(mCallback);
mCameraHelper = UVCCameraHelper.getInstance(); mCameraHelper = UVCCameraHelper.getInstance();
// set default preview size
mCameraHelper.setDefaultPreviewSize(1280,720);
// set default frame format,defalut is UVCCameraHelper.Frame_FORMAT_MPEG
// if using mpeg can not record mp4,please try yuv(version 2.1.0 do not support)
// mCameraHelper.setDefaultFrameFormat(UVCCameraHelper.FRAME_FORMAT_YUYV);
mCameraHelper.initUSBMonitor(this, mUVCCameraView, mDevConnectListener); mCameraHelper.initUSBMonitor(this, mUVCCameraView, mDevConnectListener);
``` ```
To be attention,mCallback is a object of interface CameraViewInterface.Callback,and it's used to be listenering surfaceView To be attention,mCallback is a object of interface CameraViewInterface.Callback,and it's used to be listenering surfaceView
@ -92,6 +97,7 @@ private UVCCameraHelper.OnMyDevConnectListener listener = new UVCCameraHelper.On
} }
}; };
``` ```
![Connecting gif](https://github.com/jiangdongguo/AndroidUSBCamera/blob/master/gifs/detecting.gif)
(2) Capturing JPG Images (2) Capturing JPG Images
```java ```java
mCameraHelper.capturePicture(picPath, new AbstractUVCCameraHandler.OnCaptureListener() { mCameraHelper.capturePicture(picPath, new AbstractUVCCameraHandler.OnCaptureListener() {
@ -134,15 +140,24 @@ mCameraHelper.getModelValue(UVCCameraHelper.MODE_BRIGHTNESS);
mCameraHelper.getModelValue(UVCCameraHelper.MODE_CONTRAST); mCameraHelper.getModelValue(UVCCameraHelper.MODE_CONTRAST);
... ...
``` ```
![Connecting gif](https://github.com/jiangdongguo/AndroidUSBCamera/blob/master/gifs/brightness.gif)
(5) switch resolutions and camera. (5) switch resolutions and camera.
```java ```java
mCameraHelper.updateResolution(widht, height); mCameraHelper.updateResolution(widht, height);
``` ```
![Connecting gif](https://github.com/jiangdongguo/AndroidUSBCamera/blob/master/gifs/2.1.0.gif)
At last,remember adding permissions:   At last,remember adding permissions:  
``` ```
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.RECORD_AUDIO" /> <uses-permission android:name="android.permission.RECORD_AUDIO" />
``` ```
Other Library about Android Camera
-------
[OkCamera](https://github.com/jiangdongguo/OkCamera) Android Camera univsersally operation.
[AndroidRecordMp4](https://github.com/jiangdongguo/AndroidRecordMp4) Using MediaCodec realize record mp4.
[AndroidYuvOsd](https://github.com/jiangdongguo/AndroidYuvOsd) YUV data operation.
[Lame4Mp3](https://github.com/jiangdongguo/Lame4Mp3) pcm to mp3 and pcm to aac.  
License License
------- -------

9
app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java

@ -139,8 +139,10 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
mUVCCameraView = (CameraViewInterface) mTextureView; mUVCCameraView = (CameraViewInterface) mTextureView;
mUVCCameraView.setCallback(this); mUVCCameraView.setCallback(this);
mCameraHelper = UVCCameraHelper.getInstance(); mCameraHelper = UVCCameraHelper.getInstance();
mCameraHelper.setDefaultFrameFormat(UVCCameraHelper.FRAME_FORMAT_YUYV);
mCameraHelper.initUSBMonitor(this, mUVCCameraView, listener); mCameraHelper.initUSBMonitor(this, mUVCCameraView, listener);
mCameraHelper.setOnPreviewFrameListener(new AbstractUVCCameraHandler.OnPreViewResultListener() { mCameraHelper.setOnPreviewFrameListener(new AbstractUVCCameraHandler.OnPreViewResultListener() {
@Override @Override
public void onPreviewResult(byte[] nv21Yuv) { public void onPreviewResult(byte[] nv21Yuv) {
@ -232,6 +234,7 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
Log.i(TAG,"save path:" + path); Log.i(TAG,"save path:" + path);
} }
}); });
break; break;
case R.id.menu_recording: case R.id.menu_recording:
if (mCameraHelper == null || !mCameraHelper.isCameraOpened()) { if (mCameraHelper == null || !mCameraHelper.isCameraOpened()) {
@ -240,7 +243,7 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
} }
if (!mCameraHelper.isRecording()) { if (!mCameraHelper.isRecording()) {
String videoPath = UVCCameraHelper.ROOT_PATH + System.currentTimeMillis(); String videoPath = UVCCameraHelper.ROOT_PATH + System.currentTimeMillis();
// FileUtils.createfile(FileUtils.ROOT_PATH + "test666.h264"); FileUtils.createfile(FileUtils.ROOT_PATH + "test666.h264");
RecordParams params = new RecordParams(); RecordParams params = new RecordParams();
params.setRecordPath(videoPath); params.setRecordPath(videoPath);
params.setRecordDuration(0); // 设置为0,不分割保存 params.setRecordDuration(0); // 设置为0,不分割保存
@ -250,7 +253,7 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
public void onEncodeResult(byte[] data, int offset, int length, long timestamp, int type) { public void onEncodeResult(byte[] data, int offset, int length, long timestamp, int type) {
// type = 1,h264 video stream // type = 1,h264 video stream
if (type == 1) { if (type == 1) {
// FileUtils.putFileStream(data, offset, length); FileUtils.putFileStream(data, offset, length);
} }
// type = 0,aac audio stream // type = 0,aac audio stream
if(type == 0) { if(type == 0) {
@ -266,7 +269,7 @@ public class USBCameraActivity extends AppCompatActivity implements CameraDialog
showShortMsg("start record..."); showShortMsg("start record...");
mSwitchVoice.setEnabled(false); mSwitchVoice.setEnabled(false);
} else { } else {
// FileUtils.releaseFile(); FileUtils.releaseFile();
mCameraHelper.stopRecording(); mCameraHelper.stopRecording();
showShortMsg("stop record..."); showShortMsg("stop record...");
mSwitchVoice.setEnabled(true); mSwitchVoice.setEnabled(true);

34
app/src/main/res/layout/activity_usbcamera.xml

@ -17,38 +17,42 @@
app:navigationIcon="@null" app:navigationIcon="@null"
app:popupTheme="@style/ThemeOverlay.AppCompat.Light" /> app:popupTheme="@style/ThemeOverlay.AppCompat.Light" />
<FrameLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="@id/toolbar">
<com.serenegiant.usb.widget.UVCCameraTextureView <com.serenegiant.usb.widget.UVCCameraTextureView
android:id="@+id/camera_view" android:id="@+id/camera_view"
android:layout_below="@id/toolbar"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="wrap_content"
android:layout_centerHorizontal="true" android:layout_gravity="center"/>
android:layout_centerVertical="true" /> </FrameLayout>
<LinearLayout <LinearLayout
android:id="@+id/llayout_contrast" android:id="@+id/llayout_contrast"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_alignParentBottom="true" android:layout_alignParentBottom="true"
android:layout_marginBottom="10dp"
android:layout_marginLeft="10dp" android:layout_marginLeft="10dp"
android:layout_marginRight="10dp" android:layout_marginRight="10dp"
android:layout_marginBottom="10dp"
android:orientation="horizontal"> android:orientation="horizontal">
<TextView <TextView
android:layout_width="0dp" android:layout_width="0dp"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_weight="1"
android:paddingBottom="7dp" android:paddingBottom="7dp"
android:paddingTop="7dp" android:paddingTop="7dp"
android:layout_weight="1" android:text="对比度(contrast)"
android:textColor="@color/colorWhite" android:textColor="@color/colorWhite" />
android:text="对比度(contrast)" />
<SeekBar <SeekBar
android:id="@+id/seekbar_contrast" android:id="@+id/seekbar_contrast"
android:layout_width="0dp" android:layout_width="0dp"
android:layout_weight="2" android:layout_height="match_parent"
android:layout_height="match_parent" /> android:layout_weight="2" />
</LinearLayout> </LinearLayout>
<LinearLayout <LinearLayout
@ -64,17 +68,17 @@
<TextView <TextView
android:layout_width="0dp" android:layout_width="0dp"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_weight="1"
android:paddingBottom="7dp" android:paddingBottom="7dp"
android:paddingTop="7dp" android:paddingTop="7dp"
android:layout_weight="1"
android:text="亮度(brightness)" android:text="亮度(brightness)"
android:textColor="@color/colorWhite" /> android:textColor="@color/colorWhite" />
<SeekBar <SeekBar
android:id="@+id/seekbar_brightness" android:id="@+id/seekbar_brightness"
android:layout_width="0dp" android:layout_width="0dp"
android:layout_weight="2" android:layout_height="match_parent"
android:layout_height="match_parent" /> android:layout_weight="2" />
</LinearLayout> </LinearLayout>
<Switch <Switch
@ -88,7 +92,7 @@
android:layout_marginStart="13dp" android:layout_marginStart="13dp"
android:layout_marginTop="27dp" android:layout_marginTop="27dp"
android:checked="true" android:checked="true"
android:textColor="@color/colorWhite" android:text="closeVoice"
android:text="closeVoice" /> android:textColor="@color/colorWhite" />
</RelativeLayout> </RelativeLayout>

3
build.gradle

@ -4,7 +4,8 @@ buildscript {
jcenter() jcenter()
} }
dependencies { dependencies {
classpath 'com.android.tools.build:gradle:3.0.0' // gradle插件版本
classpath 'com.android.tools.build:gradle:2.3.3'
} }
} }

BIN
build/generated/mockable-android-25.jar

Binary file not shown.

BIN
gifs/2.1.0.gif

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 MiB

88
libusbcamera/src/main/java/com/jiangdg/usbcamera/UVCCameraHelper.java

@ -32,10 +32,13 @@ public class UVCCameraHelper {
private static final String TAG = "UVCCameraHelper"; private static final String TAG = "UVCCameraHelper";
private int previewWidth = 640; private int previewWidth = 640;
private int previewHeight = 480; private int previewHeight = 480;
public static int MODE_BRIGHTNESS = UVCCamera.PU_BRIGHTNESS; // 高分辨率YUV格式帧率较低
public static int MODE_CONTRAST = UVCCamera.PU_CONTRAST; public static final int FRAME_FORMAT_YUYV = UVCCamera.FRAME_FORMAT_YUYV;
//0-YUYV,1-MJPEG // 默认使用MJPEG
private static final int PREVIEW_FORMAT = 0; public static final int FRAME_FORMAT_MJPEG = UVCCamera.FRAME_FORMAT_MJPEG;
public static final int MODE_BRIGHTNESS = UVCCamera.PU_BRIGHTNESS;
public static final int MODE_CONTRAST = UVCCamera.PU_CONTRAST;
private int mFrameFormat = FRAME_FORMAT_MJPEG;
private static UVCCameraHelper mCameraHelper; private static UVCCameraHelper mCameraHelper;
// USB Manager // USB Manager
@ -44,8 +47,8 @@ public class UVCCameraHelper {
private UVCCameraHandler mCameraHandler; private UVCCameraHandler mCameraHandler;
private USBMonitor.UsbControlBlock mCtrlBlock; private USBMonitor.UsbControlBlock mCtrlBlock;
private WeakReference<Activity> mActivityWrf; private Activity mActivity;
private WeakReference<CameraViewInterface> mCamViewWrf; private CameraViewInterface mCamView;
private UVCCameraHelper() { private UVCCameraHelper() {
} }
@ -74,8 +77,8 @@ public class UVCCameraHelper {
} }
public void initUSBMonitor(Activity activity, CameraViewInterface cameraView, final OnMyDevConnectListener listener) { public void initUSBMonitor(Activity activity, CameraViewInterface cameraView, final OnMyDevConnectListener listener) {
this.mActivityWrf = new WeakReference<>(activity); this.mActivity = activity;
this.mCamViewWrf = new WeakReference<>(cameraView); this.mCamView = cameraView;
mUSBMonitor = new USBMonitor(activity.getApplicationContext(), new USBMonitor.OnDeviceConnectListener() { mUSBMonitor = new USBMonitor(activity.getApplicationContext(), new USBMonitor.OnDeviceConnectListener() {
// called by checking usb device // called by checking usb device
@ -102,7 +105,19 @@ public class UVCCameraHelper {
public void onConnect(final UsbDevice device, USBMonitor.UsbControlBlock ctrlBlock, boolean createNew) { public void onConnect(final UsbDevice device, USBMonitor.UsbControlBlock ctrlBlock, boolean createNew) {
mCtrlBlock = ctrlBlock; mCtrlBlock = ctrlBlock;
openCamera(ctrlBlock); openCamera(ctrlBlock);
startPreview(mCamViewWrf.get()); new Thread(new Runnable() {
@Override
public void run() {
// 休眠500ms,等待Camera创建完毕
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
// 开启预览
startPreview(mCamView);
}
}).start();
if(listener != null) { if(listener != null) {
listener.onConnectDev(device,true); listener.onConnectDev(device,true);
} }
@ -126,7 +141,7 @@ public class UVCCameraHelper {
} }
public void createUVCCamera() { public void createUVCCamera() {
if (mCamViewWrf.get() == null) if (mCamView == null)
throw new NullPointerException("CameraViewInterface cannot be null!"); throw new NullPointerException("CameraViewInterface cannot be null!");
// release resources for initializing camera handler // release resources for initializing camera handler
@ -135,9 +150,9 @@ public class UVCCameraHelper {
mCameraHandler = null; mCameraHandler = null;
} }
// initialize camera handler // initialize camera handler
// cameraView.setAspectRatio(previewWidth / (float)previewHeight); mCamView.setAspectRatio(previewWidth / (float)previewHeight);
mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2, mCameraHandler = UVCCameraHandler.createHandler(mActivity, mCamView, 2,
previewWidth, previewHeight, PREVIEW_FORMAT); previewWidth, previewHeight, mFrameFormat);
} }
public void updateResolution(int width, int height) { public void updateResolution(int width, int height) {
@ -150,11 +165,23 @@ public class UVCCameraHelper {
mCameraHandler.release(); mCameraHandler.release();
mCameraHandler = null; mCameraHandler = null;
} }
// cameraView.setAspectRatio(previewWidth / (float)previewHeight); mCamView.setAspectRatio(previewWidth / (float)previewHeight);
mCameraHandler = UVCCameraHandler.createHandler(mActivityWrf.get(), mCamViewWrf.get(), 2, mCameraHandler = UVCCameraHandler.createHandler(mActivity,mCamView, 2,
previewWidth, previewHeight, PREVIEW_FORMAT); previewWidth, previewHeight, mFrameFormat);
openCamera(mCtrlBlock); openCamera(mCtrlBlock);
startPreview(mCamViewWrf.get()); new Thread(new Runnable() {
@Override
public void run() {
// 休眠500ms,等待Camera创建完毕
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
// 开启预览
startPreview(mCamView);
}
}).start();
} }
public void registerUSB() { public void registerUSB() {
@ -208,7 +235,7 @@ public class UVCCameraHelper {
public List<UsbDevice> getUsbDeviceList() { public List<UsbDevice> getUsbDeviceList() {
List<DeviceFilter> deviceFilters = DeviceFilter List<DeviceFilter> deviceFilters = DeviceFilter
.getDeviceFilters(mActivityWrf.get().getApplicationContext(), R.xml.device_filter); .getDeviceFilters(mActivity.getApplicationContext(), R.xml.device_filter);
if (mUSBMonitor == null || deviceFilters == null) if (mUSBMonitor == null || deviceFilters == null)
return null; return null;
return mUSBMonitor.getDeviceList(deviceFilters.get(0)); return mUSBMonitor.getDeviceList(deviceFilters.get(0));
@ -247,8 +274,6 @@ public class UVCCameraHelper {
} }
public void release() { public void release() {
mCamViewWrf.clear();
mActivityWrf.clear();
if (mCameraHandler != null) { if (mCameraHandler != null) {
mCameraHandler.release(); mCameraHandler.release();
mCameraHandler = null; mCameraHandler = null;
@ -299,4 +324,27 @@ public class UVCCameraHelper {
return null; return null;
return mCameraHandler.getSupportedPreviewSizes(); return mCameraHandler.getSupportedPreviewSizes();
} }
public void setDefaultPreviewSize(int defaultWidth,int defaultHeight) {
if(mUSBMonitor != null) {
throw new IllegalStateException("setDefaultPreviewSize should be call before initMonitor");
}
this.previewWidth = defaultWidth;
this.previewHeight = defaultHeight;
}
public void setDefaultFrameFormat(int format) {
if(mUSBMonitor != null) {
throw new IllegalStateException("setDefaultFrameFormat should be call before initMonitor");
}
this.mFrameFormat = format;
}
public int getPreviewWidth() {
return previewWidth;
}
public int getPreviewHeight() {
return previewHeight;
}
} }

82
libusbcamera/src/main/java/com/jiangdg/usbcamera/task/SaveYuvImageTask.java

@ -1,82 +0,0 @@
package com.jiangdg.usbcamera.task;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.os.AsyncTask;
import com.jiangdg.usbcamera.utils.YUVBean;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
/**保存YUV格式NV21图片
*
* Created by jiangdongguo on 2017-12-25下午9:13:01
*/
public class SaveYuvImageTask extends AsyncTask<Void, Void, Void> {
private static final String TAG = "SaveYuvImageTask";
private YUVBean yuvBean;
private Context mContext;
//转换结果回调接口
private OnSaveYuvResultListener mListener;
public interface OnSaveYuvResultListener{
void onSaveResult(String savePath);
}
public SaveYuvImageTask(YUVBean yuvBean, OnSaveYuvResultListener mListener) {
this.yuvBean = yuvBean;
this.mListener = mListener;
}
@Override
protected Void doInBackground(Void... params) {
if (yuvBean == null || yuvBean.getWidth() == 0
|| yuvBean.getHeight() == 0 || yuvBean.getYuvData() == null) {
return null;
}
saveYuv2Jpeg(yuvBean.getYuvData(),yuvBean.getWidth(),yuvBean.getHeight());
return null;
}
private void saveYuv2Jpeg(byte[] data,int width,int height){
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length);
boolean result = yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, bos);
if(result){
byte[] buffer = bos.toByteArray();
Bitmap bmp = BitmapFactory.decodeByteArray(buffer, 0, buffer.length);
bmp.recycle();
String savPath = yuvBean.getPicPath();
File file = new File(savPath);
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
bmp.compress(Bitmap.CompressFormat.JPEG, 100, fos);
try {
fos.flush();
fos.close();
//传递转换结果给调用者
mListener.onSaveResult(savPath);
} catch (IOException e) {
e.printStackTrace();
mListener.onSaveResult(null);
}
}
try {
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}

45
libusbcamera/src/main/java/com/jiangdg/usbcamera/utils/YUVBean.java

@ -1,45 +0,0 @@
package com.jiangdg.usbcamera.utils;
/** NV21数据类
*
* Created by jiangdongguo on 2018/1/26.
*/
public class YUVBean {
private int width;
private int height;
private byte[] yuvData;
private String picPath;
public int getWidth() {
return width;
}
public void setWidth(int width) {
this.width = width;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
public byte[] getYuvData() {
return yuvData;
}
public void setYuvData(byte[] yuvData) {
this.yuvData = yuvData;
}
public String getPicPath() {
return picPath;
}
public void setPicPath(String picPath) {
this.picPath = picPath;
}
}

6
libusbcamera/src/main/java/com/serenegiant/usb/UVCCamera.java

@ -48,7 +48,7 @@ public class UVCCamera {
public static final int DEFAULT_PREVIEW_HEIGHT = 480; public static final int DEFAULT_PREVIEW_HEIGHT = 480;
public static final int DEFAULT_PREVIEW_MODE = 0; public static final int DEFAULT_PREVIEW_MODE = 0;
public static final int DEFAULT_PREVIEW_MIN_FPS = 1; public static final int DEFAULT_PREVIEW_MIN_FPS = 1;
public static final int DEFAULT_PREVIEW_MAX_FPS = 30; public static final int DEFAULT_PREVIEW_MAX_FPS = 31;
public static final float DEFAULT_BANDWIDTH = 1.0f; public static final float DEFAULT_BANDWIDTH = 1.0f;
public static final int FRAME_FORMAT_YUYV = 0; public static final int FRAME_FORMAT_YUYV = 0;
@ -58,8 +58,8 @@ public class UVCCamera {
public static final int PIXEL_FORMAT_YUV = 1; public static final int PIXEL_FORMAT_YUV = 1;
public static final int PIXEL_FORMAT_RGB565 = 2; public static final int PIXEL_FORMAT_RGB565 = 2;
public static final int PIXEL_FORMAT_RGBX = 3; public static final int PIXEL_FORMAT_RGBX = 3;
public static final int PIXEL_FORMAT_YUV420SP = 4; public static final int PIXEL_FORMAT_YUV420SP = 4; // NV12
public static final int PIXEL_FORMAT_NV21 = 5; // = YVU420SemiPlanar public static final int PIXEL_FORMAT_NV21 = 5; // = YVU420SemiPlanar,NV21,但是保存到jpg颜色失真
//-------------------------------------------------------------------------------- //--------------------------------------------------------------------------------
public static final int CTRL_SCANNING = 0x00000001; // D0: Scanning Mode public static final int CTRL_SCANNING = 0x00000001; // D0: Scanning Mode

24
libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java

@ -20,9 +20,6 @@ import android.util.Log;
import android.view.Surface; import android.view.Surface;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import com.jiangdg.usbcamera.task.SaveYuvImageTask;
import com.jiangdg.usbcamera.utils.FileUtils;
import com.jiangdg.usbcamera.utils.YUVBean;
import com.serenegiant.usb.IFrameCallback; import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.Size; import com.serenegiant.usb.Size;
import com.serenegiant.usb.USBMonitor; import com.serenegiant.usb.USBMonitor;
@ -533,17 +530,18 @@ public abstract class AbstractUVCCameraHandler extends Handler {
if ((mUVCCamera == null) || mIsPreviewing) return; if ((mUVCCamera == null) || mIsPreviewing) return;
try { try {
mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor); mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor);
// 获取USB Camera预览数据 // 获取USB Camera预览数据,使用NV21颜色会失真
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21); // 无论使用YUV还是MPEG,setFrameCallback的设置效果一致
// mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_YUV420SP);
} catch (final IllegalArgumentException e) { } catch (final IllegalArgumentException e) {
// try { try {
// // fallback to YUV mode // fallback to YUV mode
// mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, UVCCamera.DEFAULT_PREVIEW_MODE, mBandwidthFactor); mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, UVCCamera.DEFAULT_PREVIEW_MODE, mBandwidthFactor);
// } catch (final IllegalArgumentException e1) { } catch (final IllegalArgumentException e1) {
// callOnError(e1); callOnError(e1);
// return; return;
// } }
} }
if (surface instanceof SurfaceHolder) { if (surface instanceof SurfaceHolder) {
mUVCCamera.setPreviewDisplay((SurfaceHolder)surface); mUVCCamera.setPreviewDisplay((SurfaceHolder)surface);

178
libusbcamera/src/main/java/com/serenegiant/usb/encoder/biz/H264EncodeConsumer.java

@ -1,10 +1,6 @@
package com.serenegiant.usb.encoder.biz; package com.serenegiant.usb.encoder.biz;
import java.io.BufferedOutputStream; import android.annotation.SuppressLint;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaCodecList; import android.media.MediaCodecList;
@ -14,7 +10,13 @@ import android.os.Bundle;
import android.os.Environment; import android.os.Environment;
import android.util.Log; import android.util.Log;
/** 对YUV视频流进行编码 import java.io.BufferedOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
/**
* 对YUV视频流进行编码
* Created by jiangdongguo on 2017/5/6. * Created by jiangdongguo on 2017/5/6.
*/ */
@ -39,13 +41,13 @@ public class H264EncodeConsumer extends Thread {
final int millisPerframe = 1000 / 20; final int millisPerframe = 1000 / 20;
long lastPush = 0; long lastPush = 0;
private OnH264EncodeResultListener listener; private OnH264EncodeResultListener listener;
private int mWidth ; private int mWidth;
private int mHeight ; private int mHeight;
private MediaFormat newFormat; private MediaFormat newFormat;
private WeakReference<Mp4MediaMuxer> mMuxerRef; private WeakReference<Mp4MediaMuxer> mMuxerRef;
private boolean isAddKeyFrame = false; private boolean isAddKeyFrame = false;
public interface OnH264EncodeResultListener{ public interface OnH264EncodeResultListener {
void onEncodeResult(byte[] data, int offset, void onEncodeResult(byte[] data, int offset,
int length, long timestamp); int length, long timestamp);
} }
@ -54,16 +56,12 @@ public class H264EncodeConsumer extends Thread {
this.listener = listener; this.listener = listener;
} }
public H264EncodeConsumer(){ public H264EncodeConsumer(int width, int height) {
}
public H264EncodeConsumer(int width,int height){
this.mWidth = width; this.mWidth = width;
this.mHeight = height; this.mHeight = height;
} }
public synchronized void setTmpuMuxer(Mp4MediaMuxer mMuxer){ public synchronized void setTmpuMuxer(Mp4MediaMuxer mMuxer) {
this.mMuxerRef = new WeakReference<>(mMuxer); this.mMuxerRef = new WeakReference<>(mMuxer);
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null && newFormat != null) { if (muxer != null && newFormat != null) {
@ -74,13 +72,10 @@ public class H264EncodeConsumer extends Thread {
private ByteBuffer[] inputBuffers; private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
public void setRawYuv(byte[] yuvData,int width,int height){ public void setRawYuv(byte[] yuvData, int width, int height) {
if (! isEncoderStart) if (!isEncoderStart)
return; return;
// 根据编码器支持转换颜色空间格式 if (mWidth != width || mHeight != height) {
// 即 nv21 ---> YUV420sp(21)
// nv21 ---> YUV420p (19)
if(mWidth != width || mHeight != height){
mWidth = width; mWidth = width;
mHeight = height; mHeight = height;
return; return;
@ -96,7 +91,8 @@ public class H264EncodeConsumer extends Thread {
Thread.sleep(time / 2); Thread.sleep(time / 2);
} }
// 将数据写入编码器 // 将数据写入编码器
feedMediaCodecData(yuvData);
feedMediaCodecData(nv12ToNV21(yuvData, mWidth, mHeight));
if (time > 0) if (time > 0)
Thread.sleep(time / 2); Thread.sleep(time / 2);
@ -106,13 +102,13 @@ public class H264EncodeConsumer extends Thread {
} }
} }
private void feedMediaCodecData(byte[] data){ private void feedMediaCodecData(byte[] data) {
if (! isEncoderStart) if (!isEncoderStart)
return; return;
int bufferIndex = -1; int bufferIndex = -1;
try{ try {
bufferIndex = mMediaCodec.dequeueInputBuffer(0); bufferIndex = mMediaCodec.dequeueInputBuffer(0);
}catch (IllegalStateException e){ } catch (IllegalStateException e) {
e.printStackTrace(); e.printStackTrace();
} }
if (bufferIndex >= 0) { if (bufferIndex >= 0) {
@ -129,13 +125,14 @@ public class H264EncodeConsumer extends Thread {
} }
} }
public void exit(){ public void exit() {
isExit = true; isExit = true;
} }
@SuppressLint("WrongConstant")
@Override @Override
public void run() { public void run() {
if(!isEncoderStart){ if (!isEncoderStart) {
startMediaCodec(); startMediaCodec();
} }
// 休眠200ms,等待音频线程开启 // 休眠200ms,等待音频线程开启
@ -147,7 +144,7 @@ public class H264EncodeConsumer extends Thread {
} }
// 如果编码器没有启动或者没有图像数据,线程阻塞等待 // 如果编码器没有启动或者没有图像数据,线程阻塞等待
while(!isExit){ while (!isExit) {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = 0; int outputBufferIndex = 0;
byte[] mPpsSps = new byte[0]; byte[] mPpsSps = new byte[0];
@ -162,7 +159,7 @@ public class H264EncodeConsumer extends Thread {
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
synchronized (H264EncodeConsumer.this) { synchronized (H264EncodeConsumer.this) {
newFormat = mMediaCodec.getOutputFormat(); newFormat = mMediaCodec.getOutputFormat();
if(mMuxerRef != null){ if (mMuxerRef != null) {
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) { if (muxer != null) {
muxer.addTrack(newFormat, true); muxer.addTrack(newFormat, true);
@ -202,35 +199,35 @@ public class H264EncodeConsumer extends Thread {
if (sync) { if (sync) {
System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length); System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length);
outputBuffer.get(h264, mPpsSps.length, bufferInfo.size); outputBuffer.get(h264, mPpsSps.length, bufferInfo.size);
if(listener != null){ if (listener != null) {
listener.onEncodeResult(h264, 0,mPpsSps.length + bufferInfo.size, bufferInfo.presentationTimeUs / 1000); listener.onEncodeResult(h264, 0, mPpsSps.length + bufferInfo.size, bufferInfo.presentationTimeUs / 1000);
} }
// 添加视频流到混合器 // 添加视频流到混合器
if(mMuxerRef != null){ if (mMuxerRef != null) {
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) { if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true); muxer.pumpStream(outputBuffer, bufferInfo, true);
} }
isAddKeyFrame = true; isAddKeyFrame = true;
} }
if(DEBUG) if (DEBUG)
Log.i(TAG,"关键帧 h264.length = "+h264.length+";mPpsSps.length="+mPpsSps.length Log.i(TAG, "关键帧 h264.length = " + h264.length + ";mPpsSps.length=" + mPpsSps.length
+ " bufferInfo.size = " + bufferInfo.size); + " bufferInfo.size = " + bufferInfo.size);
} else { } else {
outputBuffer.get(h264, 0, bufferInfo.size); outputBuffer.get(h264, 0, bufferInfo.size);
if(listener != null){ if (listener != null) {
listener.onEncodeResult(h264, 0,bufferInfo.size, bufferInfo.presentationTimeUs / 1000); listener.onEncodeResult(h264, 0, bufferInfo.size, bufferInfo.presentationTimeUs / 1000);
} }
// 添加视频流到混合器 // 添加视频流到混合器
if(isAddKeyFrame && mMuxerRef != null){ if (isAddKeyFrame && mMuxerRef != null) {
Mp4MediaMuxer muxer = mMuxerRef.get(); Mp4MediaMuxer muxer = mMuxerRef.get();
if (muxer != null) { if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true); muxer.pumpStream(outputBuffer, bufferInfo, true);
} }
} }
if(DEBUG) if (DEBUG)
Log.i(TAG,"普通帧 h264.length = "+h264.length+ " bufferInfo.size = " + bufferInfo.size); Log.i(TAG, "普通帧 h264.length = " + h264.length + " bufferInfo.size = " + bufferInfo.size);
} }
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
} }
@ -261,7 +258,6 @@ public class H264EncodeConsumer extends Thread {
mMediaCodec.start(); mMediaCodec.start();
isEncoderStart = true; isEncoderStart = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + 1) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + 1) {
inputBuffers = outputBuffers = null; inputBuffers = outputBuffers = null;
@ -277,25 +273,27 @@ public class H264EncodeConsumer extends Thread {
} }
} }
private void stopMediaCodec(){ private void stopMediaCodec() {
isEncoderStart = false; isEncoderStart = false;
if(mMediaCodec != null){ if (mMediaCodec != null) {
mMediaCodec.stop(); mMediaCodec.stop();
mMediaCodec.release(); mMediaCodec.release();
Log.d(TAG,"关闭视频编码器"); Log.d(TAG, "关闭视频编码器");
} }
} }
private static final int FRAME_RATE = 15; private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f; private static final float BPP = 0.50f;
private int calcBitRate() { private int calcBitRate() {
final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight); final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f)); Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate; return bitrate;
} }
/** /**
* select the first codec that match a specific MIME type * select the first codec that match a specific MIME type
*
* @param mimeType * @param mimeType
* @return null if no codec matched * @return null if no codec matched
*/ */
@ -327,6 +325,7 @@ public class H264EncodeConsumer extends Thread {
/** /**
* select color format available on specific codec and we can use. * select color format available on specific codec and we can use.
*
* @return 0 if no colorFormat is matched * @return 0 if no colorFormat is matched
*/ */
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) { protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
@ -356,12 +355,14 @@ public class H264EncodeConsumer extends Thread {
* color formats that we can use in this class * color formats that we can use in this class
*/ */
protected static int[] recognizedFormats; protected static int[] recognizedFormats;
static { static {
recognizedFormats = new int[] { recognizedFormats = new int[]{
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
}; };
} }
@ -374,4 +375,87 @@ public class H264EncodeConsumer extends Thread {
} }
return false; return false;
} }
private byte[] nv21ToI420(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV21 YYYYYYYY VUVU
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferV.put(data[i]);
bufferU.put(data[i + 1]);
}
return ret;
}
private byte[] nv12ToI420(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV12 YYYYYYYY UVUV
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferU.put(data[i]);
bufferV.put(data[i + 1]);
}
return ret;
}
private byte[] nv12ToNv21(byte[] data, int width, int height) {
byte[] ret = new byte[width * height * 3 / 2];
int total = width * height;
ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total); // I420的Y分量
ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4); // I420的U分量
ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4); // I420的V分量
// NV12 YYYYYYYY UVUV
bufferY.put(data, 0, total);
for (int i = total; i < data.length; i += 2) {
bufferU.put(data[i]);
bufferV.put(data[i + 1]);
}
return ret;
}
// YYYYYYYY UVUV(nv21)--> YYYYYYYY VUVU(nv12)
private byte[] nv21ToNV12(byte[] nv21, int width, int height) {
byte[] ret = new byte[width * height * 3 /2];
int framesize = width * height;
int i = 0, j = 0;
// 拷贝Y分量
System.arraycopy(nv21, 0,ret , 0, framesize);
// 拷贝UV分量
for (j = framesize; j < nv21.length; j += 2) {
ret[j+1] = nv21[j+1];
ret[j] = nv21[j];
}
return ret;
}
// YYYYYYYY UVUV(nv12)--> YYYYYYYY VUVU(nv21)
private byte[] nv12ToNV21(byte[] nv12, int width, int height) {
byte[] ret = new byte[width * height * 3 /2];
int framesize = width * height;
int i = 0, j = 0;
// 拷贝Y分量
System.arraycopy(nv12, 0,ret , 0, framesize);
// 拷贝UV分量
for (j = framesize; j < nv12.length; j += 2) {
ret[j] = nv12[j+1];
ret[j+1] = nv12[j];
}
return ret;
}
} }

5
local.properties

@ -7,6 +7,5 @@
# Location of the SDK. This is only used by Gradle. # Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the # For customization when using a Version Control System, please read the
# header note. # header note.
#Fri Sep 29 23:06:03 CST 2017 #Mon Nov 13 09:14:43 CST 2017
ndk.dir=E\:\\Android\\Evironment\\android-sdk-windows\\ndk-bundle sdk.dir=E\:\\Environment\\android-sdk-windows
sdk.dir=E\:\\Android\\Evironment\\android-sdk-windows

Loading…
Cancel
Save