Browse Source

新增图片抓取、视频录制功能

main
jiangdongguo 7 years ago
parent
commit
c9bdb7739a
  1. 4
      app/build.gradle
  2. 4
      app/src/main/AndroidManifest.xml
  3. 198
      app/src/main/java/com/jiangdg/usbcamera/view/BaseActivity.java
  4. 250
      app/src/main/java/com/jiangdg/usbcamera/view/MainActivity.java
  5. 169
      app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java
  6. 15
      app/src/main/res/layout/activity_main.xml
  7. 39
      app/src/main/res/layout/activity_usbcamera.xml
  8. 2
      libusbcamera/build.gradle
  9. 207
      libusbcamera/src/main/java/com/jiangdg/usbcamera/USBCameraManager.java
  10. 450
      libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java
  11. 27
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/IAudioEncoder.java
  12. 28
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/IVideoEncoder.java
  13. 233
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaAudioEncoder.java
  14. 448
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaEncoder.java
  15. 188
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaMuxerWrapper.java
  16. 196
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaSurfaceEncoder.java
  17. 193
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaVideoBufferEncoder.java
  18. 228
      libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaVideoEncoder.java
  19. 3
      libusbcamera/src/main/java/com/serenegiant/usb/widget/CameraViewInterface.java
  20. 222
      libusbcamera/src/main/java/com/serenegiant/usb/widget/UVCCameraTextureView.java
  21. BIN
      libusbcamera/src/main/res/raw/camera_click.ogg

4
app/build.gradle

@ -6,7 +6,7 @@ android {
defaultConfig { defaultConfig {
applicationId "com.jiangdg.usbcamera" applicationId "com.jiangdg.usbcamera"
minSdkVersion 18 minSdkVersion 18
targetSdkVersion 25 targetSdkVersion 22
versionCode 1 versionCode 1
versionName "1.0" versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
@ -29,4 +29,6 @@ dependencies {
compile project(':libusbcamera') compile project(':libusbcamera')
compile 'com.jakewharton:butterknife:8.8.1' compile 'com.jakewharton:butterknife:8.8.1'
compile 'com.jakewharton:butterknife-compiler:8.8.1' compile 'com.jakewharton:butterknife-compiler:8.8.1'
compile 'com.jakewharton:butterknife:8.8.1'
compile 'com.jakewharton:butterknife-compiler:8.8.1'
} }

4
app/src/main/AndroidManifest.xml

@ -2,8 +2,10 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android" <manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.jiangdg.usbcamera"> package="com.jiangdg.usbcamera">
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<application <application
android:name=".application.MyApplication" android:name=".application.MyApplication"
android:allowBackup="true" android:allowBackup="true"
@ -11,7 +13,7 @@
android:label="@string/app_name" android:label="@string/app_name"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme"> android:theme="@style/AppTheme">
<activity android:name="com.jiangdg.usbcamera.view.MainActivity"> <activity android:name="com.jiangdg.usbcamera.view.USBCameraActivity">
<intent-filter> <intent-filter>
<action android:name="android.intent.action.MAIN" /> <action android:name="android.intent.action.MAIN" />

198
app/src/main/java/com/jiangdg/usbcamera/view/BaseActivity.java

@ -1,198 +0,0 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.jiangdg.usbcamera.view;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.StringRes;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.Toast;
import com.serenegiant.dialog.MessageDialogFragment;
import com.serenegiant.utils.BuildCheck;
import com.serenegiant.utils.HandlerThreadHandler;
import com.serenegiant.utils.PermissionCheck;
/**
* Created by saki on 2016/11/18.
*
*/
public class BaseActivity extends AppCompatActivity {
private static boolean DEBUG = false;
private static final String TAG = BaseActivity.class.getSimpleName();
// 处理UI的Handler
private final Handler mUIHandler = new Handler(Looper.getMainLooper());
private final Thread mUiThread = mUIHandler.getLooper().getThread();
// 工作线程Handler
private Handler mWorkerHandler;
private long mWorkerThreadID = -1;
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// 创建工作线程
if (mWorkerHandler == null) {
mWorkerHandler = HandlerThreadHandler.createHandler(TAG);
mWorkerThreadID = mWorkerHandler.getLooper().getThread().getId();
}
}
@Override
protected void onPause() {
clearToast();
super.onPause();
}
@Override
protected synchronized void onDestroy() {
// 释放线程资源
if (mWorkerHandler != null) {
try {
mWorkerHandler.getLooper().quit();
} catch (final Exception e) {
//
}
mWorkerHandler = null;
}
super.onDestroy();
}
//================================================================================
/**
*
* 子线程中更新UIduration为延迟多久执行
*
*/
public final void runOnUiThread(final Runnable task, final long duration) {
if (task == null) return;
mUIHandler.removeCallbacks(task);
if ((duration > 0) || Thread.currentThread() != mUiThread) {
mUIHandler.postDelayed(task, duration);
} else {
try {
task.run();
} catch (final Exception e) {
Log.w(TAG, e);
}
}
}
/**
* 移除更新UI task
* @param task
*/
public final void removeFromUiThread(final Runnable task) {
if (task == null) return;
mUIHandler.removeCallbacks(task);
}
/**
* 工作子线程中执行的任务
*/
protected final synchronized void queueEvent(final Runnable task, final long delayMillis) {
if ((task == null) || (mWorkerHandler == null)) return;
try {
mWorkerHandler.removeCallbacks(task);
if (delayMillis > 0) {
mWorkerHandler.postDelayed(task, delayMillis);
} else if (mWorkerThreadID == Thread.currentThread().getId()) {
task.run();
} else {
mWorkerHandler.post(task);
}
} catch (final Exception e) {
// ignore
}
}
protected final synchronized void removeEvent(final Runnable task) {
if (task == null) return;
try {
mWorkerHandler.removeCallbacks(task);
} catch (final Exception e) {
// ignore
}
}
//================================================================================
private Toast mToast;
protected void showToast(@StringRes final int msg, final Object... args) {
removeFromUiThread(mShowToastTask);
mShowToastTask = new ShowToastTask(msg, args);
runOnUiThread(mShowToastTask, 0);
}
protected void clearToast() {
removeFromUiThread(mShowToastTask);
mShowToastTask = null;
try {
if (mToast != null) {
mToast.cancel();
mToast = null;
}
} catch (final Exception e) {
// ignore
}
}
private ShowToastTask mShowToastTask;
private final class ShowToastTask implements Runnable {
final int msg;
final Object args;
private ShowToastTask(@StringRes final int msg, final Object... args) {
this.msg = msg;
this.args = args;
}
@Override
public void run() {
try {
if (mToast != null) {
mToast.cancel();
mToast = null;
}
if (args != null) {
final String _msg = getString(msg, args);
mToast = Toast.makeText(BaseActivity.this, _msg, Toast.LENGTH_SHORT);
} else {
mToast = Toast.makeText(BaseActivity.this, msg, Toast.LENGTH_SHORT);
}
mToast.show();
} catch (final Exception e) {
// ignore
}
}
}
}

250
app/src/main/java/com/jiangdg/usbcamera/view/MainActivity.java

@ -1,250 +0,0 @@
package com.jiangdg.usbcamera.view;
import android.hardware.usb.UsbDevice;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageButton;
import android.widget.Toast;
import com.jiangdg.usbcamera.R;
import com.serenegiant.usb.CameraDialog;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.USBMonitor.OnDeviceConnectListener;
import com.serenegiant.usb.USBMonitor.UsbControlBlock;
import com.serenegiant.usb.UVCCamera;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class MainActivity extends BaseActivity implements CameraDialog.CameraDialogParent {
private static final boolean DEBUG = true;
private static final String TAG = "MainActivity";
@BindView(R.id.camera_surface_view)
public SurfaceView mUVCCameraView;
private final Object mSync = new Object();
// USB和USB Camera访问管理类
private USBMonitor mUSBMonitor;
private UVCCamera mUVCCamera;
private Surface mPreviewSurface;
private boolean isActive, isPreview;
private final SurfaceHolder.Callback mSurfaceViewCallback = new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(final SurfaceHolder holder) {
if (DEBUG) Log.v(TAG, "surfaceCreated:");
}
@Override
public void surfaceChanged(final SurfaceHolder holder, final int format, final int width, final int height) {
if ((width == 0) || (height == 0)) return;
if (DEBUG) Log.v(TAG, "surfaceChanged:");
mPreviewSurface = holder.getSurface();
synchronized (mSync) {
if (isActive && !isPreview && (mUVCCamera != null)) {
mUVCCamera.setPreviewDisplay(mPreviewSurface);
mUVCCamera.startPreview();
isPreview = true;
}
}
}
@Override
public void surfaceDestroyed(final SurfaceHolder holder) {
if (DEBUG) Log.v(TAG, "surfaceDestroyed:");
synchronized (mSync) {
if (mUVCCamera != null) {
mUVCCamera.stopPreview();
}
isPreview = false;
}
mPreviewSurface = null;
}
};
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// 绑定Activity
ButterKnife.bind(this);
mUVCCameraView.getHolder().addCallback(mSurfaceViewCallback);
// 初始化USBMonitor
// 注册USB设备监听器
mUSBMonitor = new USBMonitor(this, new OnDeviceConnectListener() {
@Override
public void onAttach(final UsbDevice device) {
if (DEBUG) Log.v(TAG, "onAttach:");
Toast.makeText(MainActivity.this, "检测到USB设备", Toast.LENGTH_SHORT).show();
}
@Override
public void onConnect(final UsbDevice device, final UsbControlBlock ctrlBlock, final boolean createNew) {
if (DEBUG) Log.v(TAG, "onConnect:");
Toast.makeText(MainActivity.this, "成功连接到USB设备", Toast.LENGTH_SHORT).show();
synchronized (mSync) {
if (mUVCCamera != null) {
mUVCCamera.destroy();
}
isActive = isPreview = false;
}
queueEvent(new Runnable() {
@Override
public void run() {
synchronized (mSync) {
final UVCCamera camera = new UVCCamera();
camera.open(ctrlBlock);
if (DEBUG) Log.i(TAG, "supportedSize:" + camera.getSupportedSize());
try {
camera.setPreviewSize(UVCCamera.DEFAULT_PREVIEW_WIDTH, UVCCamera.DEFAULT_PREVIEW_HEIGHT, UVCCamera.FRAME_FORMAT_MJPEG);
} catch (final IllegalArgumentException e) {
try {
// fallback to YUV mode
camera.setPreviewSize(UVCCamera.DEFAULT_PREVIEW_WIDTH, UVCCamera.DEFAULT_PREVIEW_HEIGHT, UVCCamera.DEFAULT_PREVIEW_MODE);
} catch (final IllegalArgumentException e1) {
camera.destroy();
return;
}
}
mPreviewSurface = mUVCCameraView.getHolder().getSurface();
if (mPreviewSurface != null) {
isActive = true;
camera.setPreviewDisplay(mPreviewSurface);
camera.startPreview();
isPreview = true;
}
synchronized (mSync) {
mUVCCamera = camera;
}
}
}
}, 0);
}
@Override
public void onDisconnect(final UsbDevice device, final UsbControlBlock ctrlBlock) {
if (DEBUG) Log.v(TAG, "onDisconnect:");
Toast.makeText(MainActivity.this, "与USB设备断开连接", Toast.LENGTH_SHORT).show();
// XXX you should check whether the comming device equal to camera device that currently using
queueEvent(new Runnable() {
@Override
public void run() {
synchronized (mSync) {
if (mUVCCamera != null) {
mUVCCamera.close();
if (mPreviewSurface != null) {
mPreviewSurface.release();
mPreviewSurface = null;
}
isActive = isPreview = false;
}
}
}
}, 0);
}
@Override
public void onDettach(final UsbDevice device) {
if (DEBUG) Log.v(TAG, "onDettach:");
Toast.makeText(MainActivity.this, "未检测到USB设备", Toast.LENGTH_SHORT).show();
}
@Override
public void onCancel(final UsbDevice device) {
}
});
}
@Override
protected void onStart() {
super.onStart();
if (DEBUG) Log.v(TAG, "onStart:");
synchronized (mSync) {
// 注册
if (mUSBMonitor != null) {
mUSBMonitor.register();
}
}
}
@Override
protected void onStop() {
if (DEBUG) Log.v(TAG, "onStop:");
synchronized (mSync) {
// 注销
if (mUSBMonitor != null) {
mUSBMonitor.unregister();
}
}
super.onStop();
}
@Override
protected void onDestroy() {
if (DEBUG) Log.v(TAG, "onDestroy:");
synchronized (mSync) {
isActive = isPreview = false;
if (mUVCCamera != null) {
mUVCCamera.destroy();
mUVCCamera = null;
}
// 释放资源
if (mUSBMonitor != null) {
mUSBMonitor.destroy();
mUSBMonitor = null;
}
}
mUVCCameraView = null;
super.onDestroy();
}
@OnClick({R.id.camera_surface_view})
public void onViewClicked(View view){
int vId= view.getId();
switch (vId){
case R.id.camera_surface_view:
if (mUVCCamera == null) {
// XXX calling CameraDialog.showDialog is necessary at only first time(only when app has no permission).
// 当APP访问USB设备没有被授权时,弹出对话框
CameraDialog.showDialog(MainActivity.this);
} else {
synchronized (mSync) {
mUVCCamera.destroy();
mUVCCamera = null;
isActive = isPreview = false;
}
}
break;
}
}
/**
* to access from CameraDialog
* @return
*/
@Override
public USBMonitor getUSBMonitor() {
return mUSBMonitor;
}
@Override
public void onDialogResult(boolean canceled) {
if (canceled) {
runOnUiThread(new Runnable() {
@Override
public void run() {
// FIXME
}
}, 0);
}
}
}

169
app/src/main/java/com/jiangdg/usbcamera/view/USBCameraActivity.java

@ -0,0 +1,169 @@
package com.jiangdg.usbcamera.view;
import android.hardware.usb.UsbDevice;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import com.jiangdg.usbcamera.R;
import com.jiangdg.usbcamera.USBCameraManager;
import com.serenegiant.usb.CameraDialog;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.widget.CameraViewInterface;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* AndroidUSBCamera引擎使用Demo
*
* Created by jiangdongguo on 2017/9/30.
*/
public class USBCameraActivity extends AppCompatActivity implements CameraDialog.CameraDialogParent{
@BindView(R.id.camera_view)
public View mTextureView;
@BindView(R.id.btn_capture_pic)
public Button mBtnCapture;
@BindView(R.id.btn_rec_video)
public Button mBtnRecord;
private USBCameraManager mUSBManager;
private CameraViewInterface mUVCCameraView;
// USB设备监听器
private USBCameraManager.OnMyDevConnectListener listener = new USBCameraManager.OnMyDevConnectListener() {
@Override
public void onAttachDev(UsbDevice device) {
showShortMsg("检测到设备:"+device.getDeviceName());
}
@Override
public void onDettachDev(UsbDevice device) {
showShortMsg(device.getDeviceName()+"已拨出");
}
@Override
public void onConnectDev(UsbDevice device) {
// 处理连接到设备后的逻辑
}
@Override
public void onDisConnectDev(UsbDevice device) {
// 处理与设备断开后的逻辑
}
};
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_usbcamera);
ButterKnife.bind(this);
// 初始化引擎
mUSBManager = USBCameraManager.getInstance();
mUVCCameraView = (CameraViewInterface) mTextureView;
mUSBManager.init(this, mUVCCameraView, listener);
}
@Override
protected void onStart() {
super.onStart();
// 注册USB事件广播监听器
if(mUSBManager != null){
mUSBManager.registerUSB();
}
// 恢复Camera预览
if(mUVCCameraView != null){
mUVCCameraView.onResume();
}
}
@Override
protected void onStop() {
super.onStop();
// 注销USB事件广播监听器
if(mUSBManager != null){
mUSBManager.unregisterUSB();
}
// 暂停Camera预览
if(mUVCCameraView != null){
mUVCCameraView.onPause();
}
}
@OnClick({R.id.camera_view, R.id.btn_capture_pic, R.id.btn_rec_video})
public void onViewClick(View view) {
int vId = view.getId();
switch (vId) {
// 开启或关闭Camera
case R.id.camera_view:
if(mUSBManager != null){
boolean isOpened = mUSBManager.isCameraOpened();
if(! isOpened){
CameraDialog.showDialog(USBCameraActivity.this);
}else {
mUSBManager.closeCamera();
}
}
break;
case R.id.btn_capture_pic:
if(mUSBManager == null || ! mUSBManager.isCameraOpened()){
showShortMsg("抓拍异常,摄像头未开启");
return;
}
String picPath = USBCameraManager.ROOT_PATH+System.currentTimeMillis()
+USBCameraManager.SUFFIX_PNG;
mUSBManager.capturePicture(picPath);
showShortMsg("保存路径:"+picPath);
break;
case R.id.btn_rec_video:
if(mUSBManager == null || ! mUSBManager.isCameraOpened()){
showShortMsg("录制异常,摄像头未开启");
return;
}
if(! mUSBManager.isRecording()){
String videoPath = USBCameraManager.ROOT_PATH+System.currentTimeMillis()
+USBCameraManager.SUFFIX_MP4;
mUSBManager.startRecording(videoPath);
mBtnRecord.setText("正在录制");
} else {
mUSBManager.stopRecording();
mBtnRecord.setText("开始录制");
}
break;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
// 释放资源
if(mUSBManager != null){
mUSBManager.release();
}
}
private void showShortMsg(String msg) {
Toast.makeText(this, msg, Toast.LENGTH_SHORT).show();
}
@Override
public USBMonitor getUSBMonitor() {
return mUSBManager.getUSBMonitor();
}
@Override
public void onDialogResult(boolean canceled) {
if(canceled){
showShortMsg("取消操作");
}
}
}

15
app/src/main/res/layout/activity_main.xml

@ -1,15 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:ignore="MergeRootFrame" >
<SurfaceView
android:id="@+id/camera_surface_view"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>

39
app/src/main/res/layout/activity_usbcamera.xml

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:tools="http://schemas.android.com/tools"
android:background="#ff000000"
tools:context=".view.USBCameraActivity"
tools:ignore="MergeRootFrame">
<com.serenegiant.usb.widget.UVCCameraTextureView
android:id="@+id/camera_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerVertical="true"
android:layout_centerHorizontal="true"/>
<Button
android:id="@+id/btn_capture_pic"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_marginBottom="10dp"
android:layout_marginRight="10dp"
android:layout_marginLeft="10dp"
android:textSize="16sp"
android:text="抓拍"/>
<Button
android:layout_above="@id/btn_capture_pic"
android:id="@+id/btn_rec_video"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginRight="10dp"
android:layout_marginLeft="10dp"
android:textSize="16sp"
android:text="开始录制"/>
</RelativeLayout>

2
libusbcamera/build.gradle

@ -6,7 +6,7 @@ android {
defaultConfig { defaultConfig {
minSdkVersion 18 minSdkVersion 18
targetSdkVersion 25 targetSdkVersion 22
versionCode 1 versionCode 1
versionName "1.0" versionName "1.0"

207
libusbcamera/src/main/java/com/jiangdg/usbcamera/USBCameraManager.java

@ -0,0 +1,207 @@
package com.jiangdg.usbcamera;
import android.app.Activity;
import android.graphics.SurfaceTexture;
import android.hardware.usb.UsbDevice;
import android.os.Environment;
import com.serenegiant.usb.CameraDialog;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.common.UVCCameraHandler;
import com.serenegiant.usb.widget.CameraViewInterface;
import java.io.File;
/**USB摄像头工具类
*
* Created by jiangdongguo on 2017/9/30.
*/
public class USBCameraManager{
public static final String ROOT_PATH = Environment.getExternalStorageDirectory().getAbsolutePath()
+ File.separator;
public static final String SUFFIX_PNG = ".png";
public static final String SUFFIX_MP4 = ".mp4";
private static final String TAG = "USBCameraManager";
private static final int PREVIEW_WIDTH = 640;
private static final int PREVIEW_HEIGHT = 480;
private static final int ENCODER_TYPE = 1;
//0为YUYV,1为MJPEG
private static final int PREVIEW_FORMAT = 1;
private static USBCameraManager mUsbCamManager;
// USB设备管理类
private USBMonitor mUSBMonitor;
// Camera业务逻辑处理
private UVCCameraHandler mCameraHandler;
private USBCameraManager(){}
public static USBCameraManager getInstance(){
if(mUsbCamManager == null){
mUsbCamManager = new USBCameraManager();
}
return mUsbCamManager;
}
public interface OnMyDevConnectListener{
void onAttachDev(UsbDevice device);
void onDettachDev(UsbDevice device);
void onConnectDev(UsbDevice device);
void onDisConnectDev(UsbDevice device);
}
/** 初始化
*
* context 上下文
* cameraView Camera要渲染的Surface
* listener USB设备检测与连接状态事件监听器
* */
public void init(Activity activity, final CameraViewInterface cameraView, final OnMyDevConnectListener listener){
if(cameraView == null)
throw new NullPointerException("CameraViewInterface cannot be null!");
mUSBMonitor = new USBMonitor(activity.getApplicationContext(), new USBMonitor.OnDeviceConnectListener() {
// 当检测到USB设备,被回调
@Override
public void onAttach(UsbDevice device) {
if(listener != null){
listener.onAttachDev(device);
}
}
// 当拨出或未检测到USB设备,被回调
@Override
public void onDettach(UsbDevice device) {
if(listener != null){
listener.onDettachDev(device);
}
}
// 当连接到USB Camera时,被回调
@Override
public void onConnect(UsbDevice device, USBMonitor.UsbControlBlock ctrlBlock, boolean createNew) {
if(listener != null){
listener.onConnectDev(device);
}
// 打开摄像头
openCamera(ctrlBlock);
// 开启预览
startPreview(cameraView);
}
// 当与USB Camera断开连接时,被回调
@Override
public void onDisconnect(UsbDevice device, USBMonitor.UsbControlBlock ctrlBlock) {
if(listener != null){
listener.onDisConnectDev(device);
}
// 关闭摄像头
closeCamera();
}
@Override
public void onCancel(UsbDevice device) {
}
});
// 设置长宽比
cameraView.setAspectRatio(PREVIEW_WIDTH / (float)PREVIEW_HEIGHT);
mCameraHandler = UVCCameraHandler.createHandler(activity,cameraView,ENCODER_TYPE,
PREVIEW_WIDTH,PREVIEW_HEIGHT,PREVIEW_FORMAT);
}
/**
* 注册检测USB设备广播接收器
* */
public void registerUSB(){
if(mUSBMonitor != null){
mUSBMonitor.register();
}
}
/**
* 注销检测USB设备广播接收器
*/
public void unregisterUSB(){
if(mUSBMonitor != null){
mUSBMonitor.unregister();
}
}
/**
* 抓拍照片
* */
public void capturePicture(String savePath){
if(mCameraHandler != null && mCameraHandler.isOpened()){
mCameraHandler.captureStill(savePath);
}
}
public void startRecording(String videoPath){
if(mCameraHandler != null && ! isRecording()){
mCameraHandler.startRecording(videoPath);
}
}
public void stopRecording(){
if(mCameraHandler != null && isRecording()){
mCameraHandler.stopRecording();
}
}
public boolean isRecording(){
if(mCameraHandler != null){
return mCameraHandler.isRecording();
}
return false;
}
public boolean isCameraOpened(){
if(mCameraHandler != null){
return mCameraHandler.isOpened();
}
return false;
}
/**
* 释放资源
* */
public void release(){
//释放CameraHandler占用的相关资源
if(mCameraHandler != null){
mCameraHandler.release();
mCameraHandler = null;
}
// 释放USBMonitor占用的相关资源
if(mUSBMonitor != null){
mUSBMonitor.destroy();
mUSBMonitor = null;
}
}
public USBMonitor getUSBMonitor() {
return mUSBMonitor;
}
public void closeCamera() {
if(mCameraHandler != null){
mCameraHandler.close();
}
}
private void openCamera(USBMonitor.UsbControlBlock ctrlBlock) {
if(mCameraHandler != null){
mCameraHandler.open(ctrlBlock);
}
}
private void startPreview(CameraViewInterface cameraView) {
SurfaceTexture st = cameraView.getSurfaceTexture();
if(mCameraHandler != null){
mCameraHandler.startPreview(st);
}
}
}

450
libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java

@ -2,14 +2,17 @@ package com.serenegiant.usb.common;
import android.app.Activity; import android.app.Activity;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbDevice;
import android.media.AudioManager; import android.media.AudioManager;
import android.media.MediaScannerConnection; import android.media.MediaScannerConnection;
import android.media.SoundPool; import android.media.SoundPool;
import android.os.Environment;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import android.os.Message; import android.os.Message;
import android.text.TextUtils;
import android.util.Log; import android.util.Log;
import android.view.Surface; import android.view.Surface;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
@ -18,8 +21,18 @@ import com.jiangdg.libusbcamera.R;
import com.serenegiant.usb.IFrameCallback; import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.USBMonitor; import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.UVCCamera; import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usb.encoder.MediaAudioEncoder;
import com.serenegiant.usb.encoder.MediaEncoder;
import com.serenegiant.usb.encoder.MediaMuxerWrapper;
import com.serenegiant.usb.encoder.MediaSurfaceEncoder;
import com.serenegiant.usb.encoder.MediaVideoBufferEncoder;
import com.serenegiant.usb.encoder.MediaVideoEncoder;
import com.serenegiant.usb.widget.CameraViewInterface; import com.serenegiant.usb.widget.CameraViewInterface;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.lang.reflect.Field; import java.lang.reflect.Field;
@ -28,23 +41,22 @@ import java.nio.ByteBuffer;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.CopyOnWriteArraySet;
/**
* Camera业务处理抽象类
*
* */
abstract class AbstractUVCCameraHandler extends Handler { abstract class AbstractUVCCameraHandler extends Handler {
private static final boolean DEBUG = true; // TODO set false on release private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "AbsUVCCameraHandler"; private static final String TAG = "AbsUVCCameraHandler";
// 对外回调接口
public interface CameraCallback { public interface CameraCallback {
public void onOpen(); public void onOpen();
public void onClose(); public void onClose();
public void onStartPreview(); public void onStartPreview();
public void onStopPreview(); public void onStopPreview();
public void onStartRecording(); public void onStartRecording();
public void onStopRecording(); public void onStopRecording();
public void onError(final Exception e); public void onError(final Exception e);
} }
@ -125,11 +137,13 @@ abstract class AbstractUVCCameraHandler extends Handler {
if (DEBUG) Log.v(TAG, "close:finished"); if (DEBUG) Log.v(TAG, "close:finished");
} }
// 切换分辨率
public void resize(final int width, final int height) { public void resize(final int width, final int height) {
checkReleased(); checkReleased();
throw new UnsupportedOperationException("does not support now"); throw new UnsupportedOperationException("does not support now");
} }
// 开启Camera预览
protected void startPreview(final Object surface) { protected void startPreview(final Object surface) {
checkReleased(); checkReleased();
if (!((surface instanceof SurfaceHolder) || (surface instanceof Surface) || (surface instanceof SurfaceTexture))) { if (!((surface instanceof SurfaceHolder) || (surface instanceof Surface) || (surface instanceof SurfaceTexture))) {
@ -138,6 +152,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
sendMessage(obtainMessage(MSG_PREVIEW_START, surface)); sendMessage(obtainMessage(MSG_PREVIEW_START, surface));
} }
// 关闭Camera预览
public void stopPreview() { public void stopPreview() {
if (DEBUG) Log.v(TAG, "stopPreview:"); if (DEBUG) Log.v(TAG, "stopPreview:");
removeMessages(MSG_PREVIEW_START); removeMessages(MSG_PREVIEW_START);
@ -161,6 +176,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
if (DEBUG) Log.v(TAG, "stopPreview:finished"); if (DEBUG) Log.v(TAG, "stopPreview:finished");
} }
// 捕获图像
protected void captureStill() { protected void captureStill() {
checkReleased(); checkReleased();
sendEmptyMessage(MSG_CAPTURE_STILL); sendEmptyMessage(MSG_CAPTURE_STILL);
@ -171,11 +187,14 @@ abstract class AbstractUVCCameraHandler extends Handler {
sendMessage(obtainMessage(MSG_CAPTURE_STILL, path)); sendMessage(obtainMessage(MSG_CAPTURE_STILL, path));
} }
public void startRecording() { // 开始录制
public void startRecording(final String path) {
checkReleased(); checkReleased();
sendEmptyMessage(MSG_CAPTURE_START); // sendEmptyMessage(MSG_CAPTURE_START);
sendMessage(obtainMessage(MSG_CAPTURE_START, path));
} }
// 停止录制
public void stopRecording() { public void stopRecording() {
sendEmptyMessage(MSG_CAPTURE_STOP); sendEmptyMessage(MSG_CAPTURE_STOP);
} }
@ -186,6 +205,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
sendEmptyMessage(MSG_RELEASE); sendEmptyMessage(MSG_RELEASE);
} }
// 对外注册监听事件
public void addCallback(final CameraCallback callback) { public void addCallback(final CameraCallback callback) {
checkReleased(); checkReleased();
if (!mReleased && (callback != null)) { if (!mReleased && (callback != null)) {
@ -267,7 +287,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
if (thread == null) return; if (thread == null) return;
switch (msg.what) { switch (msg.what) {
case MSG_OPEN: case MSG_OPEN:
thread.handleOpen((USBMonitor.UsbControlBlock) msg.obj); thread.handleOpen((USBMonitor.UsbControlBlock)msg.obj);
break; break;
case MSG_CLOSE: case MSG_CLOSE:
thread.handleClose(); thread.handleClose();
@ -278,17 +298,17 @@ abstract class AbstractUVCCameraHandler extends Handler {
case MSG_PREVIEW_STOP: case MSG_PREVIEW_STOP:
thread.handleStopPreview(); thread.handleStopPreview();
break; break;
// case MSG_CAPTURE_STILL: case MSG_CAPTURE_STILL:
// thread.handleCaptureStill((String) msg.obj); thread.handleCaptureStill((String)msg.obj);
// break; break;
// case MSG_CAPTURE_START: case MSG_CAPTURE_START:
// thread.handleStartRecording(); thread.handleStartRecording((String)msg.obj);
// break; break;
// case MSG_CAPTURE_STOP: case MSG_CAPTURE_STOP:
// thread.handleStopRecording(); thread.handleStopRecording();
// break; break;
case MSG_MEDIA_UPDATE: case MSG_MEDIA_UPDATE:
thread.handleUpdateMedia((String) msg.obj); thread.handleUpdateMedia((String)msg.obj);
break; break;
case MSG_RELEASE: case MSG_RELEASE:
thread.handleRelease(); thread.handleRelease();
@ -310,31 +330,27 @@ abstract class AbstractUVCCameraHandler extends Handler {
private float mBandwidthFactor; private float mBandwidthFactor;
private boolean mIsPreviewing; private boolean mIsPreviewing;
private boolean mIsRecording; private boolean mIsRecording;
/**
* shutter sound // 播放声音
*/
private SoundPool mSoundPool; private SoundPool mSoundPool;
private int mSoundId; private int mSoundId;
private AbstractUVCCameraHandler mHandler; private AbstractUVCCameraHandler mHandler;
/** // 处理与Camera相关的逻辑,比如获取byte数据流等
* for accessing UVC camera
*/
private UVCCamera mUVCCamera; private UVCCamera mUVCCamera;
/**
* muxer for audio/video recording
*/
// private MediaMuxerWrapper mMuxer;
// private MediaVideoBufferEncoder mVideoEncoder;
/** private MediaMuxerWrapper mMuxer;
* @param clazz Class extends AbstractUVCCameraHandler private MediaVideoBufferEncoder mVideoEncoder;
* @param parent parent Activity
* @param cameraView for still capturing /** 构造方法
* @param encoderType 0: use MediaSurfaceEncoder, 1: use MediaVideoEncoder, 2: use MediaVideoBufferEncoder *
* @param width * clazz 继承于AbstractUVCCameraHandler
* @param height * parent Activity子类
* @param format either FRAME_FORMAT_YUYV(0) or FRAME_FORMAT_MJPEG(1) * cameraView 用于捕获静止图像
* @param bandwidthFactor * encoderType 0表示使用MediaSurfaceEncoder;1表示使用MediaVideoEncoder, 2表示使用MediaVideoBufferEncoder
* width 分辨率的宽
* height 分辨率的高
* format 颜色格式0为FRAME_FORMAT_YUYV1为FRAME_FORMAT_MJPEG
* bandwidthFactor
*/ */
CameraThread(final Class<? extends AbstractUVCCameraHandler> clazz, CameraThread(final Class<? extends AbstractUVCCameraHandler> clazz,
final Activity parent, final CameraViewInterface cameraView, final Activity parent, final CameraViewInterface cameraView,
@ -348,8 +364,8 @@ abstract class AbstractUVCCameraHandler extends Handler {
mHeight = height; mHeight = height;
mPreviewMode = format; mPreviewMode = format;
mBandwidthFactor = bandwidthFactor; mBandwidthFactor = bandwidthFactor;
mWeakParent = new WeakReference<Activity>(parent); mWeakParent = new WeakReference<>(parent);
mWeakCameraView = new WeakReference<CameraViewInterface>(cameraView); mWeakCameraView = new WeakReference<>(cameraView);
loadShutterSound(parent); loadShutterSound(parent);
} }
@ -418,13 +434,12 @@ abstract class AbstractUVCCameraHandler extends Handler {
} catch (final Exception e) { } catch (final Exception e) {
callOnError(e); callOnError(e);
} }
if (DEBUG) if (DEBUG) Log.i(TAG, "supportedSize:" + (mUVCCamera != null ? mUVCCamera.getSupportedSize() : null));
Log.i(TAG, "supportedSize:" + (mUVCCamera != null ? mUVCCamera.getSupportedSize() : null));
} }
public void handleClose() { public void handleClose() {
if (DEBUG) Log.v(TAG_THREAD, "handleClose:"); if (DEBUG) Log.v(TAG_THREAD, "handleClose:");
// handleStopRecording(); handleStopRecording();
final UVCCamera camera; final UVCCamera camera;
synchronized (mSync) { synchronized (mSync) {
camera = mUVCCamera; camera = mUVCCamera;
@ -452,12 +467,11 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
} }
if (surface instanceof SurfaceHolder) { if (surface instanceof SurfaceHolder) {
mUVCCamera.setPreviewDisplay((SurfaceHolder) surface); mUVCCamera.setPreviewDisplay((SurfaceHolder)surface);
} } if (surface instanceof Surface) {
if (surface instanceof Surface) { mUVCCamera.setPreviewDisplay((Surface)surface);
mUVCCamera.setPreviewDisplay((Surface) surface);
} else { } else {
mUVCCamera.setPreviewTexture((SurfaceTexture) surface); mUVCCamera.setPreviewTexture((SurfaceTexture)surface);
} }
mUVCCamera.startPreview(); mUVCCamera.startPreview();
mUVCCamera.updateCameraParams(); mUVCCamera.updateCameraParams();
@ -482,108 +496,112 @@ abstract class AbstractUVCCameraHandler extends Handler {
if (DEBUG) Log.v(TAG_THREAD, "handleStopPreview:finished"); if (DEBUG) Log.v(TAG_THREAD, "handleStopPreview:finished");
} }
// public void handleCaptureStill(final String path) { // 捕获静态图片
// if (DEBUG) Log.v(TAG_THREAD, "handleCaptureStill:"); public void handleCaptureStill(final String path) {
// final Activity parent = mWeakParent.get(); if (DEBUG) Log.v(TAG_THREAD, "handleCaptureStill:");
// if (parent == null) return; final Activity parent = mWeakParent.get();
// mSoundPool.play(mSoundId, 0.2f, 0.2f, 0, 0, 1.0f); // play shutter sound if (parent == null) return;
// try { mSoundPool.play(mSoundId, 0.2f, 0.2f, 0, 0, 1.0f); // play shutter sound
// final Bitmap bitmap = mWeakCameraView.get().captureStillImage(); try {
// // get buffered output stream for saving a captured still image as a file on external storage. final Bitmap bitmap = mWeakCameraView.get().captureStillImage();
// // the file name is came from current time. // get buffered output stream for saving a captured still image as a file on external storage.
// // You should use extension name as same as CompressFormat when calling Bitmap#compress. // the file name is came from current time.
// final File outputFile = TextUtils.isEmpty(path) // You should use extension name as same as CompressFormat when calling Bitmap#compress.
// ? MediaMuxerWrapper.getCaptureFile(Environment.DIRECTORY_DCIM, ".png") final File outputFile = TextUtils.isEmpty(path)
// : new File(path); ? MediaMuxerWrapper.getCaptureFile(Environment.DIRECTORY_DCIM, ".png")
// final BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile)); : new File(path);
// try { final BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile));
// try { try {
// bitmap.compress(Bitmap.CompressFormat.PNG, 100, os); try {
// os.flush(); bitmap.compress(Bitmap.CompressFormat.PNG, 100, os);
// mHandler.sendMessage(mHandler.obtainMessage(MSG_MEDIA_UPDATE, outputFile.getPath())); os.flush();
// } catch (final IOException e) { mHandler.sendMessage(mHandler.obtainMessage(MSG_MEDIA_UPDATE, outputFile.getPath()));
// } } catch (final IOException e) {
// } finally { }
// os.close(); } finally {
// } os.close();
// } catch (final Exception e) { }
// callOnError(e); } catch (final Exception e) {
// } callOnError(e);
// } }
}
// public void handleStartRecording() { // 开始录制视频
// if (DEBUG) Log.v(TAG_THREAD, "handleStartRecording:"); public void handleStartRecording(String path) {
// try { if (DEBUG) Log.v(TAG_THREAD, "handleStartRecording:");
// if ((mUVCCamera == null) || (mMuxer != null)) return; try {
if ((mUVCCamera == null) || (mMuxer != null)) return;
// final MediaMuxerWrapper muxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK. // final MediaMuxerWrapper muxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
// MediaVideoBufferEncoder videoEncoder = null; final MediaMuxerWrapper muxer = new MediaMuxerWrapper(path);
// switch (mEncoderType) { MediaVideoBufferEncoder videoEncoder = null;
// case 1: // for video capturing using MediaVideoEncoder switch (mEncoderType) {
// new MediaVideoEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener); case 1: // for video capturing using MediaVideoEncoder
// break; new MediaVideoEncoder(muxer,getWidth(), getHeight(), mMediaEncoderListener);
// case 2: // for video capturing using MediaVideoBufferEncoder break;
// videoEncoder = new MediaVideoBufferEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener); case 2: // for video capturing using MediaVideoBufferEncoder
// break; videoEncoder = new MediaVideoBufferEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
// // case 0: // for video capturing using MediaSurfaceEncoder break;
// default: // case 0: // for video capturing using MediaSurfaceEncoder
// new MediaSurfaceEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener); default:
// break; new MediaSurfaceEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
// } break;
// if (true) { }
// // for audio capturing if (true) {
// new MediaAudioEncoder(muxer, mMediaEncoderListener); // for audio capturing
// } new MediaAudioEncoder(muxer, mMediaEncoderListener);
// muxer.prepare(); }
// muxer.startRecording(); muxer.prepare();
// if (videoEncoder != null) { muxer.startRecording();
// mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21); if (videoEncoder != null) {
// } mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
// synchronized (mSync) { }
// mMuxer = muxer; synchronized (mSync) {
// mVideoEncoder = videoEncoder; mMuxer = muxer;
// } mVideoEncoder = videoEncoder;
// callOnStartRecording(); }
// } catch (final IOException e) { callOnStartRecording();
// callOnError(e); } catch (final IOException e) {
// Log.e(TAG, "startCapture:", e); callOnError(e);
// } Log.e(TAG, "startCapture:", e);
// } }
}
// public void handleStopRecording() { // 停止录制视频
// if (DEBUG) Log.v(TAG_THREAD, "handleStopRecording:mMuxer=" + mMuxer); public void handleStopRecording() {
// final MediaMuxerWrapper muxer; if (DEBUG) Log.v(TAG_THREAD, "handleStopRecording:mMuxer=" + mMuxer);
// synchronized (mSync) { final MediaMuxerWrapper muxer;
// muxer = mMuxer; synchronized (mSync) {
// mMuxer = null; muxer = mMuxer;
// mVideoEncoder = null; mMuxer = null;
// if (mUVCCamera != null) { mVideoEncoder = null;
// mUVCCamera.stopCapture(); if (mUVCCamera != null) {
// } mUVCCamera.stopCapture();
// } }
// try { }
// mWeakCameraView.get().setVideoEncoder(null); try {
// } catch (final Exception e) { mWeakCameraView.get().setVideoEncoder(null);
// // ignore } catch (final Exception e) {
// } // ignore
// if (muxer != null) { }
// muxer.stopRecording(); if (muxer != null) {
// mUVCCamera.setFrameCallback(null, 0); muxer.stopRecording();
// // you should not wait here mUVCCamera.setFrameCallback(null, 0);
// callOnStopRecording(); // you should not wait here
// } callOnStopRecording();
// } }
}
private final IFrameCallback mIFrameCallback = new IFrameCallback() { private final IFrameCallback mIFrameCallback = new IFrameCallback() {
@Override @Override
public void onFrame(final ByteBuffer frame) { public void onFrame(final ByteBuffer frame) {
// final MediaVideoBufferEncoder videoEncoder; final MediaVideoBufferEncoder videoEncoder;
// synchronized (mSync) { synchronized (mSync) {
// videoEncoder = mVideoEncoder; videoEncoder = mVideoEncoder;
// } }
// if (videoEncoder != null) { if (videoEncoder != null) {
// videoEncoder.frameAvailableSoon(); videoEncoder.frameAvailableSoon();
// videoEncoder.encode(frame); videoEncoder.encode(frame);
// } }
} }
}; };
@ -594,7 +612,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
if (parent != null && parent.getApplicationContext() != null) { if (parent != null && parent.getApplicationContext() != null) {
try { try {
if (DEBUG) Log.i(TAG, "MediaScannerConnection#scanFile"); if (DEBUG) Log.i(TAG, "MediaScannerConnection#scanFile");
MediaScannerConnection.scanFile(parent.getApplicationContext(), new String[]{path}, null, null); MediaScannerConnection.scanFile(parent.getApplicationContext(), new String[]{ path }, null, null);
} catch (final Exception e) { } catch (final Exception e) {
Log.e(TAG, "handleUpdateMedia:", e); Log.e(TAG, "handleUpdateMedia:", e);
} }
@ -619,59 +637,55 @@ abstract class AbstractUVCCameraHandler extends Handler {
if (DEBUG) Log.v(TAG_THREAD, "handleRelease:finished"); if (DEBUG) Log.v(TAG_THREAD, "handleRelease:finished");
} }
// private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() { private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
// @Override @Override
// public void onPrepared(final MediaEncoder encoder) { public void onPrepared(final MediaEncoder encoder) {
// if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder); if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder);
// mIsRecording = true; mIsRecording = true;
// if (encoder instanceof MediaVideoEncoder) if (encoder instanceof MediaVideoEncoder)
// try { try {
// mWeakCameraView.get().setVideoEncoder((MediaVideoEncoder) encoder); mWeakCameraView.get().setVideoEncoder((MediaVideoEncoder)encoder);
// } catch (final Exception e) { } catch (final Exception e) {
// Log.e(TAG, "onPrepared:", e); Log.e(TAG, "onPrepared:", e);
// } }
// if (encoder instanceof MediaSurfaceEncoder) if (encoder instanceof MediaSurfaceEncoder)
// try { try {
// mWeakCameraView.get().setVideoEncoder((MediaSurfaceEncoder) encoder); mWeakCameraView.get().setVideoEncoder((MediaSurfaceEncoder)encoder);
// mUVCCamera.startCapture(((MediaSurfaceEncoder) encoder).getInputSurface()); mUVCCamera.startCapture(((MediaSurfaceEncoder)encoder).getInputSurface());
// } catch (final Exception e) { } catch (final Exception e) {
// Log.e(TAG, "onPrepared:", e); Log.e(TAG, "onPrepared:", e);
// } }
// } }
//
// @Override @Override
// public void onStopped(final MediaEncoder encoder) { public void onStopped(final MediaEncoder encoder) {
// if (DEBUG) Log.v(TAG_THREAD, "onStopped:encoder=" + encoder); if (DEBUG) Log.v(TAG_THREAD, "onStopped:encoder=" + encoder);
// if ((encoder instanceof MediaVideoEncoder) if ((encoder instanceof MediaVideoEncoder)
// || (encoder instanceof MediaSurfaceEncoder)) || (encoder instanceof MediaSurfaceEncoder))
// try { try {
// mIsRecording = false; mIsRecording = false;
// final Activity parent = mWeakParent.get(); final Activity parent = mWeakParent.get();
// mWeakCameraView.get().setVideoEncoder(null); mWeakCameraView.get().setVideoEncoder(null);
// synchronized (mSync) { synchronized (mSync) {
// if (mUVCCamera != null) { if (mUVCCamera != null) {
// mUVCCamera.stopCapture(); mUVCCamera.stopCapture();
// } }
// } }
// final String path = encoder.getOutputPath(); final String path = encoder.getOutputPath();
// if (!TextUtils.isEmpty(path)) { if (!TextUtils.isEmpty(path)) {
// mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_MEDIA_UPDATE, path), 1000); mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_MEDIA_UPDATE, path), 1000);
// } else { } else {
// final boolean released = (mHandler == null) || mHandler.mReleased; final boolean released = (mHandler == null) || mHandler.mReleased;
// if (released || parent == null || parent.isDestroyed()) { if (released || parent == null || parent.isDestroyed()) {
// handleRelease(); handleRelease();
// } }
// } }
// } catch (final Exception e) { } catch (final Exception e) {
// Log.e(TAG, "onPrepared:", e); Log.e(TAG, "onPrepared:", e);
// } }
// } }
// }; };
/**
* prepare and load shutter sound for still image capturing
*/
@SuppressWarnings("deprecation")
private void loadShutterSound(final Context context) { private void loadShutterSound(final Context context) {
// get system stream type using reflection // get system stream type using reflection
int streamType; int streamType;
@ -691,9 +705,35 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
// load shutter sound from resource // load shutter sound from resource
mSoundPool = new SoundPool(2, streamType, 0); mSoundPool = new SoundPool(2, streamType, 0);
// mSoundId = mSoundPool.load(context, R.raw.camera_click, 1); mSoundId = mSoundPool.load(context, R.raw.camera_click, 1);
} }
/**
* prepare and load shutter sound for still image capturing
*/
@SuppressWarnings("deprecation")
// private void loadShutterSound(final Context context) {
// // get system stream type using reflection
// int streamType;
// try {
// final Class<?> audioSystemClass = Class.forName("android.media.AudioSystem");
// final Field sseField = audioSystemClass.getDeclaredField("STREAM_SYSTEM_ENFORCED");
// streamType = sseField.getInt(null);
// } catch (final Exception e) {
// streamType = AudioManager.STREAM_SYSTEM; // set appropriate according to your app policy
// }
// if (mSoundPool != null) {
// try {
// mSoundPool.release();
// } catch (final Exception e) {
// }
// mSoundPool = null;
// }
// // load shutter sound from resource
// mSoundPool = new SoundPool(2, streamType, 0);
// mSoundId = mSoundPool.load(context, R.raw.camera_click, 1);
// }
@Override @Override
public void run() { public void run() {
Looper.prepare(); Looper.prepare();
@ -732,7 +772,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
private void callOnOpen() { private void callOnOpen() {
for (final CameraCallback callback : mCallbacks) { for (final CameraCallback callback: mCallbacks) {
try { try {
callback.onOpen(); callback.onOpen();
} catch (final Exception e) { } catch (final Exception e) {
@ -743,7 +783,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
private void callOnClose() { private void callOnClose() {
for (final CameraCallback callback : mCallbacks) { for (final CameraCallback callback: mCallbacks) {
try { try {
callback.onClose(); callback.onClose();
} catch (final Exception e) { } catch (final Exception e) {
@ -754,7 +794,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
private void callOnStartPreview() { private void callOnStartPreview() {
for (final CameraCallback callback : mCallbacks) { for (final CameraCallback callback: mCallbacks) {
try { try {
callback.onStartPreview(); callback.onStartPreview();
} catch (final Exception e) { } catch (final Exception e) {
@ -765,7 +805,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
private void callOnStopPreview() { private void callOnStopPreview() {
for (final CameraCallback callback : mCallbacks) { for (final CameraCallback callback: mCallbacks) {
try { try {
callback.onStopPreview(); callback.onStopPreview();
} catch (final Exception e) { } catch (final Exception e) {
@ -776,7 +816,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
private void callOnStartRecording() { private void callOnStartRecording() {
for (final CameraCallback callback : mCallbacks) { for (final CameraCallback callback: mCallbacks) {
try { try {
callback.onStartRecording(); callback.onStartRecording();
} catch (final Exception e) { } catch (final Exception e) {
@ -787,7 +827,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
private void callOnStopRecording() { private void callOnStopRecording() {
for (final CameraCallback callback : mCallbacks) { for (final CameraCallback callback: mCallbacks) {
try { try {
callback.onStopRecording(); callback.onStopRecording();
} catch (final Exception e) { } catch (final Exception e) {
@ -798,7 +838,7 @@ abstract class AbstractUVCCameraHandler extends Handler {
} }
private void callOnError(final Exception e) { private void callOnError(final Exception e) {
for (final CameraCallback callback : mCallbacks) { for (final CameraCallback callback: mCallbacks) {
try { try {
callback.onError(e); callback.onError(e);
} catch (final Exception e1) { } catch (final Exception e1) {

27
libusbcamera/src/main/java/com/serenegiant/usb/encoder/IAudioEncoder.java

@ -0,0 +1,27 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
public interface IAudioEncoder {
}

28
libusbcamera/src/main/java/com/serenegiant/usb/encoder/IVideoEncoder.java

@ -0,0 +1,28 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
public interface IVideoEncoder {
public boolean frameAvailableSoon();
}

233
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaAudioEncoder.java

@ -0,0 +1,233 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class MediaAudioEncoder extends MediaEncoder implements IAudioEncoder {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MediaAudioEncoder";
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
private static final int BIT_RATE = 64000;
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
private AudioThread mAudioThread = null;
public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
super(muxer, listener);
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.v(TAG, "prepare:");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
// prepare MediaCodec for AAC encoding of audio data from inernal mic.
final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
if (audioCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
// audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
// audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
if (DEBUG) Log.i(TAG, "format: " + audioFormat);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
@Override
protected void startRecording() {
super.startRecording();
// create and execute audio capturing thread using internal mic
if (mAudioThread == null) {
mAudioThread = new AudioThread();
mAudioThread.start();
}
}
@Override
protected void release() {
mAudioThread = null;
super.release();
}
private static final int[] AUDIO_SOURCES = new int[] {
MediaRecorder.AudioSource.DEFAULT,
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.CAMCORDER,
};
/**
* Thread to capture audio data from internal mic as uncompressed 16bit PCM data
* and write them to the MediaCodec encoder
*/
private class AudioThread extends Thread {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO); // THREAD_PRIORITY_URGENT_AUDIO
int cnt = 0;
final int min_buffer_size = AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
if (buffer_size < min_buffer_size)
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME).order(ByteOrder.nativeOrder());
AudioRecord audioRecord = null;
for (final int src: AUDIO_SOURCES) {
try {
audioRecord = new AudioRecord(src,
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
if (audioRecord != null) {
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
audioRecord.release();
audioRecord = null;
}
}
} catch (final Exception e) {
audioRecord = null;
}
if (audioRecord != null) {
break;
}
}
if (audioRecord != null) {
try {
if (mIsCapturing) {
if (DEBUG) Log.v(TAG, "AudioThread:start audio recording");
int readBytes;
audioRecord.startRecording();
try {
for ( ; mIsCapturing && !mRequestStop && !mIsEOS ; ) {
// read audio data from internal mic
buf.clear();
try {
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
} catch (final Exception e) {
break;
}
if (readBytes > 0) {
// set audio data to encoder
buf.position(readBytes);
buf.flip();
encode(buf, readBytes, getPTSUs());
frameAvailableSoon();
cnt++;
}
}
if (cnt > 0) {
frameAvailableSoon();
}
} finally {
audioRecord.stop();
}
}
} catch (final Exception e) {
Log.e(TAG, "AudioThread#run", e);
} finally {
audioRecord.release();
}
}
if (cnt == 0) {
for (int i = 0; mIsCapturing && (i < 5); i++) {
buf.position(SAMPLES_PER_FRAME);
buf.flip();
try {
encode(buf, SAMPLES_PER_FRAME, getPTSUs());
frameAvailableSoon();
} catch (final Exception e) {
break;
}
synchronized(this) {
try {
wait(50);
} catch (final InterruptedException e) {
}
}
}
}
if (DEBUG) Log.v(TAG, "AudioThread:finished");
}
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return
*/
private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectAudioCodec:");
MediaCodecInfo result = null;
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
LOOP: for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
if (types[j].equalsIgnoreCase(mimeType)) {
if (result == null) {
result = codecInfo;
break LOOP;
}
}
}
}
return result;
}
}

448
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaEncoder.java

@ -0,0 +1,448 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.Log;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
public abstract class MediaEncoder implements Runnable {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MediaEncoder";
protected static final int TIMEOUT_USEC = 10000; // 10[msec]
protected static final int MSG_FRAME_AVAILABLE = 1;
protected static final int MSG_STOP_RECORDING = 9;
public interface MediaEncoderListener {
public void onPrepared(MediaEncoder encoder);
public void onStopped(MediaEncoder encoder);
}
protected final Object mSync = new Object();
/**
* Flag that indicate this encoder is capturing now.
*/
protected volatile boolean mIsCapturing;
/**
* Flag that indicate the frame data will be available soon.
*/
private int mRequestDrain;
/**
* Flag to request stop capturing
*/
protected volatile boolean mRequestStop;
/**
* Flag that indicate encoder received EOS(End Of Stream)
*/
protected boolean mIsEOS;
/**
* Flag the indicate the muxer is running
*/
protected boolean mMuxerStarted;
/**
* Track Number
*/
protected int mTrackIndex;
/**
* MediaCodec instance for encoding
*/
protected MediaCodec mMediaCodec; // API >= 16(Android4.1.2)
/**
* Weak refarence of MediaMuxerWarapper instance
*/
protected final WeakReference<MediaMuxerWrapper> mWeakMuxer;
/**
* BufferInfo instance for dequeuing
*/
private MediaCodec.BufferInfo mBufferInfo; // API >= 16(Android4.1.2)
protected final MediaEncoderListener mListener;
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
if (listener == null) throw new NullPointerException("MediaEncoderListener is null");
if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null");
mWeakMuxer = new WeakReference<MediaMuxerWrapper>(muxer);
muxer.addEncoder(this);
mListener = listener;
synchronized (mSync) {
// create BufferInfo here for effectiveness(to reduce GC)
mBufferInfo = new MediaCodec.BufferInfo();
// wait for starting thread
new Thread(this, getClass().getSimpleName()).start();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
public String getOutputPath() {
final MediaMuxerWrapper muxer = mWeakMuxer.get();
return muxer != null ? muxer.getOutputPath() : null;
}
/**
* the method to indicate frame data is soon available or already available
* @return return true if encoder is ready to encod.
*/
public boolean frameAvailableSoon() {
// if (DEBUG) Log.v(TAG, "frameAvailableSoon");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return false;
}
mRequestDrain++;
mSync.notifyAll();
}
return true;
}
/**
* encoding loop on private thread
*/
@Override
public void run() {
// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
synchronized (mSync) {
mRequestStop = false;
mRequestDrain = 0;
mSync.notify();
}
final boolean isRunning = true;
boolean localRequestStop;
boolean localRequestDrain;
while (isRunning) {
synchronized (mSync) {
localRequestStop = mRequestStop;
localRequestDrain = (mRequestDrain > 0);
if (localRequestDrain)
mRequestDrain--;
}
if (localRequestStop) {
drain();
// request stop recording
signalEndOfInputStream();
// process output data again for EOS signale
drain();
// release all related objects
release();
break;
}
if (localRequestDrain) {
drain();
} else {
synchronized (mSync) {
try {
mSync.wait();
} catch (final InterruptedException e) {
break;
}
}
}
} // end of while
if (DEBUG) Log.d(TAG, "Encoder thread exiting");
synchronized (mSync) {
mRequestStop = true;
mIsCapturing = false;
}
}
/*
* prepareing method for each sub class
* this method should be implemented in sub class, so set this as abstract method
* @throws IOException
*/
/*package*/ abstract void prepare() throws IOException;
/*package*/ void startRecording() {
if (DEBUG) Log.v(TAG, "startRecording");
synchronized (mSync) {
mIsCapturing = true;
mRequestStop = false;
mSync.notifyAll();
}
}
/**
* the method to request stop encoding
*/
/*package*/ void stopRecording() {
if (DEBUG) Log.v(TAG, "stopRecording");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return;
}
mRequestStop = true; // for rejecting newer frame
mSync.notifyAll();
// We can not know when the encoding and writing finish.
// so we return immediately after request to avoid delay of caller thread
}
}
//********************************************************************************
//********************************************************************************
/**
* Release all releated objects
*/
protected void release() {
if (DEBUG) Log.d(TAG, "release:");
try {
mListener.onStopped(this);
} catch (final Exception e) {
Log.e(TAG, "failed onStopped", e);
}
mIsCapturing = false;
if (mMediaCodec != null) {
try {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
} catch (final Exception e) {
Log.e(TAG, "failed releasing MediaCodec", e);
}
}
if (mMuxerStarted) {
final MediaMuxerWrapper muxer = mWeakMuxer.get();
if (muxer != null) {
try {
muxer.stop();
} catch (final Exception e) {
Log.e(TAG, "failed stopping muxer", e);
}
}
}
mBufferInfo = null;
}
protected void signalEndOfInputStream() {
if (DEBUG) Log.d(TAG, "sending EOS to encoder");
// signalEndOfInputStream is only avairable for video encoding with surface
// and equivalent sending a empty buffer with BUFFER_FLAG_END_OF_STREAM flag.
// mMediaCodec.signalEndOfInputStream(); // API >= 18
encode((byte[])null, 0, getPTSUs());
}
/**
* Method to set byte array to the MediaCodec encoder
* @param buffer
* @param length length of byte array, zero means EOS.
* @param presentationTimeUs
*/
@SuppressWarnings("deprecation")
protected void encode(final byte[] buffer, final int length, final long presentationTimeUs) {
// if (DEBUG) Log.v(TAG, "encode:buffer=" + buffer);
if (!mIsCapturing) return;
int ix = 0, sz;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing && ix < length) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
sz = inputBuffer.remaining();
sz = (ix + sz < length) ? sz : length - ix;
if (sz > 0 && (buffer != null)) {
inputBuffer.put(buffer, ix, sz);
}
ix += sz;
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sz,
presentationTimeUs, 0);
}
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
}
/**
* Method to set ByteBuffer to the MediaCodec encoder
* @param buffer null means EOS
* @param presentationTimeUs
*/
@SuppressWarnings("deprecation")
protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
// if (DEBUG) Log.v(TAG, "encode:buffer=" + buffer);
if (!mIsCapturing) return;
int ix = 0, sz;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing && ix < length) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
sz = inputBuffer.remaining();
sz = (ix + sz < length) ? sz : length - ix;
if (sz > 0 && (buffer != null)) {
buffer.position(ix + sz);
buffer.flip();
inputBuffer.put(buffer);
}
ix += sz;
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sz,
presentationTimeUs, 0);
}
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
}
/**
* drain encoded data and write them to muxer
*/
@SuppressWarnings("deprecation")
protected void drain() {
if (mMediaCodec == null) return;
ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
int encoderStatus, count = 0;
final MediaMuxerWrapper muxer = mWeakMuxer.get();
if (muxer == null) {
// throw new NullPointerException("muxer is unexpectedly null");
Log.w(TAG, "muxer is unexpectedly null");
return;
}
LOOP: while (mIsCapturing) {
// get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
if (!mIsEOS) {
if (++count > 5)
break LOOP; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
// this shoud not come when encoding
encoderOutputBuffers = mMediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
// this status indicate the output format of codec is changed
// this should come only once before actual encoded data
// but this status never come on Android4.3 or less
// and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
if (mMuxerStarted) { // second time request is error
throw new RuntimeException("format changed twice");
}
// get output format from codec and pass them to muxer
// getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
mTrackIndex = muxer.addTrack(format);
mMuxerStarted = true;
if (!muxer.start()) {
// we should wait until muxer is ready
synchronized (muxer) {
while (!muxer.isStarted())
try {
muxer.wait(100);
} catch (final InterruptedException e) {
break LOOP;
}
}
}
} else if (encoderStatus < 0) {
// unexpected status
if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
} else {
final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
// this never should come...may be a MediaCodec internal error
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// You shoud set output format to muxer here when you target Android4.3 or less
// but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
// therefor we should expand and prepare output format from buffer data.
// This sample is for API>=18(>=Android 4.3), just ignore this flag here
if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// encoded data is ready, clear waiting counter
count = 0;
if (!mMuxerStarted) {
// muxer is not ready...this will prrograming failure.
throw new RuntimeException("drain:muxer hasn't started");
}
// write encoded data to muxer(need to adjust presentationTimeUs.
mBufferInfo.presentationTimeUs = getPTSUs();
muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
prevOutputPTSUs = mBufferInfo.presentationTimeUs;
}
// return buffer to encoder
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// when EOS come.
mMuxerStarted = mIsCapturing = false;
break; // out of while
}
}
}
}
/**
* previous presentationTimeUs for writing
*/
private long prevOutputPTSUs = 0;
/**
* get next encoding presentationTimeUs
* @return
*/
protected long getPTSUs() {
long result = System.nanoTime() / 1000L;
// presentationTimeUs should be monotonic
// otherwise muxer fail to write
if (result < prevOutputPTSUs)
result = (prevOutputPTSUs - result) + result;
return result;
}
}

188
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaMuxerWrapper.java

@ -0,0 +1,188 @@
package com.serenegiant.usb.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import android.text.TextUtils;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.GregorianCalendar;
import java.util.Locale;
public class MediaMuxerWrapper {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MediaMuxerWrapper";
private static final String DIR_NAME = "USBCameraTest";
private static final SimpleDateFormat mDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US);
private String mOutputPath;
private final MediaMuxer mMediaMuxer; // API >= 18
private int mEncoderCount, mStatredCount;
private boolean mIsStarted;
private MediaEncoder mVideoEncoder, mAudioEncoder;
public MediaMuxerWrapper(String path) throws IOException {
try {
// 保存到自定义路径还是手机默认Movies路径
if (TextUtils.isEmpty(path))
mOutputPath = getCaptureFile(Environment.DIRECTORY_MOVIES, ".mp4").toString();
mOutputPath = path;
} catch (final NullPointerException e) {
throw new RuntimeException("This app has no permission of writing external storage");
}
mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mEncoderCount = mStatredCount = 0;
mIsStarted = false;
}
public String getOutputPath() {
return mOutputPath;
}
public void prepare() throws IOException {
if (mVideoEncoder != null)
mVideoEncoder.prepare();
if (mAudioEncoder != null)
mAudioEncoder.prepare();
}
public void startRecording() {
if (mVideoEncoder != null)
mVideoEncoder.startRecording();
if (mAudioEncoder != null)
mAudioEncoder.startRecording();
}
public void stopRecording() {
if (mVideoEncoder != null)
mVideoEncoder.stopRecording();
mVideoEncoder = null;
if (mAudioEncoder != null)
mAudioEncoder.stopRecording();
mAudioEncoder = null;
}
public synchronized boolean isStarted() {
return mIsStarted;
}
//**********************************************************************
//**********************************************************************
/**
* assign encoder to this calss. this is called from encoder.
* @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
*/
/*package*/ void addEncoder(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder) {
if (mVideoEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mVideoEncoder = encoder;
} else if (encoder instanceof MediaSurfaceEncoder) {
if (mVideoEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mVideoEncoder = encoder;
} else if (encoder instanceof MediaVideoBufferEncoder) {
if (mVideoEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mVideoEncoder = encoder;
} else if (encoder instanceof MediaAudioEncoder) {
if (mAudioEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mAudioEncoder = encoder;
} else
throw new IllegalArgumentException("unsupported encoder");
mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
}
/**
* request start recording from encoder
* @return true when muxer is ready to write
*/
/*package*/ synchronized boolean start() {
if (DEBUG) Log.v(TAG, "start:");
mStatredCount++;
if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) {
mMediaMuxer.start();
mIsStarted = true;
notifyAll();
if (DEBUG) Log.v(TAG, "MediaMuxer started:");
}
return mIsStarted;
}
/**
* request stop recording from encoder when encoder received EOS
*/
/*package*/ synchronized void stop() {
if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStatredCount);
mStatredCount--;
if ((mEncoderCount > 0) && (mStatredCount <= 0)) {
try {
mMediaMuxer.stop();
} catch (final Exception e) {
Log.w(TAG, e);
}
mIsStarted = false;
if (DEBUG) Log.v(TAG, "MediaMuxer stopped:");
}
}
/**
* assign encoder to muxer
* @param format
* @return minus value indicate error
*/
/*package*/ synchronized int addTrack(final MediaFormat format) {
if (mIsStarted)
throw new IllegalStateException("muxer already started");
final int trackIx = mMediaMuxer.addTrack(format);
if (DEBUG) Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
return trackIx;
}
/**
* write encoded data to muxer
* @param trackIndex
* @param byteBuf
* @param bufferInfo
*/
/*package*/ synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
if (mStatredCount > 0)
mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
}
//**********************************************************************
//**********************************************************************
/**
* generate output file
* @param type Environment.DIRECTORY_MOVIES / Environment.DIRECTORY_DCIM etc.
* @param ext .mp4(.m4a for audio) or .png
* @return return null when this app has no writing permission to external storage.
*/
public static final File getCaptureFile(final String type, final String ext) {
final File dir = new File(Environment.getExternalStoragePublicDirectory(type), DIR_NAME);
Log.d(TAG, "path=" + dir.toString());
dir.mkdirs();
if (dir.canWrite()) {
return new File(dir, getDateTimeString() + ext);
}
return null;
}
/**
* get current date and time as String
* @return
*/
private static final String getDateTimeString() {
final GregorianCalendar now = new GregorianCalendar();
return mDateTimeFormat.format(now.getTime());
}
}

196
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaSurfaceEncoder.java

@ -0,0 +1,196 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
public class MediaSurfaceEncoder extends MediaEncoder implements IVideoEncoder {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MediaSurfaceEncoder";
private static final String MIME_TYPE = "video/avc";
// parameters for recording
private final int mWidth, mHeight;
private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f;
private Surface mSurface;
public MediaSurfaceEncoder(final MediaMuxerWrapper muxer, final int width, final int height, final MediaEncoderListener listener) {
super(muxer, listener);
if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
mWidth = width;
mHeight = height;
}
/**
* Returns the encoder's input surface.
*/
public Surface getInputSurface() {
return mSurface;
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.i(TAG, "prepare: ");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
if (videoCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
if (DEBUG) Log.i(TAG, "format: " + format);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// get Surface for encoder input
// this method only can call between #configure and #start
mSurface = mMediaCodec.createInputSurface(); // API >= 18
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
@Override
protected void release() {
if (DEBUG) Log.i(TAG, "release:");
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
super.release();
}
private int calcBitRate() {
final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate;
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return null if no codec matched
*/
protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectVideoCodec:");
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
// select first codec that match a specific MIME type and color format
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
final int format = selectColorFormat(codecInfo, mimeType);
if (format > 0) {
return codecInfo;
}
}
}
}
return null;
}
/**
* select color format available on specific codec and we can use.
* @return 0 if no colorFormat is matched
*/
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedVideoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
/**
* color formats that we can use in this class
*/
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[] {
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
}
private static final boolean isRecognizedVideoFormat(final int colorFormat) {
if (DEBUG) Log.i(TAG, "isRecognizedVideoFormat:colorFormat=" + colorFormat);
final int n = recognizedFormats != null ? recognizedFormats.length : 0;
for (int i = 0; i < n; i++) {
if (recognizedFormats[i] == colorFormat) {
return true;
}
}
return false;
}
}

193
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaVideoBufferEncoder.java

@ -0,0 +1,193 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class receives video images as ByteBuffer(strongly recommend direct ByteBuffer) as NV21(YUV420SP)
* and encode them to h.264.
* If you use this directly with IFrameCallback, you should know UVCCamera and it backend native libraries
* never execute color space conversion. This means that color tone of resulted movie will be different
* from that you expected/can see on screen.
*/
public class MediaVideoBufferEncoder extends MediaEncoder implements IVideoEncoder {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MediaVideoBufferEncoder";
private static final String MIME_TYPE = "video/avc";
// parameters for recording
private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f;
private final int mWidth, mHeight;
protected int mColorFormat;
public MediaVideoBufferEncoder(final MediaMuxerWrapper muxer, final int width, final int height, final MediaEncoderListener listener) {
super(muxer, listener);
if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
mWidth = width;
mHeight = height;
}
public void encode(final ByteBuffer buffer) {
// if (DEBUG) Log.v(TAG, "encode:buffer=" + buffer);
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) return;
}
encode(buffer, buffer.capacity(), getPTSUs());
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.i(TAG, "prepare: ");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
if (videoCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
if (DEBUG) Log.i(TAG, "format: " + format);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
private int calcBitRate() {
final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate;
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return null if no codec matched
*/
@SuppressWarnings("deprecation")
protected final MediaCodecInfo selectVideoCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectVideoCodec:");
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
// select first codec that match a specific MIME type and color format
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
final int format = selectColorFormat(codecInfo, mimeType);
if (format > 0) {
mColorFormat = format;
return codecInfo;
}
}
}
}
return null;
}
/**
* select color format available on specific codec and we can use.
* @return 0 if no colorFormat is matched
*/
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedViewoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
/**
* color formats that we can use in this class
*/
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[] {
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
}
private static final boolean isRecognizedViewoFormat(final int colorFormat) {
if (DEBUG) Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat);
final int n = recognizedFormats != null ? recognizedFormats.length : 0;
for (int i = 0; i < n; i++) {
if (recognizedFormats[i] == colorFormat) {
return true;
}
}
return false;
}
}

228
libusbcamera/src/main/java/com/serenegiant/usb/encoder/MediaVideoEncoder.java

@ -0,0 +1,228 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import com.serenegiant.glutils.EGLBase;
import com.serenegiant.glutils.RenderHandler;
import java.io.IOException;
/**
* Encode texture images as H.264 video
* using MediaCodec.
* This class render texture images into recording surface
* camera from MediaCodec encoder using Open GL|ES
*/
public class MediaVideoEncoder extends MediaEncoder implements IVideoEncoder {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "MediaVideoEncoder";
private static final String MIME_TYPE = "video/avc";
// parameters for recording
private final int mWidth, mHeight;
private static final int FRAME_RATE = 15;
private static final float BPP = 0.50f;
private RenderHandler mRenderHandler;
private Surface mSurface;
public MediaVideoEncoder(final MediaMuxerWrapper muxer, final int width, final int height, final MediaEncoderListener listener) {
super(muxer, listener);
if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
mRenderHandler = RenderHandler.createHandler(TAG);
mWidth = width;
mHeight = height;
}
public boolean frameAvailableSoon(final float[] tex_matrix) {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(tex_matrix);
return result;
}
/**
* This method does not work correctly on this class,
* use #frameAvailableSoon(final float[]) instead
* @return
*/
@Override
public boolean frameAvailableSoon() {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(null);
return result;
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.i(TAG, "prepare: ");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
if (videoCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
if (DEBUG) Log.i(TAG, "format: " + format);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// get Surface for encoder input
// this method only can call between #configure and #start
mSurface = mMediaCodec.createInputSurface(); // API >= 18
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
public void setEglContext(final EGLBase.IContext sharedContext, final int tex_id) {
mRenderHandler.setEglContext(sharedContext, tex_id, mSurface, true);
}
@Override
protected void release() {
if (DEBUG) Log.i(TAG, "release:");
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
if (mRenderHandler != null) {
mRenderHandler.release();
mRenderHandler = null;
}
super.release();
}
private int calcBitRate() {
final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate;
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return null if no codec matched
*/
protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectVideoCodec:");
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
// select first codec that match a specific MIME type and color format
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
final int format = selectColorFormat(codecInfo, mimeType);
if (format > 0) {
return codecInfo;
}
}
}
}
return null;
}
/**
* select color format available on specific codec and we can use.
* @return 0 if no colorFormat is matched
*/
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedVideoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
/**
* color formats that we can use in this class
*/
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[] {
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
}
private static final boolean isRecognizedVideoFormat(final int colorFormat) {
if (DEBUG) Log.i(TAG, "isRecognizedVideoFormat:colorFormat=" + colorFormat);
final int n = recognizedFormats != null ? recognizedFormats.length : 0;
for (int i = 0; i < n; i++) {
if (recognizedFormats[i] == colorFormat) {
return true;
}
}
return false;
}
}

3
libusbcamera/src/main/java/com/serenegiant/usb/widget/CameraViewInterface.java

@ -27,6 +27,7 @@ import android.graphics.Bitmap;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.view.Surface; import android.view.Surface;
import com.serenegiant.usb.encoder.IVideoEncoder;
import com.serenegiant.widget.IAspectRatioView; import com.serenegiant.widget.IAspectRatioView;
public interface CameraViewInterface extends IAspectRatioView { public interface CameraViewInterface extends IAspectRatioView {
@ -41,6 +42,6 @@ public interface CameraViewInterface extends IAspectRatioView {
public SurfaceTexture getSurfaceTexture(); public SurfaceTexture getSurfaceTexture();
public Surface getSurface(); public Surface getSurface();
public boolean hasSurface(); public boolean hasSurface();
// public void setVideoEncoder(final IVideoEncoder encoder); public void setVideoEncoder(final IVideoEncoder encoder);
public Bitmap captureStillImage(); public Bitmap captureStillImage();
} }

222
libusbcamera/src/main/java/com/serenegiant/usb/widget/UVCCameraTextureView.java

@ -1,3 +1,26 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.widget; package com.serenegiant.usb.widget;
import android.content.Context; import android.content.Context;
@ -14,8 +37,17 @@ import android.view.TextureView;
import com.serenegiant.glutils.EGLBase; import com.serenegiant.glutils.EGLBase;
import com.serenegiant.glutils.GLDrawer2D; import com.serenegiant.glutils.GLDrawer2D;
import com.serenegiant.glutils.es1.GLHelper; import com.serenegiant.glutils.es1.GLHelper;
import com.serenegiant.usb.encoder.IVideoEncoder;
import com.serenegiant.usb.encoder.MediaEncoder;
import com.serenegiant.usb.encoder.MediaVideoEncoder;
import com.serenegiant.utils.FpsCounter; import com.serenegiant.utils.FpsCounter;
/**
* change the view size with keeping the specified aspect ratio.
* if you set this view with in a FrameLayout and set property "android:layout_gravity="center",
* you can show this view in the center of screen and keep the aspect ratio of content
* XXX it is better that can set the aspect ratio as xml property
*/
public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14 public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
implements TextureView.SurfaceTextureListener, CameraViewInterface { implements TextureView.SurfaceTextureListener, CameraViewInterface {
@ -28,9 +60,7 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
private Bitmap mTempBitmap; private Bitmap mTempBitmap;
private boolean mReqesutCaptureStillImage; private boolean mReqesutCaptureStillImage;
private Callback mCallback; private Callback mCallback;
/** /** for calculation of frame rate */
* for calculation of frame rate
*/
private final FpsCounter mFpsCounter = new FpsCounter(); private final FpsCounter mFpsCounter = new FpsCounter();
public UVCCameraTextureView(final Context context) { public UVCCameraTextureView(final Context context) {
@ -156,7 +186,6 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
} }
private Surface mPreviewSurface; private Surface mPreviewSurface;
@Override @Override
public Surface getSurface() { public Surface getSurface() {
if (DEBUG) Log.v(TAG, "getSurface:hasSurface=" + mHasSurface); if (DEBUG) Log.v(TAG, "getSurface:hasSurface=" + mHasSurface);
@ -169,11 +198,11 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
return mPreviewSurface; return mPreviewSurface;
} }
// @Override @Override
// public void setVideoEncoder(final IVideoEncoder encoder) { public void setVideoEncoder(final IVideoEncoder encoder) {
// if (mRenderHandler != null) if (mRenderHandler != null)
// mRenderHandler.setVideoEncoder(encoder); mRenderHandler.setVideoEncoder(encoder);
// } }
@Override @Override
public void setCallback(final Callback callback) { public void setCallback(final Callback callback) {
@ -184,16 +213,13 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
mFpsCounter.reset(); mFpsCounter.reset();
} }
/** /** update frame rate of image processing */
* update frame rate of image processing
*/
public void updateFps() { public void updateFps() {
mFpsCounter.update(); mFpsCounter.update();
} }
/** /**
* get current frame rate of image processing * get current frame rate of image processing
*
* @return * @return
*/ */
public float getFps() { public float getFps() {
@ -202,7 +228,6 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
/** /**
* get total frame rate from start * get total frame rate from start
*
* @return * @return
*/ */
public float getTotalFps() { public float getTotalFps() {
@ -211,8 +236,8 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
/** /**
* render camera frames on this view on a private thread * render camera frames on this view on a private thread
*
* @author saki * @author saki
*
*/ */
private static final class RenderHandler extends Handler private static final class RenderHandler extends Handler
implements SurfaceTexture.OnFrameAvailableListener { implements SurfaceTexture.OnFrameAvailableListener {
@ -240,11 +265,11 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
mFpsCounter = counter; mFpsCounter = counter;
} }
// public final void setVideoEncoder(final IVideoEncoder encoder) { public final void setVideoEncoder(final IVideoEncoder encoder) {
// if (DEBUG) Log.v(TAG, "setVideoEncoder:"); if (DEBUG) Log.v(TAG, "setVideoEncoder:");
// if (mIsActive) if (mIsActive)
// sendMessage(obtainMessage(MSG_SET_ENCODER, encoder)); sendMessage(obtainMessage(MSG_SET_ENCODER, encoder));
// } }
public final SurfaceTexture getPreviewTexture() { public final SurfaceTexture getPreviewTexture() {
if (DEBUG) Log.v(TAG, "getPreviewTexture:"); if (DEBUG) Log.v(TAG, "getPreviewTexture:");
@ -297,12 +322,12 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
public final void handleMessage(final Message msg) { public final void handleMessage(final Message msg) {
if (mThread == null) return; if (mThread == null) return;
switch (msg.what) { switch (msg.what) {
// case MSG_REQUEST_RENDER: case MSG_REQUEST_RENDER:
// mThread.onDrawFrame(); mThread.onDrawFrame();
// break; break;
// case MSG_SET_ENCODER: case MSG_SET_ENCODER:
// mThread.setEncoder((MediaEncoder)msg.obj); mThread.setEncoder((MediaEncoder)msg.obj);
// break; break;
case MSG_CREATE_SURFACE: case MSG_CREATE_SURFACE:
mThread.updatePreviewSurface(); mThread.updatePreviewSurface();
break; break;
@ -323,24 +348,19 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
private final SurfaceTexture mSurface; private final SurfaceTexture mSurface;
private RenderHandler mHandler; private RenderHandler mHandler;
private EGLBase mEgl; private EGLBase mEgl;
/** /** IEglSurface instance related to this TextureView */
* IEglSurface instance related to this TextureView
*/
private EGLBase.IEglSurface mEglSurface; private EGLBase.IEglSurface mEglSurface;
private GLDrawer2D mDrawer; private GLDrawer2D mDrawer;
private int mTexId = -1; private int mTexId = -1;
/** /** SurfaceTexture instance to receive video images */
* SurfaceTexture instance to receive video images
*/
private SurfaceTexture mPreviewSurface; private SurfaceTexture mPreviewSurface;
private final float[] mStMatrix = new float[16]; private final float[] mStMatrix = new float[16];
// private MediaEncoder mEncoder; private MediaEncoder mEncoder;
private int mViewWidth, mViewHeight; private int mViewWidth, mViewHeight;
private final FpsCounter mFpsCounter; private final FpsCounter mFpsCounter;
/** /**
* constructor * constructor
*
* @param surface: drawing surface came from TexureView * @param surface: drawing surface came from TexureView
*/ */
public RenderThread(final FpsCounter fpsCounter, final SurfaceTexture surface, final int width, final int height) { public RenderThread(final FpsCounter fpsCounter, final SurfaceTexture surface, final int width, final int height) {
@ -400,24 +420,122 @@ public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
} }
} }
// public final void onDrawFrame() { public final void setEncoder(final MediaEncoder encoder) {
// mEglSurface.makeCurrent(); if (DEBUG) Log.v(TAG, "RenderThread#setEncoder:encoder=" + encoder);
// // update texture(came from camera) if (encoder != null && (encoder instanceof MediaVideoEncoder)) {
// mPreviewSurface.updateTexImage(); ((MediaVideoEncoder)encoder).setEglContext(mEglSurface.getContext(), mTexId);
// // get texture matrix }
// mPreviewSurface.getTransformMatrix(mStMatrix); mEncoder = encoder;
// // notify video encoder if it exist }
// if (mEncoder != null) {
// // notify to capturing thread that the camera frame is available. /*
// if (mEncoder instanceof MediaVideoEncoder) * Now you can get frame data as ByteBuffer(as YUV/RGB565/RGBX/NV21 pixel format) using IFrameCallback interface
// ((MediaVideoEncoder) mEncoder).frameAvailableSoon(mStMatrix); * with UVCCamera#setFrameCallback instead of using following code samples.
// else */
// mEncoder.frameAvailableSoon(); /* // for part1
// } private static final int BUF_NUM = 1;
// // draw to preview screen private static final int BUF_STRIDE = 640 * 480;
// mDrawer.draw(mTexId, mStMatrix, 0); private static final int BUF_SIZE = BUF_STRIDE * BUF_NUM;
// mEglSurface.swap(); int cnt = 0;
// } int offset = 0;
final int pixels[] = new int[BUF_SIZE];
final IntBuffer buffer = IntBuffer.wrap(pixels); */
/* // for part2
private ByteBuffer buf = ByteBuffer.allocateDirect(640 * 480 * 4);
*/
/**
* draw a frame (and request to draw for video capturing if it is necessary)
*/
public final void onDrawFrame() {
mEglSurface.makeCurrent();
// update texture(came from camera)
mPreviewSurface.updateTexImage();
// get texture matrix
mPreviewSurface.getTransformMatrix(mStMatrix);
// notify video encoder if it exist
if (mEncoder != null) {
// notify to capturing thread that the camera frame is available.
if (mEncoder instanceof MediaVideoEncoder)
((MediaVideoEncoder)mEncoder).frameAvailableSoon(mStMatrix);
else
mEncoder.frameAvailableSoon();
}
// draw to preview screen
mDrawer.draw(mTexId, mStMatrix, 0);
mEglSurface.swap();
/* // sample code to read pixels into Buffer and save as a Bitmap (part1)
buffer.position(offset);
GLES20.glReadPixels(0, 0, 640, 480, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
if (++cnt == 100) { // save as a Bitmap, only once on this sample code
// if you save every frame as a Bitmap, app will crash by Out of Memory exception...
Log.i(TAG, "Capture image using glReadPixels:offset=" + offset);
final Bitmap bitmap = createBitmap(pixels,offset, 640, 480);
final File outputFile = MediaMuxerWrapper.getCaptureFile(Environment.DIRECTORY_DCIM, ".png");
try {
final BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile));
try {
try {
bitmap.compress(CompressFormat.PNG, 100, os);
os.flush();
bitmap.recycle();
} catch (IOException e) {
}
} finally {
os.close();
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
}
offset = (offset + BUF_STRIDE) % BUF_SIZE;
*/
/* // sample code to read pixels into Buffer and save as a Bitmap (part2)
buf.order(ByteOrder.LITTLE_ENDIAN); // it is enough to call this only once.
GLES20.glReadPixels(0, 0, 640, 480, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
buf.rewind();
if (++cnt == 100) { // save as a Bitmap, only once on this sample code
// if you save every frame as a Bitmap, app will crash by Out of Memory exception...
final File outputFile = MediaMuxerWrapper.getCaptureFile(Environment.DIRECTORY_DCIM, ".png");
BufferedOutputStream os = null;
try {
try {
os = new BufferedOutputStream(new FileOutputStream(outputFile));
Bitmap bmp = Bitmap.createBitmap(640, 480, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(buf);
bmp.compress(Bitmap.CompressFormat.PNG, 90, os);
bmp.recycle();
} finally {
if (os != null) os.close();
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
}
*/
}
/* // sample code to read pixels into IntBuffer and save as a Bitmap (part1)
private static Bitmap createBitmap(final int[] pixels, final int offset, final int width, final int height) {
final Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG);
paint.setColorFilter(new ColorMatrixColorFilter(new ColorMatrix(new float[] {
0, 0, 1, 0, 0,
0, 1, 0, 0, 0,
1, 0, 0, 0, 0,
0, 0, 0, 1, 0
})));
final Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
final Canvas canvas = new Canvas(bitmap);
final Matrix matrix = new Matrix();
matrix.postScale(1.0f, -1.0f);
matrix.postTranslate(0, height);
canvas.concat(matrix);
canvas.drawBitmap(pixels, offset, width, 0, 0, width, height, false, paint);
return bitmap;
} */
@Override @Override
public final void run() { public final void run() {

BIN
libusbcamera/src/main/res/raw/camera_click.ogg

Binary file not shown.
Loading…
Cancel
Save