Browse Source

xuigai

main
jiangdongguo 7 years ago
parent
commit
6340444eb9
  1. 8
      app/app.iml
  2. 2
      app/build.gradle
  3. 2
      libusbcamera/build.gradle
  4. 24
      libusbcamera/libusbcamera.iml
  5. 811
      libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java
  6. 135
      libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandler.java
  7. 180
      libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandlerMultiSurface.java
  8. 113
      libusbcamera/src/main/java/com/serenegiant/usb/widget/AspectRatioTextureView.java
  9. 46
      libusbcamera/src/main/java/com/serenegiant/usb/widget/CameraViewInterface.java
  10. 477
      libusbcamera/src/main/java/com/serenegiant/usb/widget/UVCCameraTextureView.java
  11. 6
      local.properties

8
app/app.iml

@ -82,21 +82,13 @@
<sourceFolder url="file://$MODULE_DIR$/src/test/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/shaders" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/assets" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/blame" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/classes" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental-safeguard" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/jniLibs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/manifests" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/pre-dexed" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/res" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/rs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/shaders" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/symbols" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/transforms" />
<excludeFolder url="file://$MODULE_DIR$/build/outputs" />
<excludeFolder url="file://$MODULE_DIR$/build/tmp" />
</content>
<orderEntry type="jdk" jdkName="Android API 25 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />

2
app/build.gradle

@ -5,7 +5,7 @@ android {
buildToolsVersion "26.0.1"
defaultConfig {
applicationId "com.jiangdg.usbcamera"
minSdkVersion 15
minSdkVersion 18
targetSdkVersion 25
versionCode 1
versionName "1.0"

2
libusbcamera/build.gradle

@ -5,7 +5,7 @@ android {
buildToolsVersion "26.0.1"
defaultConfig {
minSdkVersion 15
minSdkVersion 18
targetSdkVersion 25
versionCode 1
versionName "1.0"

24
libusbcamera/libusbcamera.iml

@ -64,17 +64,9 @@
<sourceFolder url="file://$MODULE_DIR$/src/main/assets" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/aidl" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/jniLibs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/jni" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/rs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/shaders" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/shaders" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/assets" type="java-test-resource" />
@ -83,22 +75,24 @@
<sourceFolder url="file://$MODULE_DIR$/src/test/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/shaders" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/annotations" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/shaders" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/blame" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/bundles" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/classes" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental-safeguard" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/jniLibs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/lint" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/manifests" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/res" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/rs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/shaders" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/symbols" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/transforms" />
<excludeFolder url="file://$MODULE_DIR$/build/outputs" />
<excludeFolder url="file://$MODULE_DIR$/build/tmp" />
</content>
<orderEntry type="jdk" jdkName="Android API 25 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />

811
libusbcamera/src/main/java/com/serenegiant/usb/common/AbstractUVCCameraHandler.java

@ -0,0 +1,811 @@
package com.serenegiant.usb.common;
import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.usb.UsbDevice;
import android.media.AudioManager;
import android.media.MediaScannerConnection;
import android.media.SoundPool;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import com.jiangdg.libusbcamera.R;
import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usb.widget.CameraViewInterface;
import java.lang.ref.WeakReference;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.nio.ByteBuffer;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
abstract class AbstractUVCCameraHandler extends Handler {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "AbsUVCCameraHandler";
public interface CameraCallback {
public void onOpen();
public void onClose();
public void onStartPreview();
public void onStopPreview();
public void onStartRecording();
public void onStopRecording();
public void onError(final Exception e);
}
private static final int MSG_OPEN = 0;
private static final int MSG_CLOSE = 1;
private static final int MSG_PREVIEW_START = 2;
private static final int MSG_PREVIEW_STOP = 3;
private static final int MSG_CAPTURE_STILL = 4;
private static final int MSG_CAPTURE_START = 5;
private static final int MSG_CAPTURE_STOP = 6;
private static final int MSG_MEDIA_UPDATE = 7;
private static final int MSG_RELEASE = 9;
private final WeakReference<CameraThread> mWeakThread;
private volatile boolean mReleased;
protected AbstractUVCCameraHandler(final CameraThread thread) {
mWeakThread = new WeakReference<CameraThread>(thread);
}
public int getWidth() {
final CameraThread thread = mWeakThread.get();
return thread != null ? thread.getWidth() : 0;
}
public int getHeight() {
final CameraThread thread = mWeakThread.get();
return thread != null ? thread.getHeight() : 0;
}
public boolean isOpened() {
final CameraThread thread = mWeakThread.get();
return thread != null && thread.isCameraOpened();
}
public boolean isPreviewing() {
final CameraThread thread = mWeakThread.get();
return thread != null && thread.isPreviewing();
}
public boolean isRecording() {
final CameraThread thread = mWeakThread.get();
return thread != null && thread.isRecording();
}
public boolean isEqual(final UsbDevice device) {
final CameraThread thread = mWeakThread.get();
return (thread != null) && thread.isEqual(device);
}
protected boolean isCameraThread() {
final CameraThread thread = mWeakThread.get();
return thread != null && (thread.getId() == Thread.currentThread().getId());
}
protected boolean isReleased() {
final CameraThread thread = mWeakThread.get();
return mReleased || (thread == null);
}
protected void checkReleased() {
if (isReleased()) {
throw new IllegalStateException("already released");
}
}
public void open(final USBMonitor.UsbControlBlock ctrlBlock) {
checkReleased();
sendMessage(obtainMessage(MSG_OPEN, ctrlBlock));
}
public void close() {
if (DEBUG) Log.v(TAG, "close:");
if (isOpened()) {
stopPreview();
sendEmptyMessage(MSG_CLOSE);
}
if (DEBUG) Log.v(TAG, "close:finished");
}
public void resize(final int width, final int height) {
checkReleased();
throw new UnsupportedOperationException("does not support now");
}
protected void startPreview(final Object surface) {
checkReleased();
if (!((surface instanceof SurfaceHolder) || (surface instanceof Surface) || (surface instanceof SurfaceTexture))) {
throw new IllegalArgumentException("surface should be one of SurfaceHolder, Surface or SurfaceTexture");
}
sendMessage(obtainMessage(MSG_PREVIEW_START, surface));
}
public void stopPreview() {
if (DEBUG) Log.v(TAG, "stopPreview:");
removeMessages(MSG_PREVIEW_START);
stopRecording();
if (isPreviewing()) {
final CameraThread thread = mWeakThread.get();
if (thread == null) return;
synchronized (thread.mSync) {
sendEmptyMessage(MSG_PREVIEW_STOP);
if (!isCameraThread()) {
// wait for actually preview stopped to avoid releasing Surface/SurfaceTexture
// while preview is still running.
// therefore this method will take a time to execute
try {
thread.mSync.wait();
} catch (final InterruptedException e) {
}
}
}
}
if (DEBUG) Log.v(TAG, "stopPreview:finished");
}
protected void captureStill() {
checkReleased();
sendEmptyMessage(MSG_CAPTURE_STILL);
}
protected void captureStill(final String path) {
checkReleased();
sendMessage(obtainMessage(MSG_CAPTURE_STILL, path));
}
public void startRecording() {
checkReleased();
sendEmptyMessage(MSG_CAPTURE_START);
}
public void stopRecording() {
sendEmptyMessage(MSG_CAPTURE_STOP);
}
public void release() {
mReleased = true;
close();
sendEmptyMessage(MSG_RELEASE);
}
public void addCallback(final CameraCallback callback) {
checkReleased();
if (!mReleased && (callback != null)) {
final CameraThread thread = mWeakThread.get();
if (thread != null) {
thread.mCallbacks.add(callback);
}
}
}
public void removeCallback(final CameraCallback callback) {
if (callback != null) {
final CameraThread thread = mWeakThread.get();
if (thread != null) {
thread.mCallbacks.remove(callback);
}
}
}
protected void updateMedia(final String path) {
sendMessage(obtainMessage(MSG_MEDIA_UPDATE, path));
}
public boolean checkSupportFlag(final long flag) {
checkReleased();
final CameraThread thread = mWeakThread.get();
return thread != null && thread.mUVCCamera != null && thread.mUVCCamera.checkSupportFlag(flag);
}
public int getValue(final int flag) {
checkReleased();
final CameraThread thread = mWeakThread.get();
final UVCCamera camera = thread != null ? thread.mUVCCamera : null;
if (camera != null) {
if (flag == UVCCamera.PU_BRIGHTNESS) {
return camera.getBrightness();
} else if (flag == UVCCamera.PU_CONTRAST) {
return camera.getContrast();
}
}
throw new IllegalStateException();
}
public int setValue(final int flag, final int value) {
checkReleased();
final CameraThread thread = mWeakThread.get();
final UVCCamera camera = thread != null ? thread.mUVCCamera : null;
if (camera != null) {
if (flag == UVCCamera.PU_BRIGHTNESS) {
camera.setBrightness(value);
return camera.getBrightness();
} else if (flag == UVCCamera.PU_CONTRAST) {
camera.setContrast(value);
return camera.getContrast();
}
}
throw new IllegalStateException();
}
public int resetValue(final int flag) {
checkReleased();
final CameraThread thread = mWeakThread.get();
final UVCCamera camera = thread != null ? thread.mUVCCamera : null;
if (camera != null) {
if (flag == UVCCamera.PU_BRIGHTNESS) {
camera.resetBrightness();
return camera.getBrightness();
} else if (flag == UVCCamera.PU_CONTRAST) {
camera.resetContrast();
return camera.getContrast();
}
}
throw new IllegalStateException();
}
@Override
public void handleMessage(final Message msg) {
final CameraThread thread = mWeakThread.get();
if (thread == null) return;
switch (msg.what) {
case MSG_OPEN:
thread.handleOpen((USBMonitor.UsbControlBlock) msg.obj);
break;
case MSG_CLOSE:
thread.handleClose();
break;
case MSG_PREVIEW_START:
thread.handleStartPreview(msg.obj);
break;
case MSG_PREVIEW_STOP:
thread.handleStopPreview();
break;
// case MSG_CAPTURE_STILL:
// thread.handleCaptureStill((String) msg.obj);
// break;
// case MSG_CAPTURE_START:
// thread.handleStartRecording();
// break;
// case MSG_CAPTURE_STOP:
// thread.handleStopRecording();
// break;
case MSG_MEDIA_UPDATE:
thread.handleUpdateMedia((String) msg.obj);
break;
case MSG_RELEASE:
thread.handleRelease();
break;
default:
throw new RuntimeException("unsupported message:what=" + msg.what);
}
}
static final class CameraThread extends Thread {
private static final String TAG_THREAD = "CameraThread";
private final Object mSync = new Object();
private final Class<? extends AbstractUVCCameraHandler> mHandlerClass;
private final WeakReference<Activity> mWeakParent;
private final WeakReference<CameraViewInterface> mWeakCameraView;
private final int mEncoderType;
private final Set<CameraCallback> mCallbacks = new CopyOnWriteArraySet<CameraCallback>();
private int mWidth, mHeight, mPreviewMode;
private float mBandwidthFactor;
private boolean mIsPreviewing;
private boolean mIsRecording;
/**
* shutter sound
*/
private SoundPool mSoundPool;
private int mSoundId;
private AbstractUVCCameraHandler mHandler;
/**
* for accessing UVC camera
*/
private UVCCamera mUVCCamera;
/**
* muxer for audio/video recording
*/
// private MediaMuxerWrapper mMuxer;
// private MediaVideoBufferEncoder mVideoEncoder;
/**
* @param clazz Class extends AbstractUVCCameraHandler
* @param parent parent Activity
* @param cameraView for still capturing
* @param encoderType 0: use MediaSurfaceEncoder, 1: use MediaVideoEncoder, 2: use MediaVideoBufferEncoder
* @param width
* @param height
* @param format either FRAME_FORMAT_YUYV(0) or FRAME_FORMAT_MJPEG(1)
* @param bandwidthFactor
*/
CameraThread(final Class<? extends AbstractUVCCameraHandler> clazz,
final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height, final int format,
final float bandwidthFactor) {
super("CameraThread");
mHandlerClass = clazz;
mEncoderType = encoderType;
mWidth = width;
mHeight = height;
mPreviewMode = format;
mBandwidthFactor = bandwidthFactor;
mWeakParent = new WeakReference<Activity>(parent);
mWeakCameraView = new WeakReference<CameraViewInterface>(cameraView);
loadShutterSound(parent);
}
@Override
protected void finalize() throws Throwable {
Log.i(TAG, "CameraThread#finalize");
super.finalize();
}
public AbstractUVCCameraHandler getHandler() {
if (DEBUG) Log.v(TAG_THREAD, "getHandler:");
synchronized (mSync) {
if (mHandler == null)
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
return mHandler;
}
public int getWidth() {
synchronized (mSync) {
return mWidth;
}
}
public int getHeight() {
synchronized (mSync) {
return mHeight;
}
}
public boolean isCameraOpened() {
synchronized (mSync) {
return mUVCCamera != null;
}
}
public boolean isPreviewing() {
synchronized (mSync) {
return mUVCCamera != null && mIsPreviewing;
}
}
public boolean isRecording() {
synchronized (mSync) {
return (mUVCCamera != null) && (mMuxer != null);
}
}
public boolean isEqual(final UsbDevice device) {
return (mUVCCamera != null) && (mUVCCamera.getDevice() != null) && mUVCCamera.getDevice().equals(device);
}
public void handleOpen(final USBMonitor.UsbControlBlock ctrlBlock) {
if (DEBUG) Log.v(TAG_THREAD, "handleOpen:");
handleClose();
try {
final UVCCamera camera = new UVCCamera();
camera.open(ctrlBlock);
synchronized (mSync) {
mUVCCamera = camera;
}
callOnOpen();
} catch (final Exception e) {
callOnError(e);
}
if (DEBUG)
Log.i(TAG, "supportedSize:" + (mUVCCamera != null ? mUVCCamera.getSupportedSize() : null));
}
public void handleClose() {
if (DEBUG) Log.v(TAG_THREAD, "handleClose:");
// handleStopRecording();
final UVCCamera camera;
synchronized (mSync) {
camera = mUVCCamera;
mUVCCamera = null;
}
if (camera != null) {
camera.stopPreview();
camera.destroy();
callOnClose();
}
}
public void handleStartPreview(final Object surface) {
if (DEBUG) Log.v(TAG_THREAD, "handleStartPreview:");
if ((mUVCCamera == null) || mIsPreviewing) return;
try {
mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor);
} catch (final IllegalArgumentException e) {
try {
// fallback to YUV mode
mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, UVCCamera.DEFAULT_PREVIEW_MODE, mBandwidthFactor);
} catch (final IllegalArgumentException e1) {
callOnError(e1);
return;
}
}
if (surface instanceof SurfaceHolder) {
mUVCCamera.setPreviewDisplay((SurfaceHolder) surface);
}
if (surface instanceof Surface) {
mUVCCamera.setPreviewDisplay((Surface) surface);
} else {
mUVCCamera.setPreviewTexture((SurfaceTexture) surface);
}
mUVCCamera.startPreview();
mUVCCamera.updateCameraParams();
synchronized (mSync) {
mIsPreviewing = true;
}
callOnStartPreview();
}
public void handleStopPreview() {
if (DEBUG) Log.v(TAG_THREAD, "handleStopPreview:");
if (mIsPreviewing) {
if (mUVCCamera != null) {
mUVCCamera.stopPreview();
}
synchronized (mSync) {
mIsPreviewing = false;
mSync.notifyAll();
}
callOnStopPreview();
}
if (DEBUG) Log.v(TAG_THREAD, "handleStopPreview:finished");
}
// public void handleCaptureStill(final String path) {
// if (DEBUG) Log.v(TAG_THREAD, "handleCaptureStill:");
// final Activity parent = mWeakParent.get();
// if (parent == null) return;
// mSoundPool.play(mSoundId, 0.2f, 0.2f, 0, 0, 1.0f); // play shutter sound
// try {
// final Bitmap bitmap = mWeakCameraView.get().captureStillImage();
// // get buffered output stream for saving a captured still image as a file on external storage.
// // the file name is came from current time.
// // You should use extension name as same as CompressFormat when calling Bitmap#compress.
// final File outputFile = TextUtils.isEmpty(path)
// ? MediaMuxerWrapper.getCaptureFile(Environment.DIRECTORY_DCIM, ".png")
// : new File(path);
// final BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile));
// try {
// try {
// bitmap.compress(Bitmap.CompressFormat.PNG, 100, os);
// os.flush();
// mHandler.sendMessage(mHandler.obtainMessage(MSG_MEDIA_UPDATE, outputFile.getPath()));
// } catch (final IOException e) {
// }
// } finally {
// os.close();
// }
// } catch (final Exception e) {
// callOnError(e);
// }
// }
// public void handleStartRecording() {
// if (DEBUG) Log.v(TAG_THREAD, "handleStartRecording:");
// try {
// if ((mUVCCamera == null) || (mMuxer != null)) return;
// final MediaMuxerWrapper muxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
// MediaVideoBufferEncoder videoEncoder = null;
// switch (mEncoderType) {
// case 1: // for video capturing using MediaVideoEncoder
// new MediaVideoEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
// break;
// case 2: // for video capturing using MediaVideoBufferEncoder
// videoEncoder = new MediaVideoBufferEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
// break;
// // case 0: // for video capturing using MediaSurfaceEncoder
// default:
// new MediaSurfaceEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
// break;
// }
// if (true) {
// // for audio capturing
// new MediaAudioEncoder(muxer, mMediaEncoderListener);
// }
// muxer.prepare();
// muxer.startRecording();
// if (videoEncoder != null) {
// mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
// }
// synchronized (mSync) {
// mMuxer = muxer;
// mVideoEncoder = videoEncoder;
// }
// callOnStartRecording();
// } catch (final IOException e) {
// callOnError(e);
// Log.e(TAG, "startCapture:", e);
// }
// }
// public void handleStopRecording() {
// if (DEBUG) Log.v(TAG_THREAD, "handleStopRecording:mMuxer=" + mMuxer);
// final MediaMuxerWrapper muxer;
// synchronized (mSync) {
// muxer = mMuxer;
// mMuxer = null;
// mVideoEncoder = null;
// if (mUVCCamera != null) {
// mUVCCamera.stopCapture();
// }
// }
// try {
// mWeakCameraView.get().setVideoEncoder(null);
// } catch (final Exception e) {
// // ignore
// }
// if (muxer != null) {
// muxer.stopRecording();
// mUVCCamera.setFrameCallback(null, 0);
// // you should not wait here
// callOnStopRecording();
// }
// }
private final IFrameCallback mIFrameCallback = new IFrameCallback() {
@Override
public void onFrame(final ByteBuffer frame) {
// final MediaVideoBufferEncoder videoEncoder;
// synchronized (mSync) {
// videoEncoder = mVideoEncoder;
// }
// if (videoEncoder != null) {
// videoEncoder.frameAvailableSoon();
// videoEncoder.encode(frame);
// }
}
};
public void handleUpdateMedia(final String path) {
if (DEBUG) Log.v(TAG_THREAD, "handleUpdateMedia:path=" + path);
final Activity parent = mWeakParent.get();
final boolean released = (mHandler == null) || mHandler.mReleased;
if (parent != null && parent.getApplicationContext() != null) {
try {
if (DEBUG) Log.i(TAG, "MediaScannerConnection#scanFile");
MediaScannerConnection.scanFile(parent.getApplicationContext(), new String[]{path}, null, null);
} catch (final Exception e) {
Log.e(TAG, "handleUpdateMedia:", e);
}
if (released || parent.isDestroyed())
handleRelease();
} else {
Log.w(TAG, "MainActivity already destroyed");
// give up to add this movie to MediaStore now.
// Seeing this movie on Gallery app etc. will take a lot of time.
handleRelease();
}
}
public void handleRelease() {
if (DEBUG) Log.v(TAG_THREAD, "handleRelease:mIsRecording=" + mIsRecording);
handleClose();
mCallbacks.clear();
if (!mIsRecording) {
mHandler.mReleased = true;
Looper.myLooper().quit();
}
if (DEBUG) Log.v(TAG_THREAD, "handleRelease:finished");
}
// private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
// @Override
// public void onPrepared(final MediaEncoder encoder) {
// if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder);
// mIsRecording = true;
// if (encoder instanceof MediaVideoEncoder)
// try {
// mWeakCameraView.get().setVideoEncoder((MediaVideoEncoder) encoder);
// } catch (final Exception e) {
// Log.e(TAG, "onPrepared:", e);
// }
// if (encoder instanceof MediaSurfaceEncoder)
// try {
// mWeakCameraView.get().setVideoEncoder((MediaSurfaceEncoder) encoder);
// mUVCCamera.startCapture(((MediaSurfaceEncoder) encoder).getInputSurface());
// } catch (final Exception e) {
// Log.e(TAG, "onPrepared:", e);
// }
// }
//
// @Override
// public void onStopped(final MediaEncoder encoder) {
// if (DEBUG) Log.v(TAG_THREAD, "onStopped:encoder=" + encoder);
// if ((encoder instanceof MediaVideoEncoder)
// || (encoder instanceof MediaSurfaceEncoder))
// try {
// mIsRecording = false;
// final Activity parent = mWeakParent.get();
// mWeakCameraView.get().setVideoEncoder(null);
// synchronized (mSync) {
// if (mUVCCamera != null) {
// mUVCCamera.stopCapture();
// }
// }
// final String path = encoder.getOutputPath();
// if (!TextUtils.isEmpty(path)) {
// mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_MEDIA_UPDATE, path), 1000);
// } else {
// final boolean released = (mHandler == null) || mHandler.mReleased;
// if (released || parent == null || parent.isDestroyed()) {
// handleRelease();
// }
// }
// } catch (final Exception e) {
// Log.e(TAG, "onPrepared:", e);
// }
// }
// };
/**
* prepare and load shutter sound for still image capturing
*/
@SuppressWarnings("deprecation")
private void loadShutterSound(final Context context) {
// get system stream type using reflection
int streamType;
try {
final Class<?> audioSystemClass = Class.forName("android.media.AudioSystem");
final Field sseField = audioSystemClass.getDeclaredField("STREAM_SYSTEM_ENFORCED");
streamType = sseField.getInt(null);
} catch (final Exception e) {
streamType = AudioManager.STREAM_SYSTEM; // set appropriate according to your app policy
}
if (mSoundPool != null) {
try {
mSoundPool.release();
} catch (final Exception e) {
}
mSoundPool = null;
}
// load shutter sound from resource
mSoundPool = new SoundPool(2, streamType, 0);
// mSoundId = mSoundPool.load(context, R.raw.camera_click, 1);
}
@Override
public void run() {
Looper.prepare();
AbstractUVCCameraHandler handler = null;
try {
final Constructor<? extends AbstractUVCCameraHandler> constructor = mHandlerClass.getDeclaredConstructor(CameraThread.class);
handler = constructor.newInstance(this);
} catch (final NoSuchMethodException e) {
Log.w(TAG, e);
} catch (final IllegalAccessException e) {
Log.w(TAG, e);
} catch (final InstantiationException e) {
Log.w(TAG, e);
} catch (final InvocationTargetException e) {
Log.w(TAG, e);
}
if (handler != null) {
synchronized (mSync) {
mHandler = handler;
mSync.notifyAll();
}
Looper.loop();
if (mSoundPool != null) {
mSoundPool.release();
mSoundPool = null;
}
if (mHandler != null) {
mHandler.mReleased = true;
}
}
mCallbacks.clear();
synchronized (mSync) {
mHandler = null;
mSync.notifyAll();
}
}
private void callOnOpen() {
for (final CameraCallback callback : mCallbacks) {
try {
callback.onOpen();
} catch (final Exception e) {
mCallbacks.remove(callback);
Log.w(TAG, e);
}
}
}
private void callOnClose() {
for (final CameraCallback callback : mCallbacks) {
try {
callback.onClose();
} catch (final Exception e) {
mCallbacks.remove(callback);
Log.w(TAG, e);
}
}
}
private void callOnStartPreview() {
for (final CameraCallback callback : mCallbacks) {
try {
callback.onStartPreview();
} catch (final Exception e) {
mCallbacks.remove(callback);
Log.w(TAG, e);
}
}
}
private void callOnStopPreview() {
for (final CameraCallback callback : mCallbacks) {
try {
callback.onStopPreview();
} catch (final Exception e) {
mCallbacks.remove(callback);
Log.w(TAG, e);
}
}
}
private void callOnStartRecording() {
for (final CameraCallback callback : mCallbacks) {
try {
callback.onStartRecording();
} catch (final Exception e) {
mCallbacks.remove(callback);
Log.w(TAG, e);
}
}
}
private void callOnStopRecording() {
for (final CameraCallback callback : mCallbacks) {
try {
callback.onStopRecording();
} catch (final Exception e) {
mCallbacks.remove(callback);
Log.w(TAG, e);
}
}
}
private void callOnError(final Exception e) {
for (final CameraCallback callback : mCallbacks) {
try {
callback.onError(e);
} catch (final Exception e1) {
mCallbacks.remove(callback);
Log.w(TAG, e);
}
}
}
}
}

135
libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandler.java

@ -0,0 +1,135 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.common;
import android.app.Activity;
import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usb.widget.CameraViewInterface;
public class UVCCameraHandler extends AbstractUVCCameraHandler {
/**
* create UVCCameraHandler, use MediaVideoEncoder, try MJPEG, default bandwidth
* @param parent
* @param cameraView
* @param width
* @param height
* @return
*/
public static final UVCCameraHandler createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int width, final int height) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH);
}
/**
* create UVCCameraHandler, use MediaVideoEncoder, try MJPEG
* @param parent
* @param cameraView
* @param width
* @param height
* @param bandwidthFactor
* @return
*/
public static final UVCCameraHandler createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int width, final int height, final float bandwidthFactor) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, bandwidthFactor);
}
/**
* create UVCCameraHandler, try MJPEG, default bandwidth
* @param parent
* @param cameraView
* @param encoderType 0: use MediaSurfaceEncoder, 1: use MediaVideoEncoder, 2: use MediaVideoBufferEncoder
* @param width
* @param height
* @return
*/
public static final UVCCameraHandler createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height) {
return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH);
}
/**
* create UVCCameraHandler, default bandwidth
* @param parent
* @param cameraView
* @param encoderType 0: use MediaSurfaceEncoder, 1: use MediaVideoEncoder, 2: use MediaVideoBufferEncoder
* @param width
* @param height
* @param format either UVCCamera.FRAME_FORMAT_YUYV(0) or UVCCamera.FRAME_FORMAT_MJPEG(1)
* @return
*/
public static final UVCCameraHandler createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height, final int format) {
return createHandler(parent, cameraView, encoderType, width, height, format, UVCCamera.DEFAULT_BANDWIDTH);
}
/**
* create UVCCameraHandler
* @param parent
* @param cameraView
* @param encoderType 0: use MediaSurfaceEncoder, 1: use MediaVideoEncoder, 2: use MediaVideoBufferEncoder
* @param width
* @param height
* @param format either UVCCamera.FRAME_FORMAT_YUYV(0) or UVCCamera.FRAME_FORMAT_MJPEG(1)
* @param bandwidthFactor
* @return
*/
public static final UVCCameraHandler createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height, final int format, final float bandwidthFactor) {
final CameraThread thread = new CameraThread(UVCCameraHandler.class, parent, cameraView, encoderType, width, height, format, bandwidthFactor);
thread.start();
return (UVCCameraHandler)thread.getHandler();
}
protected UVCCameraHandler(final CameraThread thread) {
super(thread);
}
@Override
public void startPreview(final Object surface) {
super.startPreview(surface);
}
@Override
public void captureStill() {
super.captureStill();
}
@Override
public void captureStill(final String path) {
super.captureStill(path);
}
}

180
libusbcamera/src/main/java/com/serenegiant/usb/common/UVCCameraHandlerMultiSurface.java

@ -0,0 +1,180 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.common;
import android.app.Activity;
import android.view.Surface;
import com.serenegiant.glutils.RendererHolder;
import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usb.widget.CameraViewInterface;
public class UVCCameraHandlerMultiSurface extends AbstractUVCCameraHandler {
/**
* create UVCCameraHandlerMultiSurface, use MediaVideoEncoder, try MJPEG, default bandwidth
* @param parent
* @param cameraView
* @param width
* @param height
* @return
*/
public static final UVCCameraHandlerMultiSurface createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int width, final int height) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH);
}
/**
* create UVCCameraHandlerMultiSurface, use MediaVideoEncoder, try MJPEG
* @param parent
* @param cameraView
* @param width
* @param height
* @param bandwidthFactor
* @return
*/
public static final UVCCameraHandlerMultiSurface createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int width, final int height, final float bandwidthFactor) {
return createHandler(parent, cameraView, 1, width, height, UVCCamera.FRAME_FORMAT_MJPEG, bandwidthFactor);
}
/**
* create UVCCameraHandlerMultiSurface, try MJPEG, default bandwidth
* @param parent
* @param cameraView
* @param encoderType
* @param width
* @param height
* @return
*/
public static final UVCCameraHandlerMultiSurface createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height) {
return createHandler(parent, cameraView, encoderType, width, height, UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_BANDWIDTH);
}
/**
* create UVCCameraHandlerMultiSurface, default bandwidth
* @param parent
* @param cameraView
* @param encoderType
* @param width
* @param height
* @param format
* @return
*/
public static final UVCCameraHandlerMultiSurface createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height, final int format) {
return createHandler(parent, cameraView, encoderType, width, height, format, UVCCamera.DEFAULT_BANDWIDTH);
}
/**
* create UVCCameraHandlerMultiSurface
* @param parent
* @param cameraView
* @param encoderType 0: use MediaSurfaceEncoder, 1: use MediaVideoEncoder, 2: use MediaVideoBufferEncoder
* @param width
* @param height
* @param format either UVCCamera.FRAME_FORMAT_YUYV(0) or UVCCamera.FRAME_FORMAT_MJPEG(1)
* @param bandwidthFactor
* @return
*/
public static final UVCCameraHandlerMultiSurface createHandler(
final Activity parent, final CameraViewInterface cameraView,
final int encoderType, final int width, final int height, final int format, final float bandwidthFactor) {
final CameraThread thread = new CameraThread(UVCCameraHandlerMultiSurface.class, parent, cameraView, encoderType, width, height, format, bandwidthFactor);
thread.start();
return (UVCCameraHandlerMultiSurface)thread.getHandler();
}
private RendererHolder mRendererHolder;
protected UVCCameraHandlerMultiSurface(final CameraThread thread) {
super(thread);
mRendererHolder = new RendererHolder(thread.getWidth(), thread.getHeight(), null);
}
public synchronized void release() {
if (mRendererHolder != null) {
mRendererHolder.release();
mRendererHolder = null;
}
super.release();
}
public synchronized void resize(final int width, final int height) {
super.resize(width, height);
if (mRendererHolder != null) {
mRendererHolder.resize(width, height);
}
}
public synchronized void startPreview() {
checkReleased();
if (mRendererHolder != null) {
super.startPreview(mRendererHolder.getSurface());
} else {
throw new IllegalStateException();
}
}
public synchronized void addSurface(final int surfaceId, final Surface surface, final boolean isRecordable) {
checkReleased();
mRendererHolder.addSurface(surfaceId, surface, isRecordable);
}
public synchronized void removeSurface(final int surfaceId) {
if (mRendererHolder != null) {
mRendererHolder.removeSurface(surfaceId);
}
}
@Override
public void captureStill() {
checkReleased();
super.captureStill();
}
@Override
public void captureStill(final String path) {
checkReleased();
post(new Runnable() {
@Override
public void run() {
synchronized (UVCCameraHandlerMultiSurface.this) {
if (mRendererHolder != null) {
mRendererHolder.captureStill(path);
updateMedia(path);
}
}
}
});
}
}

113
libusbcamera/src/main/java/com/serenegiant/usb/widget/AspectRatioTextureView.java

@ -0,0 +1,113 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.widget;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
import com.serenegiant.widget.IAspectRatioView;
/**
* change the view size with keeping the specified aspect ratio.
* if you set this view with in a FrameLayout and set property "android:layout_gravity="center",
* you can show this view in the center of screen and keep the aspect ratio of content
* XXX it is better that can set the aspect ratio as xml property
*/
public class AspectRatioTextureView extends TextureView // API >= 14
implements IAspectRatioView {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "AbstractCameraView";
private double mRequestedAspect = -1.0;
private CameraViewInterface.Callback mCallback;
public AspectRatioTextureView(final Context context) {
this(context, null, 0);
}
public AspectRatioTextureView(final Context context, final AttributeSet attrs) {
this(context, attrs, 0);
}
public AspectRatioTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
}
@Override
public void setAspectRatio(final double aspectRatio) {
if (aspectRatio < 0) {
throw new IllegalArgumentException();
}
if (mRequestedAspect != aspectRatio) {
mRequestedAspect = aspectRatio;
requestLayout();
}
}
@Override
public void setAspectRatio(final int width, final int height) {
setAspectRatio(width / (double)height);
}
@Override
public double getAspectRatio() {
return mRequestedAspect;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (mRequestedAspect > 0) {
int initialWidth = MeasureSpec.getSize(widthMeasureSpec);
int initialHeight = MeasureSpec.getSize(heightMeasureSpec);
final int horizPadding = getPaddingLeft() + getPaddingRight();
final int vertPadding = getPaddingTop() + getPaddingBottom();
initialWidth -= horizPadding;
initialHeight -= vertPadding;
final double viewAspectRatio = (double)initialWidth / initialHeight;
final double aspectDiff = mRequestedAspect / viewAspectRatio - 1;
if (Math.abs(aspectDiff) > 0.01) {
if (aspectDiff > 0) {
// width priority decision
initialHeight = (int) (initialWidth / mRequestedAspect);
} else {
// height priority decision
initialWidth = (int) (initialHeight * mRequestedAspect);
}
initialWidth += horizPadding;
initialHeight += vertPadding;
widthMeasureSpec = MeasureSpec.makeMeasureSpec(initialWidth, MeasureSpec.EXACTLY);
heightMeasureSpec = MeasureSpec.makeMeasureSpec(initialHeight, MeasureSpec.EXACTLY);
}
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
}

46
libusbcamera/src/main/java/com/serenegiant/usb/widget/CameraViewInterface.java

@ -0,0 +1,46 @@
/*
* UVCCamera
* library and sample to access to UVC web camera on non-rooted Android device
*
* Copyright (c) 2014-2017 saki t_saki@serenegiant.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* All files in the folder are under this Apache License, Version 2.0.
* Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
* may have a different license, see the respective files.
*/
package com.serenegiant.usb.widget;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import com.serenegiant.widget.IAspectRatioView;
public interface CameraViewInterface extends IAspectRatioView {
public interface Callback {
public void onSurfaceCreated(CameraViewInterface view, Surface surface);
public void onSurfaceChanged(CameraViewInterface view, Surface surface, int width, int height);
public void onSurfaceDestroy(CameraViewInterface view, Surface surface);
}
public void onPause();
public void onResume();
public void setCallback(Callback callback);
public SurfaceTexture getSurfaceTexture();
public Surface getSurface();
public boolean hasSurface();
// public void setVideoEncoder(final IVideoEncoder encoder);
public Bitmap captureStillImage();
}

477
libusbcamera/src/main/java/com/serenegiant/usb/widget/UVCCameraTextureView.java

@ -0,0 +1,477 @@
package com.serenegiant.usb.widget;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.TextureView;
import com.serenegiant.glutils.EGLBase;
import com.serenegiant.glutils.GLDrawer2D;
import com.serenegiant.glutils.es1.GLHelper;
import com.serenegiant.utils.FpsCounter;
public class UVCCameraTextureView extends AspectRatioTextureView // API >= 14
implements TextureView.SurfaceTextureListener, CameraViewInterface {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "UVCCameraTextureView";
private boolean mHasSurface;
private RenderHandler mRenderHandler;
private final Object mCaptureSync = new Object();
private Bitmap mTempBitmap;
private boolean mReqesutCaptureStillImage;
private Callback mCallback;
/**
* for calculation of frame rate
*/
private final FpsCounter mFpsCounter = new FpsCounter();
public UVCCameraTextureView(final Context context) {
this(context, null, 0);
}
public UVCCameraTextureView(final Context context, final AttributeSet attrs) {
this(context, attrs, 0);
}
public UVCCameraTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
setSurfaceTextureListener(this);
}
@Override
public void onResume() {
if (DEBUG) Log.v(TAG, "onResume:");
if (mHasSurface) {
mRenderHandler = RenderHandler.createHandler(mFpsCounter, super.getSurfaceTexture(), getWidth(), getHeight());
}
}
@Override
public void onPause() {
if (DEBUG) Log.v(TAG, "onPause:");
if (mRenderHandler != null) {
mRenderHandler.release();
mRenderHandler = null;
}
if (mTempBitmap != null) {
mTempBitmap.recycle();
mTempBitmap = null;
}
}
@Override
public void onSurfaceTextureAvailable(final SurfaceTexture surface, final int width, final int height) {
if (DEBUG) Log.v(TAG, "onSurfaceTextureAvailable:" + surface);
if (mRenderHandler == null) {
mRenderHandler = RenderHandler.createHandler(mFpsCounter, surface, width, height);
} else {
mRenderHandler.resize(width, height);
}
mHasSurface = true;
if (mCallback != null) {
mCallback.onSurfaceCreated(this, getSurface());
}
}
@Override
public void onSurfaceTextureSizeChanged(final SurfaceTexture surface, final int width, final int height) {
if (DEBUG) Log.v(TAG, "onSurfaceTextureSizeChanged:" + surface);
if (mRenderHandler != null) {
mRenderHandler.resize(width, height);
}
if (mCallback != null) {
mCallback.onSurfaceChanged(this, getSurface(), width, height);
}
}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture surface) {
if (DEBUG) Log.v(TAG, "onSurfaceTextureDestroyed:" + surface);
if (mRenderHandler != null) {
mRenderHandler.release();
mRenderHandler = null;
}
mHasSurface = false;
if (mCallback != null) {
mCallback.onSurfaceDestroy(this, getSurface());
}
if (mPreviewSurface != null) {
mPreviewSurface.release();
mPreviewSurface = null;
}
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture surface) {
synchronized (mCaptureSync) {
if (mReqesutCaptureStillImage) {
mReqesutCaptureStillImage = false;
if (mTempBitmap == null)
mTempBitmap = getBitmap();
else
getBitmap(mTempBitmap);
mCaptureSync.notifyAll();
}
}
}
@Override
public boolean hasSurface() {
return mHasSurface;
}
/**
* capture preview image as a bitmap
* this method blocks current thread until bitmap is ready
* if you call this method at almost same time from different thread,
* the returned bitmap will be changed while you are processing the bitmap
* (because we return same instance of bitmap on each call for memory saving)
* if you need to call this method from multiple thread,
* you should change this method(copy and return)
*/
@Override
public Bitmap captureStillImage() {
synchronized (mCaptureSync) {
mReqesutCaptureStillImage = true;
try {
mCaptureSync.wait();
} catch (final InterruptedException e) {
}
return mTempBitmap;
}
}
@Override
public SurfaceTexture getSurfaceTexture() {
return mRenderHandler != null ? mRenderHandler.getPreviewTexture() : null;
}
private Surface mPreviewSurface;
@Override
public Surface getSurface() {
if (DEBUG) Log.v(TAG, "getSurface:hasSurface=" + mHasSurface);
if (mPreviewSurface == null) {
final SurfaceTexture st = getSurfaceTexture();
if (st != null) {
mPreviewSurface = new Surface(st);
}
}
return mPreviewSurface;
}
// @Override
// public void setVideoEncoder(final IVideoEncoder encoder) {
// if (mRenderHandler != null)
// mRenderHandler.setVideoEncoder(encoder);
// }
@Override
public void setCallback(final Callback callback) {
mCallback = callback;
}
public void resetFps() {
mFpsCounter.reset();
}
/**
* update frame rate of image processing
*/
public void updateFps() {
mFpsCounter.update();
}
/**
* get current frame rate of image processing
*
* @return
*/
public float getFps() {
return mFpsCounter.getFps();
}
/**
* get total frame rate from start
*
* @return
*/
public float getTotalFps() {
return mFpsCounter.getTotalFps();
}
/**
* render camera frames on this view on a private thread
*
* @author saki
*/
private static final class RenderHandler extends Handler
implements SurfaceTexture.OnFrameAvailableListener {
private static final int MSG_REQUEST_RENDER = 1;
private static final int MSG_SET_ENCODER = 2;
private static final int MSG_CREATE_SURFACE = 3;
private static final int MSG_RESIZE = 4;
private static final int MSG_TERMINATE = 9;
private RenderThread mThread;
private boolean mIsActive = true;
private final FpsCounter mFpsCounter;
public static final RenderHandler createHandler(final FpsCounter counter,
final SurfaceTexture surface, final int width, final int height) {
final RenderThread thread = new RenderThread(counter, surface, width, height);
thread.start();
return thread.getHandler();
}
private RenderHandler(final FpsCounter counter, final RenderThread thread) {
mThread = thread;
mFpsCounter = counter;
}
// public final void setVideoEncoder(final IVideoEncoder encoder) {
// if (DEBUG) Log.v(TAG, "setVideoEncoder:");
// if (mIsActive)
// sendMessage(obtainMessage(MSG_SET_ENCODER, encoder));
// }
public final SurfaceTexture getPreviewTexture() {
if (DEBUG) Log.v(TAG, "getPreviewTexture:");
if (mIsActive) {
synchronized (mThread.mSync) {
sendEmptyMessage(MSG_CREATE_SURFACE);
try {
mThread.mSync.wait();
} catch (final InterruptedException e) {
}
return mThread.mPreviewSurface;
}
} else {
return null;
}
}
public void resize(final int width, final int height) {
if (DEBUG) Log.v(TAG, "resize:");
if (mIsActive) {
synchronized (mThread.mSync) {
sendMessage(obtainMessage(MSG_RESIZE, width, height));
try {
mThread.mSync.wait();
} catch (final InterruptedException e) {
}
}
}
}
public final void release() {
if (DEBUG) Log.v(TAG, "release:");
if (mIsActive) {
mIsActive = false;
removeMessages(MSG_REQUEST_RENDER);
removeMessages(MSG_SET_ENCODER);
sendEmptyMessage(MSG_TERMINATE);
}
}
@Override
public final void onFrameAvailable(final SurfaceTexture surfaceTexture) {
if (mIsActive) {
mFpsCounter.count();
sendEmptyMessage(MSG_REQUEST_RENDER);
}
}
@Override
public final void handleMessage(final Message msg) {
if (mThread == null) return;
switch (msg.what) {
// case MSG_REQUEST_RENDER:
// mThread.onDrawFrame();
// break;
// case MSG_SET_ENCODER:
// mThread.setEncoder((MediaEncoder)msg.obj);
// break;
case MSG_CREATE_SURFACE:
mThread.updatePreviewSurface();
break;
case MSG_RESIZE:
mThread.resize(msg.arg1, msg.arg2);
break;
case MSG_TERMINATE:
Looper.myLooper().quit();
mThread = null;
break;
default:
super.handleMessage(msg);
}
}
private static final class RenderThread extends Thread {
private final Object mSync = new Object();
private final SurfaceTexture mSurface;
private RenderHandler mHandler;
private EGLBase mEgl;
/**
* IEglSurface instance related to this TextureView
*/
private EGLBase.IEglSurface mEglSurface;
private GLDrawer2D mDrawer;
private int mTexId = -1;
/**
* SurfaceTexture instance to receive video images
*/
private SurfaceTexture mPreviewSurface;
private final float[] mStMatrix = new float[16];
// private MediaEncoder mEncoder;
private int mViewWidth, mViewHeight;
private final FpsCounter mFpsCounter;
/**
* constructor
*
* @param surface: drawing surface came from TexureView
*/
public RenderThread(final FpsCounter fpsCounter, final SurfaceTexture surface, final int width, final int height) {
mFpsCounter = fpsCounter;
mSurface = surface;
mViewWidth = width;
mViewHeight = height;
setName("RenderThread");
}
public final RenderHandler getHandler() {
if (DEBUG) Log.v(TAG, "RenderThread#getHandler:");
synchronized (mSync) {
// create rendering thread
if (mHandler == null)
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
return mHandler;
}
public void resize(final int width, final int height) {
if (((width > 0) && (width != mViewWidth)) || ((height > 0) && (height != mViewHeight))) {
mViewWidth = width;
mViewHeight = height;
updatePreviewSurface();
} else {
synchronized (mSync) {
mSync.notifyAll();
}
}
}
public final void updatePreviewSurface() {
if (DEBUG) Log.i(TAG, "RenderThread#updatePreviewSurface:");
synchronized (mSync) {
if (mPreviewSurface != null) {
if (DEBUG) Log.d(TAG, "updatePreviewSurface:release mPreviewSurface");
mPreviewSurface.setOnFrameAvailableListener(null);
mPreviewSurface.release();
mPreviewSurface = null;
}
mEglSurface.makeCurrent();
if (mTexId >= 0) {
mDrawer.deleteTex(mTexId);
}
// create texture and SurfaceTexture for input from camera
mTexId = mDrawer.initTex();
if (DEBUG) Log.v(TAG, "updatePreviewSurface:tex_id=" + mTexId);
mPreviewSurface = new SurfaceTexture(mTexId);
mPreviewSurface.setDefaultBufferSize(mViewWidth, mViewHeight);
mPreviewSurface.setOnFrameAvailableListener(mHandler);
// notify to caller thread that previewSurface is ready
mSync.notifyAll();
}
}
// public final void onDrawFrame() {
// mEglSurface.makeCurrent();
// // update texture(came from camera)
// mPreviewSurface.updateTexImage();
// // get texture matrix
// mPreviewSurface.getTransformMatrix(mStMatrix);
// // notify video encoder if it exist
// if (mEncoder != null) {
// // notify to capturing thread that the camera frame is available.
// if (mEncoder instanceof MediaVideoEncoder)
// ((MediaVideoEncoder) mEncoder).frameAvailableSoon(mStMatrix);
// else
// mEncoder.frameAvailableSoon();
// }
// // draw to preview screen
// mDrawer.draw(mTexId, mStMatrix, 0);
// mEglSurface.swap();
// }
@Override
public final void run() {
Log.d(TAG, getName() + " started");
init();
Looper.prepare();
synchronized (mSync) {
mHandler = new RenderHandler(mFpsCounter, this);
mSync.notify();
}
Looper.loop();
Log.d(TAG, getName() + " finishing");
release();
synchronized (mSync) {
mHandler = null;
mSync.notify();
}
}
private final void init() {
if (DEBUG) Log.v(TAG, "RenderThread#init:");
// create EGLContext for this thread
mEgl = EGLBase.createFrom(null, false, false);
mEglSurface = mEgl.createFromSurface(mSurface);
mEglSurface.makeCurrent();
// create drawing object
mDrawer = new GLDrawer2D(true);
}
private final void release() {
if (DEBUG) Log.v(TAG, "RenderThread#release:");
if (mDrawer != null) {
mDrawer.release();
mDrawer = null;
}
if (mPreviewSurface != null) {
mPreviewSurface.release();
mPreviewSurface = null;
}
if (mTexId >= 0) {
GLHelper.deleteTex(mTexId);
mTexId = -1;
}
if (mEglSurface != null) {
mEglSurface.release();
mEglSurface = null;
}
if (mEgl != null) {
mEgl.release();
mEgl = null;
}
}
}
}
}

6
local.properties

@ -1,10 +1,12 @@
## This file is automatically generated by Android Studio.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file should *NOT* be checked into Version Control Systems,
# This file must *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
sdk.dir=E\:\\Environment\\android-sdk-windows
#Fri Sep 29 23:06:03 CST 2017
ndk.dir=E\:\\Android\\Evironment\\android-sdk-windows\\ndk-bundle
sdk.dir=E\:\\Android\\Evironment\\android-sdk-windows

Loading…
Cancel
Save