diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index 5aadf097..3952fde5 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -92,6 +92,10 @@
android:supportsRtl="true"
android:theme="@style/Theme.MicroPhoto"
tools:targetApi="28">
+
(handler);
if (pTerminal == NULL)
@@ -730,7 +730,7 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
// camera->Open(pathStr, fileNameStr);
unsigned long photoId = videoId;
- ((CPhoneDevice *)dev)->OnVideoReady(result != JNI_FALSE, pathStr, photoId);
+ ((CPhoneDevice *)dev)->OnVideoReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, pathStr, photoId);
if (path != NULL)
{
env->ReleaseStringUTFChars(path, pathStr);
diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp
index e93b5f7a..d4f2bd41 100644
--- a/app/src/main/cpp/PhoneDevice.cpp
+++ b/app/src/main/cpp/PhoneDevice.cpp
@@ -321,7 +321,7 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa
mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(IJ)V");
mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z");
mUpdateCaptureScheduleMid = env->GetMethodID(classService, "updateCaptureSchedule", "(J)Z");
- mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(IJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
+ mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(ZIJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
mRequestWakelockMid = env->GetMethodID(classService, "requestWakelock", "(Ljava/lang/String;J)V");
mReleaseWakelockMid = env->GetMethodID(classService, "releaseWakelock", "(Ljava/lang/String;)V");
@@ -1395,8 +1395,10 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
}
int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90;
- env->CallVoidMethod(m_javaService, mStartRecordingMid, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId, mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height,
- mPhotoInfo.duration, orientation, leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD);
+ jboolean photoOrVideo = JNI_FALSE;
+ env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId,
+ mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height,mPhotoInfo.duration, orientation,
+ leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD);
if (leftTopOSD) env->DeleteLocalRef(leftTopOSD);
if (rightTopOSD) env->DeleteLocalRef(rightTopOSD);
@@ -1786,7 +1788,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
return res;
}
-bool CPhoneDevice::OnVideoReady(bool result, const char* path, unsigned int photoId)
+bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId)
{
mPhotoInfo.photoTime = time(NULL);
diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h
index b293a795..76192b2d 100644
--- a/app/src/main/cpp/PhoneDevice.h
+++ b/app/src/main/cpp/PhoneDevice.h
@@ -219,7 +219,8 @@ public:
bool GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector& items);
void UpdatePosition(double lon, double lat, double radius, time_t ts);
- bool OnVideoReady(bool result, const char* path, unsigned int photoId);
+ bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId);
+
void UpdateSignalLevel(int signalLevel);
void UpdateTfCardPath(const std::string& tfCardPath)
{
diff --git a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java
index 40d32edd..c7688005 100644
--- a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java
+++ b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java
@@ -391,12 +391,13 @@ public class MicroPhotoService extends Service {
mService.reloadConfigs(mService.mNativeHandle);
}
} else if (TextUtils.equals(ACTION_VIDEO_FINISHED, action)) {
+ boolean photoOrVideo = intent.getBooleanExtra("photoOrVideo", false);
boolean result = intent.getBooleanExtra("result", false);
String path = intent.getStringExtra("path");
long videoId = intent.getLongExtra("videoId", 0);
Log.i(TAG, "Recording received(" + Long.toString(videoId) + "):" + path);
- mService.recordingFinished(mService.mNativeHandle, result, path, videoId);
+ mService.recordingFinished(mService.mNativeHandle, photoOrVideo, result, path, videoId);
} else if (TextUtils.equals(ACTION_STOP, action)) {
mService.stopTerminalService();
} else if (TextUtils.equals(ACTION_IMP_PUBKRY, action)) {
@@ -509,7 +510,7 @@ public class MicroPhotoService extends Service {
registerPhotoTimer(getApplicationContext(), scheduleTime, scheduleTime, timeout, schedules);
}
- public void startRecording(int cameraId, long videoId, int duration, int width, int height, int quality, int orientation, String leftTopOsd, String rightTopOsd, String rightBottomOsd, String leftBottomOsd) {
+ public void startRecording(boolean photoOrVideo, int cameraId, long videoId, int duration, int width, int height, int quality, int orientation, String leftTopOsd, String rightTopOsd, String rightBottomOsd, String leftBottomOsd) {
Context context = getApplicationContext();
// Intent intent = new Intent(this, VideoActivity.class);
Intent intent = makeRecordingIntent(context, cameraId, videoId, duration, width, height, quality, orientation,
@@ -1203,7 +1204,7 @@ cellSignalStrengthGsm.getDbm();
protected native boolean reloadConfigs(long handler);
protected native void updatePosition(long handler, double lon, double lat, double radius, long ts);
protected native boolean uninit(long handler);
- protected native void recordingFinished(long handler, boolean result, String path, long videoId);
+ protected native void recordingFinished(long handler, boolean photoOrVideo, boolean result, String path, long videoId);
public static native long takePhoto(int channel, int preset, boolean photoOrVideo, String configFilePath, String path);
public static native void releaseDeviceHandle(long deviceHandle);
public static native boolean sendExternalPhoto(long deviceHandle, String path);
diff --git a/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java b/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java
index 793b2902..b0375913 100644
--- a/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java
+++ b/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java
@@ -838,6 +838,7 @@ public class Camera2VideoActivity extends AppCompatActivity {
Intent intent = new Intent(ACTION_MP_VIDEO_FINISHED);
// intent.setPackage(packageName);
+ intent.putExtra("photoOrVideo", false);
intent.putExtra("result", result);
intent.putExtra("path", path);
intent.putExtra("videoId", mVideoId);
diff --git a/app/src/main/java/com/xypower/mpapp/video/RawActivity.java b/app/src/main/java/com/xypower/mpapp/video/RawActivity.java
new file mode 100644
index 00000000..cdc4b410
--- /dev/null
+++ b/app/src/main/java/com/xypower/mpapp/video/RawActivity.java
@@ -0,0 +1,1633 @@
+package com.xypower.mpapp.video;
+
+import androidx.appcompat.app.AppCompatActivity;
+
+import android.os.Looper;
+import android.os.Message;
+import android.os.SystemClock;
+import android.view.OrientationEventListener;
+
+import java.util.TreeMap;
+
+
+import android.Manifest;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.Point;
+import android.graphics.RectF;
+import android.graphics.SurfaceTexture;
+import android.hardware.SensorManager;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.DngCreator;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.Image;
+import android.media.ImageReader;
+import android.media.MediaScannerConnection;
+import android.net.Uri;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.os.Environment;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+import android.util.Size;
+import android.util.SparseIntArray;
+import android.view.Surface;
+import android.view.TextureView;
+import android.widget.Toast;
+
+import com.xypower.mpapp.R;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+
+public class RawActivity extends AppCompatActivity {
+
+ /**
+ * Conversion from screen rotation to JPEG orientation.
+ */
+ private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
+
+ static {
+ ORIENTATIONS.append(Surface.ROTATION_0, 0);
+ ORIENTATIONS.append(Surface.ROTATION_90, 90);
+ ORIENTATIONS.append(Surface.ROTATION_180, 180);
+ ORIENTATIONS.append(Surface.ROTATION_270, 270);
+ }
+
+ /**
+ * Request code for camera permissions.
+ */
+ private static final int REQUEST_CAMERA_PERMISSIONS = 1;
+
+ /**
+ * Permissions required to take a picture.
+ */
+ private static final String[] CAMERA_PERMISSIONS = {
+ Manifest.permission.CAMERA,
+ Manifest.permission.READ_EXTERNAL_STORAGE,
+ Manifest.permission.WRITE_EXTERNAL_STORAGE,
+ };
+
+ /**
+ * Timeout for the pre-capture sequence.
+ */
+ private static final long PRECAPTURE_TIMEOUT_MS = 1000;
+
+ /**
+ * Tolerance when comparing aspect ratios.
+ */
+ private static final double ASPECT_RATIO_TOLERANCE = 0.005;
+
+ /**
+ * Max preview width that is guaranteed by Camera2 API
+ */
+ private static final int MAX_PREVIEW_WIDTH = 1920;
+
+ /**
+ * Max preview height that is guaranteed by Camera2 API
+ */
+ private static final int MAX_PREVIEW_HEIGHT = 1080;
+
+ /**
+ * Tag for the {@link Log}.
+ */
+ private static final String TAG = "Camera2RawFragment";
+
+ /**
+ * Camera state: Device is closed.
+ */
+ private static final int STATE_CLOSED = 0;
+
+ /**
+ * Camera state: Device is opened, but is not capturing.
+ */
+ private static final int STATE_OPENED = 1;
+
+ /**
+ * Camera state: Showing camera preview.
+ */
+ private static final int STATE_PREVIEW = 2;
+
+ /**
+ * Camera state: Waiting for 3A convergence before capturing a photo.
+ */
+ private static final int STATE_WAITING_FOR_3A_CONVERGENCE = 3;
+
+ /**
+ * An {@link OrientationEventListener} used to determine when device rotation has occurred.
+ * This is mainly necessary for when the device is rotated by 180 degrees, in which case
+ * onCreate or onConfigurationChanged is not called as the view dimensions remain the same,
+ * but the orientation of the has changed, and thus the preview rotation must be updated.
+ */
+ private OrientationEventListener mOrientationListener;
+
+ /**
+ * {@link TextureView.SurfaceTextureListener} handles several lifecycle events of a
+ * {@link TextureView}.
+ */
+ private final TextureView.SurfaceTextureListener mSurfaceTextureListener
+ = new TextureView.SurfaceTextureListener() {
+
+ @Override
+ public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
+ configureTransform(width, height);
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
+ configureTransform(width, height);
+ }
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
+ synchronized (mCameraStateLock) {
+ mPreviewSize = null;
+ }
+ return true;
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(SurfaceTexture texture) {
+ }
+
+ };
+
+ /**
+ * An {@link AutoFitTextureView} for camera preview.
+ */
+ private AutoFitTextureView mTextureView;
+
+ /**
+ * An additional thread for running tasks that shouldn't block the UI. This is used for all
+ * callbacks from the {@link CameraDevice} and {@link CameraCaptureSession}s.
+ */
+ private HandlerThread mBackgroundThread;
+
+ /**
+ * A counter for tracking corresponding {@link CaptureRequest}s and {@link CaptureResult}s
+ * across the {@link CameraCaptureSession} capture callbacks.
+ */
+ private final AtomicInteger mRequestCounter = new AtomicInteger();
+
+ /**
+ * A {@link Semaphore} to prevent the app from exiting before closing the camera.
+ */
+ private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
+
+ /**
+ * A lock protecting camera state.
+ */
+ private final Object mCameraStateLock = new Object();
+
+ // *********************************************************************************************
+ // State protected by mCameraStateLock.
+ //
+ // The following state is used across both the UI and background threads. Methods with "Locked"
+ // in the name expect mCameraStateLock to be held while calling.
+
+ /**
+ * ID of the current {@link CameraDevice}.
+ */
+ private String mCameraId;
+
+ /**
+ * A {@link CameraCaptureSession } for camera preview.
+ */
+ private CameraCaptureSession mCaptureSession;
+
+ /**
+ * A reference to the open {@link CameraDevice}.
+ */
+ private CameraDevice mCameraDevice;
+
+ /**
+ * The {@link Size} of camera preview.
+ */
+ private Size mPreviewSize;
+
+ /**
+ * The {@link CameraCharacteristics} for the currently configured camera device.
+ */
+ private CameraCharacteristics mCharacteristics;
+
+ /**
+ * A {@link Handler} for running tasks in the background.
+ */
+ private Handler mBackgroundHandler;
+
+ /**
+ * A reference counted holder wrapping the {@link ImageReader} that handles JPEG image
+ * captures. This is used to allow us to clean up the {@link ImageReader} when all background
+ * tasks using its {@link Image}s have completed.
+ */
+ private RefCountedAutoCloseable mJpegImageReader;
+
+ /**
+ * A reference counted holder wrapping the {@link ImageReader} that handles RAW image captures.
+ * This is used to allow us to clean up the {@link ImageReader} when all background tasks using
+ * its {@link Image}s have completed.
+ */
+ private RefCountedAutoCloseable mRawImageReader;
+
+ /**
+ * Whether or not the currently configured camera device is fixed-focus.
+ */
+ private boolean mNoAFRun = false;
+
+ /**
+ * Number of pending user requests to capture a photo.
+ */
+ private int mPendingUserCaptures = 0;
+
+ /**
+ * Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress JPEG captures.
+ */
+ private final TreeMap mJpegResultQueue = new TreeMap<>();
+
+ /**
+ * Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress RAW captures.
+ */
+ private final TreeMap mRawResultQueue = new TreeMap<>();
+
+ /**
+ * {@link CaptureRequest.Builder} for the camera preview
+ */
+ private CaptureRequest.Builder mPreviewRequestBuilder;
+
+ /**
+ * The state of the camera device.
+ *
+ * @see #mPreCaptureCallback
+ */
+ private int mState = STATE_CLOSED;
+
+ /**
+ * Timer to use with pre-capture sequence to ensure a timely capture if 3A convergence is
+ * taking too long.
+ */
+ private long mCaptureTimer;
+
+ //**********************************************************************************************
+
+ /**
+ * {@link CameraDevice.StateCallback} is called when the currently active {@link CameraDevice}
+ * changes its state.
+ */
+ private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
+
+ @Override
+ public void onOpened(CameraDevice cameraDevice) {
+ // This method is called when the camera is opened. We start camera preview here if
+ // the TextureView displaying this has been set up.
+ synchronized (mCameraStateLock) {
+ mState = STATE_OPENED;
+ mCameraOpenCloseLock.release();
+ mCameraDevice = cameraDevice;
+
+ // Start the preview session if the TextureView has been set up already.
+ if (mPreviewSize != null && mTextureView.isAvailable()) {
+ createCameraPreviewSessionLocked();
+ }
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice cameraDevice) {
+ synchronized (mCameraStateLock) {
+ mState = STATE_CLOSED;
+ mCameraOpenCloseLock.release();
+ cameraDevice.close();
+ mCameraDevice = null;
+ }
+ }
+
+ @Override
+ public void onError(CameraDevice cameraDevice, int error) {
+ Log.e(TAG, "Received camera device error: " + error);
+ synchronized (mCameraStateLock) {
+ mState = STATE_CLOSED;
+ mCameraOpenCloseLock.release();
+ cameraDevice.close();
+ mCameraDevice = null;
+ }
+ finish();
+ }
+
+ };
+
+ /**
+ * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
+ * JPEG image is ready to be saved.
+ */
+ private final ImageReader.OnImageAvailableListener mOnJpegImageAvailableListener
+ = new ImageReader.OnImageAvailableListener() {
+
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
+ }
+
+ };
+
+ /**
+ * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
+ * RAW image is ready to be saved.
+ */
+ private final ImageReader.OnImageAvailableListener mOnRawImageAvailableListener
+ = new ImageReader.OnImageAvailableListener() {
+
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
+ }
+
+ };
+
+ /**
+ * A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
+ * pre-capture sequence.
+ */
+ private CameraCaptureSession.CaptureCallback mPreCaptureCallback
+ = new CameraCaptureSession.CaptureCallback() {
+
+ private void process(CaptureResult result) {
+ synchronized (mCameraStateLock) {
+ switch (mState) {
+ case STATE_PREVIEW: {
+ // We have nothing to do when the camera preview is running normally.
+ break;
+ }
+ case STATE_WAITING_FOR_3A_CONVERGENCE: {
+ boolean readyToCapture = true;
+ if (!mNoAFRun) {
+ Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
+ if (afState == null) {
+ break;
+ }
+
+ // If auto-focus has reached locked state, we are ready to capture
+ readyToCapture =
+ (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ||
+ afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
+ }
+
+ // If we are running on an non-legacy device, we should also wait until
+ // auto-exposure and auto-white-balance have converged as well before
+ // taking a picture.
+ if (!isLegacyLocked()) {
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
+ if (aeState == null || awbState == null) {
+ break;
+ }
+
+ readyToCapture = readyToCapture &&
+ aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED &&
+ awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED;
+ }
+
+ // If we haven't finished the pre-capture sequence but have hit our maximum
+ // wait timeout, too bad! Begin capture anyway.
+ if (!readyToCapture && hitTimeoutLocked()) {
+ Log.w(TAG, "Timed out waiting for pre-capture sequence to complete.");
+ readyToCapture = true;
+ }
+
+ if (readyToCapture && mPendingUserCaptures > 0) {
+ // Capture once for each user tap of the "Picture" button.
+ while (mPendingUserCaptures > 0) {
+ captureStillPictureLocked();
+ mPendingUserCaptures--;
+ }
+ // After this, the camera will go back to the normal state of preview.
+ mState = STATE_PREVIEW;
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
+ CaptureResult partialResult) {
+ process(partialResult);
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+ TotalCaptureResult result) {
+ process(result);
+ }
+
+ };
+
+ /**
+ * A {@link CameraCaptureSession.CaptureCallback} that handles the still JPEG and RAW capture
+ * request.
+ */
+ private final CameraCaptureSession.CaptureCallback mCaptureCallback
+ = new CameraCaptureSession.CaptureCallback() {
+ @Override
+ public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
+ long timestamp, long frameNumber) {
+ String currentDateTime = generateTimestamp();
+ File rawFile = new File(Environment.
+ getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM),
+ "RAW_" + currentDateTime + ".dng");
+ File jpegFile = new File(Environment.
+ getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM),
+ "JPEG_" + currentDateTime + ".jpg");
+
+ // Look up the ImageSaverBuilder for this request and update it with the file name
+ // based on the capture start time.
+ ImageSaver.ImageSaverBuilder jpegBuilder;
+ ImageSaver.ImageSaverBuilder rawBuilder;
+ int requestId = (int) request.getTag();
+ synchronized (mCameraStateLock) {
+ jpegBuilder = mJpegResultQueue.get(requestId);
+ rawBuilder = mRawResultQueue.get(requestId);
+ }
+
+ if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
+ if (rawBuilder != null) rawBuilder.setFile(rawFile);
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+ TotalCaptureResult result) {
+ int requestId = (int) request.getTag();
+ ImageSaver.ImageSaverBuilder jpegBuilder;
+ ImageSaver.ImageSaverBuilder rawBuilder;
+ StringBuilder sb = new StringBuilder();
+
+ // Look up the ImageSaverBuilder for this request and update it with the CaptureResult
+ synchronized (mCameraStateLock) {
+ jpegBuilder = mJpegResultQueue.get(requestId);
+ rawBuilder = mRawResultQueue.get(requestId);
+
+ if (jpegBuilder != null) {
+ jpegBuilder.setResult(result);
+ sb.append("Saving JPEG as: ");
+ sb.append(jpegBuilder.getSaveLocation());
+ }
+ if (rawBuilder != null) {
+ rawBuilder.setResult(result);
+ if (jpegBuilder != null) sb.append(", ");
+ sb.append("Saving RAW as: ");
+ sb.append(rawBuilder.getSaveLocation());
+ }
+
+ // If we have all the results necessary, save the image to a file in the background.
+ handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
+ handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
+
+ finishedCaptureLocked();
+ }
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
+ CaptureFailure failure) {
+ int requestId = (int) request.getTag();
+ synchronized (mCameraStateLock) {
+ mJpegResultQueue.remove(requestId);
+ mRawResultQueue.remove(requestId);
+ finishedCaptureLocked();
+ }
+ // showToast("Capture failed!");
+ }
+
+ };
+
+ /**
+ * A {@link Handler} for showing {@link Toast}s on the UI thread.
+ */
+ private final Handler mMessageHandler = new Handler(Looper.getMainLooper()) {
+ @Override
+ public void handleMessage(Message msg) {
+
+ }
+ };
+
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_raw);
+
+ mTextureView = (AutoFitTextureView) findViewById(R.id.texture);
+
+ // Setup a new OrientationEventListener. This is used to handle rotation events like a
+ // 180 degree rotation that do not normally trigger a call to onCreate to do view re-layout
+ // or otherwise cause the preview TextureView's size to change.
+ mOrientationListener = new OrientationEventListener(this,
+ SensorManager.SENSOR_DELAY_NORMAL) {
+ @Override
+ public void onOrientationChanged(int orientation) {
+ if (mTextureView != null && mTextureView.isAvailable()) {
+ configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
+ }
+ }
+ };
+
+ /*
+ mCameraId = intent.getIntExtra("cameraId", 0);
+ mDuration = intent.getIntExtra("duration", 0);
+ mVideoWidth = intent.getIntExtra("width", 0);
+ mVideoHeight = intent.getIntExtra("height", 0);
+ mOrientation = intent.getIntExtra("orientation", -1);
+*/
+
+ mMessageHandler.postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ takePicture();
+ }
+ }, 400);
+ }
+
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ startBackgroundThread();
+ openCamera();
+
+ // When the screen is turned off and turned back on, the SurfaceTexture is already
+ // available, and "onSurfaceTextureAvailable" will not be called. In that case, we should
+ // configure the preview bounds here (otherwise, we wait until the surface is ready in
+ // the SurfaceTextureListener).
+ if (mTextureView.isAvailable()) {
+ configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
+ } else {
+ mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
+ }
+ if (mOrientationListener != null && mOrientationListener.canDetectOrientation()) {
+ mOrientationListener.enable();
+ }
+ }
+
+ @Override
+ public void onPause() {
+ if (mOrientationListener != null) {
+ mOrientationListener.disable();
+ }
+ closeCamera();
+ stopBackgroundThread();
+ super.onPause();
+ }
+
+ /**
+ * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
+ */
+ private boolean setUpCameraOutputs() {
+ CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
+ if (manager == null) {
+ // ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").
+ // show(getFragmentManager(), "dialog");
+ return false;
+ }
+ try {
+ String[] cameraIds = manager.getCameraIdList();
+ // Find a CameraDevice that supports RAW captures, and configure state.
+ for (String cameraId : cameraIds) {
+ CameraCharacteristics characteristics
+ = manager.getCameraCharacteristics(cameraId);
+
+ int[] capabilities = characteristics.get(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+ // We only use a camera that supports RAW in this sample.
+ if (!contains(capabilities,
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
+ continue;
+ }
+
+ Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
+ if (facing != CameraMetadata.LENS_FACING_FRONT) {
+ continue;
+ }
+
+ StreamConfigurationMap map = characteristics.get(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ // For still image captures, we use the largest available size.
+ Size largestJpeg = Collections.max(
+ Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
+ new CompareSizesByArea());
+
+ Size largestRaw = Collections.max(
+ Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
+ new CompareSizesByArea());
+
+ synchronized (mCameraStateLock) {
+ // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
+ // counted wrapper to ensure they are only closed when all background tasks
+ // using them are finished.
+ if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
+ mJpegImageReader = new RefCountedAutoCloseable<>(
+ ImageReader.newInstance(largestJpeg.getWidth(),
+ largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
+ }
+ mJpegImageReader.get().setOnImageAvailableListener(
+ mOnJpegImageAvailableListener, mBackgroundHandler);
+
+ if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
+ mRawImageReader = new RefCountedAutoCloseable<>(
+ ImageReader.newInstance(largestRaw.getWidth(),
+ largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
+ }
+ mRawImageReader.get().setOnImageAvailableListener(
+ mOnRawImageAvailableListener, mBackgroundHandler);
+
+ mCharacteristics = characteristics;
+ mCameraId = cameraId;
+ }
+ return true;
+ }
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+
+ // If we found no suitable cameras for capturing RAW, warn the user.
+ // ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").
+ // show(getFragmentManager(), "dialog");
+ return false;
+ }
+
+ /**
+ * Opens the camera specified by {@link #mCameraId}.
+ */
+ @SuppressWarnings("MissingPermission")
+ private void openCamera() {
+ if (!setUpCameraOutputs()) {
+ return;
+ }
+
+ CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
+ try {
+ // Wait for any previously running session to finish.
+ if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
+ throw new RuntimeException("Time out waiting to lock camera opening.");
+ }
+
+ String cameraId;
+ Handler backgroundHandler;
+ synchronized (mCameraStateLock) {
+ cameraId = mCameraId;
+ backgroundHandler = mBackgroundHandler;
+ }
+
+ // Attempt to open the camera. mStateCallback will be called on the background handler's
+ // thread when this succeeds or fails.
+ manager.openCamera(cameraId, mStateCallback, backgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ } catch (InterruptedException e) {
+ throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
+ }
+ }
+
+ /**
+ * Closes the current {@link CameraDevice}.
+ */
+ private void closeCamera() {
+ try {
+ mCameraOpenCloseLock.acquire();
+ synchronized (mCameraStateLock) {
+
+ // Reset state and clean up resources used by the camera.
+ // Note: After calling this, the ImageReaders will be closed after any background
+ // tasks saving Images from these readers have been completed.
+ mPendingUserCaptures = 0;
+ mState = STATE_CLOSED;
+ if (null != mCaptureSession) {
+ mCaptureSession.close();
+ mCaptureSession = null;
+ }
+ if (null != mCameraDevice) {
+ mCameraDevice.close();
+ mCameraDevice = null;
+ }
+ if (null != mJpegImageReader) {
+ mJpegImageReader.close();
+ mJpegImageReader = null;
+ }
+ if (null != mRawImageReader) {
+ mRawImageReader.close();
+ mRawImageReader = null;
+ }
+ }
+ } catch (InterruptedException e) {
+ throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
+ } finally {
+ mCameraOpenCloseLock.release();
+ }
+ }
+
+ /**
+ * Starts a background thread and its {@link Handler}.
+ */
+ private void startBackgroundThread() {
+ mBackgroundThread = new HandlerThread("CameraBackground");
+ mBackgroundThread.start();
+ synchronized (mCameraStateLock) {
+ mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
+ }
+ }
+
+ /**
+ * Stops the background thread and its {@link Handler}.
+ */
+ private void stopBackgroundThread() {
+ mBackgroundThread.quitSafely();
+ try {
+ mBackgroundThread.join();
+ mBackgroundThread = null;
+ synchronized (mCameraStateLock) {
+ mBackgroundHandler = null;
+ }
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Creates a new {@link CameraCaptureSession} for camera preview.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ */
+ private void createCameraPreviewSessionLocked() {
+ try {
+ SurfaceTexture texture = mTextureView.getSurfaceTexture();
+ // We configure the size of default buffer to be the size of camera preview we want.
+ texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+
+ // This is the output Surface we need to start preview.
+ Surface surface = new Surface(texture);
+
+ // We set up a CaptureRequest.Builder with the output Surface.
+ mPreviewRequestBuilder
+ = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ mPreviewRequestBuilder.addTarget(surface);
+
+ // Here, we create a CameraCaptureSession for camera preview.
+ mCameraDevice.createCaptureSession(Arrays.asList(surface,
+ mJpegImageReader.get().getSurface(),
+ mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
+ @Override
+ public void onConfigured(CameraCaptureSession cameraCaptureSession) {
+ synchronized (mCameraStateLock) {
+ // The camera is already closed
+ if (null == mCameraDevice) {
+ return;
+ }
+
+ try {
+ setup3AControlsLocked(mPreviewRequestBuilder);
+ // Finally, we start displaying the camera preview.
+ cameraCaptureSession.setRepeatingRequest(
+ mPreviewRequestBuilder.build(),
+ mPreCaptureCallback, mBackgroundHandler);
+ mState = STATE_PREVIEW;
+ } catch (CameraAccessException | IllegalStateException e) {
+ e.printStackTrace();
+ return;
+ }
+ // When the session is ready, we start displaying the preview.
+ mCaptureSession = cameraCaptureSession;
+ }
+ }
+
+ @Override
+ public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
+ // showToast("Failed to configure camera.");
+ }
+ }, mBackgroundHandler
+ );
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Configure the given {@link CaptureRequest.Builder} to use auto-focus, auto-exposure, and
+ * auto-white-balance controls if available.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ *
+ * @param builder the builder to configure.
+ */
+ private void setup3AControlsLocked(CaptureRequest.Builder builder) {
+ // Enable auto-magical 3A run by camera device
+ builder.set(CaptureRequest.CONTROL_MODE,
+ CaptureRequest.CONTROL_MODE_AUTO);
+
+ Float minFocusDist =
+ mCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+
+ // If MINIMUM_FOCUS_DISTANCE is 0, lens is fixed-focus and we need to skip the AF run.
+ mNoAFRun = (minFocusDist == null || minFocusDist == 0);
+
+ if (!mNoAFRun) {
+ // If there is a "continuous picture" mode available, use it, otherwise default to AUTO.
+ if (contains(mCharacteristics.get(
+ CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES),
+ CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
+ builder.set(CaptureRequest.CONTROL_AF_MODE,
+ CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ } else {
+ builder.set(CaptureRequest.CONTROL_AF_MODE,
+ CaptureRequest.CONTROL_AF_MODE_AUTO);
+ }
+ }
+
+ // If there is an auto-magical flash control mode available, use it, otherwise default to
+ // the "on" mode, which is guaranteed to always be available.
+ if (contains(mCharacteristics.get(
+ CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES),
+ CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH)) {
+ builder.set(CaptureRequest.CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
+ } else {
+ builder.set(CaptureRequest.CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_MODE_ON);
+ }
+
+ // If there is an auto-magical white balance control mode available, use it.
+ if (contains(mCharacteristics.get(
+ CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES),
+ CaptureRequest.CONTROL_AWB_MODE_AUTO)) {
+ // Allow AWB to run auto-magically if this device supports this
+ builder.set(CaptureRequest.CONTROL_AWB_MODE,
+ CaptureRequest.CONTROL_AWB_MODE_AUTO);
+ }
+ }
+
+ /**
+ * Configure the necessary {@link android.graphics.Matrix} transformation to `mTextureView`,
+ * and start/restart the preview capture session if necessary.
+ *
+ * This method should be called after the camera state has been initialized in
+ * setUpCameraOutputs.
+ *
+ * @param viewWidth The width of `mTextureView`
+ * @param viewHeight The height of `mTextureView`
+ */
+ private void configureTransform(int viewWidth, int viewHeight) {
+ synchronized (mCameraStateLock) {
+ if (null == mTextureView) {
+ return;
+ }
+
+ StreamConfigurationMap map = mCharacteristics.get(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ // For still image captures, we always use the largest available size.
+ Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
+ new CompareSizesByArea());
+
+ // Find the rotation of the device relative to the native device orientation.
+ int deviceRotation = getWindowManager().getDefaultDisplay().getRotation();
+ Point displaySize = new Point();
+ getWindowManager().getDefaultDisplay().getSize(displaySize);
+
+ // Find the rotation of the device relative to the camera sensor's orientation.
+ int totalRotation = sensorToDeviceRotation(mCharacteristics, deviceRotation);
+
+ // Swap the view dimensions for calculation as needed if they are rotated relative to
+ // the sensor.
+ boolean swappedDimensions = totalRotation == 90 || totalRotation == 270;
+ int rotatedViewWidth = viewWidth;
+ int rotatedViewHeight = viewHeight;
+ int maxPreviewWidth = displaySize.x;
+ int maxPreviewHeight = displaySize.y;
+
+ if (swappedDimensions) {
+ rotatedViewWidth = viewHeight;
+ rotatedViewHeight = viewWidth;
+ maxPreviewWidth = displaySize.y;
+ maxPreviewHeight = displaySize.x;
+ }
+
+ // Preview should not be larger than display size and 1080p.
+ if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
+ maxPreviewWidth = MAX_PREVIEW_WIDTH;
+ }
+
+ if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
+ maxPreviewHeight = MAX_PREVIEW_HEIGHT;
+ }
+
+ // Find the best preview size for these view dimensions and configured JPEG size.
+ Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
+ rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight,
+ largestJpeg);
+
+ if (swappedDimensions) {
+ mTextureView.setAspectRatio(
+ previewSize.getHeight(), previewSize.getWidth());
+ } else {
+ mTextureView.setAspectRatio(
+ previewSize.getWidth(), previewSize.getHeight());
+ }
+
+ // Find rotation of device in degrees (reverse device orientation for front-facing
+ // cameras).
+ int rotation = (mCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
+ CameraCharacteristics.LENS_FACING_FRONT) ?
+ (360 + ORIENTATIONS.get(deviceRotation)) % 360 :
+ (360 - ORIENTATIONS.get(deviceRotation)) % 360;
+
+ Matrix matrix = new Matrix();
+ RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
+ RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
+ float centerX = viewRect.centerX();
+ float centerY = viewRect.centerY();
+
+ // Initially, output stream images from the Camera2 API will be rotated to the native
+ // device orientation from the sensor's orientation, and the TextureView will default to
+ // scaling these buffers to fill it's view bounds. If the aspect ratios and relative
+ // orientations are correct, this is fine.
+ //
+ // However, if the device orientation has been rotated relative to its native
+ // orientation so that the TextureView's dimensions are swapped relative to the
+ // native device orientation, we must do the following to ensure the output stream
+ // images are not incorrectly scaled by the TextureView:
+ // - Undo the scale-to-fill from the output buffer's dimensions (i.e. its dimensions
+ // in the native device orientation) to the TextureView's dimension.
+ // - Apply a scale-to-fill from the output buffer's rotated dimensions
+ // (i.e. its dimensions in the current device orientation) to the TextureView's
+ // dimensions.
+ // - Apply the rotation from the native device orientation to the current device
+ // rotation.
+ if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
+ bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
+ matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
+ float scale = Math.max(
+ (float) viewHeight / previewSize.getHeight(),
+ (float) viewWidth / previewSize.getWidth());
+ matrix.postScale(scale, scale, centerX, centerY);
+
+ }
+ matrix.postRotate(rotation, centerX, centerY);
+
+ mTextureView.setTransform(matrix);
+
+ // Start or restart the active capture session if the preview was initialized or
+ // if its aspect ratio changed significantly.
+ if (mPreviewSize == null || !checkAspectsEqual(previewSize, mPreviewSize)) {
+ mPreviewSize = previewSize;
+ if (mState != STATE_CLOSED) {
+ createCameraPreviewSessionLocked();
+ }
+ }
+ }
+ }
+
+ /**
+ * Initiate a still image capture.
+ *
+ * This function sends a capture request that initiates a pre-capture sequence in our state
+ * machine that waits for auto-focus to finish, ending in a "locked" state where the lens is no
+ * longer moving, waits for auto-exposure to choose a good exposure value, and waits for
+ * auto-white-balance to converge.
+ */
+ private void takePicture() {
+ synchronized (mCameraStateLock) {
+ mPendingUserCaptures++;
+
+ // If we already triggered a pre-capture sequence, or are in a state where we cannot
+ // do this, return immediately.
+ if (mState != STATE_PREVIEW) {
+ return;
+ }
+
+ try {
+ // Trigger an auto-focus run if camera is capable. If the camera is already focused,
+ // this should do nothing.
+ if (!mNoAFRun) {
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
+ CameraMetadata.CONTROL_AF_TRIGGER_START);
+ }
+
+ // If this is not a legacy device, we can also trigger an auto-exposure metering
+ // run.
+ if (!isLegacyLocked()) {
+ // Tell the camera to lock focus.
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+ }
+
+ // Update state machine to wait for auto-focus, auto-exposure, and
+ // auto-white-balance (aka. "3A") to converge.
+ mState = STATE_WAITING_FOR_3A_CONVERGENCE;
+
+ // Start a timer for the pre-capture sequence.
+ startTimerLocked();
+
+ // Replace the existing repeating request with one with updated 3A triggers.
+ mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback,
+ mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ /**
+ * Send a capture request to the camera device that initiates a capture targeting the JPEG and
+ * RAW outputs.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ */
+ private void captureStillPictureLocked() {
+ try {
+ if (null == mCameraDevice) {
+ return;
+ }
+ // This is the CaptureRequest.Builder that we use to take a picture.
+ final CaptureRequest.Builder captureBuilder =
+ mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+
+ captureBuilder.addTarget(mJpegImageReader.get().getSurface());
+ captureBuilder.addTarget(mRawImageReader.get().getSurface());
+
+ // Use the same AE and AF modes as the preview.
+ setup3AControlsLocked(captureBuilder);
+
+ // Set orientation.
+ int rotation = getWindowManager().getDefaultDisplay().getRotation();
+ captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,
+ sensorToDeviceRotation(mCharacteristics, rotation));
+
+ // Set request tag to easily track results in callbacks.
+ captureBuilder.setTag(mRequestCounter.getAndIncrement());
+
+ CaptureRequest request = captureBuilder.build();
+
+ // Create an ImageSaverBuilder in which to collect results, and add it to the queue
+ // of active requests.
+ ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(this)
+ .setCharacteristics(mCharacteristics);
+ ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(this)
+ .setCharacteristics(mCharacteristics);
+
+ mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
+ mRawResultQueue.put((int) request.getTag(), rawBuilder);
+
+ mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
+
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Called after a RAW/JPEG capture has completed; resets the AF trigger state for the
+ * pre-capture sequence.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ */
+ private void finishedCaptureLocked() {
+ try {
+ // Reset the auto-focus trigger in case AF didn't run quickly enough.
+ if (!mNoAFRun) {
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
+ CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+
+ mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback,
+ mBackgroundHandler);
+
+ mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
+ CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ }
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Retrieve the next {@link Image} from a reference counted {@link ImageReader}, retaining
+ * that {@link ImageReader} until that {@link Image} is no longer in use, and set this
+ * {@link Image} as the result for the next request in the queue of pending requests. If
+ * all necessary information is available, begin saving the image to a file in a background
+ * thread.
+ *
+ * @param pendingQueue the currently active requests.
+ * @param reader a reference counted wrapper containing an {@link ImageReader} from which
+ * to acquire an image.
+ */
+ private void dequeueAndSaveImage(TreeMap pendingQueue,
+ RefCountedAutoCloseable reader) {
+ synchronized (mCameraStateLock) {
+ Map.Entry entry =
+ pendingQueue.firstEntry();
+ ImageSaver.ImageSaverBuilder builder = entry.getValue();
+
+ // Increment reference count to prevent ImageReader from being closed while we
+ // are saving its Images in a background thread (otherwise their resources may
+ // be freed while we are writing to a file).
+ if (reader == null || reader.getAndRetain() == null) {
+ Log.e(TAG, "Paused the activity before we could save the image," +
+ " ImageReader already closed.");
+ pendingQueue.remove(entry.getKey());
+ return;
+ }
+
+ Image image;
+ try {
+ image = reader.get().acquireNextImage();
+ } catch (IllegalStateException e) {
+ Log.e(TAG, "Too many images queued for saving, dropping image for request: " +
+ entry.getKey());
+ pendingQueue.remove(entry.getKey());
+ return;
+ }
+
+ builder.setRefCountedReader(reader).setImage(image);
+
+ handleCompletionLocked(entry.getKey(), builder, pendingQueue);
+ }
+ }
+
+ /**
+ * Runnable that saves an {@link Image} into the specified {@link File}, and updates
+ * {@link android.provider.MediaStore} to include the resulting file.
+ *
+ * This can be constructed through an {@link ImageSaverBuilder} as the necessary image and
+ * result information becomes available.
+ */
+ private static class ImageSaver implements Runnable {
+
+ /**
+ * The image to save.
+ */
+ private final Image mImage;
+ /**
+ * The file we save the image into.
+ */
+ private final File mFile;
+
+ /**
+ * The CaptureResult for this image capture.
+ */
+ private final CaptureResult mCaptureResult;
+
+ /**
+ * The CameraCharacteristics for this camera device.
+ */
+ private final CameraCharacteristics mCharacteristics;
+
+ /**
+ * The Context to use when updating MediaStore with the saved images.
+ */
+ private final Context mContext;
+
+ /**
+ * A reference counted wrapper for the ImageReader that owns the given image.
+ */
+ private final RefCountedAutoCloseable mReader;
+
+ private ImageSaver(Image image, File file, CaptureResult result,
+ CameraCharacteristics characteristics, Context context,
+ RefCountedAutoCloseable reader) {
+ mImage = image;
+ mFile = file;
+ mCaptureResult = result;
+ mCharacteristics = characteristics;
+ mContext = context;
+ mReader = reader;
+ }
+
+ @Override
+ public void run() {
+ boolean success = false;
+ int format = mImage.getFormat();
+ switch (format) {
+ case ImageFormat.JPEG: {
+ ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
+ byte[] bytes = new byte[buffer.remaining()];
+ buffer.get(bytes);
+ FileOutputStream output = null;
+ try {
+ output = new FileOutputStream(mFile);
+ output.write(bytes);
+ success = true;
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ mImage.close();
+ closeOutput(output);
+ }
+ break;
+ }
+ case ImageFormat.RAW_SENSOR: {
+ DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
+ FileOutputStream output = null;
+ try {
+ output = new FileOutputStream(mFile);
+ dngCreator.writeImage(output, mImage);
+ success = true;
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ mImage.close();
+ closeOutput(output);
+ }
+ break;
+ }
+ default: {
+ Log.e(TAG, "Cannot save image, unexpected image format:" + format);
+ break;
+ }
+ }
+
+ // Decrement reference count to allow ImageReader to be closed to free up resources.
+ mReader.close();
+
+ // If saving the file succeeded, update MediaStore.
+ if (success) {
+ MediaScannerConnection.scanFile(mContext, new String[]{mFile.getPath()},
+ /*mimeTypes*/null, new MediaScannerConnection.MediaScannerConnectionClient() {
+ @Override
+ public void onMediaScannerConnected() {
+ // Do nothing
+ }
+
+ @Override
+ public void onScanCompleted(String path, Uri uri) {
+ Log.i(TAG, "Scanned " + path + ":");
+ Log.i(TAG, "-> uri=" + uri);
+ }
+ });
+ }
+ }
+
+ /**
+ * Builder class for constructing {@link ImageSaver}s.
+ *
+ * This class is thread safe.
+ */
+ public static class ImageSaverBuilder {
+ private Image mImage;
+ private File mFile;
+ private CaptureResult mCaptureResult;
+ private CameraCharacteristics mCharacteristics;
+ private Context mContext;
+ private RefCountedAutoCloseable mReader;
+
+ /**
+ * Construct a new ImageSaverBuilder using the given {@link Context}.
+ *
+ * @param context a {@link Context} to for accessing the
+ * {@link android.provider.MediaStore}.
+ */
+ public ImageSaverBuilder(final Context context) {
+ mContext = context;
+ }
+
+ public synchronized ImageSaverBuilder setRefCountedReader(
+ RefCountedAutoCloseable reader) {
+ if (reader == null) throw new NullPointerException();
+
+ mReader = reader;
+ return this;
+ }
+
+ public synchronized ImageSaverBuilder setImage(final Image image) {
+ if (image == null) throw new NullPointerException();
+ mImage = image;
+ return this;
+ }
+
+ public synchronized ImageSaverBuilder setFile(final File file) {
+ if (file == null) throw new NullPointerException();
+ mFile = file;
+ return this;
+ }
+
+ public synchronized ImageSaverBuilder setResult(final CaptureResult result) {
+ if (result == null) throw new NullPointerException();
+ mCaptureResult = result;
+ return this;
+ }
+
+ public synchronized ImageSaverBuilder setCharacteristics(
+ final CameraCharacteristics characteristics) {
+ if (characteristics == null) throw new NullPointerException();
+ mCharacteristics = characteristics;
+ return this;
+ }
+
+ public synchronized ImageSaver buildIfComplete() {
+ if (!isComplete()) {
+ return null;
+ }
+ return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext,
+ mReader);
+ }
+
+ public synchronized String getSaveLocation() {
+ return (mFile == null) ? "Unknown" : mFile.toString();
+ }
+
+ private boolean isComplete() {
+ return mImage != null && mFile != null && mCaptureResult != null
+ && mCharacteristics != null;
+ }
+ }
+ }
+
+ // Utility classes and methods:
+ // *********************************************************************************************
+
+ /**
+ * Comparator based on area of the given {@link Size} objects.
+ */
+ static class CompareSizesByArea implements Comparator {
+
+ @Override
+ public int compare(Size lhs, Size rhs) {
+ // We cast here to ensure the multiplications won't overflow
+ return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
+ (long) rhs.getWidth() * rhs.getHeight());
+ }
+
+ }
+
+ /**
+ * A wrapper for an {@link AutoCloseable} object that implements reference counting to allow
+ * for resource management.
+ */
+ public static class RefCountedAutoCloseable implements AutoCloseable {
+ private T mObject;
+ private long mRefCount = 0;
+
+ /**
+ * Wrap the given object.
+ *
+ * @param object an object to wrap.
+ */
+ public RefCountedAutoCloseable(T object) {
+ if (object == null) throw new NullPointerException();
+ mObject = object;
+ }
+
+ /**
+ * Increment the reference count and return the wrapped object.
+ *
+ * @return the wrapped object, or null if the object has been released.
+ */
+ public synchronized T getAndRetain() {
+ if (mRefCount < 0) {
+ return null;
+ }
+ mRefCount++;
+ return mObject;
+ }
+
+ /**
+ * Return the wrapped object.
+ *
+ * @return the wrapped object, or null if the object has been released.
+ */
+ public synchronized T get() {
+ return mObject;
+ }
+
+ /**
+ * Decrement the reference count and release the wrapped object if there are no other
+ * users retaining this object.
+ */
+ @Override
+ public synchronized void close() {
+ if (mRefCount >= 0) {
+ mRefCount--;
+ if (mRefCount < 0) {
+ try {
+ mObject.close();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ } finally {
+ mObject = null;
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that
+ * is at least as large as the respective texture view size, and that is at most as large as the
+ * respective max size, and whose aspect ratio matches with the specified value. If such size
+ * doesn't exist, choose the largest one that is at most as large as the respective max size,
+ * and whose aspect ratio matches with the specified value.
+ *
+ * @param choices The list of sizes that the camera supports for the intended output
+ * class
+ * @param textureViewWidth The width of the texture view relative to sensor coordinate
+ * @param textureViewHeight The height of the texture view relative to sensor coordinate
+ * @param maxWidth The maximum width that can be chosen
+ * @param maxHeight The maximum height that can be chosen
+ * @param aspectRatio The aspect ratio
+ * @return The optimal {@code Size}, or an arbitrary one if none were big enough
+ */
+ private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
+ int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
+ // Collect the supported resolutions that are at least as big as the preview Surface
+ List bigEnough = new ArrayList<>();
+ // Collect the supported resolutions that are smaller than the preview Surface
+ List notBigEnough = new ArrayList<>();
+ int w = aspectRatio.getWidth();
+ int h = aspectRatio.getHeight();
+ for (Size option : choices) {
+ if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
+ option.getHeight() == option.getWidth() * h / w) {
+ if (option.getWidth() >= textureViewWidth &&
+ option.getHeight() >= textureViewHeight) {
+ bigEnough.add(option);
+ } else {
+ notBigEnough.add(option);
+ }
+ }
+ }
+
+ // Pick the smallest of those big enough. If there is no one big enough, pick the
+ // largest of those not big enough.
+ if (bigEnough.size() > 0) {
+ return Collections.min(bigEnough, new CompareSizesByArea());
+ } else if (notBigEnough.size() > 0) {
+ return Collections.max(notBigEnough, new CompareSizesByArea());
+ } else {
+ Log.e(TAG, "Couldn't find any suitable preview size");
+ return choices[0];
+ }
+ }
+
+ /**
+ * Generate a string containing a formatted timestamp with the current date and time.
+ *
+ * @return a {@link String} representing a time.
+ */
+ private static String generateTimestamp() {
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US);
+ return sdf.format(new Date());
+ }
+
+ /**
+ * Cleanup the given {@link OutputStream}.
+ *
+ * @param outputStream the stream to close.
+ */
+ private static void closeOutput(OutputStream outputStream) {
+ if (null != outputStream) {
+ try {
+ outputStream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ /**
+ * Return true if the given array contains the given integer.
+ *
+ * @param modes array to check.
+ * @param mode integer to get for.
+ * @return true if the array contains the given integer, otherwise false.
+ */
+ private static boolean contains(int[] modes, int mode) {
+ if (modes == null) {
+ return false;
+ }
+ for (int i : modes) {
+ if (i == mode) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Return true if the two given {@link Size}s have the same aspect ratio.
+ *
+ * @param a first {@link Size} to compare.
+ * @param b second {@link Size} to compare.
+ * @return true if the sizes have the same aspect ratio, otherwise false.
+ */
+ private static boolean checkAspectsEqual(Size a, Size b) {
+ double aAspect = a.getWidth() / (double) a.getHeight();
+ double bAspect = b.getWidth() / (double) b.getHeight();
+ return Math.abs(aAspect - bAspect) <= ASPECT_RATIO_TOLERANCE;
+ }
+
+ /**
+ * Rotation need to transform from the camera sensor orientation to the device's current
+ * orientation.
+ *
+ * @param c the {@link CameraCharacteristics} to query for the camera sensor
+ * orientation.
+ * @param deviceOrientation the current device orientation relative to the native device
+ * orientation.
+ * @return the total rotation from the sensor orientation to the current device orientation.
+ */
+ private static int sensorToDeviceRotation(CameraCharacteristics c, int deviceOrientation) {
+ int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
+
+ // Get device orientation in degrees
+ deviceOrientation = ORIENTATIONS.get(deviceOrientation);
+
+ // Reverse device orientation for front-facing cameras
+ if (c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
+ deviceOrientation = -deviceOrientation;
+ }
+
+ // Calculate desired JPEG orientation relative to camera orientation to make
+ // the image upright relative to the device orientation
+ return (sensorOrientation - deviceOrientation + 360) % 360;
+ }
+
+ /**
+ * If the given request has been completed, remove it from the queue of active requests and
+ * send an {@link ImageSaver} with the results from this request to a background thread to
+ * save a file.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ *
+ * @param requestId the ID of the {@link CaptureRequest} to handle.
+ * @param builder the {@link ImageSaver.ImageSaverBuilder} for this request.
+ * @param queue the queue to remove this request from, if completed.
+ */
+ private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder,
+ TreeMap queue) {
+ if (builder == null) return;
+ ImageSaver saver = builder.buildIfComplete();
+ if (saver != null) {
+ queue.remove(requestId);
+ AsyncTask.THREAD_POOL_EXECUTOR.execute(saver);
+ }
+ }
+
+ /**
+ * Check if we are using a device that only supports the LEGACY hardware level.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ *
+ * @return true if this is a legacy device.
+ */
+ private boolean isLegacyLocked() {
+ return mCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) ==
+ CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
+ }
+
+ /**
+ * Start the timer for the pre-capture sequence.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ */
+ private void startTimerLocked() {
+ mCaptureTimer = SystemClock.elapsedRealtime();
+ }
+
+ /**
+ * Check if the timer for the pre-capture sequence has been hit.
+ *
+ * Call this only with {@link #mCameraStateLock} held.
+ *
+ * @return true if the timeout occurred.
+ */
+ private boolean hitTimeoutLocked() {
+ return (SystemClock.elapsedRealtime() - mCaptureTimer) > PRECAPTURE_TIMEOUT_MS;
+ }
+
+
+}
\ No newline at end of file
diff --git a/app/src/main/res/layout/activity_raw.xml b/app/src/main/res/layout/activity_raw.xml
new file mode 100644
index 00000000..42414527
--- /dev/null
+++ b/app/src/main/res/layout/activity_raw.xml
@@ -0,0 +1,17 @@
+
+
+
+
+
+
\ No newline at end of file