实现RAW格式拍照

TempBranch
Matthew 9 months ago
parent bba323a08a
commit 54f89afc4f

@ -92,8 +92,9 @@
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/Theme.MicroPhoto" android:theme="@style/Theme.MicroPhoto"
tools:targetApi="28"> tools:targetApi="28">
<activity <activity
android:name=".RawActivity" android:name=".video.RawActivity"
android:exported="false" android:exported="false"
android:screenOrientation="landscape" /> android:screenOrientation="landscape" />
<activity <activity

@ -18,9 +18,11 @@
#include <android/log.h> #include <android/log.h>
#include <android/thermal.h> #include <android/thermal.h>
#include <android/imagedecoder.h>
#include <sys/system_properties.h> #include <sys/system_properties.h>
#include <mat.h> #include <mat.h>
#include <fcntl.h>
#include <filesystem> #include <filesystem>
namespace fs = std::filesystem; namespace fs = std::filesystem;
@ -1333,7 +1335,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
TurnOnCameraPower(NULL); TurnOnCameraPower(NULL);
res = true; res = true;
if (mPhotoInfo.mediaType == 0) if (mPhotoInfo.mediaType == 0 && mPhotoInfo.usingRawFormat == 0)
{ {
mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params); mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params);
// mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params); // mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params);
@ -1395,9 +1397,9 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
} }
int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90; int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90;
jboolean photoOrVideo = JNI_FALSE; jboolean photoOrVideo = mPhotoInfo.mediaType == 0 ? JNI_TRUE : JNI_FALSE;
env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId, env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId,
mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height,mPhotoInfo.duration, orientation, mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height, mPhotoInfo.duration, orientation,
leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD); leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD);
if (leftTopOSD) env->DeleteLocalRef(leftTopOSD); if (leftTopOSD) env->DeleteLocalRef(leftTopOSD);
@ -1479,12 +1481,6 @@ void DrawOutlineText(cv::Ptr<cv::ft::FreeType2> ft2, cv::Mat& mat, const std::st
bool CPhoneDevice::OnImageReady(cv::Mat& mat) bool CPhoneDevice::OnImageReady(cv::Mat& mat)
{ {
if (mCamera == NULL)
{
// int aa = 0;
return false;
}
time_t takingTime = time(NULL); time_t takingTime = time(NULL);
if (mPhotoInfo.remedy != 0) if (mPhotoInfo.remedy != 0)
{ {
@ -1623,49 +1619,51 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
// #ifdef OUTPUT_CAMERA_DBG_INFO // #ifdef OUTPUT_CAMERA_DBG_INFO
NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); if (mCamera != NULL)
if (mPhotoInfo.outputDbgInfo != 0)
{ {
cv::Scalar scalarRed(0, 0, 255); // red NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
char extimeunit[4] = { 0 }; if (mPhotoInfo.outputDbgInfo != 0)
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); {
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); cv::Scalar scalarRed(0, 0, 255); // red
char str[128] = { 0 };
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
captureResult.autoExposure, captureResult.autoFocus,
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
captureResult.sceneMode, GpioControl::getLightAdc(), (unsigned int)captureResult.avgY, captureResult.zoomRatio,
(uint32_t)captureResult.duration, captureResult.frameDuration);
// cv::putText(mat, str, cv::Point(0, mat.rows - 20), cv::FONT_HERSHEY_COMPLEX, fontScale, scalarWhite, thickness1, cv::LINE_AA);
int fs = fontSize * 2 / 3; char extimeunit[4] = { 0 };
textSize = ft2->getTextSize(str, fs, -1, &baseline); unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
cv::Point lt(0, mat.rows - fs - 20 * ratio); strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
cv::Point lt2(0, lt.y - 2 * ratio); char str[128] = { 0 };
cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio); snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
captureResult.autoExposure, captureResult.autoFocus,
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
captureResult.sceneMode, GpioControl::getLightAdc(), (unsigned int)captureResult.avgY, captureResult.zoomRatio,
(uint32_t)captureResult.duration, captureResult.frameDuration);
// cv::putText(mat, str, cv::Point(0, mat.rows - 20), cv::FONT_HERSHEY_COMPLEX, fontScale, scalarWhite, thickness1, cv::LINE_AA);
if (rb.x > (int)width - 1) int fs = fontSize * 2 / 3;
{ textSize = ft2->getTextSize(str, fs, -1, &baseline);
rb.x = (int)width - 1; cv::Point lt(0, mat.rows - fs - 20 * ratio);
} cv::Point lt2(0, lt.y - 2 * ratio);
if (rb.y > (int)height - 1) cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio);
{
rb.y = (int)height - 1;
}
cv::Mat roi = mat(cv::Rect(lt2, rb));
cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
double alpha = 0.5;
cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
// cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1); if (rb.x > (int)width - 1)
ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false); {
// DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); rb.x = (int)width - 1;
} }
if (rb.y > (int)height - 1)
{
rb.y = (int)height - 1;
}
cv::Mat roi = mat(cv::Rect(lt2, rb));
cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
double alpha = 0.5;
cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
// cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1);
ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false);
// DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1);
}
}
// #endif // OUTPUT_CAMERA_DBG_INFO // #endif // OUTPUT_CAMERA_DBG_INFO
for (vector<OSD_INFO>::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it) for (vector<OSD_INFO>::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it)
@ -1722,6 +1720,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
bool shouldRetry = false; bool shouldRetry = false;
#if 0 #if 0
if (captureResult.avgY > MAX_LIGHT_Y || captureResult.avgY < MIN_LIGHT_Y) if (captureResult.avgY > MAX_LIGHT_Y || captureResult.avgY < MIN_LIGHT_Y)
{ {
if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
@ -1790,20 +1789,72 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId) bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId)
{ {
mPhotoInfo.photoTime = time(NULL); if (photoOrVideo)
CPhoneCamera* pCamera = NULL;
std::vector<IDevice::RECOG_OBJECT> objs;
std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo);
if (result)
{ {
std::rename(path, fullPath.c_str()); AImageDecoder* decoder = NULL;
int fd = open(path, O_RDONLY);
if (fd == -1)
{
}
int result = AImageDecoder_createFromFd(fd, &decoder);
if (result != ANDROID_IMAGE_DECODER_SUCCESS)
{
}
auto decoder_cleanup = [&decoder] () {
AImageDecoder_delete(decoder);
};
const AImageDecoderHeaderInfo* header_info = AImageDecoder_getHeaderInfo(decoder);
int bitmap_format = AImageDecoderHeaderInfo_getAndroidBitmapFormat(header_info);
// This is just for example. I don't want to handle other cases in this
// example, but that should be easy enough to do.
if (bitmap_format != ANDROID_BITMAP_FORMAT_RGBA_8888)
{
decoder_cleanup();
}
constexpr int kChannels = 4;
int width = AImageDecoderHeaderInfo_getWidth(header_info);
int height = AImageDecoderHeaderInfo_getHeight(header_info);
size_t stride = AImageDecoder_getMinimumStride(decoder);
size_t size = stride * height;
std::vector<uint8_t> pixels;
pixels.resize(size, 0);
int decode_result = AImageDecoder_decodeImage(decoder, &pixels[0], stride, size);
if (decode_result != ANDROID_IMAGE_DECODER_SUCCESS)
{
decoder_cleanup();
return false;
}
cv::Mat mat(height, width, CV_8UC4, (void*)&pixels[0]);
OnImageReady(mat);
decoder_cleanup();
close(fd);
} }
TakePhotoCb(result, mPhotoInfo, fullPath, time(NULL), objs); else
{
mPhotoInfo.photoTime = time(NULL);
CPhoneCamera* pCamera = NULL;
bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::vector<IDevice::RECOG_OBJECT> objs;
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo);
m_threadClose.swap(closeThread); if (result)
{
std::rename(path, fullPath.c_str());
}
TakePhotoCb(result, mPhotoInfo, fullPath, time(NULL), objs);
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
m_threadClose.swap(closeThread);
}
return result; return result;
} }

@ -229,7 +229,7 @@ public class BridgeActivity extends AppCompatActivity {
} }
Intent recordingIntent = MicroPhotoService.makeRecordingIntent(getApplicationContext(), Intent recordingIntent = MicroPhotoService.makeRecordingIntent(getApplicationContext(),
cameraId, videoId, duration, width, height, quality, orientation, false, cameraId, videoId, duration, width, height, quality, orientation,
leftTopOsd, rightTopOsd, rightBottomOsd, leftBottomOsd); leftTopOsd, rightTopOsd, rightBottomOsd, leftBottomOsd);
mVideoFilePath = path; mVideoFilePath = path;

@ -327,7 +327,7 @@ public class BridgeProvider extends ContentProvider {
Context context = getContext(); Context context = getContext();
Intent recordingIntent = MicroPhotoService.makeRecordingIntent(context, Intent recordingIntent = MicroPhotoService.makeRecordingIntent(context,
cameraId, videoId, duration, width, height, quality, orientation, false, cameraId, videoId, duration, width, height, quality, orientation,
leftTopOsd, rightTopOsd, rightBottomOsd, leftBottomOsd); leftTopOsd, rightTopOsd, rightBottomOsd, leftBottomOsd);
recordingIntent.putExtra("ActivityResult", false); recordingIntent.putExtra("ActivityResult", false);

@ -223,9 +223,9 @@ public class MainActivity extends AppCompatActivity {
binding.logs.setText(""); binding.logs.setText("");
MicroPhotoContext.AppConfig curAppConfig = retrieveAndSaveAppConfig(); MicroPhotoContext.AppConfig curAppConfig = retrieveAndSaveAppConfig();
TakeAndThrowPhoto(2, 0xFF); // TakeAndThrowPhoto(2, 0xFF);
try { try {
Thread.sleep(20); // Thread.sleep(20);
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} }

@ -55,6 +55,7 @@ import com.xypower.common.MicroPhotoContext;
import com.xypower.mpapp.adb.CameraAdb; import com.xypower.mpapp.adb.CameraAdb;
import com.xypower.mpapp.utils.DeviceUtil; import com.xypower.mpapp.utils.DeviceUtil;
import com.xypower.mpapp.v2.Camera2VideoActivity; import com.xypower.mpapp.v2.Camera2VideoActivity;
import com.xypower.mpapp.video.RawActivity;
import java.io.File; import java.io.File;
import java.lang.reflect.Method; import java.lang.reflect.Method;
@ -512,8 +513,8 @@ public class MicroPhotoService extends Service {
public void startRecording(boolean photoOrVideo, int cameraId, long videoId, int duration, int width, int height, int quality, int orientation, String leftTopOsd, String rightTopOsd, String rightBottomOsd, String leftBottomOsd) { public void startRecording(boolean photoOrVideo, int cameraId, long videoId, int duration, int width, int height, int quality, int orientation, String leftTopOsd, String rightTopOsd, String rightBottomOsd, String leftBottomOsd) {
Context context = getApplicationContext(); Context context = getApplicationContext();
// Intent intent = new Intent(this, VideoActivity.class);
Intent intent = makeRecordingIntent(context, cameraId, videoId, duration, width, height, quality, orientation, Intent intent = makeRecordingIntent(context, photoOrVideo, cameraId, videoId, duration, width, height, quality, orientation,
leftTopOsd, rightTopOsd, rightBottomOsd, leftBottomOsd); leftTopOsd, rightTopOsd, rightBottomOsd, leftBottomOsd);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
@ -521,9 +522,9 @@ public class MicroPhotoService extends Service {
context.startActivity(intent); context.startActivity(intent);
} }
public static Intent makeRecordingIntent(Context context, int cameraId, long videoId, int duration, int width, int height, int quality, int orientation, String leftTopOsd, String rightTopOsd, String rightBottomOsd, String leftBottomOsd) { public static Intent makeRecordingIntent(Context context, boolean photoOrVideo, int cameraId, long videoId, int duration, int width, int height, int quality, int orientation, String leftTopOsd, String rightTopOsd, String rightBottomOsd, String leftBottomOsd) {
// Intent intent = new Intent(this, VideoActivity.class); // Intent intent = new Intent(this, VideoActivity.class);
Intent intent = new Intent(context, Camera2VideoActivity.class); Intent intent = photoOrVideo ? new Intent(context, RawActivity.class) : new Intent(context, Camera2VideoActivity.class);
intent.putExtra("cameraId", cameraId); intent.putExtra("cameraId", cameraId);
intent.putExtra("videoId", videoId); intent.putExtra("videoId", videoId);

@ -1,7 +1,11 @@
package com.xypower.mpapp.video; package com.xypower.mpapp.video;
import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.app.AppCompatActivity;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Looper; import android.os.Looper;
import android.os.Message; import android.os.Message;
import android.os.SystemClock; import android.os.SystemClock;
@ -48,6 +52,7 @@ import android.view.Surface;
import android.view.TextureView; import android.view.TextureView;
import android.widget.Toast; import android.widget.Toast;
import com.xypower.mpapp.MicroPhotoService;
import com.xypower.mpapp.R; import com.xypower.mpapp.R;
import java.io.File; import java.io.File;
@ -71,6 +76,9 @@ import java.util.concurrent.atomic.AtomicInteger;
public class RawActivity extends AppCompatActivity { public class RawActivity extends AppCompatActivity {
public static final String ACTION_FINISH = "com.xypower.mvapp.ACT_FINISH";
public static final String ACTION_MP_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED";
/** /**
* Conversion from screen rotation to JPEG orientation. * Conversion from screen rotation to JPEG orientation.
*/ */
@ -219,6 +227,14 @@ public class RawActivity extends AppCompatActivity {
*/ */
private String mCameraId; private String mCameraId;
private boolean mActivityResult = false;
private long mPhotoId = 0;
private int mOrientation = -1;
/** /**
* A {@link CameraCaptureSession } for camera preview. * A {@link CameraCaptureSession } for camera preview.
*/ */
@ -234,6 +250,8 @@ public class RawActivity extends AppCompatActivity {
*/ */
private Size mPreviewSize; private Size mPreviewSize;
private Size mImageSize;
/** /**
* The {@link CameraCharacteristics} for the currently configured camera device. * The {@link CameraCharacteristics} for the currently configured camera device.
*/ */
@ -339,6 +357,7 @@ public class RawActivity extends AppCompatActivity {
cameraDevice.close(); cameraDevice.close();
mCameraDevice = null; mCameraDevice = null;
} }
broadcastPhotoFile(false, "");
finish(); finish();
} }
@ -559,13 +578,17 @@ public class RawActivity extends AppCompatActivity {
} }
}; };
/* Intent intent = getIntent();
mCameraId = intent.getIntExtra("cameraId", 0);
mDuration = intent.getIntExtra("duration", 0); mCameraId = Integer.toString(intent.getIntExtra("cameraId", 0));
mVideoWidth = intent.getIntExtra("width", 0);
mVideoHeight = intent.getIntExtra("height", 0);
mOrientation = intent.getIntExtra("orientation", -1); mOrientation = intent.getIntExtra("orientation", -1);
*/
int width = intent.getIntExtra("width", 0);
int height = intent.getIntExtra("height", 0);
mImageSize = new Size(width, height);
mPhotoId = intent.getLongExtra("videoId", 0);
mMessageHandler.postDelayed(new Runnable() { mMessageHandler.postDelayed(new Runnable() {
@Override @Override
@ -1197,6 +1220,8 @@ public class RawActivity extends AppCompatActivity {
*/ */
private final File mFile; private final File mFile;
private final long mPhotoId;
/** /**
* The CaptureResult for this image capture. * The CaptureResult for this image capture.
*/ */
@ -1212,25 +1237,43 @@ public class RawActivity extends AppCompatActivity {
*/ */
private final Context mContext; private final Context mContext;
private boolean mResult = false;
private int mFormat;
/** /**
* A reference counted wrapper for the ImageReader that owns the given image. * A reference counted wrapper for the ImageReader that owns the given image.
*/ */
private final RefCountedAutoCloseable<ImageReader> mReader; private final RefCountedAutoCloseable<ImageReader> mReader;
private ImageSaver(Image image, File file, CaptureResult result, private ImageSaver(Image image, File file, CaptureResult result, long photoId,
CameraCharacteristics characteristics, Context context, CameraCharacteristics characteristics, Context context,
RefCountedAutoCloseable<ImageReader> reader) { RefCountedAutoCloseable<ImageReader> reader) {
mImage = image; mImage = image;
mFile = file; mFile = file;
mPhotoId = photoId;
mCaptureResult = result; mCaptureResult = result;
mCharacteristics = characteristics; mCharacteristics = characteristics;
mContext = context; mContext = context;
mReader = reader; mReader = reader;
mFormat = mImage.getFormat();
}
public String getPath() {
return mFile.getAbsolutePath();
}
public boolean getResult() {
return mResult;
}
public int getFormat() {
return mFormat;
} }
@Override @Override
public void run() { public void run() {
boolean success = false; mResult = false;
int format = mImage.getFormat(); int format = mImage.getFormat();
switch (format) { switch (format) {
case ImageFormat.JPEG: { case ImageFormat.JPEG: {
@ -1241,7 +1284,7 @@ public class RawActivity extends AppCompatActivity {
try { try {
output = new FileOutputStream(mFile); output = new FileOutputStream(mFile);
output.write(bytes); output.write(bytes);
success = true; mResult = true;
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} finally { } finally {
@ -1256,7 +1299,8 @@ public class RawActivity extends AppCompatActivity {
try { try {
output = new FileOutputStream(mFile); output = new FileOutputStream(mFile);
dngCreator.writeImage(output, mImage); dngCreator.writeImage(output, mImage);
success = true;
mResult = true;
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} finally { } finally {
@ -1275,21 +1319,7 @@ public class RawActivity extends AppCompatActivity {
mReader.close(); mReader.close();
// If saving the file succeeded, update MediaStore. // If saving the file succeeded, update MediaStore.
if (success) {
MediaScannerConnection.scanFile(mContext, new String[]{mFile.getPath()},
/*mimeTypes*/null, new MediaScannerConnection.MediaScannerConnectionClient() {
@Override
public void onMediaScannerConnected() {
// Do nothing
}
@Override
public void onScanCompleted(String path, Uri uri) {
Log.i(TAG, "Scanned " + path + ":");
Log.i(TAG, "-> uri=" + uri);
}
});
}
} }
/** /**
@ -1300,6 +1330,7 @@ public class RawActivity extends AppCompatActivity {
public static class ImageSaverBuilder { public static class ImageSaverBuilder {
private Image mImage; private Image mImage;
private File mFile; private File mFile;
private long mImageId;
private CaptureResult mCaptureResult; private CaptureResult mCaptureResult;
private CameraCharacteristics mCharacteristics; private CameraCharacteristics mCharacteristics;
private Context mContext; private Context mContext;
@ -1329,6 +1360,11 @@ public class RawActivity extends AppCompatActivity {
return this; return this;
} }
public synchronized ImageSaverBuilder setImageId(final long imageId) {
mImageId = imageId;
return this;
}
public synchronized ImageSaverBuilder setFile(final File file) { public synchronized ImageSaverBuilder setFile(final File file) {
if (file == null) throw new NullPointerException(); if (file == null) throw new NullPointerException();
mFile = file; mFile = file;
@ -1352,7 +1388,7 @@ public class RawActivity extends AppCompatActivity {
if (!isComplete()) { if (!isComplete()) {
return null; return null;
} }
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, return new ImageSaver(mImage, mFile, mCaptureResult, mImageId, mCharacteristics, mContext,
mReader); mReader);
} }
@ -1590,10 +1626,25 @@ public class RawActivity extends AppCompatActivity {
private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder, private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder,
TreeMap<Integer, ImageSaver.ImageSaverBuilder> queue) { TreeMap<Integer, ImageSaver.ImageSaverBuilder> queue) {
if (builder == null) return; if (builder == null) return;
ImageSaver saver = builder.buildIfComplete(); final ImageSaver saver = builder.buildIfComplete();
if (saver != null) { if (saver != null) {
queue.remove(requestId); queue.remove(requestId);
AsyncTask.THREAD_POOL_EXECUTOR.execute(saver); AsyncTask.THREAD_POOL_EXECUTOR.execute(new Runnable() {
@Override
public void run() {
saver.run();
if (saver.getFormat() == ImageFormat.RAW_SENSOR) {
runOnUiThread(new Runnable() {
@Override
public void run() {
broadcastPhotoFile(saver.getResult(), saver.getPath());
finish();
}
});
}
}
});
} }
} }
@ -1629,5 +1680,51 @@ public class RawActivity extends AppCompatActivity {
return (SystemClock.elapsedRealtime() - mCaptureTimer) > PRECAPTURE_TIMEOUT_MS; return (SystemClock.elapsedRealtime() - mCaptureTimer) > PRECAPTURE_TIMEOUT_MS;
} }
private void broadcastPhotoFile(boolean result, String path) {
Context context = getApplicationContext();
String receiverName = MicroPhotoService.AlarmReceiver.class.getName();
String packageName = context.getPackageName();
Intent intent = new Intent(ACTION_MP_VIDEO_FINISHED);
// intent.setPackage(packageName);
intent.putExtra("photoOrVideo", true);
intent.putExtra("result", result);
intent.putExtra("path", path);
intent.putExtra("videoId", mPhotoId);
// intent.setComponent(new ComponentName(packageName, receiverName));
if (mActivityResult) {
setResult(RESULT_OK, intent);
} else {
// Log.i(TAG, "Notify recording videoId=" + Long.toString(mVideoId) + " " + path);
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(getApplicationContext());
localBroadcastManager.sendBroadcast(intent);
context.sendBroadcast(intent);
}
}
private static void broadcastPhotoFile(Context context, boolean result, String path, long photoId) {
String receiverName = MicroPhotoService.AlarmReceiver.class.getName();
String packageName = context.getPackageName();
Intent intent = new Intent(ACTION_MP_VIDEO_FINISHED);
// intent.setPackage(packageName);
intent.putExtra("photoOrVideo", true);
intent.putExtra("result", result);
intent.putExtra("path", path);
intent.putExtra("videoId", photoId);
// intent.setComponent(new ComponentName(packageName, receiverName));
// Log.i(TAG, "Notify recording videoId=" + Long.toString(mVideoId) + " " + path);
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(context);
localBroadcastManager.sendBroadcast(intent);
context.sendBroadcast(intent);
}
} }

@ -20,8 +20,8 @@ android.enableJetifier=true
BUILD_TOOLS_VERSION=33.0.3 BUILD_TOOLS_VERSION=33.0.3
COMPILE_SDK_VERSION=33 COMPILE_SDK_VERSION=33
TARGET_SDK_VERSION=28 TARGET_SDK_VERSION=30
COMPILE_MIN_SDK_VERSION=25 COMPILE_MIN_SDK_VERSION=30
opencvsdk=D:/Workspace/deps/opencv-mobile-4.9.0-android opencvsdk=D:/Workspace/deps/opencv-mobile-4.9.0-android
# opencvsdk=D:/Workspace/deps/opencv-mobile-3.4.20-android # opencvsdk=D:/Workspace/deps/opencv-mobile-3.4.20-android

Loading…
Cancel
Save