diff --git a/app/build.gradle b/app/build.gradle
index 133b5ba6..ffb698ef 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -4,8 +4,8 @@ plugins {
// 10,00,000 major-minor-build
def AppMajorVersion = 1
-def AppMinorVersion = 0
-def AppBuildNumber = 190
+def AppMinorVersion = 1
+def AppBuildNumber = 1
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber
@@ -26,7 +26,7 @@ android {
applicationId "com.xypower.mpapp"
minSdk COMPILE_MIN_SDK_VERSION as int
//noinspection ExpiredTargetSdkVersion
- targetSdk 28
+ targetSdk TARGET_SDK_VERSION as int
versionCode AppVersionCode
versionName AppVersionName
@@ -39,8 +39,8 @@ android {
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security'
// cppFlags '-std=c++17 -Wno-error=format-security'
// arguments "-DANDROID_STL=c++_shared"
- arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DASIO_ROOT=" + asioroot, "-DEVPP_ROOT=" + evpproot, "-DNCNN_ROOT=" + ncnnroot
- abiFilters 'arm64-v8a'
+ arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DHDRPLUS_ROOT=" + hdrplusroot, "-DNCNN_ROOT=" + ncnnroot
+ abiFilters 'arm64-v8a', 'armeabi-v7a'
// setAbiFilters(['arm64-v8a'])
}
}
@@ -52,6 +52,7 @@ android {
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
debug {
+ minifyEnabled false
jniDebuggable true
testCoverageEnabled false
}
@@ -74,6 +75,17 @@ android {
}
}
+ splits {
+ boolean isReleaseTask = gradle.startParameter.taskNames.any { it.contains("Release") }
+ // enabled on release build
+ abi {
+ enable isReleaseTask
+ reset()
+ include "armeabi-v7a", "arm64-v8a"
+ universalApk false
+ }
+ }
+
android.applicationVariants.all { variant ->
variant.outputs.all { output ->
if (outputFileName.endsWith('.apk')) {
@@ -81,7 +93,9 @@ android {
if(variant.buildType.name.equals('release')) {
buildTypeFlag = "rel"
}
- def fileName = "mpapp_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}.apk"
+ def abi = output.getFilter(com.android.build.OutputFile.ABI)
+ if (abi == null) abi = "all"
+ def fileName = "mpapp_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}_${abi}.apk"
outputFileName = fileName
}
}
@@ -97,6 +111,13 @@ android {
exclude 'META-INF/INDEX.LIST'
exclude 'META-INF/io.netty.versions.properties'
exclude 'META-INF/DEPENDENCIES'
+ exclude 'META-INF/LICENSE-notice.md'
+ exclude 'META-INF/LICENSE.md'
+
+ jniLibs {
+ useLegacyPackaging true
+ }
+
}
}
@@ -119,9 +140,13 @@ dependencies {
// implementation 'com.tencent:mmkv-static:1.3.0'
// implementation project(path: ':opencv')
implementation files('libs/devapi.aar')
- debugImplementation files('libs/rtmp-client-debug.aar')
- releaseImplementation files('libs/rtmp-client.aar')
- implementation project(':gpuv')
+ // debugImplementation files('libs/rtmp-client-debug.aar')
+ implementation files('libs/android-openGL-canvas-1.5.4.0.aar')
+ implementation files('libs/rtmp-client.aar')
+ api project(':gpuv')
+ implementation project(':stream')
+
+ implementation 'dev.mobile:dadb:1.2.7'
// implementation group: 'io.netty', name: 'netty-all', version: '4.1.96.Final'
// implementation 'io.netty:netty-all:4.1.23.Final'
diff --git a/app/libs/android-openGL-canvas-1.5.4.0.aar b/app/libs/android-openGL-canvas-1.5.4.0.aar
new file mode 100644
index 00000000..ee4506b7
Binary files /dev/null and b/app/libs/android-openGL-canvas-1.5.4.0.aar differ
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index 5355f78d..c4df279f 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -72,6 +72,15 @@
+
+
+
+
-
+
+
+
+ android:exported="true"
+ android:grantUriPermissions="true" />
+ android:exported="true"
+ android:process=":bridge_proc"
+ android:screenOrientation="landscape" />
-
-
-
-
\ No newline at end of file
diff --git a/app/src/main/cpp/CMakeLists.txt b/app/src/main/cpp/CMakeLists.txt
index e6913cd3..d9aaec2a 100644
--- a/app/src/main/cpp/CMakeLists.txt
+++ b/app/src/main/cpp/CMakeLists.txt
@@ -28,15 +28,21 @@ add_definitions(-DASIO_STANDALONE)
add_definitions(-DUSING_XY_EXTENSION)
# add_definitions(-DUSING_BREAK_PAD)
add_definitions(-DSQLITE_THREADSAFE=1)
+add_definitions(-DLIBRAW_NO_MEMPOOL_CHECK=1)
+# add_definitions(-DHDRPLUS_NO_DETAILED_OUTPUT=1)
add_definitions(-DHAVE_STRING_H) # for memcpy in md5.c
add_definitions(-DUSING_NRSEC)
+add_definitions(-DUSING_NRSEC_VPN)
+# add_definitions(-DUSING_CERT)
# add_definitions(-DUSING_DOWSE)
# OUTPUT_CAMERA_DBG_INFO: 照片上打印CARERA相关信息
-add_definitions(-DOUTPUT_CAMERA_DBG_INFO)
+# add_definitions(-DOUTPUT_CAMERA_DBG_INFO)
add_definitions(-DALIGN_HB_TIMER_TO_PHOTO)
+add_definitions(-DENABLE_3V3_ALWAYS)
-# set(OpenCV_DIR D:/Workspace/deps/OpenCV-android-sdk/sdk/native/jni/)
-set(OPENCV_EXTRA_MODULES_PATH D:/Workspace/Github/opencv_contrib/modules)
+add_definitions(-DUSING_HDRPLUS)
+
+add_definitions(-DUSING_N938)
# include_directories(${OpenCV_DIR}/include)
# add_library( lib_opencv SHARED IMPORTED )
@@ -46,10 +52,7 @@ set(OPENCV_EXTRA_MODULES_PATH D:/Workspace/Github/opencv_contrib/modules)
project("microphoto")
-# message(FATAL_ERROR "OpenCV ${OpenCV_DIR}")
-
-
-find_package(OpenCV REQUIRED core imgproc highgui)
+find_package(OpenCV REQUIRED core imgproc highgui photo)
# find_package(OpenCV REQUIRED core imgproc)
if(OpenCV_FOUND)
include_directories(${OpenCV_INCLUDE_DIRS})
@@ -69,7 +72,67 @@ endif(OpenCV_FOUND)
set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn)
find_package(ncnn REQUIRED)
-# include(mars/src/CMakeUtils.txt)
+
+include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include)
+include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include)
+include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include)
+
+SET( IMG_UTILS_SRCS
+ "img_utils/src/EndianUtils.cpp"
+ #"img_utils/src/FileInput.cpp"
+ #"img_utils/src/FileOutput.cpp"
+ #"img_utils/src/SortedEntryVector.cpp"
+ "img_utils/src/Input.cpp"
+ "img_utils/src/Output.cpp"
+ "img_utils/src/Orderable.cpp"
+ "img_utils/src/TiffIfd.cpp"
+ "img_utils/src/TiffWritable.cpp"
+ "img_utils/src/TiffWriter.cpp"
+ "img_utils/src/TiffEntry.cpp"
+ "img_utils/src/TiffEntryImpl.cpp"
+ "img_utils/src/ByteArrayOutput.cpp"
+ "img_utils/src/DngUtils.cpp"
+ "img_utils/src/StripSource.cpp"
+
+ libutils/SharedBuffer.cpp
+ libutils/StrongPointer.cpp
+
+ DngCreator.cpp
+ )
+
+message(WARNING "include_directories ${HDRPLUS_ROOT}/${ANDROID_ABI}/include")
+
+include_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/include)
+link_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/lib)
+
+# message(WARNING "exiv2_DIR=${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2")
+# SET(exiv2_DIR ${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2)
+# list(APPEND CMAKE_PREFIX_PATH ${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2)
+
+# find_package(exiv2 REQUIRED CONFIG NAMES exiv2)
+# message(STATUS "Found Exiv2 and linked")
+
+# OpenMP
+find_package(OpenMP REQUIRED)
+
+
+# library
+include_directories( ${CMAKE_CURRENT_SOURCE_DIR}/hdrplus/include )
+
+
+SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
+
+SET(HDRPLUS_SOURCES
+
+ hdrplus/src/align.cpp
+ hdrplus/src/bayer_image.cpp
+ hdrplus/src/burst.cpp
+ hdrplus/src/finish.cpp
+ hdrplus/src/hdrplus_pipeline.cpp
+ hdrplus/src/merge.cpp
+ hdrplus/src/params.cpp
+
+ )
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
@@ -236,31 +299,6 @@ include_directories(${FREETYPE_ROOT}/include)
include_directories(${TERM_CORE_ROOT})
# include_directories(${PROJECT_SOURCE_DIR}/../../../../../libs/inc/)
-#[[
-add_library( # Sets the name of the library.
- evpp_lite
-
- # Sets the library as a shared library.
- STATIC
-
- # Provides a relative path to your source file(s).
- ${EVPP_SOURCES} )
-]]
-
-IF (CMAKE_HOST_SYSTEM_NAME MATCHES "Windows")
- # set(BOOST_ROOT C:/ProgramData/boost_1_82_0/)
- # set(BOOST_INCLUDEDIR C:/ProgramData/boost_1_82_0/)
- include_directories(C:/ProgramData/boost_1_82_0/)
-ELSE()
- # find_package(Boost 1.58.0 COMPONENTS)
- find_package(Boost 1.58.0)
- if(Boost_FOUND)
- include_directories(${Boost_INCLUDE_DIRS})
- else()
- message(FATAL_ERROR "Boost Not Found")
- endif()
-endif()
-
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
@@ -304,7 +342,7 @@ add_library( # Sets the name of the library.
SerialPort.cpp
#WeatherComm.cpp
SensorsProtocol.cpp
- serialComm.cpp
+ SerialComm.cpp
ncnn/yolov5ncnn.cpp
@@ -312,8 +350,11 @@ add_library( # Sets the name of the library.
# camera2/OpenCVFont.cpp
+ ${HDRPLUS_SOURCES}
${CAMERA2_SOURCES}
+ ${IMG_UTILS_SRCS}
+
${TERM_CORE_ROOT}/Factory.cpp
${TERM_CORE_ROOT}/FilePoster.cpp
${TERM_CORE_ROOT}/LogThread.cpp
@@ -350,6 +391,7 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/Client/Terminal_NW.cpp
${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp
${TERM_CORE_ROOT}/Client/Database.cpp
+ ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
)
@@ -370,9 +412,8 @@ find_library( # Sets the name of the path variable.
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
-
target_link_libraries( # Specifies the target library.
- microphoto
+ ${PROJECT_NAME}
jsoncpp
@@ -385,7 +426,7 @@ target_link_libraries( # Specifies the target library.
android camera2ndk mediandk z
- ncnn ${OpenCV_LIBS} sqlite3
+ ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS}
)
diff --git a/app/src/main/cpp/CvText.cpp b/app/src/main/cpp/CvText.cpp
index 01dbcdd5..39be43a9 100644
--- a/app/src/main/cpp/CvText.cpp
+++ b/app/src/main/cpp/CvText.cpp
@@ -354,7 +354,18 @@ namespace cv {
mvFn(NULL, (void*)userData);
// Update current position ( in FreeType coordinates )
+#if defined(USING_HB)
currentPos.x += mFace->glyph->advance.x;
+#else
+ if (wstr[i] == ' ')
+ {
+ currentPos.x += mFace->glyph->advance.x << 1;
+ }
+ else
+ {
+ currentPos.x += mFace->glyph->advance.x;
+ }
+#endif
currentPos.y += mFace->glyph->advance.y;
}
delete userData;
@@ -1036,7 +1047,19 @@ namespace cv {
(this->*putPixel)(dst, gPos.y + row, gPos.x + col, _colorUC8n, cl);
}
}
- _org.x += (mFace->glyph->advance.x) >> 6;
+
+#if defined(USING_HB)
+ _org.x += (mFace->glyph->advance.x) >> 6;
+#else
+ if (wstr[i] == ' ')
+ {
+ _org.x += ((mFace->glyph->advance.x) >> 6) << 1;
+ }
+ else
+ {
+ _org.x += (mFace->glyph->advance.x) >> 6;
+ }
+#endif
_org.y += (mFace->glyph->advance.y) >> 6;
}
@@ -1045,7 +1068,6 @@ namespace cv {
#endif
}
-
int FreeType2Impl::mvFn(const FT_Vector *to, void * user)
{
if (user == NULL) { return 1; }
diff --git a/app/src/main/cpp/DngCreator.cpp b/app/src/main/cpp/DngCreator.cpp
new file mode 100644
index 00000000..0c1727fb
--- /dev/null
+++ b/app/src/main/cpp/DngCreator.cpp
@@ -0,0 +1,2639 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DngCreator_JNI"
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "DngCreator.h"
+
+// #include "core_jni_helpers.h"
+
+// #include "android_runtime/AndroidRuntime.h"
+// #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
+
+#include
+// #include
+
+using namespace android;
+using namespace img_utils;
+// using android::base::GetProperty;
+
+
+ByteVectorOutput::ByteVectorOutput(std::vector& buf) : m_buf(buf)
+{
+}
+ByteVectorOutput::~ByteVectorOutput()
+{
+}
+status_t ByteVectorOutput::open()
+{
+ return OK;
+}
+status_t ByteVectorOutput::close()
+{
+ return OK;
+}
+
+status_t ByteVectorOutput::write(const uint8_t* buf, size_t offset, size_t count)
+{
+ m_buf.insert(m_buf.end(), buf + offset, buf + offset + count);
+ return OK;
+}
+
+ByteVectorInput::ByteVectorInput(const std::vector& buf) : m_buf(buf), m_offset(0)
+{
+}
+
+ByteVectorInput::~ByteVectorInput()
+{
+}
+
+status_t ByteVectorInput::open()
+{
+ return OK;
+}
+ssize_t ByteVectorInput::read(uint8_t* buf, size_t offset, size_t count)
+{
+ if (m_buf.empty() || m_offset >= m_buf.size())
+ {
+ return NOT_ENOUGH_DATA;
+ }
+
+ size_t left = m_buf.size() - m_offset;
+ if (left >= count)
+ {
+ memcpy(buf + offset, &m_buf[m_offset], count);
+ m_offset += count;
+ return count;
+ }
+ else
+ {
+ memcpy(buf + offset, &m_buf[m_offset], left);
+ m_offset += left;
+ return left;
+ }
+}
+/**
+ * Skips bytes in the input.
+ *
+ * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ssize_t ByteVectorInput::skip(size_t count)
+{
+ size_t left = m_buf.size() - m_offset;
+ if (left >= count)
+ {
+ m_offset += count;
+ return count;
+ }
+ else
+ {
+ m_offset += left;
+ return left;
+ }
+}
+
+/**
+ * Close the Input. It is not valid to call open on a previously closed Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+status_t ByteVectorInput::close()
+{
+ return OK;
+}
+
+
+ByteBufferInput::ByteBufferInput(const uint8_t* buf, size_t len) : m_buf(buf), m_len(len), m_offset(0)
+{
+}
+
+ByteBufferInput::~ByteBufferInput()
+{
+}
+
+status_t ByteBufferInput::open()
+{
+ return OK;
+}
+ssize_t ByteBufferInput::read(uint8_t* buf, size_t offset, size_t count)
+{
+ if (m_buf == NULL || m_offset >= m_len)
+ {
+ return NOT_ENOUGH_DATA;
+ }
+
+ size_t left = m_len - m_offset;
+ if (left >= count)
+ {
+ memcpy(buf + offset, m_buf + m_offset, count);
+ m_offset += count;
+ return count;
+ }
+ else
+ {
+ memcpy(buf + offset, m_buf + m_offset, left);
+ m_offset += left;
+ return left;
+ }
+}
+/**
+ * Skips bytes in the input.
+ *
+ * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ssize_t ByteBufferInput::skip(size_t count)
+{
+ size_t left = m_len - m_offset;
+ if (left >= count)
+ {
+ m_offset += count;
+ return count;
+ }
+ else
+ {
+ m_offset += left;
+ return left;
+ }
+}
+
+status_t ByteBufferInput::close()
+{
+ return OK;
+}
+
+
+/**
+ * Convert a single YUV pixel to RGB.
+ */
+static void yuvToRgb(const uint8_t yuvData[3], int outOffset, /*out*/uint8_t rgbOut[3]) {
+ const int COLOR_MAX = 255;
+
+ float y = yuvData[0] & 0xFF; // Y channel
+ float cb = yuvData[1] & 0xFF; // U channel
+ float cr = yuvData[2] & 0xFF; // V channel
+
+ // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
+ float r = y + 1.402f * (cr - 128);
+ float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128);
+ float b = y + 1.772f * (cb - 128);
+
+ // clamp to [0,255]
+ rgbOut[outOffset] = (uint8_t) std::max(0, std::min(COLOR_MAX, (int)r));
+ rgbOut[outOffset + 1] = (uint8_t) std::max(0, std::min(COLOR_MAX, (int)g));
+ rgbOut[outOffset + 2] = (uint8_t) std::max(0, std::min(COLOR_MAX, (int)b));
+}
+
+/**
+ * Convert a single {@link Color} pixel to RGB.
+ */
+static void colorToRgb(int color, int outOffset, /*out*/uint8_t rgbOut[3]) {
+ rgbOut[outOffset] = (uint8_t)(color >> 16) & 0xFF;
+ rgbOut[outOffset + 1] = (uint8_t)(color >> 8) & 0xFF; // color >> 8)&0xFF
+ rgbOut[outOffset + 2] = (uint8_t) color & 0xFF;
+ // Discards Alpha
+}
+
+/**
+ * Generate a direct RGB {@link ByteBuffer} from a YUV420_888 {@link Image}.
+ */
+#if 0
+static ByteBuffer convertToRGB(Image yuvImage) {
+ // TODO: Optimize this with renderscript intrinsic.
+ int width = yuvImage.getWidth();
+ int height = yuvImage.getHeight();
+ ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
+
+ Image.Plane yPlane = yuvImage.getPlanes()[0];
+ Image.Plane uPlane = yuvImage.getPlanes()[1];
+ Image.Plane vPlane = yuvImage.getPlanes()[2];
+
+ ByteBuffer yBuf = yPlane.getBuffer();
+ ByteBuffer uBuf = uPlane.getBuffer();
+ ByteBuffer vBuf = vPlane.getBuffer();
+
+ yBuf.rewind();
+ uBuf.rewind();
+ vBuf.rewind();
+
+ int yRowStride = yPlane.getRowStride();
+ int vRowStride = vPlane.getRowStride();
+ int uRowStride = uPlane.getRowStride();
+
+ int yPixStride = yPlane.getPixelStride();
+ int vPixStride = vPlane.getPixelStride();
+ int uPixStride = uPlane.getPixelStride();
+
+ byte[] yuvPixel = { 0, 0, 0 };
+ byte[] yFullRow = new byte[yPixStride * (width - 1) + 1];
+ byte[] uFullRow = new byte[uPixStride * (width / 2 - 1) + 1];
+ byte[] vFullRow = new byte[vPixStride * (width / 2 - 1) + 1];
+ byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
+ for (int i = 0; i < height; i++) {
+ int halfH = i / 2;
+ yBuf.position(yRowStride * i);
+ yBuf.get(yFullRow);
+ uBuf.position(uRowStride * halfH);
+ uBuf.get(uFullRow);
+ vBuf.position(vRowStride * halfH);
+ vBuf.get(vFullRow);
+ for (int j = 0; j < width; j++) {
+ int halfW = j / 2;
+ yuvPixel[0] = yFullRow[yPixStride * j];
+ yuvPixel[1] = uFullRow[uPixStride * halfW];
+ yuvPixel[2] = vFullRow[vPixStride * halfW];
+ yuvToRgb(yuvPixel, j * BYTES_PER_RGB_PIX, /*out*/finalRow);
+ }
+ buf.put(finalRow);
+ }
+
+ yBuf.rewind();
+ uBuf.rewind();
+ vBuf.rewind();
+ buf.rewind();
+ return buf;
+ }
+#endif
+
+
+
+ DngCreator::DngCreator(ACameraMetadata* characteristics, ACameraMetadata* result) : NativeContext(characteristics, result)
+ {
+ // Find current time
+ time_t ts = time(NULL);
+
+ // Find boot time
+ // long bootTimeMillis = currentTime - SystemClock.elapsedRealtime();
+
+ // Find capture time (nanos since boot)
+#if 0
+ Long timestamp = metadata.get(CaptureResult.SENSOR_TIMESTAMP);
+ long captureTime = currentTime;
+ if (timestamp != null) {
+ captureTime = timestamp / 1000000 + bootTimeMillis;
+ }
+
+ // Format for metadata
+ String formattedCaptureTime = sDateTimeStampFormat.format(captureTime);
+#endif
+
+ std::string formattedCaptureTime;
+ init(characteristics, result, formattedCaptureTime);
+ }
+
+
+#if 0
+ void DngCreator::setLocation(Location location)
+ {
+ double latitude = location.getLatitude();
+ double longitude = location.getLongitude();
+ long time = location.getTime();
+
+ int[] latTag = toExifLatLong(latitude);
+ int[] longTag = toExifLatLong(longitude);
+ String latRef = latitude >= 0 ? GPS_LAT_REF_NORTH : GPS_LAT_REF_SOUTH;
+ String longRef = longitude >= 0 ? GPS_LONG_REF_EAST : GPS_LONG_REF_WEST;
+
+ String dateTag = sExifGPSDateStamp.format(time);
+ mGPSTimeStampCalendar.setTimeInMillis(time);
+ int[] timeTag = new int[] { mGPSTimeStampCalendar.get(Calendar.HOUR_OF_DAY), 1,
+ mGPSTimeStampCalendar.get(Calendar.MINUTE), 1,
+ mGPSTimeStampCalendar.get(Calendar.SECOND), 1 };
+ nativeSetGpsTags(latTag, latRef, longTag, longRef, dateTag, timeTag);
+ }
+#endif
+
+ void DngCreator::writeInputStream(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset)
+ {
+ int width = size.width;
+ int height = size.height;
+ if (width <= 0 || height <= 0) {
+#if 0
+ throw new IllegalArgumentException("Size with invalid width, height: (" + width + "," +
+ height + ") passed to writeInputStream");
+#endif
+ }
+ writeInputStream(dngOutput, pixels, width, height, offset);
+ }
+
+ void DngCreator::writeByteBuffer(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset)
+ {
+ int width = size.width;
+ int height = size.height;
+
+ writeByteBuffer(width, height, pixels, dngOutput, DEFAULT_PIXEL_STRIDE,
+ width * DEFAULT_PIXEL_STRIDE, offset);
+ }
+
+#if 0
+ void DngCreator::writeImage(OutputStream& dngOutput, AImage& pixels)
+ {
+ int format = pixels.getFormat();
+ if (format != ImageFormat.RAW_SENSOR) {
+
+ }
+
+ Image.Plane[] planes = pixels.getPlanes();
+ if (planes == null || planes.length <= 0) {
+
+ }
+
+ ByteBuffer buf = planes[0].getBuffer();
+ writeByteBuffer(pixels.getWidth(), pixels.getHeight(), buf, dngOutput,
+ planes[0].getPixelStride(), planes[0].getRowStride(), 0);
+ }
+#endif
+
+ void DngCreator::close() {
+
+ }
+
+ // private static final DateFormat sExifGPSDateStamp = new SimpleDateFormat(GPS_DATE_FORMAT_STR);
+ // private static final DateFormat sDateTimeStampFormat = new SimpleDateFormat(TIFF_DATETIME_FORMAT);
+#if 0
+ static {
+ sDateTimeStampFormat.setTimeZone(TimeZone.getDefault());
+ sExifGPSDateStamp.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+#endif
+
+ /**
+ * Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels.
+ */
+ void DngCreator::writeByteBuffer(int width, int height, const std::vector& pixels, std::vector& dngOutput, int pixelStride, int rowStride, long offset)
+ {
+ if (width <= 0 || height <= 0) {
+ }
+ long capacity = pixels.capacity();
+ long totalSize = ((long) rowStride) * height + offset;
+ if (capacity < totalSize) {
+#if 0
+ throw new IllegalArgumentException("Image size " + capacity +
+ " is too small (must be larger than " + totalSize + ")");
+#endif
+ }
+ int minRowStride = pixelStride * width;
+ if (minRowStride > rowStride) {
+#if 0
+ throw new IllegalArgumentException("Invalid image pixel stride, row byte width " +
+ minRowStride + " is too large, expecting " + rowStride);
+#endif
+ }
+ // pixels.clear(); // Reset mark and limit
+ writeImage(dngOutput, width, height, pixels, rowStride, pixelStride, offset, true);
+ // pixels.clear();
+ }
+
+
+ /**
+ * Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}.
+ */
+#if 0
+ static ByteBuffer DngCreator::convertToRGB(Bitmap argbBitmap) {
+ // TODO: Optimize this.
+ int width = argbBitmap.getWidth();
+ int height = argbBitmap.getHeight();
+ ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
+
+ int[] pixelRow = new int[width];
+ byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
+ for (int i = 0; i < height; i++) {
+ argbBitmap.getPixels(pixelRow, /*offset*/0, /*stride*/width, /*x*/0, /*y*/i,
+ /*width*/width, /*height*/1);
+ for (int j = 0; j < width; j++) {
+ colorToRgb(pixelRow[j], j * BYTES_PER_RGB_PIX, /*out*/finalRow);
+ }
+ buf.put(finalRow);
+ }
+
+ buf.rewind();
+ return buf;
+ }
+#endif
+
+ /**
+ * Convert coordinate to EXIF GPS tag format.
+ */
+ void DngCreator::toExifLatLong(double value, int data[6])
+ {
+ // convert to the format dd/1 mm/1 ssss/100
+ value = std::abs(value);
+ data[0] = (int) value;
+ data[1] = 1;
+ value = (value - data[0]) * 60;
+ data[2] = (int) value;
+ data[3] = 1;
+ value = (value - data[2]) * 6000;
+ data[4] = (int) value;
+ data[5] = 100;
+ }
+
+
+
+NativeContext::NativeContext(ACameraMetadata* characteristics, ACameraMetadata* result) :
+ mCharacteristics(characteristics), mResult(result), mThumbnailWidth(0),
+ mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
+ mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
+
+NativeContext::~NativeContext() {}
+
+TiffWriter* NativeContext::getWriter() {
+ return &mWriter;
+}
+
+ACameraMetadata* NativeContext::getCharacteristics() const {
+ return mCharacteristics;
+}
+
+ACameraMetadata* NativeContext::getResult() const {
+ return mResult;
+}
+
+uint32_t NativeContext::getThumbnailWidth() const {
+ return mThumbnailWidth;
+}
+
+uint32_t NativeContext::getThumbnailHeight() const {
+ return mThumbnailHeight;
+}
+
+const uint8_t* NativeContext::getThumbnail() const {
+ return &mCurrentThumbnail[0];
+}
+
+bool NativeContext::hasThumbnail() const {
+ return mThumbnailSet;
+}
+
+bool NativeContext::setThumbnail(const std::vector& buffer, uint32_t width, uint32_t height) {
+ mThumbnailWidth = width;
+ mThumbnailHeight = height;
+
+ size_t size = BYTES_PER_RGB_PIXEL * width * height;
+ mCurrentThumbnail.resize(size);
+ //if (mCurrentThumbnail.resize(size) < 0) {
+ // ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
+ // return false;
+ //}
+
+ // uint8_t* thumb = mCurrentThumbnail.editArray();
+ memcpy(&mCurrentThumbnail[0], &buffer[0], size);
+ mThumbnailSet = true;
+ return true;
+}
+
+void NativeContext::setOrientation(uint16_t orientation) {
+ mOrientation = orientation;
+}
+
+uint16_t NativeContext::getOrientation() const {
+ return mOrientation;
+}
+
+void NativeContext::setDescription(const std::string& desc) {
+ mDescription = desc;
+ mDescriptionSet = true;
+}
+
+std::string NativeContext::getDescription() const {
+ return mDescription;
+}
+
+bool NativeContext::hasDescription() const {
+ return mDescriptionSet;
+}
+
+void NativeContext::setGpsData(const GpsData& data) {
+ mGpsData = data;
+ mGpsSet = true;
+}
+
+GpsData NativeContext::getGpsData() const {
+ return mGpsData;
+}
+
+bool NativeContext::hasGpsData() const {
+ return mGpsSet;
+}
+
+void NativeContext::setCaptureTime(const std::string& formattedCaptureTime) {
+ mFormattedCaptureTime = formattedCaptureTime;
+ mCaptureTimeSet = true;
+}
+
+std::string NativeContext::getCaptureTime() const {
+ return mFormattedCaptureTime;
+}
+
+bool NativeContext::hasCaptureTime() const {
+ return mCaptureTimeSet;
+}
+
+// End of NativeContext
+// ----------------------------------------------------------------------------
+
+
+/**
+ * StripSource subclass for Input types.
+ *
+ * This class is not intended to be used across JNI calls.
+ */
+
+class InputStripSource : public StripSource, public LightRefBase {
+public:
+ InputStripSource(Input& input, uint32_t ifd, uint32_t width, uint32_t height,
+ uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
+ uint32_t samplesPerPixel);
+
+ virtual ~InputStripSource();
+
+ virtual status_t writeToStream(Output& stream, uint32_t count);
+
+ virtual uint32_t getIfd() const;
+protected:
+ uint32_t mIfd;
+ Input* mInput;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mPixStride;
+ uint32_t mRowStride;
+ uint64_t mOffset;
+ uint32_t mBytesPerSample;
+ uint32_t mSamplesPerPixel;
+};
+
+InputStripSource::InputStripSource(Input& input, uint32_t ifd, uint32_t width,
+ uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
+ uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
+ mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
+ mOffset(offset), mBytesPerSample(bytesPerSample),
+ mSamplesPerPixel(samplesPerPixel) {}
+
+InputStripSource::~InputStripSource() {}
+
+status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
+ uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
+ jlong offset = mOffset;
+
+ if (fullSize != count) {
+ ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
+ fullSize);
+ // jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
+ return BAD_VALUE;
+ }
+
+ // Skip offset
+ while (offset > 0) {
+ ssize_t skipped = mInput->skip(offset);
+ if (skipped <= 0) {
+ if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
+#if 0
+ jniThrowExceptionFmt(mEnv, "java/io/IOException",
+ "Early EOF encountered in skip, not enough pixel data for image of size %u",
+ fullSize);
+#endif
+ skipped = NOT_ENOUGH_DATA;
+ } else {
+#if 0
+ if (!mEnv->ExceptionCheck()) {
+
+ jniThrowException(mEnv, "java/io/IOException",
+ "Error encountered while skip bytes in input stream.");
+ }
+#endif
+ }
+
+ return skipped;
+ }
+ offset -= skipped;
+ }
+
+ std::vector row;
+ row.resize(mRowStride);
+#if 0
+ if (row.resize(mRowStride) < 0) {
+ jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
+ return BAD_VALUE;
+ }
+#endif
+
+ uint8_t* rowBytes = &row[0];
+
+ for (uint32_t i = 0; i < mHeight; ++i) {
+ size_t rowFillAmt = 0;
+ size_t rowSize = mRowStride;
+
+ while (rowFillAmt < mRowStride) {
+ ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
+ if (bytesRead <= 0) {
+ if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
+ ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
+ __FUNCTION__, i, bytesRead);
+#if 0
+ jniThrowExceptionFmt(mEnv, "java/io/IOException",
+ "Early EOF encountered, not enough pixel data for image of size %"
+ PRIu32, fullSize);
+#endif
+ bytesRead = NOT_ENOUGH_DATA;
+ } else {
+#if 0
+ if (!mEnv->ExceptionCheck()) {
+ jniThrowException(mEnv, "java/io/IOException",
+ "Error encountered while reading");
+ }
+#endif
+ }
+ return bytesRead;
+ }
+ rowFillAmt += bytesRead;
+ rowSize -= bytesRead;
+ }
+
+ if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
+ ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
+
+ if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK) {
+#if 0
+ if (!mEnv->ExceptionCheck()) {
+ jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
+ }
+#endif
+ return BAD_VALUE;
+ }
+ } else {
+ ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
+#if 0
+ jniThrowException(mEnv, "java/lang/IllegalStateException",
+ "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
+#endif
+ return BAD_VALUE;
+
+ // TODO: Add support for non-contiguous pixels if needed.
+ }
+ }
+ return OK;
+}
+
+uint32_t InputStripSource::getIfd() const {
+ return mIfd;
+}
+
+// End of InputStripSource
+// ----------------------------------------------------------------------------
+
+/**
+ * StripSource subclass for direct buffer types.
+ *
+ * This class is not intended to be used across JNI calls.
+ */
+
+class DirectStripSource : public StripSource, public LightRefBase {
+public:
+ DirectStripSource(const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
+ uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
+ uint32_t bytesPerSample, uint32_t samplesPerPixel);
+
+ virtual ~DirectStripSource();
+
+ virtual status_t writeToStream(Output& stream, uint32_t count);
+
+ virtual uint32_t getIfd() const;
+protected:
+ uint32_t mIfd;
+ const uint8_t* mPixelBytes;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mPixStride;
+ uint32_t mRowStride;
+ uint16_t mOffset;
+ uint32_t mBytesPerSample;
+ uint32_t mSamplesPerPixel;
+};
+
+DirectStripSource::DirectStripSource(const uint8_t* pixelBytes, uint32_t ifd,
+ uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
+ uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
+ mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
+ mRowStride(rowStride), mOffset(offset), mBytesPerSample(bytesPerSample),
+ mSamplesPerPixel(samplesPerPixel) {}
+
+DirectStripSource::~DirectStripSource() {}
+
+status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
+ uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
+
+ if (fullSize != count) {
+ ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
+ fullSize);
+#if 0
+ jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
+#endif
+ return BAD_VALUE;
+ }
+
+
+ if (mPixStride == mBytesPerSample * mSamplesPerPixel
+ && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
+ ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
+
+ if (stream.write(mPixelBytes, mOffset, fullSize) != OK) {
+#if 0
+ if (!mEnv->ExceptionCheck()) {
+ jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
+ }
+#endif
+ return BAD_VALUE;
+ }
+ } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
+ ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
+
+ for (size_t i = 0; i < mHeight; ++i) {
+ if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK/* ||
+ mEnv->ExceptionCheck()*/) {
+#if 0
+ if (!mEnv->ExceptionCheck()) {
+ jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
+ }
+#endif
+ return BAD_VALUE;
+ }
+ }
+ } else {
+ ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
+#if 0
+ jniThrowException(mEnv, "java/lang/IllegalStateException",
+ "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
+#endif
+ return BAD_VALUE;
+
+ // TODO: Add support for non-contiguous pixels if needed.
+ }
+ return OK;
+
+}
+
+uint32_t DirectStripSource::getIfd() const {
+ return mIfd;
+}
+
+// End of DirectStripSource
+// ----------------------------------------------------------------------------
+
+/**
+ * Calculate the default crop relative to the "active area" of the image sensor (this active area
+ * will always be the pre-correction active area rectangle), and set this.
+ */
+static status_t calculateAndSetCrop(ACameraMetadata* characteristics,
+ sp writer) {
+
+ ACameraMetadata_const_entry entry = { 0 };
+ // ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ // ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ camera_status_t status = ACameraMetadata_getConstEntry(characteristics,
+ ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry);
+ uint32_t width = static_cast(entry.data.i32[2]);
+ uint32_t height = static_cast(entry.data.i32[3]);
+
+ const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
+
+ if (width < margin * 2 || height < margin * 2) {
+ ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
+ "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
+#if 0
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Pre-correction active area is too small.");
+#endif
+ return BAD_VALUE;
+ }
+
+ uint32_t defaultCropOrigin[] = {margin, margin};
+ uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
+ height - defaultCropOrigin[1] - margin};
+
+ BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
+ TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
+ BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
+ TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
+
+ return OK;
+}
+
+static bool validateDngHeader(sp writer, ACameraMetadata* characteristics, uint32_t width, uint32_t height)
+{
+ if (width <= 0 || height <= 0) {
+#if 0
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
+ "Image width %d is invalid", width);
+#endif
+ return false;
+ }
+
+ ACameraMetadata_const_entry preCorrectionEntry = { 0 };
+ camera_status_t status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &preCorrectionEntry);
+ ACameraMetadata_const_entry pixelArrayEntry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, &pixelArrayEntry);
+
+ int pWidth = static_cast(pixelArrayEntry.data.i32[0]);
+ int pHeight = static_cast(pixelArrayEntry.data.i32[1]);
+ int cWidth = static_cast(preCorrectionEntry.data.i32[2]);
+ int cHeight = static_cast(preCorrectionEntry.data.i32[3]);
+
+ bool matchesPixelArray = (pWidth == width && pHeight == height);
+ bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
+
+ if (!(matchesPixelArray || matchesPreCorrectionArray)) {
+#if 0
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
+ "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
+ "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
+ width, height, pWidth, pHeight, cWidth, cHeight);
+#endif
+ return false;
+ }
+
+ return true;
+}
+
+static status_t moveEntries(sp writer, uint32_t ifdFrom, uint32_t ifdTo,
+ const std::vector& entries) {
+ for (size_t i = 0; i < entries.size(); ++i) {
+ uint16_t tagId = entries[i];
+ sp entry = writer->getEntry(tagId, ifdFrom);
+ if (entry.get() == nullptr) {
+ ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId,
+ ifdFrom);
+ return BAD_VALUE;
+ }
+ if (writer->addEntry(entry, ifdTo) != OK) {
+ ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId,
+ ifdFrom);
+ return BAD_VALUE;
+ }
+ writer->removeEntry(tagId, ifdFrom);
+ }
+ return OK;
+}
+
+/**
+ * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4.
+ * Returns OK on success, or a negative error code if the CFA enum was invalid.
+ */
+static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
+ acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
+ static_cast(
+ cfaEnum);
+ switch(cfa) {
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
+ cfaOut[0] = 0;
+ cfaOut[1] = 1;
+ cfaOut[2] = 1;
+ cfaOut[3] = 2;
+ break;
+ }
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
+ cfaOut[0] = 1;
+ cfaOut[1] = 0;
+ cfaOut[2] = 2;
+ cfaOut[3] = 1;
+ break;
+ }
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
+ cfaOut[0] = 1;
+ cfaOut[1] = 2;
+ cfaOut[2] = 0;
+ cfaOut[3] = 1;
+ break;
+ }
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
+ cfaOut[0] = 2;
+ cfaOut[1] = 1;
+ cfaOut[2] = 1;
+ cfaOut[3] = 0;
+ break;
+ }
+ // MONO and NIR are degenerate case of RGGB pattern: only Red channel
+ // will be used.
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
+ cfaOut[0] = 0;
+ break;
+ }
+ default: {
+ return BAD_VALUE;
+ }
+ }
+ return OK;
+}
+
+/**
+ * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
+ * RGGB for an unknown enum.
+ */
+static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
+ acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
+ static_cast(
+ cfaEnum);
+ switch(cfa) {
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
+ return OpcodeListBuilder::CFA_RGGB;
+ }
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
+ return OpcodeListBuilder::CFA_GRBG;
+ }
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
+ return OpcodeListBuilder::CFA_GBRG;
+ }
+ case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
+ return OpcodeListBuilder::CFA_BGGR;
+ }
+ default: {
+ return OpcodeListBuilder::CFA_RGGB;
+ }
+ }
+}
+
+/**
+ * For each color plane, find the corresponding noise profile coefficients given in the
+ * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color
+ * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
+ *
+ * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
+ * cfa - numChannels color channels corresponding to each of the per-channel noise profile
+ * coefficients.
+ * numChannels - the number of noise profile coefficient pairs and color channels given in
+ * the perChannelNoiseProfile and cfa arguments, respectively.
+ * planeColors - the color planes in the noise profile output.
+ * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
+ * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
+ *
+ * returns OK, or a negative error code on failure.
+ */
+static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
+ size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
+ /*out*/double* noiseProfile) {
+
+ for (size_t p = 0; p < numPlanes; ++p) {
+ size_t S = p * 2;
+ size_t O = p * 2 + 1;
+
+ noiseProfile[S] = 0;
+ noiseProfile[O] = 0;
+ bool uninitialized = true;
+ for (size_t c = 0; c < numChannels; ++c) {
+ if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
+ noiseProfile[S] = perChannelNoiseProfile[c * 2];
+ noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
+ uninitialized = false;
+ }
+ }
+ if (uninitialized) {
+ ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
+ __FUNCTION__, p);
+ return BAD_VALUE;
+ }
+ }
+ return OK;
+}
+
+static void undistort(/*inout*/double& x, /*inout*/double& y,
+ const std::array& distortion,
+ const float cx, const float cy, const float f) {
+ double xp = (x - cx) / f;
+ double yp = (y - cy) / f;
+
+ double x2 = xp * xp;
+ double y2 = yp * yp;
+ double r2 = x2 + y2;
+ double xy2 = 2.0 * xp * yp;
+
+ const float k0 = distortion[0];
+ const float k1 = distortion[1];
+ const float k2 = distortion[2];
+ const float k3 = distortion[3];
+ const float p1 = distortion[4];
+ const float p2 = distortion[5];
+
+ double kr = k0 + ((k3 * r2 + k2) * r2 + k1) * r2;
+ double xpp = xp * kr + p1 * xy2 + p2 * (r2 + 2.0 * x2);
+ double ypp = yp * kr + p1 * (r2 + 2.0 * y2) + p2 * xy2;
+
+ x = xpp * f + cx;
+ y = ypp * f + cy;
+ return;
+}
+
+static inline bool unDistortWithinPreCorrArray(
+ double x, double y,
+ const std::array& distortion,
+ const float cx, const float cy, const float f,
+ const int preCorrW, const int preCorrH, const int xMin, const int yMin) {
+ undistort(x, y, distortion, cx, cy, f);
+ // xMin and yMin are inclusive, and xMax and yMax are exclusive.
+ int xMax = xMin + preCorrW;
+ int yMax = yMin + preCorrH;
+ if (x < xMin || y < yMin || x >= xMax || y >= yMax) {
+ return false;
+ }
+ return true;
+}
+
+static inline bool boxWithinPrecorrectionArray(
+ int left, int top, int right, int bottom,
+ const std::array& distortion,
+ const float cx, const float cy, const float f,
+ const int preCorrW, const int preCorrH, const int xMin, const int yMin){
+ // Top row
+ if (!unDistortWithinPreCorrArray(left, top,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+
+ if (!unDistortWithinPreCorrArray(cx, top,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+
+ if (!unDistortWithinPreCorrArray(right, top,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+
+ // Middle row
+ if (!unDistortWithinPreCorrArray(left, cy,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+
+ if (!unDistortWithinPreCorrArray(right, cy,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+
+ // Bottom row
+ if (!unDistortWithinPreCorrArray(left, bottom,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+
+ if (!unDistortWithinPreCorrArray(cx, bottom,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+
+ if (!unDistortWithinPreCorrArray(right, bottom,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ return false;
+ }
+ return true;
+}
+
+static inline bool scaledBoxWithinPrecorrectionArray(
+ double scale/*must be <= 1.0*/,
+ const std::array& distortion,
+ const float cx, const float cy, const float f,
+ const int preCorrW, const int preCorrH,
+ const int xMin, const int yMin){
+
+ double left = cx * (1.0 - scale);
+ double right = (preCorrW - 1) * scale + cx * (1.0 - scale);
+ double top = cy * (1.0 - scale);
+ double bottom = (preCorrH - 1) * scale + cy * (1.0 - scale);
+
+ return boxWithinPrecorrectionArray(left, top, right, bottom,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin);
+}
+
+static status_t findPostCorrectionScale(
+ double stepSize, double minScale,
+ const std::array& distortion,
+ const float cx, const float cy, const float f,
+ const int preCorrW, const int preCorrH, const int xMin, const int yMin,
+ /*out*/ double* outScale) {
+ if (outScale == nullptr) {
+ ALOGE("%s: outScale must not be null", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ for (double scale = 1.0; scale > minScale; scale -= stepSize) {
+ if (scaledBoxWithinPrecorrectionArray(
+ scale, distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
+ *outScale = scale;
+ return OK;
+ }
+ }
+ ALOGE("%s: cannot find cropping scale for lens distortion: stepSize %f, minScale %f",
+ __FUNCTION__, stepSize, minScale);
+ return BAD_VALUE;
+}
+
+// Apply a scale factor to distortion coefficients so that the image is zoomed out and all pixels
+// are sampled within the precorrection array
+static void normalizeLensDistortion(
+ /*inout*/std::array& distortion,
+ float cx, float cy, float f, int preCorrW, int preCorrH, int xMin = 0, int yMin = 0) {
+ ALOGV("%s: distortion [%f, %f, %f, %f, %f, %f], (cx,cy) (%f, %f), f %f, (W,H) (%d, %d)"
+ ", (xmin, ymin, xmax, ymax) (%d, %d, %d, %d)",
+ __FUNCTION__, distortion[0], distortion[1], distortion[2],
+ distortion[3], distortion[4], distortion[5],
+ cx, cy, f, preCorrW, preCorrH,
+ xMin, yMin, xMin + preCorrW - 1, yMin + preCorrH - 1);
+
+ // Only update distortion coeffients if we can find a good bounding box
+ double scale = 1.0;
+ if (OK == findPostCorrectionScale(0.002, 0.5,
+ distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin,
+ /*out*/&scale)) {
+ ALOGV("%s: scaling distortion coefficients by %f", __FUNCTION__, scale);
+ // The formula:
+ // xc = xi * (k0 + k1*r^2 + k2*r^4 + k3*r^6) + k4 * (2*xi*yi) + k5 * (r^2 + 2*xi^2)
+ // To create effective zoom we want to replace xi by xi *m, yi by yi*m and r^2 by r^2*m^2
+ // Factor the extra m power terms into k0~k6
+ std::array scalePowers = {1, 3, 5, 7, 2, 2};
+ for (size_t i = 0; i < 6; i++) {
+ distortion[i] *= pow(scale, scalePowers[i]);
+ }
+ }
+ return;
+}
+
+// ----------------------------------------------------------------------------
+#if 0
+static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
+ ALOGV("%s:", __FUNCTION__);
+ return reinterpret_cast(env->GetLongField(thiz,
+ gDngCreatorClassInfo.mNativeContext));
+}
+
+static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp context) {
+ ALOGV("%s:", __FUNCTION__);
+ NativeContext* current = DngCreator_getNativeContext(env, thiz);
+
+ if (context != nullptr) {
+ context->incStrong((void*) DngCreator_setNativeContext);
+ }
+
+ if (current) {
+ current->decStrong((void*) DngCreator_setNativeContext);
+ }
+
+ env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
+ reinterpret_cast(context.get()));
+}
+
+static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
+ ALOGV("%s:", __FUNCTION__);
+
+ gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
+ clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
+
+ jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
+ gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
+ outputStreamClazz, "write", "([BII)V");
+
+ jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
+ gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
+ gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
+
+ jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
+ gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
+ inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
+}
+#endif
+
+void DngCreator::init(ACameraMetadata* characteristics,
+ ACameraMetadata* results, const std::string& captureTime) {
+ ALOGV("%s:", __FUNCTION__);
+
+ sp nativeContext = new NativeContext(characteristics, results);
+
+ size_t len = captureTime.size() + 1;
+ if (len != NativeContext::DATETIME_COUNT) {
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Formatted capture time string length is not required 20 characters");
+#endif
+ return;
+ }
+
+ nativeContext->setCaptureTime(captureTime);
+
+ // DngCreator_setNativeContext(env, thiz, nativeContext);
+}
+
+sp DngCreator::setup(uint32_t imageWidth, uint32_t imageHeight)
+{
+ ACameraMetadata* characteristics = getCharacteristics();
+ ACameraMetadata* results = getResult();
+
+ sp writer = new TiffWriter();
+
+ uint32_t preXMin = 0;
+ uint32_t preYMin = 0;
+ uint32_t preWidth = 0;
+ uint32_t preHeight = 0;
+ uint8_t colorFilter = 0;
+ camera_status_t status;
+ bool isBayer = true;
+ {
+ // Check dimensions
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
+ preXMin = static_cast(entry.data.i32[0]);
+ preYMin = static_cast(entry.data.i32[1]);
+ preWidth = static_cast(entry.data.i32[2]);
+ preHeight = static_cast(entry.data.i32[3]);
+
+ ACameraMetadata_const_entry pixelArrayEntry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, &pixelArrayEntry);
+ uint32_t pixWidth = static_cast(pixelArrayEntry.data.i32[0]);
+ uint32_t pixHeight = static_cast(pixelArrayEntry.data.i32[1]);
+
+ if (!((imageWidth == preWidth && imageHeight == preHeight) ||
+ (imageWidth == pixWidth && imageHeight == pixHeight))) {
+#if 0
+ jniThrowException(env, "java/lang/AssertionError",
+ "Height and width of image buffer did not match height and width of"
+ "either the preCorrectionActiveArraySize or the pixelArraySize.");
+#endif
+ return nullptr;
+ }
+
+ ACameraMetadata_const_entry colorFilterEntry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &colorFilterEntry);
+ colorFilter = colorFilterEntry.data.u8[0];
+ ACameraMetadata_const_entry capabilitiesEntry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_REQUEST_AVAILABLE_CAPABILITIES, & capabilitiesEntry);
+ size_t capsCount = capabilitiesEntry.count;
+ const uint8_t* caps = capabilitiesEntry.data.u8;
+
+ if (std::find(caps, caps+capsCount, ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
+ != caps+capsCount) {
+ isBayer = false;
+ } else if (colorFilter == ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
+ colorFilter == ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
+#if 0
+ jniThrowException(env, "java/lang/AssertionError",
+ "A camera device with MONO/NIR color filter must have MONOCHROME capability.");
+#endif
+ return nullptr;
+ }
+ }
+
+ writer->addIfd(TIFF_IFD_0);
+
+ status_t err = OK;
+
+ const uint32_t samplesPerPixel = 1;
+ const uint32_t bitsPerSample = BITS_PER_SAMPLE;
+
+ OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
+ uint8_t cfaPlaneColor[3] = {0, 1, 2};
+ ACameraMetadata_const_entry cfaEntry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &cfaEntry);
+ BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
+ uint8_t cfaEnum = cfaEntry.data.u8[0];
+
+ // TODO: Greensplit.
+ // TODO: Add remaining non-essential tags
+
+ // Setup main image tags
+
+ {
+ // Set orientation
+ uint16_t orientation = TAG_ORIENTATION_NORMAL;
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
+ env, TAG_ORIENTATION, writer);
+ }
+
+ {
+ // Set subfiletype
+ uint32_t subfileType = 0; // Main image
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
+ TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
+ }
+
+ {
+ // Set bits per sample
+ uint16_t bits = static_cast(bitsPerSample);
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
+ TAG_BITSPERSAMPLE, writer);
+ }
+
+ {
+ // Set compression
+ uint16_t compression = 1; // None
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
+ TIFF_IFD_0), env, TAG_COMPRESSION, writer);
+ }
+
+ {
+ // Set dimensions
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
+ env, TAG_IMAGEWIDTH, writer);
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
+ env, TAG_IMAGELENGTH, writer);
+ }
+
+ {
+ // Set photometric interpretation
+ uint16_t interpretation = isBayer ? 32803 /* CFA */ :
+ 34892; /* Linear Raw */;
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
+ &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
+ }
+
+ {
+ uint16_t repeatDim[2] = {2, 2};
+ if (!isBayer) {
+ repeatDim[0] = repeatDim[1] = 1;
+ }
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
+ TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
+
+ // Set blacklevel tags, using dynamic black level if available
+ ACameraMetadata_const_entry entry = { 0 };
+ camera_status_t status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL, &entry);
+ uint32_t blackLevelRational[8] = {0};
+ if (entry.count != 0) {
+ BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
+ for (size_t i = 0; i < entry.count; i++) {
+ blackLevelRational[i * 2] = static_cast(entry.data.f[i] * 100);
+ blackLevelRational[i * 2 + 1] = 100;
+ }
+ } else {
+ // Fall back to static black level which is guaranteed
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_BLACK_LEVEL_PATTERN, &entry);
+ BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
+ for (size_t i = 0; i < entry.count; i++) {
+ blackLevelRational[i * 2] = static_cast(entry.data.i32[i]);
+ blackLevelRational[i * 2 + 1] = 1;
+ }
+ }
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
+ blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
+ }
+
+ {
+ // Set samples per pixel
+ uint16_t samples = static_cast(samplesPerPixel);
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
+ env, TAG_SAMPLESPERPIXEL, writer);
+ }
+
+ {
+ // Set planar configuration
+ uint16_t config = 1; // Chunky
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
+ TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
+ }
+
+ // All CFA pattern tags are not necessary for monochrome cameras.
+ if (isBayer) {
+ // Set CFA pattern dimensions
+ uint16_t repeatDim[2] = {2, 2};
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
+ TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
+
+ // Set CFA pattern
+ const int cfaLength = 4;
+ uint8_t cfa[cfaLength];
+ if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
+#if 0
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Invalid metadata for tag %d", TAG_CFAPATTERN);
+#endif
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
+ env, TAG_CFAPATTERN, writer);
+
+ opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
+
+ // Set CFA plane color
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
+ TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
+
+ // Set CFA layout
+ uint16_t cfaLayout = 1;
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
+ env, TAG_CFALAYOUT, writer);
+ }
+
+ {
+ // image description
+ uint8_t imageDescription = '\0'; // empty
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
+ TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
+ }
+
+ {
+ // make
+ // Use "" to represent unknown make as suggested in TIFF/EP spec.
+ char manufacturer[PROP_VALUE_MAX] = { 0 };
+ __system_property_get("ro.product.manufacturer", manufacturer);
+ uint32_t count = static_cast(strlen(manufacturer)) + 1;
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
+ reinterpret_cast(manufacturer), TIFF_IFD_0), env, TAG_MAKE,
+ writer);
+ }
+
+ {
+ // model
+ // Use "" to represent unknown model as suggested in TIFF/EP spec.
+ char model[PROP_VALUE_MAX] = { 0 };
+ __system_property_get("ro.product.model", model);
+ uint32_t count = static_cast(strlen(model)) + 1;
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
+ reinterpret_cast(model), TIFF_IFD_0), env, TAG_MODEL,
+ writer);
+ }
+
+ {
+ // x resolution
+ uint32_t xres[] = { 72, 1 }; // default 72 ppi
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
+ env, TAG_XRESOLUTION, writer);
+
+ // y resolution
+ uint32_t yres[] = { 72, 1 }; // default 72 ppi
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
+ env, TAG_YRESOLUTION, writer);
+
+ uint16_t unit = 2; // inches
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
+ env, TAG_RESOLUTIONUNIT, writer);
+ }
+
+ {
+ // software
+ char software[PROP_VALUE_MAX] = { 0 };
+ __system_property_get("ro.build.fingerprint", software);
+ uint32_t count = static_cast(strlen(software)) + 1;
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
+ reinterpret_cast(software), TIFF_IFD_0), env, TAG_SOFTWARE,
+ writer);
+ }
+
+ if (hasCaptureTime()) {
+ // datetime
+ std::string captureTime = getCaptureTime();
+
+ if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
+ reinterpret_cast(captureTime.c_str()), TIFF_IFD_0) != OK) {
+#if 0
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
+ "Invalid metadata for tag %x", TAG_DATETIME);
+#endif
+ return nullptr;
+ }
+
+ // datetime original
+ if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
+ reinterpret_cast(captureTime.c_str()), TIFF_IFD_0) != OK) {
+#if 0
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
+ "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
+#endif
+ return nullptr;
+ }
+ }
+
+ {
+ // TIFF/EP standard id
+ uint8_t standardId[] = { 1, 0, 0, 0 };
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
+ TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
+ }
+
+ {
+ // copyright
+ uint8_t copyright = '\0'; // empty
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right,
+ TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
+ }
+
+ {
+ // exposure time
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_EXPOSURE_TIME, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
+
+ int64_t exposureTime = *(entry.data.i64);
+
+ if (exposureTime < 0) {
+ // Should be unreachable
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Negative exposure time in metadata");
+#endif
+ return nullptr;
+ }
+
+ // Ensure exposure time doesn't overflow (for exposures > 4s)
+ uint32_t denominator = 1000000000;
+ while (exposureTime > UINT32_MAX) {
+ exposureTime >>= 1;
+ denominator >>= 1;
+ if (denominator == 0) {
+ // Should be unreachable
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Exposure time too long");
+#endif
+ return nullptr;
+ }
+ }
+
+ uint32_t exposure[] = { static_cast(exposureTime), denominator };
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
+ TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
+
+ }
+
+ {
+ // ISO speed ratings
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_SENSITIVITY, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
+
+ int32_t tempIso = *(entry.data.i32);
+ if (tempIso < 0) {
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Negative ISO value");
+#endif
+ return nullptr;
+ }
+
+ if (tempIso > UINT16_MAX) {
+ ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
+ tempIso = UINT16_MAX;
+ }
+
+ uint16_t iso = static_cast(tempIso);
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
+ TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
+ }
+
+ {
+ // Baseline exposure
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
+
+ // post RAW gain should be boostValue / 100
+ double postRAWGain = static_cast (entry.data.i32[0]) / 100.f;
+ // Baseline exposure should be in EV units so log2(gain) =
+ // log10(gain)/log10(2)
+ double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
+ int32_t baseExposureSRat[] = { static_cast (baselineExposure * 100),
+ 100 };
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
+ baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
+ }
+
+ {
+ // focal length
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_FOCAL_LENGTH, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
+
+ uint32_t focalLength[] = { static_cast(*(entry.data.f) * 100), 100 };
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
+ TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
+ }
+
+ {
+ // f number
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_APERTURE, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
+
+ uint32_t fnum[] = { static_cast(*(entry.data.f) * 100), 100 };
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
+ TIFF_IFD_0), env, TAG_FNUMBER, writer);
+ }
+
+ {
+ // Set DNG version information
+ uint8_t version[4] = {1, 4, 0, 0};
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
+ env, TAG_DNGVERSION, writer);
+
+ uint8_t backwardVersion[4] = {1, 1, 0, 0};
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
+ TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
+ }
+
+ {
+ // Set whitelevel
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_WHITE_LEVEL, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
+ uint32_t whiteLevel = static_cast(entry.data.i32[0]);
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
+ env, TAG_WHITELEVEL, writer);
+ }
+
+ {
+ // Set default scale
+ uint32_t defaultScale[4] = {1, 1, 1, 1};
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
+ TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
+ }
+
+ bool singleIlluminant = false;
+ if (isBayer) {
+ // Set calibration illuminants
+ ACameraMetadata_const_entry entry1 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_REFERENCE_ILLUMINANT1, &entry1);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
+ ACameraMetadata_const_entry entry2 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_REFERENCE_ILLUMINANT2, &entry2);
+ if (entry2.count == 0) {
+ singleIlluminant = true;
+ }
+ uint16_t ref1 = entry1.data.u8[0];
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
+ TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
+
+ if (!singleIlluminant) {
+ uint16_t ref2 = entry2.data.u8[0];
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
+ TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
+ }
+ }
+
+ if (isBayer) {
+ // Set color transforms
+ ACameraMetadata_const_entry entry1 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_COLOR_TRANSFORM1, &entry1);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
+
+ int32_t colorTransform1[entry1.count * 2];
+
+ size_t ctr = 0;
+ for(size_t i = 0; i < entry1.count; ++i) {
+ colorTransform1[ctr++] = entry1.data.r[i].numerator;
+ colorTransform1[ctr++] = entry1.data.r[i].denominator;
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
+ colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
+
+ if (!singleIlluminant) {
+ ACameraMetadata_const_entry entry2 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_COLOR_TRANSFORM2, &entry2);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
+ int32_t colorTransform2[entry2.count * 2];
+
+ ctr = 0;
+ for(size_t i = 0; i < entry2.count; ++i) {
+ colorTransform2[ctr++] = entry2.data.r[i].numerator;
+ colorTransform2[ctr++] = entry2.data.r[i].denominator;
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
+ colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
+ }
+ }
+
+ if (isBayer) {
+ // Set calibration transforms
+ ACameraMetadata_const_entry entry1 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_CALIBRATION_TRANSFORM1, &entry1);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
+
+ int32_t calibrationTransform1[entry1.count * 2];
+
+ size_t ctr = 0;
+ for(size_t i = 0; i < entry1.count; ++i) {
+ calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
+ calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
+ calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
+
+ if (!singleIlluminant) {
+ ACameraMetadata_const_entry entry2 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_CALIBRATION_TRANSFORM2, &entry2);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
+ int32_t calibrationTransform2[entry2.count * 2];
+
+ ctr = 0;
+ for(size_t i = 0; i < entry2.count; ++i) {
+ calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
+ calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
+ calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer);
+ }
+ }
+
+ if (isBayer) {
+ // Set forward transforms
+ ACameraMetadata_const_entry entry1 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_FORWARD_MATRIX1, &entry1);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
+
+ int32_t forwardTransform1[entry1.count * 2];
+
+ size_t ctr = 0;
+ for(size_t i = 0; i < entry1.count; ++i) {
+ forwardTransform1[ctr++] = entry1.data.r[i].numerator;
+ forwardTransform1[ctr++] = entry1.data.r[i].denominator;
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
+ forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
+
+ if (!singleIlluminant) {
+ ACameraMetadata_const_entry entry2 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_FORWARD_MATRIX2, &entry2);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
+ int32_t forwardTransform2[entry2.count * 2];
+
+ ctr = 0;
+ for(size_t i = 0; i < entry2.count; ++i) {
+ forwardTransform2[ctr++] = entry2.data.r[i].numerator;
+ forwardTransform2[ctr++] = entry2.data.r[i].denominator;
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
+ forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer);
+ }
+ }
+
+ if (isBayer) {
+ // Set camera neutral
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_NEUTRAL_COLOR_POINT, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
+ uint32_t cameraNeutral[entry.count * 2];
+
+ size_t ctr = 0;
+ for(size_t i = 0; i < entry.count; ++i) {
+ cameraNeutral[ctr++] =
+ static_cast(entry.data.r[i].numerator);
+ cameraNeutral[ctr++] =
+ static_cast(entry.data.r[i].denominator);
+ }
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
+ TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
+ }
+
+ {
+ // Set dimensions
+ if (calculateAndSetCrop(characteristics, writer) != OK) {
+ return nullptr;
+ }
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
+ uint32_t xmin = static_cast(entry.data.i32[0]);
+ uint32_t ymin = static_cast(entry.data.i32[1]);
+ uint32_t width = static_cast(entry.data.i32[2]);
+ uint32_t height = static_cast(entry.data.i32[3]);
+
+ // If we only have a buffer containing the pre-correction rectangle, ignore the offset
+ // relative to the pixel array.
+ if (imageWidth == width && imageHeight == height) {
+ xmin = 0;
+ ymin = 0;
+ }
+
+ uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
+ env, TAG_ACTIVEAREA, writer);
+ }
+
+ {
+ // Setup unique camera model tag
+ char model[PROP_VALUE_MAX] = { 0 };
+ __system_property_get("ro.product.model", model);
+ char manufacturer[PROP_VALUE_MAX] = { 0 };
+ __system_property_get("ro.product.manufacturer", manufacturer);
+ char brand[PROP_VALUE_MAX] = { 0 };
+ __system_property_get("ro.product.brand", brand);
+
+ std::string cameraModel = model;
+ cameraModel += "-";
+ cameraModel += manufacturer;
+ cameraModel += "-";
+ cameraModel += brand;
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
+ reinterpret_cast(cameraModel.c_str()), TIFF_IFD_0), env,
+ TAG_UNIQUECAMERAMODEL, writer);
+ }
+
+ {
+ // Setup sensor noise model
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_NOISE_PROFILE, &entry);
+
+ const status_t numPlaneColors = isBayer ? 3 : 1;
+ const status_t numCfaChannels = isBayer ? 4 : 1;
+
+ uint8_t cfaOut[numCfaChannels];
+ if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Invalid CFA from camera characteristics");
+#endif
+ return nullptr;
+ }
+
+ double noiseProfile[numPlaneColors * 2];
+
+ if (entry.count > 0) {
+ if (entry.count != numCfaChannels * 2) {
+ ALOGW("%s: Invalid entry count %zu for noise profile returned "
+ "in characteristics, no noise profile tag written...",
+ __FUNCTION__, entry.count);
+ } else {
+ if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
+ cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
+
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
+ numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
+ writer);
+ } else {
+ ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
+ " tag written...", __FUNCTION__);
+ }
+ }
+ } else {
+ ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.",
+ __FUNCTION__);
+ }
+ }
+
+ {
+ // Set up opcode List 2
+ OpcodeListBuilder builder;
+ status_t err = OK;
+
+ // Set up lens shading map
+ ACameraMetadata_const_entry entry1 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_LENS_INFO_SHADING_MAP_SIZE, &entry1);
+
+ uint32_t lsmWidth = 0;
+ uint32_t lsmHeight = 0;
+
+ if (entry1.count != 0) {
+ lsmWidth = static_cast(entry1.data.i32[0]);
+ lsmHeight = static_cast(entry1.data.i32[1]);
+ }
+
+ ACameraMetadata_const_entry entry2 = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_STATISTICS_LENS_SHADING_MAP, &entry2);
+
+ ACameraMetadata_const_entry entry = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry);
+ BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
+ uint32_t xmin = static_cast(entry.data.i32[0]);
+ uint32_t ymin = static_cast(entry.data.i32[1]);
+ uint32_t width = static_cast(entry.data.i32[2]);
+ uint32_t height = static_cast(entry.data.i32[3]);
+ if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
+ // GainMap rectangle is relative to the active area origin.
+ err = builder.addGainMapsForMetadata(lsmWidth,
+ lsmHeight,
+ 0,
+ 0,
+ height,
+ width,
+ opcodeCfaLayout,
+ entry2.data.f);
+ if (err != OK) {
+ ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
+#if 0
+ jniThrowRuntimeException(env, "failed to add lens shading map.");
+#endif
+ return nullptr;
+ }
+ }
+
+ // Hot pixel map is specific to bayer camera per DNG spec.
+ if (isBayer) {
+ // Set up bad pixel correction list
+ ACameraMetadata_const_entry entry3 = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_STATISTICS_HOT_PIXEL_MAP, &entry3);
+
+ if ((entry3.count % 2) != 0) {
+ ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
+ __FUNCTION__);
+#if 0
+ jniThrowRuntimeException(env, "failed to add hotpixel map.");
+#endif
+ return nullptr;
+ }
+
+ // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
+ std::vector v;
+ for (size_t i = 0; i < entry3.count; i += 2) {
+ int32_t x = entry3.data.i32[i];
+ int32_t y = entry3.data.i32[i + 1];
+ x -= static_cast(xmin);
+ y -= static_cast(ymin);
+ if (x < 0 || y < 0 || static_cast(x) >= width ||
+ static_cast(y) >= height) {
+ continue;
+ }
+ v.push_back(x);
+ v.push_back(y);
+ }
+ const uint32_t* badPixels = &v[0];
+ uint32_t badPixelCount = v.size();
+
+ if (badPixelCount > 0) {
+ err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
+
+ if (err != OK) {
+ ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
+#if 0
+ jniThrowRuntimeException(env, "failed to add hotpixel map.");
+#endif
+ return nullptr;
+ }
+ }
+ }
+
+ if (builder.getCount() > 0) {
+ size_t listSize = builder.getSize();
+ uint8_t opcodeListBuf[listSize];
+ err = builder.buildOpList(opcodeListBuf);
+ if (err == OK) {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize,
+ opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
+ } else {
+ ALOGE("%s: Could not build list of opcodes for lens shading map and bad pixel "
+ "correction.", __FUNCTION__);
+#if 0
+ jniThrowRuntimeException(env, "failed to construct opcode list for lens shading "
+ "map and bad pixel correction");
+#endif
+ return nullptr;
+ }
+ }
+ }
+
+ {
+ // Set up opcode List 3
+ OpcodeListBuilder builder;
+ status_t err = OK;
+
+ // Set up rectilinear distortion correction
+ std::array distortion = {1.f, 0.f, 0.f, 0.f, 0.f, 0.f};
+ bool gotDistortion = false;
+
+ ACameraMetadata_const_entry entry4 = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_INTRINSIC_CALIBRATION, &entry4);
+
+ if (entry4.count == 5) {
+ float cx = entry4.data.f[/*c_x*/2];
+ float cy = entry4.data.f[/*c_y*/3];
+ // Assuming f_x = f_y, or at least close enough.
+ // Also assuming s = 0, or at least close enough.
+ float f = entry4.data.f[/*f_x*/0];
+
+ ACameraMetadata_const_entry entry3 = { 0 };
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_DISTORTION, &entry3);
+ if (entry3.count == 5) {
+ gotDistortion = true;
+
+ // Scale the distortion coefficients to create a zoom in warpped image so that all
+ // pixels are drawn within input image.
+ for (size_t i = 0; i < entry3.count; i++) {
+ distortion[i+1] = entry3.data.f[i];
+ }
+
+ if (preWidth == imageWidth && preHeight == imageHeight) {
+ normalizeLensDistortion(distortion, cx, cy, f, preWidth, preHeight);
+ } else {
+ // image size == pixel array size (contains optical black pixels)
+ // cx/cy is defined in preCorrArray so adding the offset
+ // Also changes default xmin/ymin so that pixels are only
+ // sampled within preCorrection array
+ normalizeLensDistortion(
+ distortion, cx + preXMin, cy + preYMin, f, preWidth, preHeight,
+ preXMin, preYMin);
+ }
+
+ float m_x = std::fmaxf(preWidth - cx, cx);
+ float m_y = std::fmaxf(preHeight - cy, cy);
+ float m_sq = m_x*m_x + m_y*m_y;
+ float m = sqrtf(m_sq); // distance to farthest corner from optical center
+ float f_sq = f * f;
+ // Conversion factors from Camera2 K factors for new LENS_DISTORTION field
+ // to DNG spec.
+ //
+ // Camera2 / OpenCV assume distortion is applied in a space where focal length
+ // is factored out, while DNG assumes a normalized space where the distance
+ // from optical center to the farthest corner is 1.
+ // Scale from camera2 to DNG spec accordingly.
+ // distortion[0] is always 1 with the new LENS_DISTORTION field.
+ const double convCoeff[5] = {
+ m_sq / f_sq,
+ pow(m_sq, 2) / pow(f_sq, 2),
+ pow(m_sq, 3) / pow(f_sq, 3),
+ m / f,
+ m / f
+ };
+ for (size_t i = 0; i < entry3.count; i++) {
+ distortion[i+1] *= convCoeff[i];
+ }
+ } else {
+ status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_RADIAL_DISTORTION, &entry3);
+ if (entry3.count == 6) {
+ gotDistortion = true;
+ // Conversion factors from Camera2 K factors to DNG spec. K factors:
+ //
+ // Note: these are necessary because our unit system assumes a
+ // normalized max radius of sqrt(2), whereas the DNG spec's
+ // WarpRectilinear opcode assumes a normalized max radius of 1.
+ // Thus, each K coefficient must include the domain scaling
+ // factor (the DNG domain is scaled by sqrt(2) to emulate the
+ // domain used by the Camera2 specification).
+ const double convCoeff[6] = {
+ sqrt(2),
+ 2 * sqrt(2),
+ 4 * sqrt(2),
+ 8 * sqrt(2),
+ 2,
+ 2
+ };
+ for (size_t i = 0; i < entry3.count; i++) {
+ distortion[i] = entry3.data.f[i] * convCoeff[i];
+ }
+ }
+ }
+ if (gotDistortion) {
+ err = builder.addWarpRectilinearForMetadata(
+ distortion.data(), preWidth, preHeight, cx, cy);
+ if (err != OK) {
+ ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
+#if 0
+ jniThrowRuntimeException(env, "failed to add distortion correction.");
+#endif
+ return nullptr;
+ }
+ }
+ }
+
+ if (builder.getCount() > 0) {
+ size_t listSize = builder.getSize();
+ uint8_t opcodeListBuf[listSize];
+ err = builder.buildOpList(opcodeListBuf);
+ if (err == OK) {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize,
+ opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
+ } else {
+ ALOGE("%s: Could not build list of opcodes for distortion correction.",
+ __FUNCTION__);
+#if 0
+ jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
+ " correction");
+#endif
+ return nullptr;
+ }
+ }
+ }
+
+ {
+ // Set up orientation tags.
+ // Note: There's only one orientation field for the whole file, in IFD0
+ // The main image and any thumbnails therefore have the same orientation.
+ uint16_t orientation = getOrientation();
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
+ env, TAG_ORIENTATION, writer);
+
+ }
+
+ if (hasDescription()){
+ // Set Description
+ std::string description = getDescription();
+ size_t len = description.size() + 1;
+ if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
+ reinterpret_cast(description.c_str()), TIFF_IFD_0) != OK) {
+#if 0
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
+ "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
+#endif
+ }
+ }
+
+ if (hasGpsData()) {
+ // Set GPS tags
+ GpsData gpsData = getGpsData();
+ if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
+ if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
+ ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
+ TIFF_IFD_0);
+#if 0
+ jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
+#endif
+ return nullptr;
+ }
+ }
+
+ {
+ uint8_t version[] = {2, 3, 0, 0};
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
+ TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
+ }
+
+ {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
+ GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
+ TAG_GPSLATITUDEREF, writer);
+ }
+
+ {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
+ GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
+ TAG_GPSLONGITUDEREF, writer);
+ }
+
+ {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
+ TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
+ }
+
+ {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
+ TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
+ }
+
+ {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
+ TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
+ }
+
+ {
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
+ GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
+ TAG_GPSDATESTAMP, writer);
+ }
+ }
+
+
+ if (hasThumbnail()) {
+ if (!writer->hasIfd(TIFF_IFD_SUB1)) {
+ if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
+ ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
+ TIFF_IFD_0);
+#if 0
+ jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
+#endif
+ return nullptr;
+ }
+ }
+
+ std::vector tagsToMove;
+ tagsToMove.push_back(TAG_NEWSUBFILETYPE);
+ tagsToMove.push_back(TAG_ACTIVEAREA);
+ tagsToMove.push_back(TAG_BITSPERSAMPLE);
+ tagsToMove.push_back(TAG_COMPRESSION);
+ tagsToMove.push_back(TAG_IMAGEWIDTH);
+ tagsToMove.push_back(TAG_IMAGELENGTH);
+ tagsToMove.push_back(TAG_PHOTOMETRICINTERPRETATION);
+ tagsToMove.push_back(TAG_BLACKLEVEL);
+ tagsToMove.push_back(TAG_BLACKLEVELREPEATDIM);
+ tagsToMove.push_back(TAG_SAMPLESPERPIXEL);
+ tagsToMove.push_back(TAG_PLANARCONFIGURATION);
+ if (isBayer) {
+ tagsToMove.push_back(TAG_CFAREPEATPATTERNDIM);
+ tagsToMove.push_back(TAG_CFAPATTERN);
+ tagsToMove.push_back(TAG_CFAPLANECOLOR);
+ tagsToMove.push_back(TAG_CFALAYOUT);
+ }
+ tagsToMove.push_back(TAG_XRESOLUTION);
+ tagsToMove.push_back(TAG_YRESOLUTION);
+ tagsToMove.push_back(TAG_RESOLUTIONUNIT);
+ tagsToMove.push_back(TAG_WHITELEVEL);
+ tagsToMove.push_back(TAG_DEFAULTSCALE);
+ tagsToMove.push_back(TAG_DEFAULTCROPORIGIN);
+ tagsToMove.push_back(TAG_DEFAULTCROPSIZE);
+
+ if (nullptr != writer->getEntry(TAG_OPCODELIST2, TIFF_IFD_0).get()) {
+ tagsToMove.push_back(TAG_OPCODELIST2);
+ }
+
+ if (nullptr != writer->getEntry(TAG_OPCODELIST3, TIFF_IFD_0).get()) {
+ tagsToMove.push_back(TAG_OPCODELIST3);
+ }
+
+ if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) {
+#if 0
+ jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries");
+#endif
+ return nullptr;
+ }
+
+ // Setup thumbnail tags
+
+ {
+ // Set photometric interpretation
+ uint16_t interpretation = 2; // RGB
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
+ &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
+ }
+
+ {
+ // Set planar configuration
+ uint16_t config = 1; // Chunky
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
+ TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
+ }
+
+ {
+ // Set samples per pixel
+ uint16_t samples = SAMPLES_PER_RGB_PIXEL;
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
+ TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer);
+ }
+
+ {
+ // Set bits per sample
+ uint16_t bits[SAMPLES_PER_RGB_PIXEL];
+ for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
+ BAIL_IF_INVALID_RET_NULL_SP(
+ writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_0),
+ env, TAG_BITSPERSAMPLE, writer);
+ }
+
+ {
+ // Set subfiletype
+ uint32_t subfileType = 1; // Thumbnail image
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
+ TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
+ }
+
+ {
+ // Set compression
+ uint16_t compression = 1; // None
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
+ TIFF_IFD_0), env, TAG_COMPRESSION, writer);
+ }
+
+ {
+ // Set dimensions
+ uint32_t uWidth = getThumbnailWidth();
+ uint32_t uHeight = getThumbnailHeight();
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0),
+ env, TAG_IMAGEWIDTH, writer);
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0),
+ env, TAG_IMAGELENGTH, writer);
+ }
+
+ {
+ // x resolution
+ uint32_t xres[] = { 72, 1 }; // default 72 ppi
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
+ env, TAG_XRESOLUTION, writer);
+
+ // y resolution
+ uint32_t yres[] = { 72, 1 }; // default 72 ppi
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
+ env, TAG_YRESOLUTION, writer);
+
+ uint16_t unit = 2; // inches
+ BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
+ env, TAG_RESOLUTIONUNIT, writer);
+ }
+ }
+
+ if (writer->addStrip(TIFF_IFD_0) != OK) {
+ ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__);
+#if 0
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to setup thumbnail strip tags.");
+#endif
+ return nullptr;
+ }
+
+ if (writer->hasIfd(TIFF_IFD_SUB1)) {
+ if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
+ ALOGE("%s: Could not main image strip tags.", __FUNCTION__);
+#if 0
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to setup main image strip tags.");
+#endif
+ return nullptr;
+ }
+ }
+ return writer;
+}
+
+void DngCreator::setGpsTags(const std::vector& latTag,
+ const std::string& latRef, const std::vector& longTag, const std::string& longRef, const std::string& dateTag, const std::vector& timeTag) {
+ ALOGV("%s:", __FUNCTION__);
+
+ GpsData data;
+
+ size_t latLen = latTag.size();
+ size_t longLen = longTag.size();
+ size_t timeLen = timeTag.size();
+ if (latLen != GpsData::GPS_VALUE_LENGTH) {
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "invalid latitude tag length");
+#endif
+ return;
+ } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "invalid longitude tag length");
+#endif
+ return;
+ } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
+#if 0
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "invalid time tag length");
+#endif
+ return;
+ }
+
+ memcpy(&data.mLatitude, &latTag[0], sizeof(int) * GpsData::GPS_VALUE_LENGTH);
+ memcpy(&data.mLongitude, &longTag[0], sizeof(int) * GpsData::GPS_VALUE_LENGTH);
+ memcpy(&data.mTimestamp, &timeTag[0], sizeof(int) * GpsData::GPS_VALUE_LENGTH);
+
+ memcpy(&data.mLatitudeRef, latRef.c_str(), 1);
+ data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
+ memcpy(&data.mLongitudeRef, longRef.c_str(), 1);
+ data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
+ memcpy(&data.mDate, dateTag.c_str(), GpsData::GPS_DATE_LENGTH - 1);
+ data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
+
+ setGpsData(data);
+}
+
+// TODO: Refactor out common preamble for the two nativeWrite methods.
+void DngCreator::writeImage(std::vector& outStream, uint32_t uWidth,
+ uint32_t uHeight, const std::vector& inBuffer, int rowStride, int pixStride, uint64_t uOffset, bool isDirect) {
+ ALOGV("%s:", __FUNCTION__);
+ ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
+ "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
+ uHeight, rowStride, pixStride, uOffset);
+ uint32_t rStride = static_cast(rowStride);
+ uint32_t pStride = static_cast(pixStride);
+
+ std::vector& out = outStream;
+
+ // sp out = new JniOutputStream(env, outStream);
+ // if(env->ExceptionCheck()) {
+ // ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
+ // return;
+ // }
+
+ sp writer = setup(uWidth, uHeight);
+
+ if (writer.get() == nullptr) {
+ return;
+ }
+
+ // Validate DNG size
+ if (!validateDngHeader(writer, getCharacteristics(), uWidth, uHeight)) {
+ return;
+ }
+
+ // sp inBuf;
+ std::vector sources;
+ sp thumbnailSource;
+ uint32_t targetIfd = TIFF_IFD_0;
+
+ bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
+
+ if (hasThumbnail) {
+#if 0
+ ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
+ uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
+ uint32_t thumbWidth = getThumbnailWidth();
+ thumbnailSource = new DirectStripSource(env, getThumbnail(), TIFF_IFD_0,
+ thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
+ bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
+ SAMPLES_PER_RGB_PIXEL);
+ sources.push_back(thumbnailSource.get());
+ targetIfd = TIFF_IFD_SUB1;
+#endif
+ }
+
+ if (isDirect) {
+ size_t fullSize = rStride * uHeight;
+ jlong capacity = inBuffer.size();
+ if (capacity < 0 || fullSize + uOffset > static_cast(capacity)) {
+#if 0
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Invalid size %d for Image, size given in metadata is %d at current stride",
+ capacity, fullSize);
+#endif
+ return;
+ }
+
+ uint8_t* pixelBytes = (uint8_t*)&inBuffer[0];
+
+ ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
+ DirectStripSource stripSource(pixelBytes, targetIfd, uWidth, uHeight, pStride,
+ rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
+ sources.push_back(&stripSource);
+
+ status_t ret = OK;
+ ByteVectorOutput byteVectorOutput(outStream);
+
+ if ((ret = writer->write(&byteVectorOutput, &sources[0], sources.size())) != OK) {
+ ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
+#if 0
+ if (!env->ExceptionCheck()) {
+ jniThrowExceptionFmt(env, "java/io/IOException",
+ "Encountered error %d while writing file.", ret);
+ }
+#endif
+ return;
+ }
+ } else {
+ int aa = 0;
+ // inBuf = new JniInputByteBuffer(env, inBuffer);
+#if 0
+ ALOGV("%s: Using input-type strip source.", __FUNCTION__);
+ InputStripSource stripSource(*inBuf, targetIfd, uWidth, uHeight, pStride,
+ rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
+ sources.push_back(&stripSource);
+
+ status_t ret = OK;
+ if ((ret = writer->write(out.get(), &sources[0], sources.size())) != OK) {
+ ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
+#if 0
+ if (!env->ExceptionCheck()) {
+ jniThrowExceptionFmt(env, "java/io/IOException",
+ "Encountered error %d while writing file.", ret);
+ }
+#endif
+ return;
+ }
+#endif
+ }
+}
+
+void DngCreator::writeInputStream(std::vector& outStream,
+ const std::vector& inStream, uint32_t uWidth, uint32_t uHeight, long offset) {
+ ALOGV("%s:", __FUNCTION__);
+
+ uint32_t rowStride = uWidth * BYTES_PER_SAMPLE;
+ uint32_t pixStride = BYTES_PER_SAMPLE;
+
+ uint64_t uOffset = static_cast(offset);
+
+ ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
+ "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
+ uHeight, rowStride, pixStride, offset);
+
+ ByteVectorOutput out(outStream);
+ // std::vector& out = outStream;
+
+ sp writer = setup(uWidth, uHeight);
+
+ if (writer.get() == nullptr) {
+ return;
+ }
+
+ // Validate DNG size
+ if (!validateDngHeader(writer, getCharacteristics(), uWidth, uHeight)) {
+ return;
+ }
+
+ sp thumbnailSource;
+ uint32_t targetIfd = TIFF_IFD_0;
+ bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
+ std::vector sources;
+
+ if (hasThumbnail)
+ {
+#if 0
+ ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
+ uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
+ uint32_t width = getThumbnailWidth();
+ thumbnailSource = new DirectStripSource(getThumbnail(), TIFF_IFD_0,
+ width, getThumbnailHeight(), bytesPerPixel,
+ bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
+ SAMPLES_PER_RGB_PIXEL);
+ sources.pus_back(thumbnailSource.get());
+ targetIfd = TIFF_IFD_SUB1;
+#endif
+ }
+
+ // sp in = new JniInputStream(env, inStream);
+
+ ByteVectorInput in(inStream);
+
+ ALOGV("%s: Using input-type strip source.", __FUNCTION__);
+ InputStripSource stripSource(in, targetIfd, uWidth, uHeight, pixStride,
+ rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
+ sources.push_back(&stripSource);
+
+ status_t ret = OK;
+ if ((ret = writer->write(&out, &sources[0], sources.size())) != OK) {
+ ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
+#if 0
+ if (!env->ExceptionCheck()) {
+ jniThrowExceptionFmt(env, "java/io/IOException",
+ "Encountered error %d while writing file.", ret);
+ }
+#endif
+ return;
+ }
+}
+
+void DngCreator::writeInputBuffer(std::vector& outStream,
+ const uint8_t* inBuffer, size_t bufferLength, uint32_t uWidth, uint32_t uHeight, long offset) {
+ ALOGV("%s:", __FUNCTION__);
+
+ uint32_t rowStride = uWidth * BYTES_PER_SAMPLE;
+ uint32_t pixStride = BYTES_PER_SAMPLE;
+
+ uint64_t uOffset = static_cast(offset);
+
+ ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
+ "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
+ uHeight, rowStride, pixStride, offset);
+
+ ByteVectorOutput out(outStream);
+ // std::vector& out = outStream;
+
+ sp writer = setup(uWidth, uHeight);
+
+ if (writer.get() == nullptr) {
+ return;
+ }
+
+
+ // Validate DNG size
+ if (!validateDngHeader(writer, getCharacteristics(), uWidth, uHeight)) {
+ return;
+ }
+
+ sp thumbnailSource;
+ uint32_t targetIfd = TIFF_IFD_0;
+ bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
+ std::vector sources;
+
+ if (hasThumbnail)
+ {
+#if 0
+ ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
+ uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
+ uint32_t width = getThumbnailWidth();
+ thumbnailSource = new DirectStripSource(getThumbnail(), TIFF_IFD_0,
+ width, getThumbnailHeight(), bytesPerPixel,
+ bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
+ SAMPLES_PER_RGB_PIXEL);
+ sources.push_back(thumbnailSource.get());
+ targetIfd = TIFF_IFD_SUB1;
+#endif
+ }
+
+ // sp in = new JniInputStream(env, inStream);
+
+ ByteBufferInput in(inBuffer, bufferLength);
+
+ ALOGV("%s: Using input-type strip source.", __FUNCTION__);
+ InputStripSource stripSource(in, targetIfd, uWidth, uHeight, pixStride,
+ rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
+ sources.push_back(&stripSource);
+
+ status_t ret = OK;
+ if ((ret = writer->write(&out, &sources[0], sources.size())) != OK) {
+ ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
+#if 0
+ if (!env->ExceptionCheck()) {
+ jniThrowExceptionFmt(env, "java/io/IOException",
+ "Encountered error %d while writing file.", ret);
+ }
+#endif
+ return;
+ }
+}
diff --git a/app/src/main/cpp/DngCreator.h b/app/src/main/cpp/DngCreator.h
new file mode 100644
index 00000000..a19c6374
--- /dev/null
+++ b/app/src/main/cpp/DngCreator.h
@@ -0,0 +1,332 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DngCreator_JNI"
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+// #include "core_jni_helpers.h"
+
+// #include "android_runtime/AndroidRuntime.h"
+// #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
+
+#include
+// #include
+
+using namespace android;
+using namespace img_utils;
+// using android::base::GetProperty;
+
+
+/**
+ * Max width or height dimension for thumbnails.
+ */
+// max pixel dimension for TIFF/EP
+#define MAX_THUMBNAIL_DIMENSION 256
+
+
+// bytes per sample
+#define DEFAULT_PIXEL_STRIDE 2
+// byts per pixel
+#define BYTES_PER_RGB_PIX 3
+
+
+#define GPS_LAT_REF_NORTH "N"
+#define GPS_LAT_REF_SOUTH "S"
+#define GPS_LONG_REF_EAST "E"
+#define GPS_LONG_REF_WEST "W"
+
+#define GPS_DATE_FORMAT_STR "yyyy:MM:dd"
+#define TIFF_DATETIME_FORMAT "yyyy:MM:dd kk:mm:ss"
+
+class ByteVectorOutput : public Output {
+public:
+ ByteVectorOutput(std::vector& buf);
+ virtual ~ByteVectorOutput();
+
+ virtual status_t open();
+
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
+
+ virtual status_t close();
+
+protected:
+ std::vector& m_buf;
+};
+
+class ByteVectorInput : public Input {
+public:
+ ByteVectorInput(const std::vector& buf);
+ virtual ~ByteVectorInput();
+
+ /**
+ * Open this Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ status_t open();
+
+ /**
+ * Read bytes into the given buffer. At most, the number of bytes given in the
+ * count argument will be read. Bytes will be written into the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ ssize_t read(uint8_t* buf, size_t offset, size_t count);
+
+ /**
+ * Skips bytes in the input.
+ *
+ * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ ssize_t skip(size_t count);
+
+ /**
+ * Close the Input. It is not valid to call open on a previously closed Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ status_t close();
+
+protected:
+ const std::vector& m_buf;
+ size_t m_offset;
+};
+
+class ByteBufferInput : public Input {
+public:
+ ByteBufferInput(const uint8_t* buf, size_t len);
+ virtual ~ByteBufferInput();
+
+ /**
+ * Open this Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ status_t open();
+
+ /**
+ * Read bytes into the given buffer. At most, the number of bytes given in the
+ * count argument will be read. Bytes will be written into the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ ssize_t read(uint8_t* buf, size_t offset, size_t count);
+
+ /**
+ * Skips bytes in the input.
+ *
+ * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ ssize_t skip(size_t count);
+
+ /**
+ * Close the Input. It is not valid to call open on a previously closed Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ status_t close();
+
+protected:
+ const uint8_t* m_buf;
+ size_t m_len;
+ size_t m_offset;
+};
+
+struct SIZE
+{
+ int width;
+ int height;
+};
+
+#define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
+ if ((expr) != OK) { \
+ return false; \
+ }
+
+#define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
+ if ((expr) != OK) { \
+ return nullptr; \
+ }
+
+#define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
+ if ((expr) != OK) { \
+ return -1; \
+ }
+
+#define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
+ if ((entry).count == 0) { \
+ return nullptr; \
+ }
+
+#define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
+ if (expr) { \
+ return nullptr; \
+ }
+
+#define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
+
+enum {
+ BITS_PER_SAMPLE = 16,
+ BYTES_PER_SAMPLE = 2,
+ BYTES_PER_RGB_PIXEL = 3,
+ BITS_PER_RGB_SAMPLE = 8,
+ BYTES_PER_RGB_SAMPLE = 1,
+ SAMPLES_PER_RGB_PIXEL = 3,
+ SAMPLES_PER_RAW_PIXEL = 1,
+ TIFF_IFD_0 = 0,
+ TIFF_IFD_SUB1 = 1,
+ TIFF_IFD_GPSINFO = 2,
+};
+
+/**
+ * POD container class for GPS tag data.
+ */
+class GpsData {
+public:
+ enum {
+ GPS_VALUE_LENGTH = 6,
+ GPS_REF_LENGTH = 2,
+ GPS_DATE_LENGTH = 11,
+ };
+
+ uint32_t mLatitude[GPS_VALUE_LENGTH];
+ uint32_t mLongitude[GPS_VALUE_LENGTH];
+ uint32_t mTimestamp[GPS_VALUE_LENGTH];
+ uint8_t mLatitudeRef[GPS_REF_LENGTH];
+ uint8_t mLongitudeRef[GPS_REF_LENGTH];
+ uint8_t mDate[GPS_DATE_LENGTH];
+};
+
+// ----------------------------------------------------------------------------
+
+/**
+ * Container class for the persistent native context.
+ */
+
+class NativeContext : public LightRefBase {
+public:
+ enum {
+ DATETIME_COUNT = 20,
+ };
+
+ NativeContext(ACameraMetadata* characteristics, ACameraMetadata* result);
+ virtual ~NativeContext();
+
+ TiffWriter* getWriter();
+
+ ACameraMetadata* getCharacteristics() const;
+ ACameraMetadata* getResult() const;
+
+ uint32_t getThumbnailWidth() const;
+ uint32_t getThumbnailHeight() const;
+ const uint8_t* getThumbnail() const;
+ bool hasThumbnail() const;
+
+ bool setThumbnail(const std::vector& buffer, uint32_t width, uint32_t height);
+
+ void setOrientation(uint16_t orientation);
+ uint16_t getOrientation() const;
+
+ void setDescription(const std::string& desc);
+ std::string getDescription() const;
+ bool hasDescription() const;
+
+ void setGpsData(const GpsData& data);
+ GpsData getGpsData() const;
+ bool hasGpsData() const;
+
+ void setCaptureTime(const std::string& formattedCaptureTime);
+ std::string getCaptureTime() const;
+ bool hasCaptureTime() const;
+
+protected:
+ std::vector mCurrentThumbnail;
+ TiffWriter mWriter;
+ ACameraMetadata* mCharacteristics;
+ ACameraMetadata* mResult;
+ uint32_t mThumbnailWidth;
+ uint32_t mThumbnailHeight;
+ uint16_t mOrientation;
+ bool mThumbnailSet;
+ bool mGpsSet;
+ bool mDescriptionSet;
+ bool mCaptureTimeSet;
+ std::string mDescription;
+ GpsData mGpsData;
+ std::string mFormattedCaptureTime;
+};
+
+class DngCreator : public NativeContext
+{
+
+public:
+ DngCreator(ACameraMetadata* characteristics, ACameraMetadata* result);
+
+#if 0
+ void setLocation(Location location);
+#endif
+
+ void writeInputStream(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset);
+ void writeByteBuffer(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset);
+
+#if 0
+ void writeImage(OutputStream& dngOutput, AImage& pixels);
+#endif
+
+ void close();
+
+ // private static final DateFormat sExifGPSDateStamp = new SimpleDateFormat(GPS_DATE_FORMAT_STR);
+ // private static final DateFormat sDateTimeStampFormat = new SimpleDateFormat(TIFF_DATETIME_FORMAT);
+#if 0
+ static {
+ sDateTimeStampFormat.setTimeZone(TimeZone.getDefault());
+ sExifGPSDateStamp.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+#endif
+
+ /**
+ * Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels.
+ */
+ void writeByteBuffer(int width, int height, const std::vector& pixels, std::vector& dngOutput, int pixelStride, int rowStride, long offset);
+
+ /**
+ * Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}.
+ */
+
+ /**
+ * Convert coordinate to EXIF GPS tag format.
+ */
+ void toExifLatLong(double value, int data[6]);
+
+ void init(ACameraMetadata* characteristics, ACameraMetadata* result, const std::string& captureTime);
+ sp setup(uint32_t imageWidth, uint32_t imageHeight);
+ void destroy();
+ void setGpsTags(const std::vector& latTag, const std::string& latRef, const std::vector& longTag, const std::string& longRef, const std::string& dateTag, const std::vector& timeTag);
+ void writeImage(std::vector& out, uint32_t width, uint32_t height, const std::vector& rawBuffer, int rowStride, int pixStride, uint64_t offset, bool isDirect);
+
+ void writeInputStream(std::vector& out, const std::vector& rawStream, uint32_t width, uint32_t height, long offset);
+
+ void writeInputBuffer(std::vector& out, const uint8_t* rawBuffer, size_t bufferLen, uint32_t width, uint32_t height, long offset);
+
+};
diff --git a/app/src/main/cpp/GPIOControl.cpp b/app/src/main/cpp/GPIOControl.cpp
index 783025af..38768843 100644
--- a/app/src/main/cpp/GPIOControl.cpp
+++ b/app/src/main/cpp/GPIOControl.cpp
@@ -31,19 +31,9 @@ typedef struct
char str[MAX_STRING_LEN];
}IOT_PARAM;
-typedef struct{
- float airtemp; /* 空气温度*/
- float RH; /* 相对湿度*/
- float atmos; /* 大气压*/
- float windspeed; /* 风速*/
- float winddirection; /* 风向*/
- float rainfall; /* 雨量*/
- float sunshine; /* 日照*/
-}Weather;
-
void GpioControl::setInt(int cmd, int value)
{
- int fd = open("/dev/mtkgpioctrl", O_RDONLY);
+ int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
@@ -59,7 +49,7 @@ void GpioControl::setInt(int cmd, int value)
int GpioControl::getInt(int cmd)
{
- int fd = open("/dev/mtkgpioctrl", O_RDONLY);
+ int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
{
@@ -77,7 +67,7 @@ int GpioControl::getInt(int cmd)
void GpioControl::setLong(int cmd, long value)
{
- int fd = open("/dev/mtkgpioctrl", O_RDONLY);
+ int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value2 = value;
@@ -93,7 +83,7 @@ void GpioControl::setLong(int cmd, long value)
long GpioControl::getLong(int cmd)
{
- int fd = open("/dev/mtkgpioctrl", O_RDONLY);
+ int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
{
@@ -110,8 +100,7 @@ long GpioControl::getLong(int cmd)
void GpioControl::setString(int cmd, const std::string& value)
{
IOT_PARAM param;
- // char *pval = jstringToChars(env, value);
- int fd = open("/dev/mtkgpioctrl", O_RDONLY);
+ int fd = open(GPIO_NODE_MP, O_RDONLY);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
param.cmd = cmd;
@@ -129,7 +118,7 @@ void GpioControl::setString(int cmd, const std::string& value)
std::string GpioControl::getString(int cmd)
{
- int fd = open("/dev/mtkgpioctrl", O_RDONLY);
+ int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
{
@@ -142,3 +131,65 @@ std::string GpioControl::getString(int cmd)
}
return "";
}
+
+#ifdef USING_N938
+
+#if 0
+bool GpioControl::SetN938Cmd(int cmd, int val)
+{
+ char buf[32] = { 0 };
+ snprintf(buf, "out %d %d", cmd, val);
+
+ IOT_PARAM param;
+ int len = MAX_STRING_LEN < strlen(buf) ? MAX_STRING_LEN : strlen(buf);
+
+ param.cmd = cmd;
+ memset(param.str, 0, MAX_STRING_LEN);
+ memcpy(param.str, value.c_str(), len);
+
+ int fd = open(GPIO_NODE_MP, O_RDONLY);
+ if( fd > 0 )
+ {
+ ioctl(fd, IOT_PARAM_WRITE, ¶m);
+ close(fd);
+ }
+ return;
+}
+#endif
+
+bool GpioControl::OpenSensors()
+{
+
+ GpioControl::setCam3V3Enable(true);
+ GpioControl::setInt(CMD_SET_485_EN_STATE, true ? 1 : 0);
+ int igpio;
+ GpioControl::setInt(CMD_SET_WTH_POWER, 1);
+ GpioControl::setInt(CMD_SET_PULL_POWER, 1);
+ GpioControl::setInt(CMD_SET_ANGLE_POWER, 1);
+ GpioControl::setInt(CMD_SET_OTHER_POWER, 1);
+ GpioControl::setInt(CMD_SET_PIC1_POWER, 1);
+
+ igpio = GpioControl::getInt(CMD_SET_WTH_POWER);
+ igpio = GpioControl::getInt(CMD_SET_PULL_POWER);
+ igpio = GpioControl::getInt(CMD_SET_ANGLE_POWER);
+ igpio = GpioControl::getInt(CMD_SET_OTHER_POWER);
+ igpio = GpioControl::getInt(CMD_SET_PIC1_POWER);
+
+ GpioControl::setInt(CMD_SET_SPI_POWER, 1);
+ GpioControl::setInt(CMD_SET_485_en0, 1);
+ GpioControl::setInt(CMD_SET_485_en1, 1);
+ GpioControl::setInt(CMD_SET_485_en2, 1);
+ GpioControl::setInt(CMD_SET_485_en3, 1);
+ GpioControl::setInt(CMD_SET_485_en4, 1);
+
+ igpio = GpioControl::getInt(CMD_SET_SPI_POWER);
+ igpio = GpioControl::getInt(CMD_SET_485_en0);
+ igpio = GpioControl::getInt(CMD_SET_485_en1);
+ igpio = GpioControl::getInt(CMD_SET_485_en2);
+ igpio = GpioControl::getInt(CMD_SET_485_en3);
+ igpio = GpioControl::getInt(CMD_SET_485_en4);
+ return 0;
+
+}
+
+#endif
diff --git a/app/src/main/cpp/GPIOControl.h b/app/src/main/cpp/GPIOControl.h
index 70339636..908a8947 100644
--- a/app/src/main/cpp/GPIOControl.h
+++ b/app/src/main/cpp/GPIOControl.h
@@ -6,6 +6,8 @@
#define MICROPHOTO_GPIOCONTROL_H
#include
+#include
+#include
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
@@ -36,16 +38,86 @@
#define CMD_SET_CAM_3V3_EN_STATE 132
#define CMD_SET_12V_EN_STATE 133
#define CMD_SET_SYSTEM_RESET 202
-#define CMD_SET_WTH_POWER 490
-#define CMD_SET_PULL_POWER 491
-#define CMD_SET_ANGLE_POWER 492
-#define CMD_SET_OTHER_POWER 493
-#define CMD_SET_PIC1_POWER 494
-#define CMD_SET_485_en0 301
-#define CMD_SET_485_en1 302
-#define CMD_SET_485_en2 303
-#define CMD_SET_485_en3 304
-#define CMD_SET_485_en4 305
+
+
+#ifdef USING_N938
+
+
+#define CMD_SET_485_EN_STATE 131
+#define CMD_SET_CAM_3V3_EN_STATE 132
+#define CMD_SET_12V_EN_STATE 133
+#define CMD_SET_485_STATE 121
+#define CMD_SET_SPI_MODE 123
+#define CMD_SET_SPI_BITS_PER_WORD 124
+#define CMD_SET_SPI_MAXSPEEDHZ 125
+#define CMD_SET_SPI_POWER 129
+#define CMD_SET_WTH_POWER 490
+#define CMD_SET_PULL_POWER 491
+#define CMD_SET_ANGLE_POWER 492
+#define CMD_SET_OTHER_POWER 493
+#define CMD_SET_PIC1_POWER 494
+#define CMD_SET_GPIO157_POWER 510
+#define CMD_SET_GPIO5_POWER 511
+#define CMD_SET_PWM_BEE_STATE 126
+#define CMD_SET_ALM_MODE 128
+#define CMD_SET_485_en0 301
+#define CMD_SET_485_en1 302
+#define CMD_SET_485_en2 303
+#define CMD_SET_485_en3 304
+#define CMD_SET_485_en4 305
+#define CMD_SET_OTG_STATE 107
+#define CMD_GET_OTG_STATE 108
+
+#if 0
+
+#define CMD_485_0_DE 156 // 485_0 DE信号
+#define CMD_485_0_PWR_EN 157 // 485_0 电源使能
+#define CMD_485_0_1_DE_EN 171 // 485_0&1DE电平转换芯片使能信号
+#define CMD_485_1_DE 172 //
+
+#define CMD_SET_CAM_3V3_EN_STATE 72 // 整板3V3上电使能
+#define CMD_3V3_SWITCH_EN 45 // 整板485_3V3信号电平转换电源使能
+
+#define CMD_UART0_EN 73 // 预留UART0电平转换芯片使能
+#define CMD_485_1_PWR_EN 5 // 485_1 电源使能
+
+#define CMD_485_3_DE 6 // 485_3 DE信号
+#define CMD_485_2_DE 7 // 485_2 DE信号
+#define CMD_485_4_DE 13 // 485_4 DE信号
+#define CMD_NETWORK_PWR_EN 94 // 100M网络电源使能
+
+#define CMD_485_2_PWR_EN 92 // 485_2 电源使能
+#define CMD_485_3_PWR_EN 91 // 485_3 电源使能
+#define CMD_485_4_PWR_EN 90 // 485_4 电源使能
+
+#define CMD_SEC_EN 27 // 加密芯片上电使能
+
+#define CMD_485_2_3_DE_EN 26 // 485_2&3 DE电平转换芯片使能信号
+
+#define CMD_5V_PWR_EN 14 // 整板5V0上电使能
+#define CMD_SD_CARD_DECT 15 // SD CARD DECT
+#define CMD_PIC1_EN 16
+
+#define CMD_OTHER_EN 21
+#define CMD_ANGLE_EN 22
+#define CMD_PULL_EN 23
+#define CMD_WEATHER_EN 24
+
+#define CMD_LED_CTRL 46
+#define CMD_BD_EN 47
+#define CMD_ADC_EN 44
+
+#define CMD_SPI_PWR_EN 43 // SPI转串口电源使能
+
+#endif
+
+#endif // USING_N938
+
+#ifndef USING_N938
+#define GPIO_NODE_N938 "/sys/devices/platform/1000b000.pinctrl/mt_gpio"
+#else
+#define GPIO_NODE_MP "/dev/mtkgpioctrl"
+#endif // USING_N938
class GpioControl
@@ -71,7 +143,11 @@ public:
static void setCam3V3Enable(bool enabled)
{
+#ifdef ENABLE_3V3_ALWAYS
+ setInt(CMD_SET_CAM_3V3_EN_STATE, 1);
+#else
setInt(CMD_SET_CAM_3V3_EN_STATE, enabled ? 1 : 0);
+#endif
}
static void reboot()
@@ -161,6 +237,10 @@ public:
static void setSpiPower(bool on) {
setInt(CMD_SET_SPI_POWER, on ? 1 : 0);
+ if (on)
+ {
+ std::this_thread::sleep_for(std::chrono::milliseconds(40));
+ }
}
static void setRS485Enable(bool z) {
@@ -172,6 +252,12 @@ public:
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
}
+#ifdef USING_N938
+ static bool SetN938Cmd(int cmd, int val);
+ static bool OpenSensors();
+ static bool CloseSensors();
+#endif
+
};
diff --git a/app/src/main/cpp/MicroPhoto.cpp b/app/src/main/cpp/MicroPhoto.cpp
index 0b31ffef..3d5a67b6 100644
--- a/app/src/main/cpp/MicroPhoto.cpp
+++ b/app/src/main/cpp/MicroPhoto.cpp
@@ -10,9 +10,12 @@
#include
#include
#include
+#include "ncnn/yolov5ncnn.h"
#include
+
+
#define NRSEC_PATH "/dev/spidev0.0"
#ifdef USING_BREAK_PAD
@@ -326,7 +329,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
pTerminal->InitServerInfo(MakeString(appPathStr), MakeString(cmdidStr), MakeString(ipStr), port, udpOrTcp, encryptData);
// pTerminal->SetPacketSize(1 * 1024); // 1K
-#ifdef USING_NRSEC
+#if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN)
pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", "");
#endif
bool res = pTerminal->Startup(device);
@@ -394,9 +397,9 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
if (photoInfo.usbCamera)
{
- device->TurnOnOtg(NULL);
+ CPhoneDevice::TurnOnOtg(NULL);
}
- device->TurnOnCameraPower(NULL);
+ CPhoneDevice::TurnOnCameraPower(NULL);
std::vector osds;
osds.resize(4);
@@ -415,11 +418,11 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
env->ReleaseStringUTFChars(path, pathStr);
- device->TurnOffCameraPower(NULL);
- if (photoInfo.usbCamera)
- {
- device->TurnOffOtg(NULL);
- }
+ // device->TurnOffCameraPower(NULL);
+ // if (photoInfo.usbCamera)
+ //{
+ // device->TurnOffOtg(NULL);
+ //}
return reinterpret_cast(device);
}
@@ -497,6 +500,10 @@ Java_com_xypower_mpapp_MicroPhotoService_uninit(
}
pTerminal->SignalExit();
pTerminal->Shutdown();
+ if (dev != NULL)
+ {
+ delete dev;
+ }
delete pTerminal;
@@ -636,6 +643,90 @@ Java_com_xypower_mpapp_MicroPhotoService_getPhotoTimeData(
return data;
}
+extern "C" JNIEXPORT jintArray JNICALL
+Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture(
+ JNIEnv* env,
+ jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) {
+
+ const char* pParamPathStr = env->GetStringUTFChars(paramPath, 0);
+ std::string paramPathStr = MakeString(pParamPathStr);
+ env->ReleaseStringUTFChars(paramPath, pParamPathStr);
+
+ const char* pBinPathStr = env->GetStringUTFChars(binPath, 0);
+ std::string binPathStr = MakeString(pBinPathStr);
+ env->ReleaseStringUTFChars(binPath, pBinPathStr);
+
+ const char* pBlobName8Str = env->GetStringUTFChars(blobName8, 0);
+ std::string blobName8Str = MakeString(pBlobName8Str);
+ env->ReleaseStringUTFChars(blobName8, pBlobName8Str);
+
+ const char* pBlobName16Str = env->GetStringUTFChars(blobName16, 0);
+ std::string blobName16Str = MakeString(pBlobName16Str);
+ env->ReleaseStringUTFChars(blobName16, pBlobName16Str);
+
+ const char* pBlobName32Str = env->GetStringUTFChars(blobName32, 0);
+ std::string blobName32Str = MakeString(pBlobName32Str);
+ env->ReleaseStringUTFChars(blobName32, pBlobName32Str);
+
+ const char* pPicPathStr = env->GetStringUTFChars(picPath, 0);
+ std::string picPathStr = MakeString(pPicPathStr);
+ env->ReleaseStringUTFChars(picPath, pPicPathStr);
+
+ cv::Mat mat = cv::imread(picPathStr);
+
+ if (mat.empty())
+ {
+ return NULL;
+ }
+
+ std::vector dataArray;
+ ncnn_init();
+
+ ncnn::Net net;
+
+ bool res = YoloV5Ncnn_Init(net, paramPathStr, binPathStr);
+ if (res)
+ {
+ std::vector objs;
+ res = YoloV5NcnnDetect(net, mat, true, blobName8Str, blobName16Str, blobName32Str, objs);
+ if (res && !objs.empty())
+ {
+ for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend(); ++it)
+ {
+ // float x;
+ // float y;
+ // float w;
+ // float h;
+ // int label;
+ // float prob;
+ dataArray.push_back(it->x);
+ dataArray.push_back(it->y);
+ dataArray.push_back(it->w);
+ dataArray.push_back(it->h);
+ dataArray.push_back(it->label);
+ dataArray.push_back((int)(it->prob * 100.0f));
+ }
+ }
+ }
+
+ // ncnn_uninit();
+
+ if (dataArray.empty())
+ {
+ return NULL;
+ }
+
+ jintArray data = env->NewIntArray(dataArray.size());
+ if (data == NULL) {
+ return NULL;
+ }
+
+ env->SetIntArrayRegion(data, 0, dataArray.size(), &dataArray[0]);
+
+ return data;
+}
+
+
/*
extern "C" JNIEXPORT jlongArray JNICALL
Java_com_xypower_mpapp_MicroPhotoService_getNextScheduleItem(
@@ -704,10 +795,106 @@ Java_com_xypower_mpapp_MicroPhotoService_getNextScheduleItem(
*/
+extern "C" JNIEXPORT void JNICALL
+Java_com_xypower_mpapp_MicroPhotoService_captureFinished(
+ JNIEnv* env,
+ jobject pThis, jlong handler, jboolean photoOrVideo, jboolean result, jobject bitmap, jlong photoId) {
+
+ CTerminal* pTerminal = reinterpret_cast(handler);
+ if (pTerminal == NULL)
+ {
+ return;
+ }
+
+ IDevice* dev = pTerminal->GetDevice();
+ if (dev != NULL)
+ {
+ if (result == JNI_FALSE || bitmap == NULL)
+ {
+ cv::Mat mat;
+ ((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId);
+ return;
+ }
+ AndroidBitmapInfo info = { 0 };
+ int res = AndroidBitmap_getInfo(env, bitmap, &info);
+ if (res < 0 || info.format != ANDROID_BITMAP_FORMAT_RGBA_8888)
+ {
+ }
+
+ bool hardwareBitmap = (info.flags & ANDROID_BITMAP_FLAGS_IS_HARDWARE) != 0;
+ void* pixels = NULL;
+ AHardwareBuffer* hardwareBuffer = NULL;
+ if (hardwareBitmap)
+ {
+#if 0
+ res = AndroidBitmap_getHardwareBuffer(env, bitmap, &hardwareBuffer);
+ int32_t fence = -1;
+ res = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, fence, NULL, &pixels);
+
+ cv::Mat mat(info.height, info.width, CV_8UC4, pixels);
+ AHardwareBuffer_unlock(hardwareBuffer, &fence);
+
+ cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR);
+
+ ((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId);
+#endif // 0
+ } else
+ {
+ res = AndroidBitmap_lockPixels(env, bitmap, &pixels);
+ cv::Mat tmp(info.height, info.width, CV_8UC4, pixels);
+
+ cv::Mat raw(info.height, info.width, CV_8UC4);
+
+ // tmp.copyTo(mat);
+ cv::cvtColor(tmp, raw, cv::COLOR_RGBA2BGR);
+
+ cv::Mat mat;
+ cv::fastNlMeansDenoisingColored(raw, mat, 13, 13, 7, 21);
+
+ AndroidBitmap_unlockPixels(env, bitmap);
+
+ ((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId);
+ }
+ }
+}
+
+extern "C" JNIEXPORT void JNICALL
+Java_com_xypower_mpapp_MicroPhotoService_burstCaptureFinished(
+ JNIEnv* env,
+ jobject pThis, jlong handler, jboolean result, jint numberOfCaptures,
+ jstring pathsJoinedByTab, jboolean frontCamera, jint rotation, jlong photoId) {
+
+ CTerminal* pTerminal = reinterpret_cast(handler);
+ if (pTerminal == NULL)
+ {
+ return;
+ }
+
+ /// HDRPlus
+#ifdef USING_HDRPLUS
+
+
+#endif
+ IDevice* dev = pTerminal->GetDevice();
+ if (dev != NULL)
+ {
+ if (result == JNI_FALSE)
+ {
+ cv::Mat mat;
+ ((CPhoneDevice *)dev)->OnCaptureReady(true, false, mat, (unsigned long)photoId);
+ return;
+ }
+
+ const char* pathsStr = env->GetStringUTFChars(pathsJoinedByTab, 0);
+ ((CPhoneDevice *)dev)->ProcessRawCapture(result != JNI_FALSE, numberOfCaptures, MakeString(pathsStr), frontCamera != JNI_FALSE, rotation, photoId);
+ env->ReleaseStringUTFChars(pathsJoinedByTab, pathsStr);
+ }
+}
+
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
JNIEnv* env,
- jobject pThis, jlong handler, jboolean result, jstring path, jlong videoId) {
+ jobject pThis, jlong handler, jboolean photoOrVideo, jboolean result, jstring path, jlong videoId) {
CTerminal* pTerminal = reinterpret_cast(handler);
if (pTerminal == NULL)
@@ -726,7 +913,7 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
// camera->Open(pathStr, fileNameStr);
unsigned long photoId = videoId;
- ((CPhoneDevice *)dev)->OnVideoReady(result != JNI_FALSE, pathStr, photoId);
+ ((CPhoneDevice *)dev)->OnVideoReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, pathStr, photoId);
if (path != NULL)
{
env->ReleaseStringUTFChars(path, pathStr);
@@ -735,7 +922,7 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
}
-extern "C" JNIEXPORT void JNICALL
+extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs(
JNIEnv* env,
jobject pThis, jlong handler) {
@@ -743,13 +930,36 @@ Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs(
CTerminal* pTerminal = reinterpret_cast(handler);
if (pTerminal == NULL)
{
- return;
+ return JNI_FALSE;
}
- pTerminal->LoadAppConfigs();
+ bool res = pTerminal->LoadAppConfigs();
+ return res ? JNI_TRUE : JNI_FALSE;
}
+extern "C" JNIEXPORT jboolean JNICALL
+Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto(
+ JNIEnv* env, jclass cls, jlong handler, jstring path) {
+
+ CTerminal* pTerminal = reinterpret_cast(handler);
+ if (pTerminal == NULL)
+ {
+ return JNI_FALSE;
+ }
+
+ if (env->GetStringUTFLength(path) <=0)
+ {
+ return JNI_FALSE;
+ }
+
+ const char *pathStr = env->GetStringUTFChars(path, 0);
+ bool res = pTerminal->SendExternalPhoto(pathStr);
+ env->ReleaseStringUTFChars(path, pathStr);
+
+ return res ? JNI_TRUE : JNI_FALSE;
+}
+
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_infoLog(
JNIEnv* env, jclass cls, jstring msg) {
@@ -814,7 +1024,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKeyFile(
const char *md5Str = env->GetStringUTFChars(md5, 0);
- GpioControl::setCam3V3Enable(true);
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@@ -827,7 +1038,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKeyFile(
}
GpioControl::setSpiPower(false);
- GpioControl::setCam3V3Enable(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
env->ReleaseStringUTFChars(md5, md5Str);
@@ -849,7 +1060,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKey(
return JNI_FALSE;
}
- GpioControl::setCam3V3Enable(true);
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@@ -864,7 +1076,44 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKey(
}
GpioControl::setSpiPower(false);
- GpioControl::setCam3V3Enable(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
+
+ return res ? JNI_TRUE : JNI_FALSE;
+#else
+ return JNI_FALSE;
+#endif
+}
+
+
+extern "C" JNIEXPORT jboolean JNICALL
+Java_com_xypower_mpapp_MicroPhotoService_importPrivateKey(
+ JNIEnv* env, jclass cls, jint index, jbyteArray cert) {
+
+#ifdef USING_NRSEC
+
+ int byteCertLen = env->GetArrayLength(cert);
+ if (byteCertLen <= 0)
+ {
+ return JNI_FALSE;
+ }
+
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
+ GpioControl::setSpiPower(true);
+
+ NrsecPort nrsec;
+ const char *path = NRSEC_PATH;
+ bool res = nrsec.Open(path);
+ if (res)
+ {
+ jbyte* byteCert = env->GetByteArrayElements(cert, 0);
+ res = nrsec.SM2ImportPrivateKey(index, (const uint8_t*)byteCert) == 0;
+ nrsec.Close();
+ env->ReleaseByteArrayElements(cert, byteCert, JNI_ABORT);
+ }
+
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
return res ? JNI_TRUE : JNI_FALSE;
#else
@@ -886,7 +1135,9 @@ Java_com_xypower_mpapp_MicroPhotoService_genKeys(
jclass cls, jint index) {
#ifdef USING_NRSEC
- GpioControl::setCam3V3Enable(true);
+
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
const char *path = NRSEC_PATH;
@@ -900,7 +1151,7 @@ Java_com_xypower_mpapp_MicroPhotoService_genKeys(
}
GpioControl::setSpiPower(false);
- GpioControl::setCam3V3Enable(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
return res ? JNI_TRUE : JNI_FALSE;
#else
@@ -915,7 +1166,8 @@ Java_com_xypower_mpapp_MicroPhotoService_querySecVersion(
std::string version;
#ifdef USING_NRSEC
- GpioControl::setCam3V3Enable(true);
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
const char *path = NRSEC_PATH;
@@ -929,7 +1181,7 @@ Java_com_xypower_mpapp_MicroPhotoService_querySecVersion(
}
GpioControl::setSpiPower(false);
- GpioControl::setCam3V3Enable(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
#endif
return env->NewStringUTF(version.c_str());
}
@@ -945,7 +1197,8 @@ Java_com_xypower_mpapp_MicroPhotoService_genCertRequest(
}
const char *path = NRSEC_PATH;
- GpioControl::setCam3V3Enable(true);
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
uint8_t output[1024] = { 0 };
@@ -962,6 +1215,9 @@ Java_com_xypower_mpapp_MicroPhotoService_genCertRequest(
env->ReleaseStringUTFChars(subject, subjectStr);
}
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
+
if (res)
{
const char* outputPathStr = env->GetStringUTFChars(outputPath, 0);
@@ -999,7 +1255,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKeyFile(
const char *path = NRSEC_PATH;
- GpioControl::setCam3V3Enable(true);
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@@ -1011,7 +1268,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKeyFile(
}
GpioControl::setSpiPower(false);
- GpioControl::setCam3V3Enable(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
// const char *md5Str = env->GetStringUTFChars(md5, 0);
// env->ReleaseStringUTFChars(md5, md5Str);
@@ -1038,7 +1295,8 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPublicKeyFile(
uint8_t len = 0;
std::vector data(64, 0);
- GpioControl::setCam3V3Enable(true);
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@@ -1050,7 +1308,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPublicKeyFile(
}
GpioControl::setSpiPower(false);
- GpioControl::setCam3V3Enable(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
if (res)
{
@@ -1077,7 +1335,8 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
const char *path = NRSEC_PATH;
- GpioControl::setCam3V3Enable(true);
+ GpioControl::setSpiPower(false);
+ CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@@ -1093,7 +1352,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
nrsec.Close();
GpioControl::setSpiPower(false);
- GpioControl::setCam3V3Enable(false);
+ CPhoneDevice::TurnOffCameraPower(NULL);
if (res)
{
diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp
index b160d412..27de01f1 100644
--- a/app/src/main/cpp/PhoneDevice.cpp
+++ b/app/src/main/cpp/PhoneDevice.cpp
@@ -7,6 +7,8 @@
#include "ncnn/yolov5ncnn.h"
#include "GPIOControl.h"
#include "CvText.h"
+#include "PositionHelper.h"
+#include "DngCreator.h"
#include
#include
@@ -18,10 +20,18 @@
#include
#include
+#include
#include
-#include
+#include
+#include
+#ifdef USING_HDRPLUS
+#include
+#endif
+
+#include
#include
+#include
namespace fs = std::filesystem;
#define CMD_SET_485_EN_STATE 131
@@ -36,6 +46,33 @@ extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
// are normalized to eight bits.
static const int kMaxChannelValue = 262143;
+
+cv::Mat convert16bit2_8bit_(cv::Mat ans){
+ if(ans.type()==CV_16UC3){
+ cv::MatIterator_ it, end;
+ for( it = ans.begin(), end = ans.end(); it != end; ++it)
+ {
+ // std::cout< characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames)
+{
+ if (m_dev != NULL)
+ {
+ return m_dev->onBurstCapture(characteristics, results, ldr, frames);
+ }
+ return false;
+}
+
void CPhoneDevice::CPhoneCamera::on_error(const std::string& msg)
{
if (m_dev != NULL)
@@ -160,7 +206,123 @@ void CPhoneDevice::CPhoneCamera::onDisconnected(ACameraDevice* device)
}
}
-CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode) : mCameraPowerCount(0), mOtgCount(0), mVersionCode(versionCode)
+
+CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params) : CPhoneDevice::CPhoneCamera(dev, width, height, params), m_path(path)
+{
+}
+
+bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames)
+{
+ if (m_dev != NULL)
+ {
+ m_dev->onBurstCapture(characteristics, results, ldr, frames);
+ }
+ return true;
+}
+
+void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader)
+{
+ ALOGD("onImageAvailable %p", reader);
+
+ AImage* image = 0;
+ media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image);
+
+ if (mstatus != AMEDIA_OK)
+ {
+ // error
+ // https://stackoverflow.com/questions/67063562
+ if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
+ {
+ XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus);
+ }
+ return;
+ }
+
+ uint8_t* y_data = 0;
+ int y_len = 0;
+#if 0
+ if (!lightDetected)
+ {
+ AImage_getPlaneData(image, 0, &y_data, &y_len);
+
+ lightDetected = true;
+
+#if __cplusplus >= 201703L
+ uint64_t avgY = std::reduce(y_data, y_data + y_len, 0);
+#else
+ uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
+#endif
+ avgY = avgY / (uint64_t)y_len;
+ mLdr = avgY;
+#if 1
+ if (avgY < 50)
+ {
+ if (m_params.autoExposure)
+ {
+ uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
+ camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
+
+ int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY);
+ status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
+
+ int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY);
+ status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime);
+
+ XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY,
+ mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity);
+ }
+ AImage_delete(image);
+ return;
+ }
+#endif
+ }
+#endif
+
+ int32_t format;
+ AImage_getFormat(image, &format);
+
+ if (format == AIMAGE_FORMAT_JPEG)
+ {
+ int planeCount;
+ media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
+
+ // LOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount);
+ if (!(status == AMEDIA_OK && planeCount == 1))
+ {
+ // LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount);
+ return;
+ }
+
+ uint8_t *data = nullptr;
+ int len = 0;
+ AImage_getPlaneData(image, 0, &data, &len);
+
+ FILE *file = fopen(m_path.c_str(), "wb");
+ if (file && data && len)
+ {
+ fwrite(data, 1, len, file);
+ fclose(file);
+ }
+ else
+ {
+ if (file)
+ fclose(file);
+ }
+ }
+
+ AImage_delete(image);
+}
+
+int32_t CPhoneDevice::CJpegCamera::getOutputFormat() const
+{
+ return AIMAGE_FORMAT_JPEG;
+}
+
+std::mutex CPhoneDevice::m_powerLocker;
+long CPhoneDevice::mCameraPowerCount = 0;
+long CPhoneDevice::mOtgCount = 0;
+
+CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode) : mVersionCode(versionCode)
{
mCamera = NULL;
m_listener = NULL;
@@ -195,7 +357,7 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa
mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(IJ)V");
mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z");
mUpdateCaptureScheduleMid = env->GetMethodID(classService, "updateCaptureSchedule", "(J)Z");
- mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(IJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
+ mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(ZIJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
mRequestWakelockMid = env->GetMethodID(classService, "requestWakelock", "(Ljava/lang/String;J)V");
mReleaseWakelockMid = env->GetMethodID(classService, "releaseWakelock", "(Ljava/lang/String;)V");
@@ -308,16 +470,18 @@ bool CPhoneDevice::SelfTest(std::string& result)
{
result.clear();
+ const char* ITEM_SEP = "\t"; //
unsigned int numberOfChannels = 0;
- result += "设备自检 版本:" + GetVersion() + NEW_LINE_TAG;
+ result += "设备自检 版本:" + GetVersion() + ITEM_SEP;
Json::Value appConfig = Json::objectValue;
std::vector content;
std::string filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF);
if (!readFile(filePath, content))
{
- result += ("读取系统配置文件App.json失败" NEW_LINE_TAG);
+ result += "读取系统配置文件App.json失败";
+ result += ITEM_SEP;
}
else
{
@@ -331,16 +495,16 @@ bool CPhoneDevice::SelfTest(std::string& result)
if (GetJSONUInt32Value(appConfig, "channels", val) && (val > 0 && val <= 255))
{
numberOfChannels = val;
- result += "通道数:" + std::to_string(numberOfChannels) + NEW_LINE_TAG;
+ result += "通道数:" + std::to_string(numberOfChannels) + ITEM_SEP;
}
else
{
- result += "通道数未定义或者无效" NEW_LINE_TAG;
+ result += "通道数未定义或者无效" + std::string(ITEM_SEP);
}
}
else
{
- result += "解析系统配置文件App.json失败" NEW_LINE_TAG;
+ result += "解析系统配置文件App.json失败" + std::string(ITEM_SEP);
}
}
@@ -355,7 +519,7 @@ bool CPhoneDevice::SelfTest(std::string& result)
filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF);
if (!readFile(filePath, content))
{
- result += "读取通道" + std::to_string(channel) + "配置文件失败" NEW_LINE_TAG;
+ result += "读取通道" + std::to_string(channel) + "配置文件失败" + std::string(ITEM_SEP);
}
else
{
@@ -368,17 +532,17 @@ bool CPhoneDevice::SelfTest(std::string& result)
GetJSONUInt8Value(channelCfg, "usbCamera", usbCamera);
if (GetJSONUInt8Value(channelCfg, "cameraId", cameraId))
{
- result += "通道" + std::to_string(channel) + " Camera ID为 " + std::to_string(cameraId) + NEW_LINE_TAG;
+ result += "通道" + std::to_string(channel) + " Camera ID为 " + std::to_string(cameraId) + ITEM_SEP;
}
else
{
cameraId = channel - 1;
- result += "通道" + std::to_string(channel) + "未定义Camera ID, 使用默认值 " + std::to_string(cameraId) + NEW_LINE_TAG;
+ result += "通道" + std::to_string(channel) + "未定义Camera ID, 使用默认值 " + std::to_string(cameraId) + ITEM_SEP;
}
}
else
{
- result += "解析通道" + std::to_string(channel) + "配置文件App.json失败" NEW_LINE_TAG;
+ result += "解析通道" + std::to_string(channel) + "配置文件App.json失败" + std::string(ITEM_SEP);
}
}
@@ -400,11 +564,11 @@ bool CPhoneDevice::SelfTest(std::string& result)
}
if (res == 0)
{
- result += "通道" + std::to_string(channel) + "正常:最大分辨率:" + std::to_string(width) + "x" + std::to_string(height) + NEW_LINE_TAG;
+ result += "通道" + std::to_string(channel) + "正常:最大分辨率:" + std::to_string(width) + "x" + std::to_string(height) + ITEM_SEP;
}
else
{
- result += "通道" + std::to_string(channel) + " 异常 err=" + std::to_string(res) + NEW_LINE_TAG;
+ result += "通道" + std::to_string(channel) + " 异常 err=" + std::to_string(res) + ITEM_SEP;
}
}
@@ -412,19 +576,19 @@ bool CPhoneDevice::SelfTest(std::string& result)
if (bv > 0)
{
bv -= bv % 100;
- result += std::string("电池电压:") + std::to_string(bv / 1000) + std::string(".") + std::to_string((bv % 1000) / 100) + NEW_LINE_TAG;
+ result += std::string("电池电压:") + std::to_string(bv / 1000) + std::string(".") + std::to_string((bv % 1000) / 100) + ITEM_SEP;
}
fs::space_info si = fs::space("/data");
double fr = ((double)si.available * 100.0f) / ((double)si.capacity);
result += "可用存储:";
result += std::to_string((int)fr);
- result += "%%" NEW_LINE_TAG;
+ result += "%%" + std::string(ITEM_SEP);
long fm = android_os_Process_getFreeMemory();
long tm = android_os_Process_getTotalMemory();
double fmp = ((double)fm * 100.0f) / ((double)tm);
- result += std::string("可用内存:") + std::to_string((int)fmp) + std::string("%%" NEW_LINE_TAG);
+ result += std::string("可用内存:") + std::to_string((int)fmp) + std::string("%%") + ITEM_SEP;
if (!m_tfCardPath.empty())
{
@@ -432,12 +596,12 @@ bool CPhoneDevice::SelfTest(std::string& result)
double fr2 = ((double)si2.available * 100.0f) / ((double)si2.capacity);
result += "TF卡可用空间:";
result += std::to_string((int)fr2);
- result += "%%" NEW_LINE_TAG;
+ result += "%%" + std::string(ITEM_SEP);
}
result += "4G信号强度:";
result += std::to_string(m_signalLevel);
- result += NEW_LINE_TAG;
+ result += ITEM_SEP;
result += "网络接口:";
std::vector devices;
@@ -447,7 +611,7 @@ bool CPhoneDevice::SelfTest(std::string& result)
result += (*it);
result += " ";
}
- // result += NEW_LINE_TAG;
+ // result += ITEM_SEP;
return true;
}
@@ -530,19 +694,19 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro
else if (it->first == PROP_MODEL)
{
__system_property_get("ro.product.model", value);
- it->second = value;
+ it->second = std::string(value);
}
else if (it->first == PROP_BS_MANU)
{
__system_property_get("ro.product.manufacturer", value);
- it->second = value;
+ it->second = std::string(value);
}
else if (it->first == PROP_VERSION)
{
// FOR Protocol
snprintf(value, sizeof(value), "%u.%03u", (mVersionCode / 1000), (mVersionCode % 1000));
// __system_property_get("ro.build.version.release", value);
- it->second = value;
+ it->second = std::string(value);
}
else if (it->first == (PROP_VERSION_ABBR))
{
@@ -560,19 +724,19 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro
else if (it->first == PROP_PROD_DATE)
{
__system_property_get("ro.build.date.utc", value);
- it->second = value;
+ it->second = std::string(value);
}
else if (it->first == PROP_SN || it->first == PROP_BS_ID)
{
__system_property_get("ro.serialno", value);
- it->second = value;
+ it->second = std::string(value);
}
else if (it->first == PROP_IMEI)
{
if (m_simcard.empty())
{
__system_property_get("phone.imei", value);
- it->second = value;
+ it->second = std::string(value);
}
else
{
@@ -592,9 +756,8 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro
{
fs::space_info si = fs::space("/data");
double fr = ((double)si.available * 100.0f) / ((double)si.capacity);
- char buf[12] = { 0 };
- snprintf(buf, sizeof(buf), "%d%%", (int)fr);
- it->second = buf;
+ snprintf(value, sizeof(value), "%d%%", (int)fr);
+ it->second = std::string(value);
}
else if (it->first == PROP_TOTAL_ROM)
{
@@ -610,9 +773,8 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro
long fm = android_os_Process_getFreeMemory();
long tm = android_os_Process_getTotalMemory();
double fmp = ((double)fm * 100.0f) / ((double)tm);
- char buf[12] = { 0 };
- snprintf(buf, sizeof(buf), "%d%%", (int)fmp);
- it->second = buf; // Unit: M
+ snprintf(value, sizeof(value), "%d%%", (int)fmp);
+ it->second = std::string(value); // Unit: M
}
else if (it->first == PROP_TOTAL_MEMORY)
{
@@ -643,7 +805,7 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro
continue;
}
snprintf(str, sizeof(str), "%.1f", (val / 1000.0));
- it->second = str;
+ it->second = std::string(str);
break;
}
}
@@ -657,9 +819,8 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro
if (val > 0)
{
bv = val;
- char str[32] = { 0 };
- snprintf(str, sizeof(str), "%.1f", val / 1000.0);
- it->second = str;
+ snprintf(value, sizeof(value), "%.1f", val / 1000.0);
+ it->second = std::string(value);
}
else
{
@@ -701,7 +862,7 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro
char str[32] = { 0 };
float batteryCurrent = STANDARD_CURRENT_64V / ((float)bv / 1000.0f / STANDARD_VOLTAGE_64V);
snprintf(str, sizeof(str), "%d", (int)batteryCurrent);
- it->second = str;
+ it->second = std::string(str);
}
}
// __system_property_get("ro.telephony.default_network", value);
@@ -1179,6 +1340,8 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
params.requestTemplate = mPhotoInfo.requestTemplate;
params.awbMode = mPhotoInfo.awbMode;
params.wait3ALocked = mPhotoInfo.wait3ALocked;
+ params.burstRawCapture = mPhotoInfo.usingRawFormat;
+ params.burstCaptures = mPhotoInfo.burstCaptures;
if (params.requestTemplate <= 0 || params.requestTemplate > 5)
{
params.requestTemplate = 2;
@@ -1196,7 +1359,6 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
}
#endif
- // GpioControl::EnableGpio(CMD_SET_CAM_3V3_EN_STATE, true);
bool res = false;
if (photoInfo.usbCamera)
@@ -1206,9 +1368,10 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
TurnOnCameraPower(NULL);
res = true;
- if (mPhotoInfo.mediaType == 0)
+ if (mPhotoInfo.mediaType == 0/* && mPhotoInfo.usingRawFormat == 0*/)
{
mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params);
+ // mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params);
if (mCamera->open(to_string(mPhotoInfo.cameraId)) == 0)
{
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Succeeded to OpenCamera CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
@@ -1267,8 +1430,10 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
}
int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90;
- env->CallVoidMethod(m_javaService, mStartRecordingMid, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId, mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height,
- mPhotoInfo.duration, orientation, leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD);
+ jboolean photoOrVideo = mPhotoInfo.mediaType == 0 ? JNI_TRUE : JNI_FALSE;
+ env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId,
+ mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height, mPhotoInfo.duration, orientation,
+ leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD);
if (leftTopOSD) env->DeleteLocalRef(leftTopOSD);
if (rightTopOSD) env->DeleteLocalRef(rightTopOSD);
@@ -1307,13 +1472,13 @@ void CPhoneDevice::CloseCamera2(CPhoneDevice::CPhoneCamera* camera, unsigned int
delete camera;
}
- XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Will Turn Off Power=%u", photoId);
+ XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Will Turn Off Power PHOTOID=%u", photoId);
if (turnOffOtg)
{
TurnOffOtg(NULL);
}
TurnOffCameraPower(NULL);
- XYLOG(XYLOG_SEVERITY_DEBUG, "TP: End Turn Off Power=%u", photoId);
+ XYLOG(XYLOG_SEVERITY_DEBUG, "TP: End Turn Off Power PHOTOID=%u", photoId);
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: CloseCamera PHOTOID=%u", photoId);
@@ -1347,14 +1512,315 @@ void DrawOutlineText(cv::Ptr ft2, cv::Mat& mat, const std::st
}
}
-bool CPhoneDevice::OnImageReady(cv::Mat& mat)
+bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics,
+ std::vector >& results,
+ uint32_t ldr, std::vector >& frames)
{
- if (mCamera == NULL)
+ time_t takingTime = time(NULL);
+ if (mPhotoInfo.remedy != 0)
{
- // int aa = 0;
- return false;
+ if ((takingTime - mPhotoInfo.scheduleTime) > 30)
+ {
+ takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2;
+ }
+ }
+ mPhotoInfo.photoTime = takingTime;
+
+ vector osds;
+ osds.swap(mOsds);
+ PHOTO_INFO photoInfo = mPhotoInfo;
+ std::string path;
+ path.swap(mPath);
+
+ std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
+
+ acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
+ ACameraMetadata_const_entry e = { 0 };
+ camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
+ if (status == ACAMERA_OK)
+ {
+ facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
+ }
+
+ int sensorOrientation = 0;
+ {
+ ACameraMetadata_const_entry e = { 0 };
+ status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
+ if (status == ACAMERA_OK)
+ {
+ sensorOrientation = (int)e.data.i32[0];
+ }
}
+ bool turnOffOtg = (photoInfo.usbCamera != 0);
+ CPhoneCamera* pCamera = mCamera;
+ mCamera = NULL;
+
+ std::thread th([=]()mutable
+ {
+ cv::Mat rgb;
+ std::vector > rawFiles;
+
+ media_status_t mstatus;
+ std::string cameraInfo;
+ if (photoInfo.usingRawFormat != 0)
+ {
+ //
+ for (int idx = 0; idx < frames.size(); idx++)
+ {
+ std::shared_ptr spImage = frames[idx];
+ std::shared_ptr result = results[idx];
+
+ auto it = rawFiles.insert(rawFiles.end(), std::vector());
+
+ int32_t width;
+ int32_t height;
+ AImage_getWidth(spImage.get(), &width);
+ AImage_getHeight(spImage.get(), &height);
+
+ int planeCount;
+ media_status_t status = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
+ AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
+
+ uint8_t *planeData = NULL;
+ int planeDataLen = 0;
+ mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
+ DngCreator dngCreator(characteristics.get(), result.get());
+ dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0);
+ }
+ }
+ else
+ {
+ if (results.size() == 1 && frames.size() == 1)
+ {
+ std::shared_ptr result = results[0];
+ std::shared_ptr frame = frames[0];
+
+ if (photoInfo.outputDbgInfo != 0)
+ {
+ NdkCamera::CAPTURE_RESULT captureResult = { 0 };
+ NdkCamera::EnumCameraResult(result.get(), captureResult);
+
+ char extimeunit[4] = { 0 };
+ unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
+ strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
+ char str[128] = { 0 };
+ snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
+ captureResult.autoExposure, captureResult.autoFocus,
+ extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
+ // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
+ (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
+ captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
+ (uint32_t)captureResult.duration, captureResult.frameDuration);
+ cameraInfo = str;
+ }
+
+ int32_t format;
+ mstatus = AImage_getFormat(frame.get(), &format);
+
+ if (format == AIMAGE_FORMAT_YUV_420_888)
+ {
+ int32_t width;
+ int32_t height;
+ mstatus = AImage_getWidth(frame.get(), &width);
+ mstatus = AImage_getHeight(frame.get(), &height);
+
+ int32_t y_pixelStride = 0;
+ int32_t u_pixelStride = 0;
+ int32_t v_pixelStride = 0;
+ AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride);
+ AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride);
+ AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride);
+
+ int32_t y_rowStride = 0;
+ int32_t u_rowStride = 0;
+ int32_t v_rowStride = 0;
+ AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride);
+ AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride);
+ AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride);
+
+ uint8_t* y_data = 0;
+ uint8_t* u_data = 0;
+ uint8_t* v_data = 0;
+ int y_len = 0;
+ int u_len = 0;
+ int v_len = 0;
+ AImage_getPlaneData(frame.get(), 0, &y_data, &y_len);
+ AImage_getPlaneData(frame.get(), 1, &u_data, &u_len);
+ AImage_getPlaneData(frame.get(), 2, &v_data, &v_len);
+
+ if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
+ {
+ // already nv21
+ ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
+ }
+ else
+ {
+ // construct nv21
+ uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
+ {
+ // Y
+ uint8_t* yptr = nv21;
+ for (int y = 0; y < height; y++)
+ {
+ const uint8_t* y_data_ptr = y_data + y_rowStride * y;
+ for (int x = 0; x < width; x++)
+ {
+ yptr[0] = y_data_ptr[0];
+ yptr++;
+ y_data_ptr += y_pixelStride;
+ }
+ }
+
+ // UV
+ uint8_t* uvptr = nv21 + width * height;
+ for (int y = 0; y < height / 2; y++)
+ {
+ const uint8_t* v_data_ptr = v_data + v_rowStride * y;
+ const uint8_t* u_data_ptr = u_data + u_rowStride * y;
+ for (int x = 0; x < width / 2; x++)
+ {
+ uvptr[0] = v_data_ptr[0];
+ uvptr[1] = u_data_ptr[0];
+ uvptr += 2;
+ v_data_ptr += v_pixelStride;
+ u_data_ptr += u_pixelStride;
+ }
+ }
+ }
+
+ ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
+
+ delete[] nv21;
+ }
+
+ if (photoInfo.outputDbgInfo != 0)
+ {
+
+ }
+ }
+ }
+ }
+
+ frames.clear();
+ std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
+ m_threadClose.swap(closeThread);
+ if (closeThread.joinable())
+ {
+ closeThread.detach();
+ }
+
+#ifdef OUTPUT_CAMERA_DBG_INFO
+#if 0
+ bool shouldRetry = false;
+ if (ldr != ~0)
+ {
+ if (ldr < MIN_LIGHT_Y)
+ {
+ if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
+ {
+ shouldRetry = true;
+ char presetBuf[16] = {0};
+ snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
+ // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
+ replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
+ XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
+ (uint32_t) captureResult.avgY);
+
+ // photoInfo.usingRawFormat = 1;
+ }
+ }
+ else if (ldr > MAX_LIGHT_Y)
+ {
+ if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
+ {
+ shouldRetry = true;
+ char presetBuf[16] = {0};
+ snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
+ // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
+ replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
+ XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
+ (uint32_t) captureResult.avgY);
+ }
+
+ photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
+ }
+ }
+#endif // 0
+#endif // OUTPUT_CAMERA_DBG_INFO
+
+ // Notify to take next photo
+ TakePhotoCb(1, photoInfo, "", takingTime);
+
+ if (photoInfo.usingRawFormat != 0)
+ {
+ XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
+ hdrplus::hdrplus_pipeline pipeline;
+ pipeline.run_pipeline(rawFiles, 0, rgb);
+ XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
+
+#ifdef NDEBUG
+ for (auto it = rawFilePaths.cbegin(); it != rawFilePaths.cend(); ++it)
+ {
+ std::remove((*it).c_str());
+ }
+#endif
+ {
+ cv::Mat tempPic = convert16bit2_8bit_(rgb);
+ rgb = tempPic;
+ }
+
+ if (photoInfo.orientation > 0)
+ {
+ if (photoInfo.orientation == 1)
+ {
+ if (facing == ACAMERA_LENS_FACING_FRONT)
+ {
+ cv::flip(rgb, rgb, 1);
+ }
+ } else if (photoInfo.orientation == 2)
+ {
+ cv::Mat tempPic;
+ cv::transpose(rgb, tempPic);
+ cv::flip(tempPic, rgb, 1);
+ }
+ else if (photoInfo.orientation == 3)
+ {
+ if (facing == ACAMERA_LENS_FACING_FRONT)
+ {
+ flip(rgb, rgb, 0);
+ }
+ else
+ {
+ cv::flip(rgb, rgb, -1);
+ }
+ }
+ else if (photoInfo.orientation == 4)
+ {
+ cv::Mat tempPic;
+ cv::transpose(rgb, tempPic);
+ cv::flip(tempPic, rgb, 0);
+ }
+
+ XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
+ }
+ cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
+ }
+
+ bool res = PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
+ if (res)
+ {
+ // TakePhotoCb(2, photoInfo, path, takingTime);
+ }
+ });
+
+ th.detach();
+
+ return true;
+}
+
+bool CPhoneDevice::OnImageReady(cv::Mat& mat)
+{
time_t takingTime = time(NULL);
if (mPhotoInfo.remedy != 0)
{
@@ -1429,8 +1895,12 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
"hair drier", "toothbrush"
};
#endif
+
cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16);
cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16);
+ float minSizeW = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.width * m_pRecognizationCfg->minSize / 100) : 0;
+ float minSizeH = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.height * m_pRecognizationCfg->minSize / 100) : 0;
+
for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend();)
{
if (it->label >= m_pRecognizationCfg->items.size())
@@ -1446,6 +1916,15 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
continue;
}
+ if (m_pRecognizationCfg->minSize > 0)
+ {
+ if (it->w < minSizeW || it->h < minSizeH)
+ {
+ it = objs.erase(it);
+ continue;
+ }
+ }
+
if ((mPhotoInfo.recognization & 0x2) != 0)
{
cv::Rect rc(it->x, it->y, it->w, it->h);
@@ -1491,102 +1970,43 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled");
}
-#ifdef OUTPUT_CAMERA_DBG_INFO
-
- cv::Scalar scalarRed(0, 0, 255); // red
-
- NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
+// #ifdef OUTPUT_CAMERA_DBG_INFO
-#if 0
- if (captureResult.avgY < 25 && mPhotoInfo.autoExposure != 0)
+ if (mCamera != NULL)
{
- // Take another photo
- CPhoneDevice* pThis = this;
- std::string path = mPath;
- IDevice::PHOTO_INFO photoInfo = mPhotoInfo;
- std::vector osds = mOsds;
- photoInfo.photoId += 1;
- photoInfo.autoExposure = 0;
- if (captureResult.avgY == 0)
- {
- photoInfo.exposureTime = 600000000;
- photoInfo.sensitivity = 2500;
- }
- else if (captureResult.avgY <= 6)
- {
- photoInfo.exposureTime = captureResult.exposureTime * 150 / captureResult.avgY;
- photoInfo.sensitivity = photoInfo.sensitivity * 80 / captureResult.avgY;
- if (photoInfo.sensitivity < captureResult.sensitivity)
- {
- photoInfo.sensitivity = captureResult.sensitivity;
- }
- else if (photoInfo.sensitivity > 3000)
- {
- photoInfo.sensitivity = 3000;
- }
- }
- else
+ if (mPhotoInfo.outputDbgInfo != 0)
{
- photoInfo.exposureTime = captureResult.exposureTime * 120 / captureResult.avgY;
- photoInfo.sensitivity = photoInfo.sensitivity * 60 / captureResult.avgY;
- if (photoInfo.sensitivity < captureResult.sensitivity)
+ cv::Scalar scalarRed(0, 0, 255); // red
+
+ char extimeunit[4] = { 0 };
+ char str[128] = { 0 };
+
+ int fs = fontSize * 2 / 3;
+ textSize = ft2->getTextSize(str, fs, -1, &baseline);
+ cv::Point lt(0, mat.rows - fs - 20 * ratio);
+ cv::Point lt2(0, lt.y - 2 * ratio);
+ cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio);
+
+ if (rb.x > (int)width - 1)
{
- photoInfo.sensitivity = captureResult.sensitivity;
+ rb.x = (int)width - 1;
}
- else if (photoInfo.sensitivity > 3000)
+ if (rb.y > (int)height - 1)
{
- photoInfo.sensitivity = 3000;
+ rb.y = (int)height - 1;
}
- }
-
- std::thread t([=]
- {
- std::this_thread::sleep_for(std::chrono::milliseconds(5000));
- pThis->TakePhoto(photoInfo, osds, path);
- });
-
- t.detach();
- }
-#endif // 0
-
- char extimeunit[4] = { 0 };
- unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
- strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
- char str[128] = { 0 };
- snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
- captureResult.autoExposure, captureResult.autoFocus,
- extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
- // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
- (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
- captureResult.sceneMode, GpioControl::getLightAdc(), (unsigned int)captureResult.avgY, captureResult.zoomRatio,
- (uint32_t)captureResult.duration, captureResult.frameDuration);
- // cv::putText(mat, str, cv::Point(0, mat.rows - 20), cv::FONT_HERSHEY_COMPLEX, fontScale, scalarWhite, thickness1, cv::LINE_AA);
+ cv::Mat roi = mat(cv::Rect(lt2, rb));
+ cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
+ double alpha = 0.5;
+ cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
- int fs = fontSize * 2 / 3;
- textSize = ft2->getTextSize(str, fs, -1, &baseline);
- cv::Point lt(0, mat.rows - fs - 20 * ratio);
- cv::Point lt2(0, lt.y - 2 * ratio);
- cv::Point rb(0 + textSize.width, lt2.y + textSize.height + 8 * ratio);
-
- if (rb.x > (int)width - 1)
- {
- rb.x = (int)width - 1;
- }
- if (rb.y > (int)height - 1)
- {
- rb.y = (int)height - 1;
+ // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1);
+ ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false);
+ // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1);
+ }
}
- cv::Mat roi = mat(cv::Rect(lt2, rb));
- cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
- double alpha = 0.2;
- cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
-
- // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1);
- ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false);
- // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1);
-
-#endif // OUTPUT_CAMERA_DBG_INFO
+// #endif // OUTPUT_CAMERA_DBG_INFO
for (vector::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it)
{
@@ -1631,26 +2051,53 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness);
}
- vector params;
+ std::vector params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
- params.push_back(mPhotoInfo.quality);
+ params.push_back((int)((uint32_t)mPhotoInfo.quality));
bool res = false;
std::string fullPath = endsWith(mPath, ".jpg") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo));
#ifdef OUTPUT_CAMERA_DBG_INFO
+
bool shouldRetry = false;
- if (captureResult.avgY > 245 || captureResult.avgY < 10)
- {
- if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
+#if 0
+ if (mCamera != NULL) {
+ NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
+
+ if (captureResult.avgY < MIN_LIGHT_Y)
+ {
+ if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
+ {
+ shouldRetry = true;
+ char presetBuf[16] = {0};
+ snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries);
+ // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg");
+ replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
+ XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
+ (uint32_t) captureResult.avgY);
+
+ // mPhotoInfo.usingRawFormat = 1;
+ }
+ }
+ else if (captureResult.avgY > MAX_LIGHT_Y)
{
- shouldRetry = true;
- replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg");
- replaceAll(fullPath, "/photos/", "/sentPhotos/");
+ if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
+ {
+ shouldRetry = true;
+ char presetBuf[16] = {0};
+ snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries);
+ // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg");
+ replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
+ XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
+ (uint32_t) captureResult.avgY);
+ }
- XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", (uint32_t)captureResult.avgY);
+ mPhotoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
}
}
+#endif
+
#endif // OUTPUT_CAMERA_DBG_INFO
if (!std::filesystem::exists(std::filesystem::path(fullPath)))
@@ -1667,52 +2114,318 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
#ifdef OUTPUT_CAMERA_DBG_INFO
if (shouldRetry)
{
- TakePhotoCb(false, mPhotoInfo, fullPath, takingTime, objs);
+ TakePhotoCb(0, mPhotoInfo, fullPath, takingTime, objs);
}
else
{
- TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs);
+ TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs);
}
#else
- TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs);
+ TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs);
#endif
}
else
{
ALOGI("Photo file exists: %s", mPath.c_str());
}
- CPhoneCamera* pCamera = mCamera;
- mCamera = NULL;
-
- bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
- std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
- m_threadClose.swap(closeThread);
- if (closeThread.joinable())
- {
- closeThread.detach();
- }
return res;
}
-bool CPhoneDevice::OnVideoReady(bool result, const char* path, unsigned int photoId)
+bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat)
{
- mPhotoInfo.photoTime = time(NULL);
+ int baseline = 0;
+ cv::Size textSize;
+ double height = mat.rows;
+ double width = mat.cols;
+ // double ratio = std::min(height / 1024, width / 1920);
+ double ratio = height / 1024.0;
+ int thickness = round(1.4 * ratio);
+ if (thickness < 1) thickness = 1;
+ else if (thickness > 5) thickness = 5;
+ cv::Scalar scalarWhite(255, 255, 255); // white
+ int fontSize = (int)(28.0 * ratio);
+ cv::Point pt;
- CPhoneCamera* pCamera = NULL;
- std::vector objs;
- std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo);
- if (result)
+ std::string fontPath;
+ if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc"))
{
- std::rename(path, fullPath.c_str());
+ fontPath = "/system/fonts/NotoSansCJK-Regular.ttc";
}
- TakePhotoCb(result, mPhotoInfo, fullPath, time(NULL), objs);
-
- bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
- std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
- m_threadClose.swap(closeThread);
-
- return result;
+ else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc"))
+ {
+ fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc";
+ }
+ else
+ {
+ fontPath = m_appPath+ "fonts/Noto.otf";
+ }
+ cv::Ptr ft2;
+ ft2 = cv::ft::createFreeType2();
+ ft2->loadFontData(fontPath.c_str(), 0);
+ // cv::Rect rc(0, 0, mat.cols, mat.rows);
+ // cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED);
+ std::vector objs;
+
+ if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (photoInfo.recognization != 0))
+ {
+ XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled");
+
+ // visualize(ncnnPath.c_str(), in);
+#ifdef _DEBUG
+ double startTime = ncnn::get_current_time();
+#endif // _DEBUG
+
+ bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs);
+#ifdef _DEBUG
+ double elasped = ncnn::get_current_time() - startTime;
+ // __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped);
+#endif // _DEBUG
+#ifdef _DEBUG
+ ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0));
+#endif
+ if (detected && !objs.empty())
+ {
+ cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16);
+ cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16);
+ float minSizeW = m_pRecognizationCfg->minSize > 0 ? (photoInfo.width * m_pRecognizationCfg->minSize / 100) : 0;
+ float minSizeH = m_pRecognizationCfg->minSize > 0 ? (photoInfo.height * m_pRecognizationCfg->minSize / 100) : 0;
+
+ for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend();)
+ {
+ if (it->label >= m_pRecognizationCfg->items.size())
+ {
+ it = objs.erase(it);
+ continue;
+ }
+
+ const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label];
+ if (item.enabled == 0 || it->prob < item.prob)
+ {
+ it = objs.erase(it);
+ continue;
+ }
+
+ if (m_pRecognizationCfg->minSize > 0)
+ {
+ if (it->w < minSizeW || it->h < minSizeH)
+ {
+ it = objs.erase(it);
+ continue;
+ }
+ }
+
+ if ((photoInfo.recognization & 0x2) != 0)
+ {
+ cv::Rect rc(it->x, it->y, it->w, it->h);
+ cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness);
+ textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline);
+ textSize.height += baseline;
+ if (it->y > textSize.height)
+ {
+ pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness;
+ }
+ else if (mat.rows - it->y - it->h > textSize.height)
+ {
+ pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness;
+ }
+ else
+ {
+ // Inner
+ pt.y = it->y + 4 + m_pRecognizationCfg->thickness;
+ }
+ if (mat.cols - it->x > textSize.width)
+ {
+ pt.x = it->x;
+ }
+ else
+ {
+ pt.x = it->x + it->w - textSize.width;
+ }
+
+#ifdef OUTPUT_CAMERA_DBG_INFO
+ char buf[128];
+ snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)",
+ it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height);
+ XYLOG(XYLOG_SEVERITY_DEBUG, buf);
+#endif
+ ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true);
+ }
+ ++it;
+ }
+ }
+ }
+ else
+ {
+ XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled");
+ }
+
+// #ifdef OUTPUT_CAMERA_DBG_INFO
+
+ if (!cameraInfo.empty())
+ {
+ // NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
+
+ if (photoInfo.outputDbgInfo != 0)
+ {
+ cv::Scalar scalarRed(0, 0, 255); // red
+
+ int fs = fontSize * 2 / 3;
+ textSize = ft2->getTextSize(cameraInfo, fs, -1, &baseline);
+ cv::Point lt(0, mat.rows - fs - 20 * ratio);
+ cv::Point lt2(0, lt.y - 2 * ratio);
+ cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio);
+
+ if (rb.x > (int)width - 1)
+ {
+ rb.x = (int)width - 1;
+ }
+ if (rb.y > (int)height - 1)
+ {
+ rb.y = (int)height - 1;
+ }
+ cv::Mat roi = mat(cv::Rect(lt2, rb));
+ cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
+ double alpha = 0.5;
+ cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
+
+ // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1);
+ ft2->putText(mat, cameraInfo, lt, fs, scalarRed, -1, cv::LINE_AA, false);
+
+ // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1);
+ }
+ }
+// #endif // OUTPUT_CAMERA_DBG_INFO
+
+ for (vector::const_iterator it = osds.cbegin(); it != osds.cend(); ++it)
+ {
+ if (it->text.empty())
+ {
+ continue;
+ }
+
+#ifdef _DEBUG
+ if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
+ {
+ int aa = 0;
+ }
+#endif
+
+ textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline);
+ XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline);
+
+ if (it->alignment == OSD_ALIGNMENT_TOP_LEFT)
+ {
+ pt.x = it->x * ratio;
+ pt.y = it->y * ratio;
+ }
+ else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT)
+ {
+ pt.x = width - textSize.width - it->x * ratio;
+ pt.y= it->y * ratio;
+ }
+ else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
+ {
+ pt.x = width - textSize.width - it->x * ratio;
+ pt.y = height - it->y * ratio - textSize.height - baseline;
+ }
+ else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT)
+ {
+ pt.x = it->x * ratio;
+ pt.y = height - it->y * ratio - textSize.height - baseline;
+ }
+
+ // cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height);
+ // cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2);
+ DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness);
+ }
+
+ std::vector params;
+ params.push_back(cv::IMWRITE_JPEG_QUALITY);
+ params.push_back((int)((uint32_t)photoInfo.quality));
+
+ bool res = false;
+ std::string fullPath = endsWith(path, ".jpg") ? path : (path + CTerminal::BuildPhotoFileName(photoInfo));
+
+ if (!std::filesystem::exists(std::filesystem::path(fullPath)))
+ {
+#ifdef _DEBUG
+ char log[256] = { 0 };
+ strcpy(log, fullPath.c_str());
+#endif
+ bool res = cv::imwrite(fullPath.c_str(), mat, params);
+ if (!res)
+ {
+ XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s", fullPath.c_str() + m_appPath.size());
+ }
+ else
+ {
+ XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s", fullPath.c_str() + m_appPath.size());
+ }
+ TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, photoInfo.photoTime, objs);
+ }
+ else
+ {
+ XYLOG(XYLOG_SEVERITY_INFO, "Photo File Exists: %s", fullPath.c_str() + m_appPath.size());
+ }
+
+ return res;
+}
+
+bool CPhoneDevice::OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId)
+{
+ XYLOG(XYLOG_SEVERITY_INFO, "RAW Capture finished: %u RES=%d", photoId, (result ? 1 : 0));
+ if (photoOrVideo)
+ {
+ if (result)
+ {
+ OnImageReady(mat);
+ }
+ else
+ {
+ std::vector objs;
+ TakePhotoCb(0, mPhotoInfo, "", time(NULL), objs);
+
+ CPhoneCamera* pCamera = mCamera;
+ mCamera = NULL;
+
+ bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
+ std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
+ m_threadClose.swap(closeThread);
+ if (closeThread.joinable())
+ {
+ closeThread.detach();
+ }
+ }
+ }
+
+ return true;
+}
+
+bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId)
+{
+ if (photoOrVideo)
+ {
+ }
+ else
+ {
+ mPhotoInfo.photoTime = time(NULL);
+ CPhoneCamera* pCamera = NULL;
+
+ std::vector objs;
+ std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo);
+ if (result)
+ {
+ std::rename(path, fullPath.c_str());
+ }
+ TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs);
+
+ bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
+ std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
+ m_threadClose.swap(closeThread);
+ }
+
+ return result;
}
void CPhoneDevice::onError(const std::string& msg)
@@ -1727,7 +2440,7 @@ void CPhoneDevice::onError(const std::string& msg)
CPhoneCamera* pCamera = mCamera;
mCamera = NULL;
- TakePhotoCb(false, mPhotoInfo, mPath, 0);
+ TakePhotoCb(0, mPhotoInfo, mPath, 0);
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
@@ -1746,7 +2459,7 @@ void CPhoneDevice::onDisconnected(ACameraDevice* device)
CPhoneCamera* pCamera = mCamera;
mCamera = NULL;
- TakePhotoCb(false, mPhotoInfo, mPath, 0);
+ TakePhotoCb(0, mPhotoInfo, mPath, 0);
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
@@ -1775,24 +2488,28 @@ void CPhoneDevice::UpdatePosition(double lon, double lat, double radius, time_t
{
if (m_listener != NULL)
{
+ if (shouldConvertPosition(lat, lon))
+ {
+ transformPosition(lat, lon);
+ }
return m_listener->OnPositionDataArrived(lon, lat, radius, ts);
}
}
void CPhoneDevice::TurnOnCameraPower(JNIEnv* env)
{
- m_devLocker.lock();
+ m_powerLocker.lock();
if (mCameraPowerCount == 0)
{
GpioControl::setCam3V3Enable(true);
}
mCameraPowerCount++;
- m_devLocker.unlock();
+ m_powerLocker.unlock();
}
void CPhoneDevice::TurnOffCameraPower(JNIEnv* env)
{
- m_devLocker.lock();
+ m_powerLocker.lock();
if (mCameraPowerCount > 0)
{
mCameraPowerCount--;
@@ -1801,24 +2518,24 @@ void CPhoneDevice::TurnOffCameraPower(JNIEnv* env)
GpioControl::setCam3V3Enable(false);
}
}
- m_devLocker.unlock();
+ m_powerLocker.unlock();
}
void CPhoneDevice::TurnOnOtg(JNIEnv* env)
{
- m_devLocker.lock();
+ m_powerLocker.lock();
if (mOtgCount == 0)
{
ALOGD("setOtgState 1");
GpioControl::setOtgState(true);
}
mOtgCount++;
- m_devLocker.unlock();
+ m_powerLocker.unlock();
}
void CPhoneDevice::TurnOffOtg(JNIEnv* env)
{
- m_devLocker.lock();
+ m_powerLocker.lock();
if (mOtgCount > 0)
{
mOtgCount--;
@@ -1828,7 +2545,7 @@ void CPhoneDevice::TurnOffOtg(JNIEnv* env)
GpioControl::setOtgState(false);
}
}
- m_devLocker.unlock();
+ m_powerLocker.unlock();
}
void CPhoneDevice::UpdateSignalLevel(int signalLevel)
@@ -1840,4 +2557,216 @@ void CPhoneDevice::UpdateSignalLevel(int signalLevel)
void CPhoneDevice::UpdateSimcard(const std::string& simcard)
{
m_simcard = simcard;
-}
\ No newline at end of file
+}
+
+bool CPhoneDevice::ProcessRawCapture(bool result, int numberOfCaptures, const std::string& pathsJoinedByTab, bool frontCamera, int rotation, long photoId)
+{
+ std::vector paths = split(pathsJoinedByTab, "\t");
+
+ if (paths.empty())
+ {
+ cv::Mat mat;
+ OnCaptureReady(true, false, mat, (unsigned long)photoId);
+ return false;
+ }
+
+ XYLOG(XYLOG_SEVERITY_ERROR, "Start Processing Raw Capture CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
+
+ hdrplus::hdrplus_pipeline pipeline;
+ cv::Mat mat;
+ pipeline.run_pipeline(paths, 0, mat);
+ XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
+
+ mat = convert16bit2_8bit_(mat.clone());
+
+ if (rotation >= 0)
+ {
+ if (rotation == 90)
+ {
+ cv::Mat tempPic;
+ cv::transpose(mat, tempPic);
+ cv::flip(tempPic, mat, 1);
+ }
+ else if (rotation == 180)
+ {
+ if (frontCamera)
+ {
+ flip(mat, mat, 0);
+
+ }
+ else
+ {
+ cv::flip(mat, mat, -1);
+ }
+ }
+ else if (rotation == 270)
+ {
+ cv::Mat tempPic;
+ cv::transpose(mat, tempPic);
+ cv::flip(tempPic, mat, 0);
+ }
+
+ XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
+ }
+ cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR);
+
+ XYLOG(XYLOG_SEVERITY_ERROR, "Finish Processing Raw Capture CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
+
+#ifdef _DEBUG
+ // cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
+ cv::imwrite("/sdcard/com.xypower.mpapp/tmp/final.jpg", mat);
+#endif
+
+ OnCaptureReady(true, result != JNI_FALSE, mat, (unsigned long)photoId);
+ return true;
+}
+
+int CPhoneDevice::GetIceData(IDevice::ICE_INFO *iceInfo, IDevice::ICE_TAIL *iceTail, SENSOR_PARAM *sensorParam)
+{
+ Collect_sensor_data(); //15s
+ Data_DEF airt;
+ //++等值覆冰厚度, 综合悬挂载荷, 不均衡张力差 置0
+ iceInfo->equal_icethickness = 0;
+ iceInfo->tension = 0;
+ iceInfo->tension_difference = 0;
+
+ int pullno = 0;
+ int angleno = 0;
+ for(int num = 0; num < MAX_SERIAL_DEV_NUM; num++)
+ {
+ if(sensorParam[num].SensorsType == RALLY_PROTOCOL)
+ {
+ GetPullValue(num, &airt);
+ iceInfo->t_sensor_data[pullno].original_tension = airt.EuValue;
+ pullno++;
+ } else if(sensorParam[num].SensorsType == SLANT_PROTOCOL)
+ {
+ GetAngleValue(num, &airt, 0);
+ iceInfo->t_sensor_data[angleno].deflection_angle = airt.EuValue;
+ GetAngleValue(num, &airt, 1);
+ iceInfo->t_sensor_data[angleno].windage_yaw_angle = airt.EuValue;
+ angleno++;
+ }
+ }
+
+ GetWindSpeedData(&airt);
+ iceTail->instantaneous_windspeed = airt.EuValue;
+ GetWindDirectionData(&airt);
+ iceTail->instantaneous_winddirection = airt.EuValue;//需求无符号整数给出浮点数
+ GetAirTempData(&airt);
+ iceTail->air_temperature = airt.EuValue;
+ GetHumidityData(&airt);
+ iceTail->humidity = airt.EuValue;//需求无符号整数给出浮点数
+
+ return true;
+}
+
+
+
+int CPhoneDevice::GetWData(IDevice::WEATHER_INFO *weatherInfo)
+{
+ Collect_sensor_data(); //15s
+
+ Data_DEF airt;
+ GetWeatherData(&airt, 0);
+ weatherInfo->air_temperature = airt.EuValue;
+
+ if (airt.AiState == -1) return false;
+
+ GetWeatherData(&airt, 1);
+ weatherInfo->humidity = airt.EuValue;
+ GetWeatherData(&airt, 2);
+ weatherInfo->avg_windspeed_10min = airt.EuValue;
+ weatherInfo->extreme_windspeed = airt.EuValue;
+ weatherInfo->standard_windspeed = airt.EuValue;
+ GetWeatherData(&airt, 3);
+ weatherInfo->avg_winddirection_10min = airt.EuValue;
+ GetWeatherData(&airt, 4);
+ weatherInfo->precipitation = airt.EuValue;
+ GetWeatherData(&airt, 5);
+ weatherInfo->air_pressure = airt.EuValue;
+ GetWeatherData(&airt, 6);
+ weatherInfo->radiation_intensity = airt.EuValue;
+ return true;
+
+}
+
+#ifdef USING_N938
+bool CPhoneDevice::OpenSensors()
+{
+ GpioControl::setInt(CMD_SET_CAM_3V3_EN_STATE, true ? 1 : 0);
+ GpioControl::setInt(CMD_SET_485_EN_STATE, true ? 1 : 0);
+ int igpio;
+ GpioControl::setInt(CMD_SET_WTH_POWER, 1);
+ GpioControl::setInt(CMD_SET_PULL_POWER, 1);
+ GpioControl::setInt(CMD_SET_ANGLE_POWER, 1);
+ GpioControl::setInt(CMD_SET_OTHER_POWER, 1);
+ GpioControl::setInt(CMD_SET_PIC1_POWER, 1);
+
+ igpio = GpioControl::getInt(CMD_SET_WTH_POWER);
+ igpio = GpioControl::getInt(CMD_SET_PULL_POWER);
+ igpio = GpioControl::getInt(CMD_SET_ANGLE_POWER);
+ igpio = GpioControl::getInt(CMD_SET_OTHER_POWER);
+ igpio = GpioControl::getInt(CMD_SET_PIC1_POWER);
+
+ GpioControl::setInt(CMD_SET_SPI_POWER, 1);
+ GpioControl::setInt(CMD_SET_485_en0, 1);
+ GpioControl::setInt(CMD_SET_485_en1, 1);
+ GpioControl::setInt(CMD_SET_485_en2, 1);
+ GpioControl::setInt(CMD_SET_485_en3, 1);
+ GpioControl::setInt(CMD_SET_485_en4, 1);
+
+ igpio = GpioControl::getInt(CMD_SET_SPI_POWER);
+ igpio = GpioControl::getInt(CMD_SET_485_en0);
+ igpio = GpioControl::getInt(CMD_SET_485_en1);
+ igpio = GpioControl::getInt(CMD_SET_485_en2);
+ igpio = GpioControl::getInt(CMD_SET_485_en3);
+ igpio = GpioControl::getInt(CMD_SET_485_en4);
+ return 0;
+
+}
+bool CPhoneDevice::CloseSensors()
+{
+ GpioControl::setInt(CMD_SET_12V_EN_STATE, false ? 1 : 0);
+ GpioControl::setInt(CMD_SET_CAM_3V3_EN_STATE, false ? 1 : 0);
+ GpioControl::setInt(CMD_SET_485_EN_STATE, false ? 1 : 0);
+ int igpio;
+ GpioControl::setInt(CMD_SET_WTH_POWER, 0);
+ GpioControl::setInt(CMD_SET_PULL_POWER, 0);
+ GpioControl::setInt(CMD_SET_ANGLE_POWER, 0);
+ GpioControl::setInt(CMD_SET_OTHER_POWER, 0);
+ GpioControl::setInt(CMD_SET_PIC1_POWER, 0);
+
+ igpio = GpioControl::getInt(CMD_SET_WTH_POWER);
+ igpio = GpioControl::getInt(CMD_SET_PULL_POWER);
+ igpio = GpioControl::getInt(CMD_SET_ANGLE_POWER);
+ igpio = GpioControl::getInt(CMD_SET_OTHER_POWER);
+ igpio = GpioControl::getInt(CMD_SET_PIC1_POWER);
+
+ GpioControl::setInt(CMD_SET_SPI_POWER, 0);
+ GpioControl::setInt(CMD_SET_485_en0, 0);
+ GpioControl::setInt(CMD_SET_485_en1, 0);
+ GpioControl::setInt(CMD_SET_485_en2, 0);
+ GpioControl::setInt(CMD_SET_485_en3, 0);
+ GpioControl::setInt(CMD_SET_485_en4, 0);
+
+ //sleep(3);
+ igpio = GpioControl::getInt(CMD_SET_SPI_POWER);
+ igpio = GpioControl::getInt(CMD_SET_485_en0);
+ igpio = GpioControl::getInt(CMD_SET_485_en1);
+ igpio = GpioControl::getInt(CMD_SET_485_en2);
+ igpio = GpioControl::getInt(CMD_SET_485_en3);
+ igpio = GpioControl::getInt(CMD_SET_485_en4);
+ return 0;
+}
+#else
+bool CPhoneDevice::OpenSensors()
+{
+ return false;
+}
+
+bool CPhoneDevice::CloseSensors()
+{
+ return false;
+}
+#endif
diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h
index 1676f7a2..bfa6e16a 100644
--- a/app/src/main/cpp/PhoneDevice.h
+++ b/app/src/main/cpp/PhoneDevice.h
@@ -153,18 +153,33 @@ class CPhoneDevice : public IDevice
{
public:
- class CPhoneCamera : public NdkCamera {
+ class CPhoneCamera : public NdkCamera
+ {
public:
CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params);
virtual ~CPhoneCamera();
virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg);
virtual void onDisconnected(ACameraDevice* device);
+ virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames);
protected:
CPhoneDevice* m_dev;
};
+ class CJpegCamera : public CPhoneCamera
+ {
+ public:
+ CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params);
+
+ virtual void onImageAvailable(AImageReader* reader);
+ virtual int32_t getOutputFormat() const;
+ virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames);
+
+ protected:
+ std::string m_path;
+ };
+
struct TIMER_CONTEXT
{
CPhoneDevice* device;
@@ -198,10 +213,18 @@ public:
virtual unsigned long RequestWakelock(unsigned long timeout);
virtual bool ReleaseWakelock(unsigned long wakelock);
+ virtual int GetWData(WEATHER_INFO *weatherInfo);
+ virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, SENSOR_PARAM *sensorParam);
+ virtual bool OpenSensors();
+ virtual bool CloseSensors();
+
bool GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector