diff --git a/app/build.gradle b/app/build.gradle index 133b5ba6..ffb698ef 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -4,8 +4,8 @@ plugins { // 10,00,000 major-minor-build def AppMajorVersion = 1 -def AppMinorVersion = 0 -def AppBuildNumber = 190 +def AppMinorVersion = 1 +def AppBuildNumber = 1 def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber @@ -26,7 +26,7 @@ android { applicationId "com.xypower.mpapp" minSdk COMPILE_MIN_SDK_VERSION as int //noinspection ExpiredTargetSdkVersion - targetSdk 28 + targetSdk TARGET_SDK_VERSION as int versionCode AppVersionCode versionName AppVersionName @@ -39,8 +39,8 @@ android { cppFlags '-std=c++17 -fexceptions -Wno-error=format-security' // cppFlags '-std=c++17 -Wno-error=format-security' // arguments "-DANDROID_STL=c++_shared" - arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DASIO_ROOT=" + asioroot, "-DEVPP_ROOT=" + evpproot, "-DNCNN_ROOT=" + ncnnroot - abiFilters 'arm64-v8a' + arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DHDRPLUS_ROOT=" + hdrplusroot, "-DNCNN_ROOT=" + ncnnroot + abiFilters 'arm64-v8a', 'armeabi-v7a' // setAbiFilters(['arm64-v8a']) } } @@ -52,6 +52,7 @@ android { proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' } debug { + minifyEnabled false jniDebuggable true testCoverageEnabled false } @@ -74,6 +75,17 @@ android { } } + splits { + boolean isReleaseTask = gradle.startParameter.taskNames.any { it.contains("Release") } + // enabled on release build + abi { + enable isReleaseTask + reset() + include "armeabi-v7a", "arm64-v8a" + universalApk false + } + } + android.applicationVariants.all { variant -> variant.outputs.all { output -> if (outputFileName.endsWith('.apk')) { @@ -81,7 +93,9 @@ android { if(variant.buildType.name.equals('release')) { buildTypeFlag = "rel" } - def fileName = "mpapp_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}.apk" + def abi = output.getFilter(com.android.build.OutputFile.ABI) + if (abi == null) abi = "all" + def fileName = "mpapp_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}_${abi}.apk" outputFileName = fileName } } @@ -97,6 +111,13 @@ android { exclude 'META-INF/INDEX.LIST' exclude 'META-INF/io.netty.versions.properties' exclude 'META-INF/DEPENDENCIES' + exclude 'META-INF/LICENSE-notice.md' + exclude 'META-INF/LICENSE.md' + + jniLibs { + useLegacyPackaging true + } + } } @@ -119,9 +140,13 @@ dependencies { // implementation 'com.tencent:mmkv-static:1.3.0' // implementation project(path: ':opencv') implementation files('libs/devapi.aar') - debugImplementation files('libs/rtmp-client-debug.aar') - releaseImplementation files('libs/rtmp-client.aar') - implementation project(':gpuv') + // debugImplementation files('libs/rtmp-client-debug.aar') + implementation files('libs/android-openGL-canvas-1.5.4.0.aar') + implementation files('libs/rtmp-client.aar') + api project(':gpuv') + implementation project(':stream') + + implementation 'dev.mobile:dadb:1.2.7' // implementation group: 'io.netty', name: 'netty-all', version: '4.1.96.Final' // implementation 'io.netty:netty-all:4.1.23.Final' diff --git a/app/libs/android-openGL-canvas-1.5.4.0.aar b/app/libs/android-openGL-canvas-1.5.4.0.aar new file mode 100644 index 00000000..ee4506b7 Binary files /dev/null and b/app/libs/android-openGL-canvas-1.5.4.0.aar differ diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index 5355f78d..c4df279f 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -72,6 +72,15 @@ + + + + - + + + + android:exported="true" + android:grantUriPermissions="true" /> + android:exported="true" + android:process=":bridge_proc" + android:screenOrientation="landscape" /> - - - - \ No newline at end of file diff --git a/app/src/main/cpp/CMakeLists.txt b/app/src/main/cpp/CMakeLists.txt index e6913cd3..d9aaec2a 100644 --- a/app/src/main/cpp/CMakeLists.txt +++ b/app/src/main/cpp/CMakeLists.txt @@ -28,15 +28,21 @@ add_definitions(-DASIO_STANDALONE) add_definitions(-DUSING_XY_EXTENSION) # add_definitions(-DUSING_BREAK_PAD) add_definitions(-DSQLITE_THREADSAFE=1) +add_definitions(-DLIBRAW_NO_MEMPOOL_CHECK=1) +# add_definitions(-DHDRPLUS_NO_DETAILED_OUTPUT=1) add_definitions(-DHAVE_STRING_H) # for memcpy in md5.c add_definitions(-DUSING_NRSEC) +add_definitions(-DUSING_NRSEC_VPN) +# add_definitions(-DUSING_CERT) # add_definitions(-DUSING_DOWSE) # OUTPUT_CAMERA_DBG_INFO: 照片上打印CARERA相关信息 -add_definitions(-DOUTPUT_CAMERA_DBG_INFO) +# add_definitions(-DOUTPUT_CAMERA_DBG_INFO) add_definitions(-DALIGN_HB_TIMER_TO_PHOTO) +add_definitions(-DENABLE_3V3_ALWAYS) -# set(OpenCV_DIR D:/Workspace/deps/OpenCV-android-sdk/sdk/native/jni/) -set(OPENCV_EXTRA_MODULES_PATH D:/Workspace/Github/opencv_contrib/modules) +add_definitions(-DUSING_HDRPLUS) + +add_definitions(-DUSING_N938) # include_directories(${OpenCV_DIR}/include) # add_library( lib_opencv SHARED IMPORTED ) @@ -46,10 +52,7 @@ set(OPENCV_EXTRA_MODULES_PATH D:/Workspace/Github/opencv_contrib/modules) project("microphoto") -# message(FATAL_ERROR "OpenCV ${OpenCV_DIR}") - - -find_package(OpenCV REQUIRED core imgproc highgui) +find_package(OpenCV REQUIRED core imgproc highgui photo) # find_package(OpenCV REQUIRED core imgproc) if(OpenCV_FOUND) include_directories(${OpenCV_INCLUDE_DIRS}) @@ -69,7 +72,67 @@ endif(OpenCV_FOUND) set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn) find_package(ncnn REQUIRED) -# include(mars/src/CMakeUtils.txt) + +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include) + +SET( IMG_UTILS_SRCS + "img_utils/src/EndianUtils.cpp" + #"img_utils/src/FileInput.cpp" + #"img_utils/src/FileOutput.cpp" + #"img_utils/src/SortedEntryVector.cpp" + "img_utils/src/Input.cpp" + "img_utils/src/Output.cpp" + "img_utils/src/Orderable.cpp" + "img_utils/src/TiffIfd.cpp" + "img_utils/src/TiffWritable.cpp" + "img_utils/src/TiffWriter.cpp" + "img_utils/src/TiffEntry.cpp" + "img_utils/src/TiffEntryImpl.cpp" + "img_utils/src/ByteArrayOutput.cpp" + "img_utils/src/DngUtils.cpp" + "img_utils/src/StripSource.cpp" + + libutils/SharedBuffer.cpp + libutils/StrongPointer.cpp + + DngCreator.cpp + ) + +message(WARNING "include_directories ${HDRPLUS_ROOT}/${ANDROID_ABI}/include") + +include_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/include) +link_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/lib) + +# message(WARNING "exiv2_DIR=${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2") +# SET(exiv2_DIR ${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2) +# list(APPEND CMAKE_PREFIX_PATH ${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2) + +# find_package(exiv2 REQUIRED CONFIG NAMES exiv2) +# message(STATUS "Found Exiv2 and linked") + +# OpenMP +find_package(OpenMP REQUIRED) + + +# library +include_directories( ${CMAKE_CURRENT_SOURCE_DIR}/hdrplus/include ) + + +SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX) + +SET(HDRPLUS_SOURCES + + hdrplus/src/align.cpp + hdrplus/src/bayer_image.cpp + hdrplus/src/burst.cpp + hdrplus/src/finish.cpp + hdrplus/src/hdrplus_pipeline.cpp + hdrplus/src/merge.cpp + hdrplus/src/params.cpp + + ) SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR}) @@ -236,31 +299,6 @@ include_directories(${FREETYPE_ROOT}/include) include_directories(${TERM_CORE_ROOT}) # include_directories(${PROJECT_SOURCE_DIR}/../../../../../libs/inc/) -#[[ -add_library( # Sets the name of the library. - evpp_lite - - # Sets the library as a shared library. - STATIC - - # Provides a relative path to your source file(s). - ${EVPP_SOURCES} ) -]] - -IF (CMAKE_HOST_SYSTEM_NAME MATCHES "Windows") - # set(BOOST_ROOT C:/ProgramData/boost_1_82_0/) - # set(BOOST_INCLUDEDIR C:/ProgramData/boost_1_82_0/) - include_directories(C:/ProgramData/boost_1_82_0/) -ELSE() - # find_package(Boost 1.58.0 COMPONENTS) - find_package(Boost 1.58.0) - if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) - else() - message(FATAL_ERROR "Boost Not Found") - endif() -endif() - # Creates and names a library, sets it as either STATIC # or SHARED, and provides the relative paths to its source code. # You can define multiple libraries, and CMake builds them for you. @@ -304,7 +342,7 @@ add_library( # Sets the name of the library. SerialPort.cpp #WeatherComm.cpp SensorsProtocol.cpp - serialComm.cpp + SerialComm.cpp ncnn/yolov5ncnn.cpp @@ -312,8 +350,11 @@ add_library( # Sets the name of the library. # camera2/OpenCVFont.cpp + ${HDRPLUS_SOURCES} ${CAMERA2_SOURCES} + ${IMG_UTILS_SRCS} + ${TERM_CORE_ROOT}/Factory.cpp ${TERM_CORE_ROOT}/FilePoster.cpp ${TERM_CORE_ROOT}/LogThread.cpp @@ -350,6 +391,7 @@ add_library( # Sets the name of the library. ${TERM_CORE_ROOT}/Client/Terminal_NW.cpp ${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp ${TERM_CORE_ROOT}/Client/Database.cpp + ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp ) @@ -370,9 +412,8 @@ find_library( # Sets the name of the path variable. # can link multiple libraries, such as libraries you define in this # build script, prebuilt third-party libraries, or system libraries. - target_link_libraries( # Specifies the target library. - microphoto + ${PROJECT_NAME} jsoncpp @@ -385,7 +426,7 @@ target_link_libraries( # Specifies the target library. android camera2ndk mediandk z - ncnn ${OpenCV_LIBS} sqlite3 + ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS} ) diff --git a/app/src/main/cpp/CvText.cpp b/app/src/main/cpp/CvText.cpp index 01dbcdd5..39be43a9 100644 --- a/app/src/main/cpp/CvText.cpp +++ b/app/src/main/cpp/CvText.cpp @@ -354,7 +354,18 @@ namespace cv { mvFn(NULL, (void*)userData); // Update current position ( in FreeType coordinates ) +#if defined(USING_HB) currentPos.x += mFace->glyph->advance.x; +#else + if (wstr[i] == ' ') + { + currentPos.x += mFace->glyph->advance.x << 1; + } + else + { + currentPos.x += mFace->glyph->advance.x; + } +#endif currentPos.y += mFace->glyph->advance.y; } delete userData; @@ -1036,7 +1047,19 @@ namespace cv { (this->*putPixel)(dst, gPos.y + row, gPos.x + col, _colorUC8n, cl); } } - _org.x += (mFace->glyph->advance.x) >> 6; + +#if defined(USING_HB) + _org.x += (mFace->glyph->advance.x) >> 6; +#else + if (wstr[i] == ' ') + { + _org.x += ((mFace->glyph->advance.x) >> 6) << 1; + } + else + { + _org.x += (mFace->glyph->advance.x) >> 6; + } +#endif _org.y += (mFace->glyph->advance.y) >> 6; } @@ -1045,7 +1068,6 @@ namespace cv { #endif } - int FreeType2Impl::mvFn(const FT_Vector *to, void * user) { if (user == NULL) { return 1; } diff --git a/app/src/main/cpp/DngCreator.cpp b/app/src/main/cpp/DngCreator.cpp new file mode 100644 index 00000000..0c1727fb --- /dev/null +++ b/app/src/main/cpp/DngCreator.cpp @@ -0,0 +1,2639 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "DngCreator_JNI" +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "DngCreator.h" + +// #include "core_jni_helpers.h" + +// #include "android_runtime/AndroidRuntime.h" +// #include "android_runtime/android_hardware_camera2_CameraMetadata.h" + +#include +// #include + +using namespace android; +using namespace img_utils; +// using android::base::GetProperty; + + +ByteVectorOutput::ByteVectorOutput(std::vector& buf) : m_buf(buf) +{ +} +ByteVectorOutput::~ByteVectorOutput() +{ +} +status_t ByteVectorOutput::open() +{ + return OK; +} +status_t ByteVectorOutput::close() +{ + return OK; +} + +status_t ByteVectorOutput::write(const uint8_t* buf, size_t offset, size_t count) +{ + m_buf.insert(m_buf.end(), buf + offset, buf + offset + count); + return OK; +} + +ByteVectorInput::ByteVectorInput(const std::vector& buf) : m_buf(buf), m_offset(0) +{ +} + +ByteVectorInput::~ByteVectorInput() +{ +} + +status_t ByteVectorInput::open() +{ + return OK; +} +ssize_t ByteVectorInput::read(uint8_t* buf, size_t offset, size_t count) +{ + if (m_buf.empty() || m_offset >= m_buf.size()) + { + return NOT_ENOUGH_DATA; + } + + size_t left = m_buf.size() - m_offset; + if (left >= count) + { + memcpy(buf + offset, &m_buf[m_offset], count); + m_offset += count; + return count; + } + else + { + memcpy(buf + offset, &m_buf[m_offset], left); + m_offset += left; + return left; + } +} +/** + * Skips bytes in the input. + * + * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ +ssize_t ByteVectorInput::skip(size_t count) +{ + size_t left = m_buf.size() - m_offset; + if (left >= count) + { + m_offset += count; + return count; + } + else + { + m_offset += left; + return left; + } +} + +/** + * Close the Input. It is not valid to call open on a previously closed Input. + * + * Returns OK on success, or a negative error code. + */ +status_t ByteVectorInput::close() +{ + return OK; +} + + +ByteBufferInput::ByteBufferInput(const uint8_t* buf, size_t len) : m_buf(buf), m_len(len), m_offset(0) +{ +} + +ByteBufferInput::~ByteBufferInput() +{ +} + +status_t ByteBufferInput::open() +{ + return OK; +} +ssize_t ByteBufferInput::read(uint8_t* buf, size_t offset, size_t count) +{ + if (m_buf == NULL || m_offset >= m_len) + { + return NOT_ENOUGH_DATA; + } + + size_t left = m_len - m_offset; + if (left >= count) + { + memcpy(buf + offset, m_buf + m_offset, count); + m_offset += count; + return count; + } + else + { + memcpy(buf + offset, m_buf + m_offset, left); + m_offset += left; + return left; + } +} +/** + * Skips bytes in the input. + * + * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ +ssize_t ByteBufferInput::skip(size_t count) +{ + size_t left = m_len - m_offset; + if (left >= count) + { + m_offset += count; + return count; + } + else + { + m_offset += left; + return left; + } +} + +status_t ByteBufferInput::close() +{ + return OK; +} + + +/** + * Convert a single YUV pixel to RGB. + */ +static void yuvToRgb(const uint8_t yuvData[3], int outOffset, /*out*/uint8_t rgbOut[3]) { + const int COLOR_MAX = 255; + + float y = yuvData[0] & 0xFF; // Y channel + float cb = yuvData[1] & 0xFF; // U channel + float cr = yuvData[2] & 0xFF; // V channel + + // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section) + float r = y + 1.402f * (cr - 128); + float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128); + float b = y + 1.772f * (cb - 128); + + // clamp to [0,255] + rgbOut[outOffset] = (uint8_t) std::max(0, std::min(COLOR_MAX, (int)r)); + rgbOut[outOffset + 1] = (uint8_t) std::max(0, std::min(COLOR_MAX, (int)g)); + rgbOut[outOffset + 2] = (uint8_t) std::max(0, std::min(COLOR_MAX, (int)b)); +} + +/** + * Convert a single {@link Color} pixel to RGB. + */ +static void colorToRgb(int color, int outOffset, /*out*/uint8_t rgbOut[3]) { + rgbOut[outOffset] = (uint8_t)(color >> 16) & 0xFF; + rgbOut[outOffset + 1] = (uint8_t)(color >> 8) & 0xFF; // color >> 8)&0xFF + rgbOut[outOffset + 2] = (uint8_t) color & 0xFF; + // Discards Alpha +} + +/** + * Generate a direct RGB {@link ByteBuffer} from a YUV420_888 {@link Image}. + */ +#if 0 +static ByteBuffer convertToRGB(Image yuvImage) { + // TODO: Optimize this with renderscript intrinsic. + int width = yuvImage.getWidth(); + int height = yuvImage.getHeight(); + ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height); + + Image.Plane yPlane = yuvImage.getPlanes()[0]; + Image.Plane uPlane = yuvImage.getPlanes()[1]; + Image.Plane vPlane = yuvImage.getPlanes()[2]; + + ByteBuffer yBuf = yPlane.getBuffer(); + ByteBuffer uBuf = uPlane.getBuffer(); + ByteBuffer vBuf = vPlane.getBuffer(); + + yBuf.rewind(); + uBuf.rewind(); + vBuf.rewind(); + + int yRowStride = yPlane.getRowStride(); + int vRowStride = vPlane.getRowStride(); + int uRowStride = uPlane.getRowStride(); + + int yPixStride = yPlane.getPixelStride(); + int vPixStride = vPlane.getPixelStride(); + int uPixStride = uPlane.getPixelStride(); + + byte[] yuvPixel = { 0, 0, 0 }; + byte[] yFullRow = new byte[yPixStride * (width - 1) + 1]; + byte[] uFullRow = new byte[uPixStride * (width / 2 - 1) + 1]; + byte[] vFullRow = new byte[vPixStride * (width / 2 - 1) + 1]; + byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width]; + for (int i = 0; i < height; i++) { + int halfH = i / 2; + yBuf.position(yRowStride * i); + yBuf.get(yFullRow); + uBuf.position(uRowStride * halfH); + uBuf.get(uFullRow); + vBuf.position(vRowStride * halfH); + vBuf.get(vFullRow); + for (int j = 0; j < width; j++) { + int halfW = j / 2; + yuvPixel[0] = yFullRow[yPixStride * j]; + yuvPixel[1] = uFullRow[uPixStride * halfW]; + yuvPixel[2] = vFullRow[vPixStride * halfW]; + yuvToRgb(yuvPixel, j * BYTES_PER_RGB_PIX, /*out*/finalRow); + } + buf.put(finalRow); + } + + yBuf.rewind(); + uBuf.rewind(); + vBuf.rewind(); + buf.rewind(); + return buf; + } +#endif + + + + DngCreator::DngCreator(ACameraMetadata* characteristics, ACameraMetadata* result) : NativeContext(characteristics, result) + { + // Find current time + time_t ts = time(NULL); + + // Find boot time + // long bootTimeMillis = currentTime - SystemClock.elapsedRealtime(); + + // Find capture time (nanos since boot) +#if 0 + Long timestamp = metadata.get(CaptureResult.SENSOR_TIMESTAMP); + long captureTime = currentTime; + if (timestamp != null) { + captureTime = timestamp / 1000000 + bootTimeMillis; + } + + // Format for metadata + String formattedCaptureTime = sDateTimeStampFormat.format(captureTime); +#endif + + std::string formattedCaptureTime; + init(characteristics, result, formattedCaptureTime); + } + + +#if 0 + void DngCreator::setLocation(Location location) + { + double latitude = location.getLatitude(); + double longitude = location.getLongitude(); + long time = location.getTime(); + + int[] latTag = toExifLatLong(latitude); + int[] longTag = toExifLatLong(longitude); + String latRef = latitude >= 0 ? GPS_LAT_REF_NORTH : GPS_LAT_REF_SOUTH; + String longRef = longitude >= 0 ? GPS_LONG_REF_EAST : GPS_LONG_REF_WEST; + + String dateTag = sExifGPSDateStamp.format(time); + mGPSTimeStampCalendar.setTimeInMillis(time); + int[] timeTag = new int[] { mGPSTimeStampCalendar.get(Calendar.HOUR_OF_DAY), 1, + mGPSTimeStampCalendar.get(Calendar.MINUTE), 1, + mGPSTimeStampCalendar.get(Calendar.SECOND), 1 }; + nativeSetGpsTags(latTag, latRef, longTag, longRef, dateTag, timeTag); + } +#endif + + void DngCreator::writeInputStream(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset) + { + int width = size.width; + int height = size.height; + if (width <= 0 || height <= 0) { +#if 0 + throw new IllegalArgumentException("Size with invalid width, height: (" + width + "," + + height + ") passed to writeInputStream"); +#endif + } + writeInputStream(dngOutput, pixels, width, height, offset); + } + + void DngCreator::writeByteBuffer(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset) + { + int width = size.width; + int height = size.height; + + writeByteBuffer(width, height, pixels, dngOutput, DEFAULT_PIXEL_STRIDE, + width * DEFAULT_PIXEL_STRIDE, offset); + } + +#if 0 + void DngCreator::writeImage(OutputStream& dngOutput, AImage& pixels) + { + int format = pixels.getFormat(); + if (format != ImageFormat.RAW_SENSOR) { + + } + + Image.Plane[] planes = pixels.getPlanes(); + if (planes == null || planes.length <= 0) { + + } + + ByteBuffer buf = planes[0].getBuffer(); + writeByteBuffer(pixels.getWidth(), pixels.getHeight(), buf, dngOutput, + planes[0].getPixelStride(), planes[0].getRowStride(), 0); + } +#endif + + void DngCreator::close() { + + } + + // private static final DateFormat sExifGPSDateStamp = new SimpleDateFormat(GPS_DATE_FORMAT_STR); + // private static final DateFormat sDateTimeStampFormat = new SimpleDateFormat(TIFF_DATETIME_FORMAT); +#if 0 + static { + sDateTimeStampFormat.setTimeZone(TimeZone.getDefault()); + sExifGPSDateStamp.setTimeZone(TimeZone.getTimeZone("UTC")); + } +#endif + + /** + * Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels. + */ + void DngCreator::writeByteBuffer(int width, int height, const std::vector& pixels, std::vector& dngOutput, int pixelStride, int rowStride, long offset) + { + if (width <= 0 || height <= 0) { + } + long capacity = pixels.capacity(); + long totalSize = ((long) rowStride) * height + offset; + if (capacity < totalSize) { +#if 0 + throw new IllegalArgumentException("Image size " + capacity + + " is too small (must be larger than " + totalSize + ")"); +#endif + } + int minRowStride = pixelStride * width; + if (minRowStride > rowStride) { +#if 0 + throw new IllegalArgumentException("Invalid image pixel stride, row byte width " + + minRowStride + " is too large, expecting " + rowStride); +#endif + } + // pixels.clear(); // Reset mark and limit + writeImage(dngOutput, width, height, pixels, rowStride, pixelStride, offset, true); + // pixels.clear(); + } + + + /** + * Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}. + */ +#if 0 + static ByteBuffer DngCreator::convertToRGB(Bitmap argbBitmap) { + // TODO: Optimize this. + int width = argbBitmap.getWidth(); + int height = argbBitmap.getHeight(); + ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height); + + int[] pixelRow = new int[width]; + byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width]; + for (int i = 0; i < height; i++) { + argbBitmap.getPixels(pixelRow, /*offset*/0, /*stride*/width, /*x*/0, /*y*/i, + /*width*/width, /*height*/1); + for (int j = 0; j < width; j++) { + colorToRgb(pixelRow[j], j * BYTES_PER_RGB_PIX, /*out*/finalRow); + } + buf.put(finalRow); + } + + buf.rewind(); + return buf; + } +#endif + + /** + * Convert coordinate to EXIF GPS tag format. + */ + void DngCreator::toExifLatLong(double value, int data[6]) + { + // convert to the format dd/1 mm/1 ssss/100 + value = std::abs(value); + data[0] = (int) value; + data[1] = 1; + value = (value - data[0]) * 60; + data[2] = (int) value; + data[3] = 1; + value = (value - data[2]) * 6000; + data[4] = (int) value; + data[5] = 100; + } + + + +NativeContext::NativeContext(ACameraMetadata* characteristics, ACameraMetadata* result) : + mCharacteristics(characteristics), mResult(result), mThumbnailWidth(0), + mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false), + mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {} + +NativeContext::~NativeContext() {} + +TiffWriter* NativeContext::getWriter() { + return &mWriter; +} + +ACameraMetadata* NativeContext::getCharacteristics() const { + return mCharacteristics; +} + +ACameraMetadata* NativeContext::getResult() const { + return mResult; +} + +uint32_t NativeContext::getThumbnailWidth() const { + return mThumbnailWidth; +} + +uint32_t NativeContext::getThumbnailHeight() const { + return mThumbnailHeight; +} + +const uint8_t* NativeContext::getThumbnail() const { + return &mCurrentThumbnail[0]; +} + +bool NativeContext::hasThumbnail() const { + return mThumbnailSet; +} + +bool NativeContext::setThumbnail(const std::vector& buffer, uint32_t width, uint32_t height) { + mThumbnailWidth = width; + mThumbnailHeight = height; + + size_t size = BYTES_PER_RGB_PIXEL * width * height; + mCurrentThumbnail.resize(size); + //if (mCurrentThumbnail.resize(size) < 0) { + // ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__); + // return false; + //} + + // uint8_t* thumb = mCurrentThumbnail.editArray(); + memcpy(&mCurrentThumbnail[0], &buffer[0], size); + mThumbnailSet = true; + return true; +} + +void NativeContext::setOrientation(uint16_t orientation) { + mOrientation = orientation; +} + +uint16_t NativeContext::getOrientation() const { + return mOrientation; +} + +void NativeContext::setDescription(const std::string& desc) { + mDescription = desc; + mDescriptionSet = true; +} + +std::string NativeContext::getDescription() const { + return mDescription; +} + +bool NativeContext::hasDescription() const { + return mDescriptionSet; +} + +void NativeContext::setGpsData(const GpsData& data) { + mGpsData = data; + mGpsSet = true; +} + +GpsData NativeContext::getGpsData() const { + return mGpsData; +} + +bool NativeContext::hasGpsData() const { + return mGpsSet; +} + +void NativeContext::setCaptureTime(const std::string& formattedCaptureTime) { + mFormattedCaptureTime = formattedCaptureTime; + mCaptureTimeSet = true; +} + +std::string NativeContext::getCaptureTime() const { + return mFormattedCaptureTime; +} + +bool NativeContext::hasCaptureTime() const { + return mCaptureTimeSet; +} + +// End of NativeContext +// ---------------------------------------------------------------------------- + + +/** + * StripSource subclass for Input types. + * + * This class is not intended to be used across JNI calls. + */ + +class InputStripSource : public StripSource, public LightRefBase { +public: + InputStripSource(Input& input, uint32_t ifd, uint32_t width, uint32_t height, + uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample, + uint32_t samplesPerPixel); + + virtual ~InputStripSource(); + + virtual status_t writeToStream(Output& stream, uint32_t count); + + virtual uint32_t getIfd() const; +protected: + uint32_t mIfd; + Input* mInput; + uint32_t mWidth; + uint32_t mHeight; + uint32_t mPixStride; + uint32_t mRowStride; + uint64_t mOffset; + uint32_t mBytesPerSample; + uint32_t mSamplesPerPixel; +}; + +InputStripSource::InputStripSource(Input& input, uint32_t ifd, uint32_t width, + uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset, + uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input), + mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride), + mOffset(offset), mBytesPerSample(bytesPerSample), + mSamplesPerPixel(samplesPerPixel) {} + +InputStripSource::~InputStripSource() {} + +status_t InputStripSource::writeToStream(Output& stream, uint32_t count) { + uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel; + jlong offset = mOffset; + + if (fullSize != count) { + ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count, + fullSize); + // jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write"); + return BAD_VALUE; + } + + // Skip offset + while (offset > 0) { + ssize_t skipped = mInput->skip(offset); + if (skipped <= 0) { + if (skipped == NOT_ENOUGH_DATA || skipped == 0) { +#if 0 + jniThrowExceptionFmt(mEnv, "java/io/IOException", + "Early EOF encountered in skip, not enough pixel data for image of size %u", + fullSize); +#endif + skipped = NOT_ENOUGH_DATA; + } else { +#if 0 + if (!mEnv->ExceptionCheck()) { + + jniThrowException(mEnv, "java/io/IOException", + "Error encountered while skip bytes in input stream."); + } +#endif + } + + return skipped; + } + offset -= skipped; + } + + std::vector row; + row.resize(mRowStride); +#if 0 + if (row.resize(mRowStride) < 0) { + jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector."); + return BAD_VALUE; + } +#endif + + uint8_t* rowBytes = &row[0]; + + for (uint32_t i = 0; i < mHeight; ++i) { + size_t rowFillAmt = 0; + size_t rowSize = mRowStride; + + while (rowFillAmt < mRowStride) { + ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize); + if (bytesRead <= 0) { + if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) { + ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd", + __FUNCTION__, i, bytesRead); +#if 0 + jniThrowExceptionFmt(mEnv, "java/io/IOException", + "Early EOF encountered, not enough pixel data for image of size %" + PRIu32, fullSize); +#endif + bytesRead = NOT_ENOUGH_DATA; + } else { +#if 0 + if (!mEnv->ExceptionCheck()) { + jniThrowException(mEnv, "java/io/IOException", + "Error encountered while reading"); + } +#endif + } + return bytesRead; + } + rowFillAmt += bytesRead; + rowSize -= bytesRead; + } + + if (mPixStride == mBytesPerSample * mSamplesPerPixel) { + ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__); + + if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK) { +#if 0 + if (!mEnv->ExceptionCheck()) { + jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); + } +#endif + return BAD_VALUE; + } + } else { + ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__); +#if 0 + jniThrowException(mEnv, "java/lang/IllegalStateException", + "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous"); +#endif + return BAD_VALUE; + + // TODO: Add support for non-contiguous pixels if needed. + } + } + return OK; +} + +uint32_t InputStripSource::getIfd() const { + return mIfd; +} + +// End of InputStripSource +// ---------------------------------------------------------------------------- + +/** + * StripSource subclass for direct buffer types. + * + * This class is not intended to be used across JNI calls. + */ + +class DirectStripSource : public StripSource, public LightRefBase { +public: + DirectStripSource(const uint8_t* pixelBytes, uint32_t ifd, uint32_t width, + uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset, + uint32_t bytesPerSample, uint32_t samplesPerPixel); + + virtual ~DirectStripSource(); + + virtual status_t writeToStream(Output& stream, uint32_t count); + + virtual uint32_t getIfd() const; +protected: + uint32_t mIfd; + const uint8_t* mPixelBytes; + uint32_t mWidth; + uint32_t mHeight; + uint32_t mPixStride; + uint32_t mRowStride; + uint16_t mOffset; + uint32_t mBytesPerSample; + uint32_t mSamplesPerPixel; +}; + +DirectStripSource::DirectStripSource(const uint8_t* pixelBytes, uint32_t ifd, + uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride, + uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), + mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride), + mRowStride(rowStride), mOffset(offset), mBytesPerSample(bytesPerSample), + mSamplesPerPixel(samplesPerPixel) {} + +DirectStripSource::~DirectStripSource() {} + +status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) { + uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel; + + if (fullSize != count) { + ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count, + fullSize); +#if 0 + jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write"); +#endif + return BAD_VALUE; + } + + + if (mPixStride == mBytesPerSample * mSamplesPerPixel + && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) { + ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__); + + if (stream.write(mPixelBytes, mOffset, fullSize) != OK) { +#if 0 + if (!mEnv->ExceptionCheck()) { + jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); + } +#endif + return BAD_VALUE; + } + } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) { + ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__); + + for (size_t i = 0; i < mHeight; ++i) { + if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK/* || + mEnv->ExceptionCheck()*/) { +#if 0 + if (!mEnv->ExceptionCheck()) { + jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); + } +#endif + return BAD_VALUE; + } + } + } else { + ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__); +#if 0 + jniThrowException(mEnv, "java/lang/IllegalStateException", + "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous"); +#endif + return BAD_VALUE; + + // TODO: Add support for non-contiguous pixels if needed. + } + return OK; + +} + +uint32_t DirectStripSource::getIfd() const { + return mIfd; +} + +// End of DirectStripSource +// ---------------------------------------------------------------------------- + +/** + * Calculate the default crop relative to the "active area" of the image sensor (this active area + * will always be the pre-correction active area rectangle), and set this. + */ +static status_t calculateAndSetCrop(ACameraMetadata* characteristics, + sp writer) { + + ACameraMetadata_const_entry entry = { 0 }; + // ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE + // ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE + camera_status_t status = ACameraMetadata_getConstEntry(characteristics, + ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry); + uint32_t width = static_cast(entry.data.i32[2]); + uint32_t height = static_cast(entry.data.i32[3]); + + const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation. + + if (width < margin * 2 || height < margin * 2) { + ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too" + "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width); +#if 0 + jniThrowException(env, "java/lang/IllegalStateException", + "Pre-correction active area is too small."); +#endif + return BAD_VALUE; + } + + uint32_t defaultCropOrigin[] = {margin, margin}; + uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin, + height - defaultCropOrigin[1] - margin}; + + BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin, + TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer); + BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize, + TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer); + + return OK; +} + +static bool validateDngHeader(sp writer, ACameraMetadata* characteristics, uint32_t width, uint32_t height) +{ + if (width <= 0 || height <= 0) { +#if 0 + jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ + "Image width %d is invalid", width); +#endif + return false; + } + + ACameraMetadata_const_entry preCorrectionEntry = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &preCorrectionEntry); + ACameraMetadata_const_entry pixelArrayEntry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, &pixelArrayEntry); + + int pWidth = static_cast(pixelArrayEntry.data.i32[0]); + int pHeight = static_cast(pixelArrayEntry.data.i32[1]); + int cWidth = static_cast(preCorrectionEntry.data.i32[2]); + int cHeight = static_cast(preCorrectionEntry.data.i32[3]); + + bool matchesPixelArray = (pWidth == width && pHeight == height); + bool matchesPreCorrectionArray = (cWidth == width && cHeight == height); + + if (!(matchesPixelArray || matchesPreCorrectionArray)) { +#if 0 + jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ + "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel " + "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)", + width, height, pWidth, pHeight, cWidth, cHeight); +#endif + return false; + } + + return true; +} + +static status_t moveEntries(sp writer, uint32_t ifdFrom, uint32_t ifdTo, + const std::vector& entries) { + for (size_t i = 0; i < entries.size(); ++i) { + uint16_t tagId = entries[i]; + sp entry = writer->getEntry(tagId, ifdFrom); + if (entry.get() == nullptr) { + ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId, + ifdFrom); + return BAD_VALUE; + } + if (writer->addEntry(entry, ifdTo) != OK) { + ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId, + ifdFrom); + return BAD_VALUE; + } + writer->removeEntry(tagId, ifdFrom); + } + return OK; +} + +/** + * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4. + * Returns OK on success, or a negative error code if the CFA enum was invalid. + */ +static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) { + acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa = + static_cast( + cfaEnum); + switch(cfa) { + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: { + cfaOut[0] = 0; + cfaOut[1] = 1; + cfaOut[2] = 1; + cfaOut[3] = 2; + break; + } + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: { + cfaOut[0] = 1; + cfaOut[1] = 0; + cfaOut[2] = 2; + cfaOut[3] = 1; + break; + } + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: { + cfaOut[0] = 1; + cfaOut[1] = 2; + cfaOut[2] = 0; + cfaOut[3] = 1; + break; + } + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: { + cfaOut[0] = 2; + cfaOut[1] = 1; + cfaOut[2] = 1; + cfaOut[3] = 0; + break; + } + // MONO and NIR are degenerate case of RGGB pattern: only Red channel + // will be used. + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO: + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: { + cfaOut[0] = 0; + break; + } + default: { + return BAD_VALUE; + } + } + return OK; +} + +/** + * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to + * RGGB for an unknown enum. + */ +static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) { + acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa = + static_cast( + cfaEnum); + switch(cfa) { + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: { + return OpcodeListBuilder::CFA_RGGB; + } + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: { + return OpcodeListBuilder::CFA_GRBG; + } + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: { + return OpcodeListBuilder::CFA_GBRG; + } + case ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: { + return OpcodeListBuilder::CFA_BGGR; + } + default: { + return OpcodeListBuilder::CFA_RGGB; + } + } +} + +/** + * For each color plane, find the corresponding noise profile coefficients given in the + * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color + * plane, this method takes the pair of noise profile coefficients with the higher S coefficient. + * + * perChannelNoiseProfile - numChannels * 2 noise profile coefficients. + * cfa - numChannels color channels corresponding to each of the per-channel noise profile + * coefficients. + * numChannels - the number of noise profile coefficient pairs and color channels given in + * the perChannelNoiseProfile and cfa arguments, respectively. + * planeColors - the color planes in the noise profile output. + * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile. + * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients. + * + * returns OK, or a negative error code on failure. + */ +static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa, + size_t numChannels, const uint8_t* planeColors, size_t numPlanes, + /*out*/double* noiseProfile) { + + for (size_t p = 0; p < numPlanes; ++p) { + size_t S = p * 2; + size_t O = p * 2 + 1; + + noiseProfile[S] = 0; + noiseProfile[O] = 0; + bool uninitialized = true; + for (size_t c = 0; c < numChannels; ++c) { + if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) { + noiseProfile[S] = perChannelNoiseProfile[c * 2]; + noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1]; + uninitialized = false; + } + } + if (uninitialized) { + ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu", + __FUNCTION__, p); + return BAD_VALUE; + } + } + return OK; +} + +static void undistort(/*inout*/double& x, /*inout*/double& y, + const std::array& distortion, + const float cx, const float cy, const float f) { + double xp = (x - cx) / f; + double yp = (y - cy) / f; + + double x2 = xp * xp; + double y2 = yp * yp; + double r2 = x2 + y2; + double xy2 = 2.0 * xp * yp; + + const float k0 = distortion[0]; + const float k1 = distortion[1]; + const float k2 = distortion[2]; + const float k3 = distortion[3]; + const float p1 = distortion[4]; + const float p2 = distortion[5]; + + double kr = k0 + ((k3 * r2 + k2) * r2 + k1) * r2; + double xpp = xp * kr + p1 * xy2 + p2 * (r2 + 2.0 * x2); + double ypp = yp * kr + p1 * (r2 + 2.0 * y2) + p2 * xy2; + + x = xpp * f + cx; + y = ypp * f + cy; + return; +} + +static inline bool unDistortWithinPreCorrArray( + double x, double y, + const std::array& distortion, + const float cx, const float cy, const float f, + const int preCorrW, const int preCorrH, const int xMin, const int yMin) { + undistort(x, y, distortion, cx, cy, f); + // xMin and yMin are inclusive, and xMax and yMax are exclusive. + int xMax = xMin + preCorrW; + int yMax = yMin + preCorrH; + if (x < xMin || y < yMin || x >= xMax || y >= yMax) { + return false; + } + return true; +} + +static inline bool boxWithinPrecorrectionArray( + int left, int top, int right, int bottom, + const std::array& distortion, + const float cx, const float cy, const float f, + const int preCorrW, const int preCorrH, const int xMin, const int yMin){ + // Top row + if (!unDistortWithinPreCorrArray(left, top, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + + if (!unDistortWithinPreCorrArray(cx, top, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + + if (!unDistortWithinPreCorrArray(right, top, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + + // Middle row + if (!unDistortWithinPreCorrArray(left, cy, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + + if (!unDistortWithinPreCorrArray(right, cy, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + + // Bottom row + if (!unDistortWithinPreCorrArray(left, bottom, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + + if (!unDistortWithinPreCorrArray(cx, bottom, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + + if (!unDistortWithinPreCorrArray(right, bottom, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + return false; + } + return true; +} + +static inline bool scaledBoxWithinPrecorrectionArray( + double scale/*must be <= 1.0*/, + const std::array& distortion, + const float cx, const float cy, const float f, + const int preCorrW, const int preCorrH, + const int xMin, const int yMin){ + + double left = cx * (1.0 - scale); + double right = (preCorrW - 1) * scale + cx * (1.0 - scale); + double top = cy * (1.0 - scale); + double bottom = (preCorrH - 1) * scale + cy * (1.0 - scale); + + return boxWithinPrecorrectionArray(left, top, right, bottom, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin); +} + +static status_t findPostCorrectionScale( + double stepSize, double minScale, + const std::array& distortion, + const float cx, const float cy, const float f, + const int preCorrW, const int preCorrH, const int xMin, const int yMin, + /*out*/ double* outScale) { + if (outScale == nullptr) { + ALOGE("%s: outScale must not be null", __FUNCTION__); + return BAD_VALUE; + } + + for (double scale = 1.0; scale > minScale; scale -= stepSize) { + if (scaledBoxWithinPrecorrectionArray( + scale, distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) { + *outScale = scale; + return OK; + } + } + ALOGE("%s: cannot find cropping scale for lens distortion: stepSize %f, minScale %f", + __FUNCTION__, stepSize, minScale); + return BAD_VALUE; +} + +// Apply a scale factor to distortion coefficients so that the image is zoomed out and all pixels +// are sampled within the precorrection array +static void normalizeLensDistortion( + /*inout*/std::array& distortion, + float cx, float cy, float f, int preCorrW, int preCorrH, int xMin = 0, int yMin = 0) { + ALOGV("%s: distortion [%f, %f, %f, %f, %f, %f], (cx,cy) (%f, %f), f %f, (W,H) (%d, %d)" + ", (xmin, ymin, xmax, ymax) (%d, %d, %d, %d)", + __FUNCTION__, distortion[0], distortion[1], distortion[2], + distortion[3], distortion[4], distortion[5], + cx, cy, f, preCorrW, preCorrH, + xMin, yMin, xMin + preCorrW - 1, yMin + preCorrH - 1); + + // Only update distortion coeffients if we can find a good bounding box + double scale = 1.0; + if (OK == findPostCorrectionScale(0.002, 0.5, + distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin, + /*out*/&scale)) { + ALOGV("%s: scaling distortion coefficients by %f", __FUNCTION__, scale); + // The formula: + // xc = xi * (k0 + k1*r^2 + k2*r^4 + k3*r^6) + k4 * (2*xi*yi) + k5 * (r^2 + 2*xi^2) + // To create effective zoom we want to replace xi by xi *m, yi by yi*m and r^2 by r^2*m^2 + // Factor the extra m power terms into k0~k6 + std::array scalePowers = {1, 3, 5, 7, 2, 2}; + for (size_t i = 0; i < 6; i++) { + distortion[i] *= pow(scale, scalePowers[i]); + } + } + return; +} + +// ---------------------------------------------------------------------------- +#if 0 +static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) { + ALOGV("%s:", __FUNCTION__); + return reinterpret_cast(env->GetLongField(thiz, + gDngCreatorClassInfo.mNativeContext)); +} + +static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp context) { + ALOGV("%s:", __FUNCTION__); + NativeContext* current = DngCreator_getNativeContext(env, thiz); + + if (context != nullptr) { + context->incStrong((void*) DngCreator_setNativeContext); + } + + if (current) { + current->decStrong((void*) DngCreator_setNativeContext); + } + + env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext, + reinterpret_cast(context.get())); +} + +static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) { + ALOGV("%s:", __FUNCTION__); + + gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env, + clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J"); + + jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream"); + gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env, + outputStreamClazz, "write", "([BII)V"); + + jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream"); + gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I"); + gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J"); + + jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer"); + gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env, + inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;"); +} +#endif + +void DngCreator::init(ACameraMetadata* characteristics, + ACameraMetadata* results, const std::string& captureTime) { + ALOGV("%s:", __FUNCTION__); + + sp nativeContext = new NativeContext(characteristics, results); + + size_t len = captureTime.size() + 1; + if (len != NativeContext::DATETIME_COUNT) { +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "Formatted capture time string length is not required 20 characters"); +#endif + return; + } + + nativeContext->setCaptureTime(captureTime); + + // DngCreator_setNativeContext(env, thiz, nativeContext); +} + +sp DngCreator::setup(uint32_t imageWidth, uint32_t imageHeight) +{ + ACameraMetadata* characteristics = getCharacteristics(); + ACameraMetadata* results = getResult(); + + sp writer = new TiffWriter(); + + uint32_t preXMin = 0; + uint32_t preYMin = 0; + uint32_t preWidth = 0; + uint32_t preHeight = 0; + uint8_t colorFilter = 0; + camera_status_t status; + bool isBayer = true; + { + // Check dimensions + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer); + preXMin = static_cast(entry.data.i32[0]); + preYMin = static_cast(entry.data.i32[1]); + preWidth = static_cast(entry.data.i32[2]); + preHeight = static_cast(entry.data.i32[3]); + + ACameraMetadata_const_entry pixelArrayEntry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, &pixelArrayEntry); + uint32_t pixWidth = static_cast(pixelArrayEntry.data.i32[0]); + uint32_t pixHeight = static_cast(pixelArrayEntry.data.i32[1]); + + if (!((imageWidth == preWidth && imageHeight == preHeight) || + (imageWidth == pixWidth && imageHeight == pixHeight))) { +#if 0 + jniThrowException(env, "java/lang/AssertionError", + "Height and width of image buffer did not match height and width of" + "either the preCorrectionActiveArraySize or the pixelArraySize."); +#endif + return nullptr; + } + + ACameraMetadata_const_entry colorFilterEntry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &colorFilterEntry); + colorFilter = colorFilterEntry.data.u8[0]; + ACameraMetadata_const_entry capabilitiesEntry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_REQUEST_AVAILABLE_CAPABILITIES, & capabilitiesEntry); + size_t capsCount = capabilitiesEntry.count; + const uint8_t* caps = capabilitiesEntry.data.u8; + + if (std::find(caps, caps+capsCount, ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) + != caps+capsCount) { + isBayer = false; + } else if (colorFilter == ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO || + colorFilter == ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) { +#if 0 + jniThrowException(env, "java/lang/AssertionError", + "A camera device with MONO/NIR color filter must have MONOCHROME capability."); +#endif + return nullptr; + } + } + + writer->addIfd(TIFF_IFD_0); + + status_t err = OK; + + const uint32_t samplesPerPixel = 1; + const uint32_t bitsPerSample = BITS_PER_SAMPLE; + + OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE; + uint8_t cfaPlaneColor[3] = {0, 1, 2}; + ACameraMetadata_const_entry cfaEntry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &cfaEntry); + BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer); + uint8_t cfaEnum = cfaEntry.data.u8[0]; + + // TODO: Greensplit. + // TODO: Add remaining non-essential tags + + // Setup main image tags + + { + // Set orientation + uint16_t orientation = TAG_ORIENTATION_NORMAL; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0), + env, TAG_ORIENTATION, writer); + } + + { + // Set subfiletype + uint32_t subfileType = 0; // Main image + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType, + TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer); + } + + { + // Set bits per sample + uint16_t bits = static_cast(bitsPerSample); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env, + TAG_BITSPERSAMPLE, writer); + } + + { + // Set compression + uint16_t compression = 1; // None + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression, + TIFF_IFD_0), env, TAG_COMPRESSION, writer); + } + + { + // Set dimensions + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0), + env, TAG_IMAGEWIDTH, writer); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0), + env, TAG_IMAGELENGTH, writer); + } + + { + // Set photometric interpretation + uint16_t interpretation = isBayer ? 32803 /* CFA */ : + 34892; /* Linear Raw */; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1, + &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer); + } + + { + uint16_t repeatDim[2] = {2, 2}; + if (!isBayer) { + repeatDim[0] = repeatDim[1] = 1; + } + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim, + TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer); + + // Set blacklevel tags, using dynamic black level if available + ACameraMetadata_const_entry entry = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL, &entry); + uint32_t blackLevelRational[8] = {0}; + if (entry.count != 0) { + BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer); + for (size_t i = 0; i < entry.count; i++) { + blackLevelRational[i * 2] = static_cast(entry.data.f[i] * 100); + blackLevelRational[i * 2 + 1] = 100; + } + } else { + // Fall back to static black level which is guaranteed + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_BLACK_LEVEL_PATTERN, &entry); + BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer); + for (size_t i = 0; i < entry.count; i++) { + blackLevelRational[i * 2] = static_cast(entry.data.i32[i]); + blackLevelRational[i * 2 + 1] = 1; + } + } + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1], + blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer); + } + + { + // Set samples per pixel + uint16_t samples = static_cast(samplesPerPixel); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0), + env, TAG_SAMPLESPERPIXEL, writer); + } + + { + // Set planar configuration + uint16_t config = 1; // Chunky + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config, + TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer); + } + + // All CFA pattern tags are not necessary for monochrome cameras. + if (isBayer) { + // Set CFA pattern dimensions + uint16_t repeatDim[2] = {2, 2}; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim, + TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer); + + // Set CFA pattern + const int cfaLength = 4; + uint8_t cfa[cfaLength]; + if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) { +#if 0 + jniThrowExceptionFmt(env, "java/lang/IllegalStateException", + "Invalid metadata for tag %d", TAG_CFAPATTERN); +#endif + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0), + env, TAG_CFAPATTERN, writer); + + opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum); + + // Set CFA plane color + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor, + TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer); + + // Set CFA layout + uint16_t cfaLayout = 1; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0), + env, TAG_CFALAYOUT, writer); + } + + { + // image description + uint8_t imageDescription = '\0'; // empty + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription, + TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer); + } + + { + // make + // Use "" to represent unknown make as suggested in TIFF/EP spec. + char manufacturer[PROP_VALUE_MAX] = { 0 }; + __system_property_get("ro.product.manufacturer", manufacturer); + uint32_t count = static_cast(strlen(manufacturer)) + 1; + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count, + reinterpret_cast(manufacturer), TIFF_IFD_0), env, TAG_MAKE, + writer); + } + + { + // model + // Use "" to represent unknown model as suggested in TIFF/EP spec. + char model[PROP_VALUE_MAX] = { 0 }; + __system_property_get("ro.product.model", model); + uint32_t count = static_cast(strlen(model)) + 1; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count, + reinterpret_cast(model), TIFF_IFD_0), env, TAG_MODEL, + writer); + } + + { + // x resolution + uint32_t xres[] = { 72, 1 }; // default 72 ppi + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0), + env, TAG_XRESOLUTION, writer); + + // y resolution + uint32_t yres[] = { 72, 1 }; // default 72 ppi + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0), + env, TAG_YRESOLUTION, writer); + + uint16_t unit = 2; // inches + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0), + env, TAG_RESOLUTIONUNIT, writer); + } + + { + // software + char software[PROP_VALUE_MAX] = { 0 }; + __system_property_get("ro.build.fingerprint", software); + uint32_t count = static_cast(strlen(software)) + 1; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count, + reinterpret_cast(software), TIFF_IFD_0), env, TAG_SOFTWARE, + writer); + } + + if (hasCaptureTime()) { + // datetime + std::string captureTime = getCaptureTime(); + + if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT, + reinterpret_cast(captureTime.c_str()), TIFF_IFD_0) != OK) { +#if 0 + jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", + "Invalid metadata for tag %x", TAG_DATETIME); +#endif + return nullptr; + } + + // datetime original + if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT, + reinterpret_cast(captureTime.c_str()), TIFF_IFD_0) != OK) { +#if 0 + jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", + "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL); +#endif + return nullptr; + } + } + + { + // TIFF/EP standard id + uint8_t standardId[] = { 1, 0, 0, 0 }; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId, + TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer); + } + + { + // copyright + uint8_t copyright = '\0'; // empty + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right, + TIFF_IFD_0), env, TAG_COPYRIGHT, writer); + } + + { + // exposure time + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_EXPOSURE_TIME, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer); + + int64_t exposureTime = *(entry.data.i64); + + if (exposureTime < 0) { + // Should be unreachable +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "Negative exposure time in metadata"); +#endif + return nullptr; + } + + // Ensure exposure time doesn't overflow (for exposures > 4s) + uint32_t denominator = 1000000000; + while (exposureTime > UINT32_MAX) { + exposureTime >>= 1; + denominator >>= 1; + if (denominator == 0) { + // Should be unreachable +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "Exposure time too long"); +#endif + return nullptr; + } + } + + uint32_t exposure[] = { static_cast(exposureTime), denominator }; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure, + TIFF_IFD_0), env, TAG_EXPOSURETIME, writer); + + } + + { + // ISO speed ratings + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_SENSITIVITY, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer); + + int32_t tempIso = *(entry.data.i32); + if (tempIso < 0) { +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "Negative ISO value"); +#endif + return nullptr; + } + + if (tempIso > UINT16_MAX) { + ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__); + tempIso = UINT16_MAX; + } + + uint16_t iso = static_cast(tempIso); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso, + TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer); + } + + { + // Baseline exposure + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer); + + // post RAW gain should be boostValue / 100 + double postRAWGain = static_cast (entry.data.i32[0]) / 100.f; + // Baseline exposure should be in EV units so log2(gain) = + // log10(gain)/log10(2) + double baselineExposure = std::log(postRAWGain) / std::log(2.0f); + int32_t baseExposureSRat[] = { static_cast (baselineExposure * 100), + 100 }; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1, + baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer); + } + + { + // focal length + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_FOCAL_LENGTH, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer); + + uint32_t focalLength[] = { static_cast(*(entry.data.f) * 100), 100 }; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength, + TIFF_IFD_0), env, TAG_FOCALLENGTH, writer); + } + + { + // f number + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_APERTURE, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer); + + uint32_t fnum[] = { static_cast(*(entry.data.f) * 100), 100 }; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum, + TIFF_IFD_0), env, TAG_FNUMBER, writer); + } + + { + // Set DNG version information + uint8_t version[4] = {1, 4, 0, 0}; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0), + env, TAG_DNGVERSION, writer); + + uint8_t backwardVersion[4] = {1, 1, 0, 0}; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion, + TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer); + } + + { + // Set whitelevel + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_WHITE_LEVEL, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer); + uint32_t whiteLevel = static_cast(entry.data.i32[0]); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0), + env, TAG_WHITELEVEL, writer); + } + + { + // Set default scale + uint32_t defaultScale[4] = {1, 1, 1, 1}; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale, + TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer); + } + + bool singleIlluminant = false; + if (isBayer) { + // Set calibration illuminants + ACameraMetadata_const_entry entry1 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_REFERENCE_ILLUMINANT1, &entry1); + BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer); + ACameraMetadata_const_entry entry2 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_REFERENCE_ILLUMINANT2, &entry2); + if (entry2.count == 0) { + singleIlluminant = true; + } + uint16_t ref1 = entry1.data.u8[0]; + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1, + TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer); + + if (!singleIlluminant) { + uint16_t ref2 = entry2.data.u8[0]; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2, + TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer); + } + } + + if (isBayer) { + // Set color transforms + ACameraMetadata_const_entry entry1 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_COLOR_TRANSFORM1, &entry1); + BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer); + + int32_t colorTransform1[entry1.count * 2]; + + size_t ctr = 0; + for(size_t i = 0; i < entry1.count; ++i) { + colorTransform1[ctr++] = entry1.data.r[i].numerator; + colorTransform1[ctr++] = entry1.data.r[i].denominator; + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count, + colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer); + + if (!singleIlluminant) { + ACameraMetadata_const_entry entry2 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_COLOR_TRANSFORM2, &entry2); + BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer); + int32_t colorTransform2[entry2.count * 2]; + + ctr = 0; + for(size_t i = 0; i < entry2.count; ++i) { + colorTransform2[ctr++] = entry2.data.r[i].numerator; + colorTransform2[ctr++] = entry2.data.r[i].denominator; + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count, + colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer); + } + } + + if (isBayer) { + // Set calibration transforms + ACameraMetadata_const_entry entry1 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_CALIBRATION_TRANSFORM1, &entry1); + BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer); + + int32_t calibrationTransform1[entry1.count * 2]; + + size_t ctr = 0; + for(size_t i = 0; i < entry1.count; ++i) { + calibrationTransform1[ctr++] = entry1.data.r[i].numerator; + calibrationTransform1[ctr++] = entry1.data.r[i].denominator; + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count, + calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer); + + if (!singleIlluminant) { + ACameraMetadata_const_entry entry2 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_CALIBRATION_TRANSFORM2, &entry2); + BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer); + int32_t calibrationTransform2[entry2.count * 2]; + + ctr = 0; + for(size_t i = 0; i < entry2.count; ++i) { + calibrationTransform2[ctr++] = entry2.data.r[i].numerator; + calibrationTransform2[ctr++] = entry2.data.r[i].denominator; + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count, + calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer); + } + } + + if (isBayer) { + // Set forward transforms + ACameraMetadata_const_entry entry1 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_FORWARD_MATRIX1, &entry1); + BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer); + + int32_t forwardTransform1[entry1.count * 2]; + + size_t ctr = 0; + for(size_t i = 0; i < entry1.count; ++i) { + forwardTransform1[ctr++] = entry1.data.r[i].numerator; + forwardTransform1[ctr++] = entry1.data.r[i].denominator; + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count, + forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer); + + if (!singleIlluminant) { + ACameraMetadata_const_entry entry2 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_FORWARD_MATRIX2, &entry2); + BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer); + int32_t forwardTransform2[entry2.count * 2]; + + ctr = 0; + for(size_t i = 0; i < entry2.count; ++i) { + forwardTransform2[ctr++] = entry2.data.r[i].numerator; + forwardTransform2[ctr++] = entry2.data.r[i].denominator; + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count, + forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer); + } + } + + if (isBayer) { + // Set camera neutral + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_NEUTRAL_COLOR_POINT, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer); + uint32_t cameraNeutral[entry.count * 2]; + + size_t ctr = 0; + for(size_t i = 0; i < entry.count; ++i) { + cameraNeutral[ctr++] = + static_cast(entry.data.r[i].numerator); + cameraNeutral[ctr++] = + static_cast(entry.data.r[i].denominator); + } + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral, + TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer); + } + + { + // Set dimensions + if (calculateAndSetCrop(characteristics, writer) != OK) { + return nullptr; + } + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer); + uint32_t xmin = static_cast(entry.data.i32[0]); + uint32_t ymin = static_cast(entry.data.i32[1]); + uint32_t width = static_cast(entry.data.i32[2]); + uint32_t height = static_cast(entry.data.i32[3]); + + // If we only have a buffer containing the pre-correction rectangle, ignore the offset + // relative to the pixel array. + if (imageWidth == width && imageHeight == height) { + xmin = 0; + ymin = 0; + } + + uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width}; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0), + env, TAG_ACTIVEAREA, writer); + } + + { + // Setup unique camera model tag + char model[PROP_VALUE_MAX] = { 0 }; + __system_property_get("ro.product.model", model); + char manufacturer[PROP_VALUE_MAX] = { 0 }; + __system_property_get("ro.product.manufacturer", manufacturer); + char brand[PROP_VALUE_MAX] = { 0 }; + __system_property_get("ro.product.brand", brand); + + std::string cameraModel = model; + cameraModel += "-"; + cameraModel += manufacturer; + cameraModel += "-"; + cameraModel += brand; + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1, + reinterpret_cast(cameraModel.c_str()), TIFF_IFD_0), env, + TAG_UNIQUECAMERAMODEL, writer); + } + + { + // Setup sensor noise model + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_SENSOR_NOISE_PROFILE, &entry); + + const status_t numPlaneColors = isBayer ? 3 : 1; + const status_t numCfaChannels = isBayer ? 4 : 1; + + uint8_t cfaOut[numCfaChannels]; + if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) { +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "Invalid CFA from camera characteristics"); +#endif + return nullptr; + } + + double noiseProfile[numPlaneColors * 2]; + + if (entry.count > 0) { + if (entry.count != numCfaChannels * 2) { + ALOGW("%s: Invalid entry count %zu for noise profile returned " + "in characteristics, no noise profile tag written...", + __FUNCTION__, entry.count); + } else { + if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels, + cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) { + + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE, + numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE, + writer); + } else { + ALOGW("%s: Error converting coefficients for noise profile, no noise profile" + " tag written...", __FUNCTION__); + } + } + } else { + ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.", + __FUNCTION__); + } + } + + { + // Set up opcode List 2 + OpcodeListBuilder builder; + status_t err = OK; + + // Set up lens shading map + ACameraMetadata_const_entry entry1 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_LENS_INFO_SHADING_MAP_SIZE, &entry1); + + uint32_t lsmWidth = 0; + uint32_t lsmHeight = 0; + + if (entry1.count != 0) { + lsmWidth = static_cast(entry1.data.i32[0]); + lsmHeight = static_cast(entry1.data.i32[1]); + } + + ACameraMetadata_const_entry entry2 = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_STATISTICS_LENS_SHADING_MAP, &entry2); + + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &entry); + BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer); + uint32_t xmin = static_cast(entry.data.i32[0]); + uint32_t ymin = static_cast(entry.data.i32[1]); + uint32_t width = static_cast(entry.data.i32[2]); + uint32_t height = static_cast(entry.data.i32[3]); + if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) { + // GainMap rectangle is relative to the active area origin. + err = builder.addGainMapsForMetadata(lsmWidth, + lsmHeight, + 0, + 0, + height, + width, + opcodeCfaLayout, + entry2.data.f); + if (err != OK) { + ALOGE("%s: Could not add Lens shading map.", __FUNCTION__); +#if 0 + jniThrowRuntimeException(env, "failed to add lens shading map."); +#endif + return nullptr; + } + } + + // Hot pixel map is specific to bayer camera per DNG spec. + if (isBayer) { + // Set up bad pixel correction list + ACameraMetadata_const_entry entry3 = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, ACAMERA_STATISTICS_HOT_PIXEL_MAP, &entry3); + + if ((entry3.count % 2) != 0) { + ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!", + __FUNCTION__); +#if 0 + jniThrowRuntimeException(env, "failed to add hotpixel map."); +#endif + return nullptr; + } + + // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag + std::vector v; + for (size_t i = 0; i < entry3.count; i += 2) { + int32_t x = entry3.data.i32[i]; + int32_t y = entry3.data.i32[i + 1]; + x -= static_cast(xmin); + y -= static_cast(ymin); + if (x < 0 || y < 0 || static_cast(x) >= width || + static_cast(y) >= height) { + continue; + } + v.push_back(x); + v.push_back(y); + } + const uint32_t* badPixels = &v[0]; + uint32_t badPixelCount = v.size(); + + if (badPixelCount > 0) { + err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout); + + if (err != OK) { + ALOGE("%s: Could not add hotpixel map.", __FUNCTION__); +#if 0 + jniThrowRuntimeException(env, "failed to add hotpixel map."); +#endif + return nullptr; + } + } + } + + if (builder.getCount() > 0) { + size_t listSize = builder.getSize(); + uint8_t opcodeListBuf[listSize]; + err = builder.buildOpList(opcodeListBuf); + if (err == OK) { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize, + opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST2, writer); + } else { + ALOGE("%s: Could not build list of opcodes for lens shading map and bad pixel " + "correction.", __FUNCTION__); +#if 0 + jniThrowRuntimeException(env, "failed to construct opcode list for lens shading " + "map and bad pixel correction"); +#endif + return nullptr; + } + } + } + + { + // Set up opcode List 3 + OpcodeListBuilder builder; + status_t err = OK; + + // Set up rectilinear distortion correction + std::array distortion = {1.f, 0.f, 0.f, 0.f, 0.f, 0.f}; + bool gotDistortion = false; + + ACameraMetadata_const_entry entry4 = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_INTRINSIC_CALIBRATION, &entry4); + + if (entry4.count == 5) { + float cx = entry4.data.f[/*c_x*/2]; + float cy = entry4.data.f[/*c_y*/3]; + // Assuming f_x = f_y, or at least close enough. + // Also assuming s = 0, or at least close enough. + float f = entry4.data.f[/*f_x*/0]; + + ACameraMetadata_const_entry entry3 = { 0 }; + status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_DISTORTION, &entry3); + if (entry3.count == 5) { + gotDistortion = true; + + // Scale the distortion coefficients to create a zoom in warpped image so that all + // pixels are drawn within input image. + for (size_t i = 0; i < entry3.count; i++) { + distortion[i+1] = entry3.data.f[i]; + } + + if (preWidth == imageWidth && preHeight == imageHeight) { + normalizeLensDistortion(distortion, cx, cy, f, preWidth, preHeight); + } else { + // image size == pixel array size (contains optical black pixels) + // cx/cy is defined in preCorrArray so adding the offset + // Also changes default xmin/ymin so that pixels are only + // sampled within preCorrection array + normalizeLensDistortion( + distortion, cx + preXMin, cy + preYMin, f, preWidth, preHeight, + preXMin, preYMin); + } + + float m_x = std::fmaxf(preWidth - cx, cx); + float m_y = std::fmaxf(preHeight - cy, cy); + float m_sq = m_x*m_x + m_y*m_y; + float m = sqrtf(m_sq); // distance to farthest corner from optical center + float f_sq = f * f; + // Conversion factors from Camera2 K factors for new LENS_DISTORTION field + // to DNG spec. + // + // Camera2 / OpenCV assume distortion is applied in a space where focal length + // is factored out, while DNG assumes a normalized space where the distance + // from optical center to the farthest corner is 1. + // Scale from camera2 to DNG spec accordingly. + // distortion[0] is always 1 with the new LENS_DISTORTION field. + const double convCoeff[5] = { + m_sq / f_sq, + pow(m_sq, 2) / pow(f_sq, 2), + pow(m_sq, 3) / pow(f_sq, 3), + m / f, + m / f + }; + for (size_t i = 0; i < entry3.count; i++) { + distortion[i+1] *= convCoeff[i]; + } + } else { + status = ACameraMetadata_getConstEntry(results, ACAMERA_LENS_RADIAL_DISTORTION, &entry3); + if (entry3.count == 6) { + gotDistortion = true; + // Conversion factors from Camera2 K factors to DNG spec. K factors: + // + // Note: these are necessary because our unit system assumes a + // normalized max radius of sqrt(2), whereas the DNG spec's + // WarpRectilinear opcode assumes a normalized max radius of 1. + // Thus, each K coefficient must include the domain scaling + // factor (the DNG domain is scaled by sqrt(2) to emulate the + // domain used by the Camera2 specification). + const double convCoeff[6] = { + sqrt(2), + 2 * sqrt(2), + 4 * sqrt(2), + 8 * sqrt(2), + 2, + 2 + }; + for (size_t i = 0; i < entry3.count; i++) { + distortion[i] = entry3.data.f[i] * convCoeff[i]; + } + } + } + if (gotDistortion) { + err = builder.addWarpRectilinearForMetadata( + distortion.data(), preWidth, preHeight, cx, cy); + if (err != OK) { + ALOGE("%s: Could not add distortion correction.", __FUNCTION__); +#if 0 + jniThrowRuntimeException(env, "failed to add distortion correction."); +#endif + return nullptr; + } + } + } + + if (builder.getCount() > 0) { + size_t listSize = builder.getSize(); + uint8_t opcodeListBuf[listSize]; + err = builder.buildOpList(opcodeListBuf); + if (err == OK) { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize, + opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST3, writer); + } else { + ALOGE("%s: Could not build list of opcodes for distortion correction.", + __FUNCTION__); +#if 0 + jniThrowRuntimeException(env, "failed to construct opcode list for distortion" + " correction"); +#endif + return nullptr; + } + } + } + + { + // Set up orientation tags. + // Note: There's only one orientation field for the whole file, in IFD0 + // The main image and any thumbnails therefore have the same orientation. + uint16_t orientation = getOrientation(); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0), + env, TAG_ORIENTATION, writer); + + } + + if (hasDescription()){ + // Set Description + std::string description = getDescription(); + size_t len = description.size() + 1; + if (writer->addEntry(TAG_IMAGEDESCRIPTION, len, + reinterpret_cast(description.c_str()), TIFF_IFD_0) != OK) { +#if 0 + jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", + "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION); +#endif + } + } + + if (hasGpsData()) { + // Set GPS tags + GpsData gpsData = getGpsData(); + if (!writer->hasIfd(TIFF_IFD_GPSINFO)) { + if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) { + ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO, + TIFF_IFD_0); +#if 0 + jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO"); +#endif + return nullptr; + } + } + + { + uint8_t version[] = {2, 3, 0, 0}; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version, + TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer); + } + + { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF, + GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env, + TAG_GPSLATITUDEREF, writer); + } + + { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF, + GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env, + TAG_GPSLONGITUDEREF, writer); + } + + { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude, + TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer); + } + + { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude, + TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer); + } + + { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp, + TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer); + } + + { + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP, + GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env, + TAG_GPSDATESTAMP, writer); + } + } + + + if (hasThumbnail()) { + if (!writer->hasIfd(TIFF_IFD_SUB1)) { + if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) { + ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1, + TIFF_IFD_0); +#if 0 + jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD"); +#endif + return nullptr; + } + } + + std::vector tagsToMove; + tagsToMove.push_back(TAG_NEWSUBFILETYPE); + tagsToMove.push_back(TAG_ACTIVEAREA); + tagsToMove.push_back(TAG_BITSPERSAMPLE); + tagsToMove.push_back(TAG_COMPRESSION); + tagsToMove.push_back(TAG_IMAGEWIDTH); + tagsToMove.push_back(TAG_IMAGELENGTH); + tagsToMove.push_back(TAG_PHOTOMETRICINTERPRETATION); + tagsToMove.push_back(TAG_BLACKLEVEL); + tagsToMove.push_back(TAG_BLACKLEVELREPEATDIM); + tagsToMove.push_back(TAG_SAMPLESPERPIXEL); + tagsToMove.push_back(TAG_PLANARCONFIGURATION); + if (isBayer) { + tagsToMove.push_back(TAG_CFAREPEATPATTERNDIM); + tagsToMove.push_back(TAG_CFAPATTERN); + tagsToMove.push_back(TAG_CFAPLANECOLOR); + tagsToMove.push_back(TAG_CFALAYOUT); + } + tagsToMove.push_back(TAG_XRESOLUTION); + tagsToMove.push_back(TAG_YRESOLUTION); + tagsToMove.push_back(TAG_RESOLUTIONUNIT); + tagsToMove.push_back(TAG_WHITELEVEL); + tagsToMove.push_back(TAG_DEFAULTSCALE); + tagsToMove.push_back(TAG_DEFAULTCROPORIGIN); + tagsToMove.push_back(TAG_DEFAULTCROPSIZE); + + if (nullptr != writer->getEntry(TAG_OPCODELIST2, TIFF_IFD_0).get()) { + tagsToMove.push_back(TAG_OPCODELIST2); + } + + if (nullptr != writer->getEntry(TAG_OPCODELIST3, TIFF_IFD_0).get()) { + tagsToMove.push_back(TAG_OPCODELIST3); + } + + if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) { +#if 0 + jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries"); +#endif + return nullptr; + } + + // Setup thumbnail tags + + { + // Set photometric interpretation + uint16_t interpretation = 2; // RGB + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1, + &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer); + } + + { + // Set planar configuration + uint16_t config = 1; // Chunky + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config, + TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer); + } + + { + // Set samples per pixel + uint16_t samples = SAMPLES_PER_RGB_PIXEL; + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, + TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer); + } + + { + // Set bits per sample + uint16_t bits[SAMPLES_PER_RGB_PIXEL]; + for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE; + BAIL_IF_INVALID_RET_NULL_SP( + writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_0), + env, TAG_BITSPERSAMPLE, writer); + } + + { + // Set subfiletype + uint32_t subfileType = 1; // Thumbnail image + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType, + TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer); + } + + { + // Set compression + uint16_t compression = 1; // None + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression, + TIFF_IFD_0), env, TAG_COMPRESSION, writer); + } + + { + // Set dimensions + uint32_t uWidth = getThumbnailWidth(); + uint32_t uHeight = getThumbnailHeight(); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0), + env, TAG_IMAGEWIDTH, writer); + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0), + env, TAG_IMAGELENGTH, writer); + } + + { + // x resolution + uint32_t xres[] = { 72, 1 }; // default 72 ppi + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0), + env, TAG_XRESOLUTION, writer); + + // y resolution + uint32_t yres[] = { 72, 1 }; // default 72 ppi + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0), + env, TAG_YRESOLUTION, writer); + + uint16_t unit = 2; // inches + BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0), + env, TAG_RESOLUTIONUNIT, writer); + } + } + + if (writer->addStrip(TIFF_IFD_0) != OK) { + ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__); +#if 0 + jniThrowException(env, "java/lang/IllegalStateException", + "Failed to setup thumbnail strip tags."); +#endif + return nullptr; + } + + if (writer->hasIfd(TIFF_IFD_SUB1)) { + if (writer->addStrip(TIFF_IFD_SUB1) != OK) { + ALOGE("%s: Could not main image strip tags.", __FUNCTION__); +#if 0 + jniThrowException(env, "java/lang/IllegalStateException", + "Failed to setup main image strip tags."); +#endif + return nullptr; + } + } + return writer; +} + +void DngCreator::setGpsTags(const std::vector& latTag, + const std::string& latRef, const std::vector& longTag, const std::string& longRef, const std::string& dateTag, const std::vector& timeTag) { + ALOGV("%s:", __FUNCTION__); + + GpsData data; + + size_t latLen = latTag.size(); + size_t longLen = longTag.size(); + size_t timeLen = timeTag.size(); + if (latLen != GpsData::GPS_VALUE_LENGTH) { +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "invalid latitude tag length"); +#endif + return; + } else if (longLen != GpsData::GPS_VALUE_LENGTH) { +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "invalid longitude tag length"); +#endif + return; + } else if (timeLen != GpsData::GPS_VALUE_LENGTH) { +#if 0 + jniThrowException(env, "java/lang/IllegalArgumentException", + "invalid time tag length"); +#endif + return; + } + + memcpy(&data.mLatitude, &latTag[0], sizeof(int) * GpsData::GPS_VALUE_LENGTH); + memcpy(&data.mLongitude, &longTag[0], sizeof(int) * GpsData::GPS_VALUE_LENGTH); + memcpy(&data.mTimestamp, &timeTag[0], sizeof(int) * GpsData::GPS_VALUE_LENGTH); + + memcpy(&data.mLatitudeRef, latRef.c_str(), 1); + data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0'; + memcpy(&data.mLongitudeRef, longRef.c_str(), 1); + data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0'; + memcpy(&data.mDate, dateTag.c_str(), GpsData::GPS_DATE_LENGTH - 1); + data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0'; + + setGpsData(data); +} + +// TODO: Refactor out common preamble for the two nativeWrite methods. +void DngCreator::writeImage(std::vector& outStream, uint32_t uWidth, + uint32_t uHeight, const std::vector& inBuffer, int rowStride, int pixStride, uint64_t uOffset, bool isDirect) { + ALOGV("%s:", __FUNCTION__); + ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, " + "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth, + uHeight, rowStride, pixStride, uOffset); + uint32_t rStride = static_cast(rowStride); + uint32_t pStride = static_cast(pixStride); + + std::vector& out = outStream; + + // sp out = new JniOutputStream(env, outStream); + // if(env->ExceptionCheck()) { + // ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__); + // return; + // } + + sp writer = setup(uWidth, uHeight); + + if (writer.get() == nullptr) { + return; + } + + // Validate DNG size + if (!validateDngHeader(writer, getCharacteristics(), uWidth, uHeight)) { + return; + } + + // sp inBuf; + std::vector sources; + sp thumbnailSource; + uint32_t targetIfd = TIFF_IFD_0; + + bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1); + + if (hasThumbnail) { +#if 0 + ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__); + uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE; + uint32_t thumbWidth = getThumbnailWidth(); + thumbnailSource = new DirectStripSource(env, getThumbnail(), TIFF_IFD_0, + thumbWidth, context->getThumbnailHeight(), bytesPerPixel, + bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE, + SAMPLES_PER_RGB_PIXEL); + sources.push_back(thumbnailSource.get()); + targetIfd = TIFF_IFD_SUB1; +#endif + } + + if (isDirect) { + size_t fullSize = rStride * uHeight; + jlong capacity = inBuffer.size(); + if (capacity < 0 || fullSize + uOffset > static_cast(capacity)) { +#if 0 + jniThrowExceptionFmt(env, "java/lang/IllegalStateException", + "Invalid size %d for Image, size given in metadata is %d at current stride", + capacity, fullSize); +#endif + return; + } + + uint8_t* pixelBytes = (uint8_t*)&inBuffer[0]; + + ALOGV("%s: Using direct-type strip source.", __FUNCTION__); + DirectStripSource stripSource(pixelBytes, targetIfd, uWidth, uHeight, pStride, + rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); + sources.push_back(&stripSource); + + status_t ret = OK; + ByteVectorOutput byteVectorOutput(outStream); + + if ((ret = writer->write(&byteVectorOutput, &sources[0], sources.size())) != OK) { + ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); +#if 0 + if (!env->ExceptionCheck()) { + jniThrowExceptionFmt(env, "java/io/IOException", + "Encountered error %d while writing file.", ret); + } +#endif + return; + } + } else { + int aa = 0; + // inBuf = new JniInputByteBuffer(env, inBuffer); +#if 0 + ALOGV("%s: Using input-type strip source.", __FUNCTION__); + InputStripSource stripSource(*inBuf, targetIfd, uWidth, uHeight, pStride, + rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); + sources.push_back(&stripSource); + + status_t ret = OK; + if ((ret = writer->write(out.get(), &sources[0], sources.size())) != OK) { + ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); +#if 0 + if (!env->ExceptionCheck()) { + jniThrowExceptionFmt(env, "java/io/IOException", + "Encountered error %d while writing file.", ret); + } +#endif + return; + } +#endif + } +} + +void DngCreator::writeInputStream(std::vector& outStream, + const std::vector& inStream, uint32_t uWidth, uint32_t uHeight, long offset) { + ALOGV("%s:", __FUNCTION__); + + uint32_t rowStride = uWidth * BYTES_PER_SAMPLE; + uint32_t pixStride = BYTES_PER_SAMPLE; + + uint64_t uOffset = static_cast(offset); + + ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, " + "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth, + uHeight, rowStride, pixStride, offset); + + ByteVectorOutput out(outStream); + // std::vector& out = outStream; + + sp writer = setup(uWidth, uHeight); + + if (writer.get() == nullptr) { + return; + } + + // Validate DNG size + if (!validateDngHeader(writer, getCharacteristics(), uWidth, uHeight)) { + return; + } + + sp thumbnailSource; + uint32_t targetIfd = TIFF_IFD_0; + bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1); + std::vector sources; + + if (hasThumbnail) + { +#if 0 + ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__); + uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE; + uint32_t width = getThumbnailWidth(); + thumbnailSource = new DirectStripSource(getThumbnail(), TIFF_IFD_0, + width, getThumbnailHeight(), bytesPerPixel, + bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE, + SAMPLES_PER_RGB_PIXEL); + sources.pus_back(thumbnailSource.get()); + targetIfd = TIFF_IFD_SUB1; +#endif + } + + // sp in = new JniInputStream(env, inStream); + + ByteVectorInput in(inStream); + + ALOGV("%s: Using input-type strip source.", __FUNCTION__); + InputStripSource stripSource(in, targetIfd, uWidth, uHeight, pixStride, + rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); + sources.push_back(&stripSource); + + status_t ret = OK; + if ((ret = writer->write(&out, &sources[0], sources.size())) != OK) { + ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); +#if 0 + if (!env->ExceptionCheck()) { + jniThrowExceptionFmt(env, "java/io/IOException", + "Encountered error %d while writing file.", ret); + } +#endif + return; + } +} + +void DngCreator::writeInputBuffer(std::vector& outStream, + const uint8_t* inBuffer, size_t bufferLength, uint32_t uWidth, uint32_t uHeight, long offset) { + ALOGV("%s:", __FUNCTION__); + + uint32_t rowStride = uWidth * BYTES_PER_SAMPLE; + uint32_t pixStride = BYTES_PER_SAMPLE; + + uint64_t uOffset = static_cast(offset); + + ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, " + "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth, + uHeight, rowStride, pixStride, offset); + + ByteVectorOutput out(outStream); + // std::vector& out = outStream; + + sp writer = setup(uWidth, uHeight); + + if (writer.get() == nullptr) { + return; + } + + + // Validate DNG size + if (!validateDngHeader(writer, getCharacteristics(), uWidth, uHeight)) { + return; + } + + sp thumbnailSource; + uint32_t targetIfd = TIFF_IFD_0; + bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1); + std::vector sources; + + if (hasThumbnail) + { +#if 0 + ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__); + uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE; + uint32_t width = getThumbnailWidth(); + thumbnailSource = new DirectStripSource(getThumbnail(), TIFF_IFD_0, + width, getThumbnailHeight(), bytesPerPixel, + bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE, + SAMPLES_PER_RGB_PIXEL); + sources.push_back(thumbnailSource.get()); + targetIfd = TIFF_IFD_SUB1; +#endif + } + + // sp in = new JniInputStream(env, inStream); + + ByteBufferInput in(inBuffer, bufferLength); + + ALOGV("%s: Using input-type strip source.", __FUNCTION__); + InputStripSource stripSource(in, targetIfd, uWidth, uHeight, pixStride, + rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); + sources.push_back(&stripSource); + + status_t ret = OK; + if ((ret = writer->write(&out, &sources[0], sources.size())) != OK) { + ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); +#if 0 + if (!env->ExceptionCheck()) { + jniThrowExceptionFmt(env, "java/io/IOException", + "Encountered error %d while writing file.", ret); + } +#endif + return; + } +} diff --git a/app/src/main/cpp/DngCreator.h b/app/src/main/cpp/DngCreator.h new file mode 100644 index 00000000..a19c6374 --- /dev/null +++ b/app/src/main/cpp/DngCreator.h @@ -0,0 +1,332 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "DngCreator_JNI" +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// #include "core_jni_helpers.h" + +// #include "android_runtime/AndroidRuntime.h" +// #include "android_runtime/android_hardware_camera2_CameraMetadata.h" + +#include +// #include + +using namespace android; +using namespace img_utils; +// using android::base::GetProperty; + + +/** + * Max width or height dimension for thumbnails. + */ +// max pixel dimension for TIFF/EP +#define MAX_THUMBNAIL_DIMENSION 256 + + +// bytes per sample +#define DEFAULT_PIXEL_STRIDE 2 +// byts per pixel +#define BYTES_PER_RGB_PIX 3 + + +#define GPS_LAT_REF_NORTH "N" +#define GPS_LAT_REF_SOUTH "S" +#define GPS_LONG_REF_EAST "E" +#define GPS_LONG_REF_WEST "W" + +#define GPS_DATE_FORMAT_STR "yyyy:MM:dd" +#define TIFF_DATETIME_FORMAT "yyyy:MM:dd kk:mm:ss" + +class ByteVectorOutput : public Output { +public: + ByteVectorOutput(std::vector& buf); + virtual ~ByteVectorOutput(); + + virtual status_t open(); + + virtual status_t write(const uint8_t* buf, size_t offset, size_t count); + + virtual status_t close(); + +protected: + std::vector& m_buf; +}; + +class ByteVectorInput : public Input { +public: + ByteVectorInput(const std::vector& buf); + virtual ~ByteVectorInput(); + + /** + * Open this Input. + * + * Returns OK on success, or a negative error code. + */ + status_t open(); + + /** + * Read bytes into the given buffer. At most, the number of bytes given in the + * count argument will be read. Bytes will be written into the given buffer starting + * at the index given in the offset argument. + * + * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ + ssize_t read(uint8_t* buf, size_t offset, size_t count); + + /** + * Skips bytes in the input. + * + * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ + ssize_t skip(size_t count); + + /** + * Close the Input. It is not valid to call open on a previously closed Input. + * + * Returns OK on success, or a negative error code. + */ + status_t close(); + +protected: + const std::vector& m_buf; + size_t m_offset; +}; + +class ByteBufferInput : public Input { +public: + ByteBufferInput(const uint8_t* buf, size_t len); + virtual ~ByteBufferInput(); + + /** + * Open this Input. + * + * Returns OK on success, or a negative error code. + */ + status_t open(); + + /** + * Read bytes into the given buffer. At most, the number of bytes given in the + * count argument will be read. Bytes will be written into the given buffer starting + * at the index given in the offset argument. + * + * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ + ssize_t read(uint8_t* buf, size_t offset, size_t count); + + /** + * Skips bytes in the input. + * + * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ + ssize_t skip(size_t count); + + /** + * Close the Input. It is not valid to call open on a previously closed Input. + * + * Returns OK on success, or a negative error code. + */ + status_t close(); + +protected: + const uint8_t* m_buf; + size_t m_len; + size_t m_offset; +}; + +struct SIZE +{ + int width; + int height; +}; + +#define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \ + if ((expr) != OK) { \ + return false; \ + } + +#define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \ + if ((expr) != OK) { \ + return nullptr; \ + } + +#define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \ + if ((expr) != OK) { \ + return -1; \ + } + +#define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \ + if ((entry).count == 0) { \ + return nullptr; \ + } + +#define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \ + if (expr) { \ + return nullptr; \ + } + +#define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext" + +enum { + BITS_PER_SAMPLE = 16, + BYTES_PER_SAMPLE = 2, + BYTES_PER_RGB_PIXEL = 3, + BITS_PER_RGB_SAMPLE = 8, + BYTES_PER_RGB_SAMPLE = 1, + SAMPLES_PER_RGB_PIXEL = 3, + SAMPLES_PER_RAW_PIXEL = 1, + TIFF_IFD_0 = 0, + TIFF_IFD_SUB1 = 1, + TIFF_IFD_GPSINFO = 2, +}; + +/** + * POD container class for GPS tag data. + */ +class GpsData { +public: + enum { + GPS_VALUE_LENGTH = 6, + GPS_REF_LENGTH = 2, + GPS_DATE_LENGTH = 11, + }; + + uint32_t mLatitude[GPS_VALUE_LENGTH]; + uint32_t mLongitude[GPS_VALUE_LENGTH]; + uint32_t mTimestamp[GPS_VALUE_LENGTH]; + uint8_t mLatitudeRef[GPS_REF_LENGTH]; + uint8_t mLongitudeRef[GPS_REF_LENGTH]; + uint8_t mDate[GPS_DATE_LENGTH]; +}; + +// ---------------------------------------------------------------------------- + +/** + * Container class for the persistent native context. + */ + +class NativeContext : public LightRefBase { +public: + enum { + DATETIME_COUNT = 20, + }; + + NativeContext(ACameraMetadata* characteristics, ACameraMetadata* result); + virtual ~NativeContext(); + + TiffWriter* getWriter(); + + ACameraMetadata* getCharacteristics() const; + ACameraMetadata* getResult() const; + + uint32_t getThumbnailWidth() const; + uint32_t getThumbnailHeight() const; + const uint8_t* getThumbnail() const; + bool hasThumbnail() const; + + bool setThumbnail(const std::vector& buffer, uint32_t width, uint32_t height); + + void setOrientation(uint16_t orientation); + uint16_t getOrientation() const; + + void setDescription(const std::string& desc); + std::string getDescription() const; + bool hasDescription() const; + + void setGpsData(const GpsData& data); + GpsData getGpsData() const; + bool hasGpsData() const; + + void setCaptureTime(const std::string& formattedCaptureTime); + std::string getCaptureTime() const; + bool hasCaptureTime() const; + +protected: + std::vector mCurrentThumbnail; + TiffWriter mWriter; + ACameraMetadata* mCharacteristics; + ACameraMetadata* mResult; + uint32_t mThumbnailWidth; + uint32_t mThumbnailHeight; + uint16_t mOrientation; + bool mThumbnailSet; + bool mGpsSet; + bool mDescriptionSet; + bool mCaptureTimeSet; + std::string mDescription; + GpsData mGpsData; + std::string mFormattedCaptureTime; +}; + +class DngCreator : public NativeContext +{ + +public: + DngCreator(ACameraMetadata* characteristics, ACameraMetadata* result); + +#if 0 + void setLocation(Location location); +#endif + + void writeInputStream(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset); + void writeByteBuffer(std::vector& dngOutput, SIZE size, const std::vector& pixels, long offset); + +#if 0 + void writeImage(OutputStream& dngOutput, AImage& pixels); +#endif + + void close(); + + // private static final DateFormat sExifGPSDateStamp = new SimpleDateFormat(GPS_DATE_FORMAT_STR); + // private static final DateFormat sDateTimeStampFormat = new SimpleDateFormat(TIFF_DATETIME_FORMAT); +#if 0 + static { + sDateTimeStampFormat.setTimeZone(TimeZone.getDefault()); + sExifGPSDateStamp.setTimeZone(TimeZone.getTimeZone("UTC")); + } +#endif + + /** + * Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels. + */ + void writeByteBuffer(int width, int height, const std::vector& pixels, std::vector& dngOutput, int pixelStride, int rowStride, long offset); + + /** + * Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}. + */ + + /** + * Convert coordinate to EXIF GPS tag format. + */ + void toExifLatLong(double value, int data[6]); + + void init(ACameraMetadata* characteristics, ACameraMetadata* result, const std::string& captureTime); + sp setup(uint32_t imageWidth, uint32_t imageHeight); + void destroy(); + void setGpsTags(const std::vector& latTag, const std::string& latRef, const std::vector& longTag, const std::string& longRef, const std::string& dateTag, const std::vector& timeTag); + void writeImage(std::vector& out, uint32_t width, uint32_t height, const std::vector& rawBuffer, int rowStride, int pixStride, uint64_t offset, bool isDirect); + + void writeInputStream(std::vector& out, const std::vector& rawStream, uint32_t width, uint32_t height, long offset); + + void writeInputBuffer(std::vector& out, const uint8_t* rawBuffer, size_t bufferLen, uint32_t width, uint32_t height, long offset); + +}; diff --git a/app/src/main/cpp/GPIOControl.cpp b/app/src/main/cpp/GPIOControl.cpp index 783025af..38768843 100644 --- a/app/src/main/cpp/GPIOControl.cpp +++ b/app/src/main/cpp/GPIOControl.cpp @@ -31,19 +31,9 @@ typedef struct char str[MAX_STRING_LEN]; }IOT_PARAM; -typedef struct{ - float airtemp; /* 空气温度*/ - float RH; /* 相对湿度*/ - float atmos; /* 大气压*/ - float windspeed; /* 风速*/ - float winddirection; /* 风向*/ - float rainfall; /* 雨量*/ - float sunshine; /* 日照*/ -}Weather; - void GpioControl::setInt(int cmd, int value) { - int fd = open("/dev/mtkgpioctrl", O_RDONLY); + int fd = open(GPIO_NODE_MP, O_RDONLY); IOT_PARAM param; param.cmd = cmd; param.value = value; @@ -59,7 +49,7 @@ void GpioControl::setInt(int cmd, int value) int GpioControl::getInt(int cmd) { - int fd = open("/dev/mtkgpioctrl", O_RDONLY); + int fd = open(GPIO_NODE_MP, O_RDONLY); // LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd); if( fd > 0 ) { @@ -77,7 +67,7 @@ int GpioControl::getInt(int cmd) void GpioControl::setLong(int cmd, long value) { - int fd = open("/dev/mtkgpioctrl", O_RDONLY); + int fd = open(GPIO_NODE_MP, O_RDONLY); IOT_PARAM param; param.cmd = cmd; param.value2 = value; @@ -93,7 +83,7 @@ void GpioControl::setLong(int cmd, long value) long GpioControl::getLong(int cmd) { - int fd = open("/dev/mtkgpioctrl", O_RDONLY); + int fd = open(GPIO_NODE_MP, O_RDONLY); // LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd); if( fd > 0 ) { @@ -110,8 +100,7 @@ long GpioControl::getLong(int cmd) void GpioControl::setString(int cmd, const std::string& value) { IOT_PARAM param; - // char *pval = jstringToChars(env, value); - int fd = open("/dev/mtkgpioctrl", O_RDONLY); + int fd = open(GPIO_NODE_MP, O_RDONLY); int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size(); param.cmd = cmd; @@ -129,7 +118,7 @@ void GpioControl::setString(int cmd, const std::string& value) std::string GpioControl::getString(int cmd) { - int fd = open("/dev/mtkgpioctrl", O_RDONLY); + int fd = open(GPIO_NODE_MP, O_RDONLY); // LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd); if( fd > 0 ) { @@ -142,3 +131,65 @@ std::string GpioControl::getString(int cmd) } return ""; } + +#ifdef USING_N938 + +#if 0 +bool GpioControl::SetN938Cmd(int cmd, int val) +{ + char buf[32] = { 0 }; + snprintf(buf, "out %d %d", cmd, val); + + IOT_PARAM param; + int len = MAX_STRING_LEN < strlen(buf) ? MAX_STRING_LEN : strlen(buf); + + param.cmd = cmd; + memset(param.str, 0, MAX_STRING_LEN); + memcpy(param.str, value.c_str(), len); + + int fd = open(GPIO_NODE_MP, O_RDONLY); + if( fd > 0 ) + { + ioctl(fd, IOT_PARAM_WRITE, ¶m); + close(fd); + } + return; +} +#endif + +bool GpioControl::OpenSensors() +{ + + GpioControl::setCam3V3Enable(true); + GpioControl::setInt(CMD_SET_485_EN_STATE, true ? 1 : 0); + int igpio; + GpioControl::setInt(CMD_SET_WTH_POWER, 1); + GpioControl::setInt(CMD_SET_PULL_POWER, 1); + GpioControl::setInt(CMD_SET_ANGLE_POWER, 1); + GpioControl::setInt(CMD_SET_OTHER_POWER, 1); + GpioControl::setInt(CMD_SET_PIC1_POWER, 1); + + igpio = GpioControl::getInt(CMD_SET_WTH_POWER); + igpio = GpioControl::getInt(CMD_SET_PULL_POWER); + igpio = GpioControl::getInt(CMD_SET_ANGLE_POWER); + igpio = GpioControl::getInt(CMD_SET_OTHER_POWER); + igpio = GpioControl::getInt(CMD_SET_PIC1_POWER); + + GpioControl::setInt(CMD_SET_SPI_POWER, 1); + GpioControl::setInt(CMD_SET_485_en0, 1); + GpioControl::setInt(CMD_SET_485_en1, 1); + GpioControl::setInt(CMD_SET_485_en2, 1); + GpioControl::setInt(CMD_SET_485_en3, 1); + GpioControl::setInt(CMD_SET_485_en4, 1); + + igpio = GpioControl::getInt(CMD_SET_SPI_POWER); + igpio = GpioControl::getInt(CMD_SET_485_en0); + igpio = GpioControl::getInt(CMD_SET_485_en1); + igpio = GpioControl::getInt(CMD_SET_485_en2); + igpio = GpioControl::getInt(CMD_SET_485_en3); + igpio = GpioControl::getInt(CMD_SET_485_en4); + return 0; + +} + +#endif diff --git a/app/src/main/cpp/GPIOControl.h b/app/src/main/cpp/GPIOControl.h index 70339636..908a8947 100644 --- a/app/src/main/cpp/GPIOControl.h +++ b/app/src/main/cpp/GPIOControl.h @@ -6,6 +6,8 @@ #define MICROPHOTO_GPIOCONTROL_H #include +#include +#include #define CMD_GET_LIGHT_ADC 101 #define CMD_SET_LIGHT_ADC 102 @@ -36,16 +38,86 @@ #define CMD_SET_CAM_3V3_EN_STATE 132 #define CMD_SET_12V_EN_STATE 133 #define CMD_SET_SYSTEM_RESET 202 -#define CMD_SET_WTH_POWER 490 -#define CMD_SET_PULL_POWER 491 -#define CMD_SET_ANGLE_POWER 492 -#define CMD_SET_OTHER_POWER 493 -#define CMD_SET_PIC1_POWER 494 -#define CMD_SET_485_en0 301 -#define CMD_SET_485_en1 302 -#define CMD_SET_485_en2 303 -#define CMD_SET_485_en3 304 -#define CMD_SET_485_en4 305 + + +#ifdef USING_N938 + + +#define CMD_SET_485_EN_STATE 131 +#define CMD_SET_CAM_3V3_EN_STATE 132 +#define CMD_SET_12V_EN_STATE 133 +#define CMD_SET_485_STATE 121 +#define CMD_SET_SPI_MODE 123 +#define CMD_SET_SPI_BITS_PER_WORD 124 +#define CMD_SET_SPI_MAXSPEEDHZ 125 +#define CMD_SET_SPI_POWER 129 +#define CMD_SET_WTH_POWER 490 +#define CMD_SET_PULL_POWER 491 +#define CMD_SET_ANGLE_POWER 492 +#define CMD_SET_OTHER_POWER 493 +#define CMD_SET_PIC1_POWER 494 +#define CMD_SET_GPIO157_POWER 510 +#define CMD_SET_GPIO5_POWER 511 +#define CMD_SET_PWM_BEE_STATE 126 +#define CMD_SET_ALM_MODE 128 +#define CMD_SET_485_en0 301 +#define CMD_SET_485_en1 302 +#define CMD_SET_485_en2 303 +#define CMD_SET_485_en3 304 +#define CMD_SET_485_en4 305 +#define CMD_SET_OTG_STATE 107 +#define CMD_GET_OTG_STATE 108 + +#if 0 + +#define CMD_485_0_DE 156 // 485_0 DE信号 +#define CMD_485_0_PWR_EN 157 // 485_0 电源使能 +#define CMD_485_0_1_DE_EN 171 // 485_0&1DE电平转换芯片使能信号 +#define CMD_485_1_DE 172 // + +#define CMD_SET_CAM_3V3_EN_STATE 72 // 整板3V3上电使能 +#define CMD_3V3_SWITCH_EN 45 // 整板485_3V3信号电平转换电源使能 + +#define CMD_UART0_EN 73 // 预留UART0电平转换芯片使能 +#define CMD_485_1_PWR_EN 5 // 485_1 电源使能 + +#define CMD_485_3_DE 6 // 485_3 DE信号 +#define CMD_485_2_DE 7 // 485_2 DE信号 +#define CMD_485_4_DE 13 // 485_4 DE信号 +#define CMD_NETWORK_PWR_EN 94 // 100M网络电源使能 + +#define CMD_485_2_PWR_EN 92 // 485_2 电源使能 +#define CMD_485_3_PWR_EN 91 // 485_3 电源使能 +#define CMD_485_4_PWR_EN 90 // 485_4 电源使能 + +#define CMD_SEC_EN 27 // 加密芯片上电使能 + +#define CMD_485_2_3_DE_EN 26 // 485_2&3 DE电平转换芯片使能信号 + +#define CMD_5V_PWR_EN 14 // 整板5V0上电使能 +#define CMD_SD_CARD_DECT 15 // SD CARD DECT +#define CMD_PIC1_EN 16 + +#define CMD_OTHER_EN 21 +#define CMD_ANGLE_EN 22 +#define CMD_PULL_EN 23 +#define CMD_WEATHER_EN 24 + +#define CMD_LED_CTRL 46 +#define CMD_BD_EN 47 +#define CMD_ADC_EN 44 + +#define CMD_SPI_PWR_EN 43 // SPI转串口电源使能 + +#endif + +#endif // USING_N938 + +#ifndef USING_N938 +#define GPIO_NODE_N938 "/sys/devices/platform/1000b000.pinctrl/mt_gpio" +#else +#define GPIO_NODE_MP "/dev/mtkgpioctrl" +#endif // USING_N938 class GpioControl @@ -71,7 +143,11 @@ public: static void setCam3V3Enable(bool enabled) { +#ifdef ENABLE_3V3_ALWAYS + setInt(CMD_SET_CAM_3V3_EN_STATE, 1); +#else setInt(CMD_SET_CAM_3V3_EN_STATE, enabled ? 1 : 0); +#endif } static void reboot() @@ -161,6 +237,10 @@ public: static void setSpiPower(bool on) { setInt(CMD_SET_SPI_POWER, on ? 1 : 0); + if (on) + { + std::this_thread::sleep_for(std::chrono::milliseconds(40)); + } } static void setRS485Enable(bool z) { @@ -172,6 +252,12 @@ public: setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0); } +#ifdef USING_N938 + static bool SetN938Cmd(int cmd, int val); + static bool OpenSensors(); + static bool CloseSensors(); +#endif + }; diff --git a/app/src/main/cpp/MicroPhoto.cpp b/app/src/main/cpp/MicroPhoto.cpp index 0b31ffef..3d5a67b6 100644 --- a/app/src/main/cpp/MicroPhoto.cpp +++ b/app/src/main/cpp/MicroPhoto.cpp @@ -10,9 +10,12 @@ #include #include #include +#include "ncnn/yolov5ncnn.h" #include + + #define NRSEC_PATH "/dev/spidev0.0" #ifdef USING_BREAK_PAD @@ -326,7 +329,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init( pTerminal->InitServerInfo(MakeString(appPathStr), MakeString(cmdidStr), MakeString(ipStr), port, udpOrTcp, encryptData); // pTerminal->SetPacketSize(1 * 1024); // 1K -#ifdef USING_NRSEC +#if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN) pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", ""); #endif bool res = pTerminal->Startup(device); @@ -394,9 +397,9 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto( if (photoInfo.usbCamera) { - device->TurnOnOtg(NULL); + CPhoneDevice::TurnOnOtg(NULL); } - device->TurnOnCameraPower(NULL); + CPhoneDevice::TurnOnCameraPower(NULL); std::vector osds; osds.resize(4); @@ -415,11 +418,11 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto( env->ReleaseStringUTFChars(path, pathStr); - device->TurnOffCameraPower(NULL); - if (photoInfo.usbCamera) - { - device->TurnOffOtg(NULL); - } + // device->TurnOffCameraPower(NULL); + // if (photoInfo.usbCamera) + //{ + // device->TurnOffOtg(NULL); + //} return reinterpret_cast(device); } @@ -497,6 +500,10 @@ Java_com_xypower_mpapp_MicroPhotoService_uninit( } pTerminal->SignalExit(); pTerminal->Shutdown(); + if (dev != NULL) + { + delete dev; + } delete pTerminal; @@ -636,6 +643,90 @@ Java_com_xypower_mpapp_MicroPhotoService_getPhotoTimeData( return data; } +extern "C" JNIEXPORT jintArray JNICALL +Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture( + JNIEnv* env, + jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) { + + const char* pParamPathStr = env->GetStringUTFChars(paramPath, 0); + std::string paramPathStr = MakeString(pParamPathStr); + env->ReleaseStringUTFChars(paramPath, pParamPathStr); + + const char* pBinPathStr = env->GetStringUTFChars(binPath, 0); + std::string binPathStr = MakeString(pBinPathStr); + env->ReleaseStringUTFChars(binPath, pBinPathStr); + + const char* pBlobName8Str = env->GetStringUTFChars(blobName8, 0); + std::string blobName8Str = MakeString(pBlobName8Str); + env->ReleaseStringUTFChars(blobName8, pBlobName8Str); + + const char* pBlobName16Str = env->GetStringUTFChars(blobName16, 0); + std::string blobName16Str = MakeString(pBlobName16Str); + env->ReleaseStringUTFChars(blobName16, pBlobName16Str); + + const char* pBlobName32Str = env->GetStringUTFChars(blobName32, 0); + std::string blobName32Str = MakeString(pBlobName32Str); + env->ReleaseStringUTFChars(blobName32, pBlobName32Str); + + const char* pPicPathStr = env->GetStringUTFChars(picPath, 0); + std::string picPathStr = MakeString(pPicPathStr); + env->ReleaseStringUTFChars(picPath, pPicPathStr); + + cv::Mat mat = cv::imread(picPathStr); + + if (mat.empty()) + { + return NULL; + } + + std::vector dataArray; + ncnn_init(); + + ncnn::Net net; + + bool res = YoloV5Ncnn_Init(net, paramPathStr, binPathStr); + if (res) + { + std::vector objs; + res = YoloV5NcnnDetect(net, mat, true, blobName8Str, blobName16Str, blobName32Str, objs); + if (res && !objs.empty()) + { + for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend(); ++it) + { + // float x; + // float y; + // float w; + // float h; + // int label; + // float prob; + dataArray.push_back(it->x); + dataArray.push_back(it->y); + dataArray.push_back(it->w); + dataArray.push_back(it->h); + dataArray.push_back(it->label); + dataArray.push_back((int)(it->prob * 100.0f)); + } + } + } + + // ncnn_uninit(); + + if (dataArray.empty()) + { + return NULL; + } + + jintArray data = env->NewIntArray(dataArray.size()); + if (data == NULL) { + return NULL; + } + + env->SetIntArrayRegion(data, 0, dataArray.size(), &dataArray[0]); + + return data; +} + + /* extern "C" JNIEXPORT jlongArray JNICALL Java_com_xypower_mpapp_MicroPhotoService_getNextScheduleItem( @@ -704,10 +795,106 @@ Java_com_xypower_mpapp_MicroPhotoService_getNextScheduleItem( */ +extern "C" JNIEXPORT void JNICALL +Java_com_xypower_mpapp_MicroPhotoService_captureFinished( + JNIEnv* env, + jobject pThis, jlong handler, jboolean photoOrVideo, jboolean result, jobject bitmap, jlong photoId) { + + CTerminal* pTerminal = reinterpret_cast(handler); + if (pTerminal == NULL) + { + return; + } + + IDevice* dev = pTerminal->GetDevice(); + if (dev != NULL) + { + if (result == JNI_FALSE || bitmap == NULL) + { + cv::Mat mat; + ((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId); + return; + } + AndroidBitmapInfo info = { 0 }; + int res = AndroidBitmap_getInfo(env, bitmap, &info); + if (res < 0 || info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) + { + } + + bool hardwareBitmap = (info.flags & ANDROID_BITMAP_FLAGS_IS_HARDWARE) != 0; + void* pixels = NULL; + AHardwareBuffer* hardwareBuffer = NULL; + if (hardwareBitmap) + { +#if 0 + res = AndroidBitmap_getHardwareBuffer(env, bitmap, &hardwareBuffer); + int32_t fence = -1; + res = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, fence, NULL, &pixels); + + cv::Mat mat(info.height, info.width, CV_8UC4, pixels); + AHardwareBuffer_unlock(hardwareBuffer, &fence); + + cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR); + + ((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId); +#endif // 0 + } else + { + res = AndroidBitmap_lockPixels(env, bitmap, &pixels); + cv::Mat tmp(info.height, info.width, CV_8UC4, pixels); + + cv::Mat raw(info.height, info.width, CV_8UC4); + + // tmp.copyTo(mat); + cv::cvtColor(tmp, raw, cv::COLOR_RGBA2BGR); + + cv::Mat mat; + cv::fastNlMeansDenoisingColored(raw, mat, 13, 13, 7, 21); + + AndroidBitmap_unlockPixels(env, bitmap); + + ((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId); + } + } +} + +extern "C" JNIEXPORT void JNICALL +Java_com_xypower_mpapp_MicroPhotoService_burstCaptureFinished( + JNIEnv* env, + jobject pThis, jlong handler, jboolean result, jint numberOfCaptures, + jstring pathsJoinedByTab, jboolean frontCamera, jint rotation, jlong photoId) { + + CTerminal* pTerminal = reinterpret_cast(handler); + if (pTerminal == NULL) + { + return; + } + + /// HDRPlus +#ifdef USING_HDRPLUS + + +#endif + IDevice* dev = pTerminal->GetDevice(); + if (dev != NULL) + { + if (result == JNI_FALSE) + { + cv::Mat mat; + ((CPhoneDevice *)dev)->OnCaptureReady(true, false, mat, (unsigned long)photoId); + return; + } + + const char* pathsStr = env->GetStringUTFChars(pathsJoinedByTab, 0); + ((CPhoneDevice *)dev)->ProcessRawCapture(result != JNI_FALSE, numberOfCaptures, MakeString(pathsStr), frontCamera != JNI_FALSE, rotation, photoId); + env->ReleaseStringUTFChars(pathsJoinedByTab, pathsStr); + } +} + extern "C" JNIEXPORT void JNICALL Java_com_xypower_mpapp_MicroPhotoService_recordingFinished( JNIEnv* env, - jobject pThis, jlong handler, jboolean result, jstring path, jlong videoId) { + jobject pThis, jlong handler, jboolean photoOrVideo, jboolean result, jstring path, jlong videoId) { CTerminal* pTerminal = reinterpret_cast(handler); if (pTerminal == NULL) @@ -726,7 +913,7 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished( // camera->Open(pathStr, fileNameStr); unsigned long photoId = videoId; - ((CPhoneDevice *)dev)->OnVideoReady(result != JNI_FALSE, pathStr, photoId); + ((CPhoneDevice *)dev)->OnVideoReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, pathStr, photoId); if (path != NULL) { env->ReleaseStringUTFChars(path, pathStr); @@ -735,7 +922,7 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished( } -extern "C" JNIEXPORT void JNICALL +extern "C" JNIEXPORT jboolean JNICALL Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs( JNIEnv* env, jobject pThis, jlong handler) { @@ -743,13 +930,36 @@ Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs( CTerminal* pTerminal = reinterpret_cast(handler); if (pTerminal == NULL) { - return; + return JNI_FALSE; } - pTerminal->LoadAppConfigs(); + bool res = pTerminal->LoadAppConfigs(); + return res ? JNI_TRUE : JNI_FALSE; } +extern "C" JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto( + JNIEnv* env, jclass cls, jlong handler, jstring path) { + + CTerminal* pTerminal = reinterpret_cast(handler); + if (pTerminal == NULL) + { + return JNI_FALSE; + } + + if (env->GetStringUTFLength(path) <=0) + { + return JNI_FALSE; + } + + const char *pathStr = env->GetStringUTFChars(path, 0); + bool res = pTerminal->SendExternalPhoto(pathStr); + env->ReleaseStringUTFChars(path, pathStr); + + return res ? JNI_TRUE : JNI_FALSE; +} + extern "C" JNIEXPORT void JNICALL Java_com_xypower_mpapp_MicroPhotoService_infoLog( JNIEnv* env, jclass cls, jstring msg) { @@ -814,7 +1024,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKeyFile( const char *md5Str = env->GetStringUTFChars(md5, 0); - GpioControl::setCam3V3Enable(true); + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); NrsecPort nrsec; @@ -827,7 +1038,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKeyFile( } GpioControl::setSpiPower(false); - GpioControl::setCam3V3Enable(false); + CPhoneDevice::TurnOffCameraPower(NULL); env->ReleaseStringUTFChars(md5, md5Str); @@ -849,7 +1060,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKey( return JNI_FALSE; } - GpioControl::setCam3V3Enable(true); + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); NrsecPort nrsec; @@ -864,7 +1076,44 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKey( } GpioControl::setSpiPower(false); - GpioControl::setCam3V3Enable(false); + CPhoneDevice::TurnOffCameraPower(NULL); + + return res ? JNI_TRUE : JNI_FALSE; +#else + return JNI_FALSE; +#endif +} + + +extern "C" JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_importPrivateKey( + JNIEnv* env, jclass cls, jint index, jbyteArray cert) { + +#ifdef USING_NRSEC + + int byteCertLen = env->GetArrayLength(cert); + if (byteCertLen <= 0) + { + return JNI_FALSE; + } + + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); + GpioControl::setSpiPower(true); + + NrsecPort nrsec; + const char *path = NRSEC_PATH; + bool res = nrsec.Open(path); + if (res) + { + jbyte* byteCert = env->GetByteArrayElements(cert, 0); + res = nrsec.SM2ImportPrivateKey(index, (const uint8_t*)byteCert) == 0; + nrsec.Close(); + env->ReleaseByteArrayElements(cert, byteCert, JNI_ABORT); + } + + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOffCameraPower(NULL); return res ? JNI_TRUE : JNI_FALSE; #else @@ -886,7 +1135,9 @@ Java_com_xypower_mpapp_MicroPhotoService_genKeys( jclass cls, jint index) { #ifdef USING_NRSEC - GpioControl::setCam3V3Enable(true); + + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); const char *path = NRSEC_PATH; @@ -900,7 +1151,7 @@ Java_com_xypower_mpapp_MicroPhotoService_genKeys( } GpioControl::setSpiPower(false); - GpioControl::setCam3V3Enable(false); + CPhoneDevice::TurnOffCameraPower(NULL); return res ? JNI_TRUE : JNI_FALSE; #else @@ -915,7 +1166,8 @@ Java_com_xypower_mpapp_MicroPhotoService_querySecVersion( std::string version; #ifdef USING_NRSEC - GpioControl::setCam3V3Enable(true); + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); const char *path = NRSEC_PATH; @@ -929,7 +1181,7 @@ Java_com_xypower_mpapp_MicroPhotoService_querySecVersion( } GpioControl::setSpiPower(false); - GpioControl::setCam3V3Enable(false); + CPhoneDevice::TurnOffCameraPower(NULL); #endif return env->NewStringUTF(version.c_str()); } @@ -945,7 +1197,8 @@ Java_com_xypower_mpapp_MicroPhotoService_genCertRequest( } const char *path = NRSEC_PATH; - GpioControl::setCam3V3Enable(true); + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); uint8_t output[1024] = { 0 }; @@ -962,6 +1215,9 @@ Java_com_xypower_mpapp_MicroPhotoService_genCertRequest( env->ReleaseStringUTFChars(subject, subjectStr); } + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOffCameraPower(NULL); + if (res) { const char* outputPathStr = env->GetStringUTFChars(outputPath, 0); @@ -999,7 +1255,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKeyFile( const char *path = NRSEC_PATH; - GpioControl::setCam3V3Enable(true); + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); NrsecPort nrsec; @@ -1011,7 +1268,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKeyFile( } GpioControl::setSpiPower(false); - GpioControl::setCam3V3Enable(false); + CPhoneDevice::TurnOffCameraPower(NULL); // const char *md5Str = env->GetStringUTFChars(md5, 0); // env->ReleaseStringUTFChars(md5, md5Str); @@ -1038,7 +1295,8 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPublicKeyFile( uint8_t len = 0; std::vector data(64, 0); - GpioControl::setCam3V3Enable(true); + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); NrsecPort nrsec; @@ -1050,7 +1308,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPublicKeyFile( } GpioControl::setSpiPower(false); - GpioControl::setCam3V3Enable(false); + CPhoneDevice::TurnOffCameraPower(NULL); if (res) { @@ -1077,7 +1335,8 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile( const char *path = NRSEC_PATH; - GpioControl::setCam3V3Enable(true); + GpioControl::setSpiPower(false); + CPhoneDevice::TurnOnCameraPower(NULL); GpioControl::setSpiPower(true); NrsecPort nrsec; @@ -1093,7 +1352,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile( nrsec.Close(); GpioControl::setSpiPower(false); - GpioControl::setCam3V3Enable(false); + CPhoneDevice::TurnOffCameraPower(NULL); if (res) { diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index b160d412..27de01f1 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -7,6 +7,8 @@ #include "ncnn/yolov5ncnn.h" #include "GPIOControl.h" #include "CvText.h" +#include "PositionHelper.h" +#include "DngCreator.h" #include #include @@ -18,10 +20,18 @@ #include #include +#include #include -#include +#include +#include +#ifdef USING_HDRPLUS +#include +#endif + +#include #include +#include namespace fs = std::filesystem; #define CMD_SET_485_EN_STATE 131 @@ -36,6 +46,33 @@ extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread); // are normalized to eight bits. static const int kMaxChannelValue = 262143; + +cv::Mat convert16bit2_8bit_(cv::Mat ans){ + if(ans.type()==CV_16UC3){ + cv::MatIterator_ it, end; + for( it = ans.begin(), end = ans.end(); it != end; ++it) + { + // std::cout< characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + return m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return false; +} + void CPhoneDevice::CPhoneCamera::on_error(const std::string& msg) { if (m_dev != NULL) @@ -160,7 +206,123 @@ void CPhoneDevice::CPhoneCamera::onDisconnected(ACameraDevice* device) } } -CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode) : mCameraPowerCount(0), mOtgCount(0), mVersionCode(versionCode) + +CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params) : CPhoneDevice::CPhoneCamera(dev, width, height, params), m_path(path) +{ +} + +bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return true; +} + +void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader) +{ + ALOGD("onImageAvailable %p", reader); + + AImage* image = 0; + media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); + + if (mstatus != AMEDIA_OK) + { + // error + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); + } + return; + } + + uint8_t* y_data = 0; + int y_len = 0; +#if 0 + if (!lightDetected) + { + AImage_getPlaneData(image, 0, &y_data, &y_len); + + lightDetected = true; + +#if __cplusplus >= 201703L + uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); +#else + uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); +#endif + avgY = avgY / (uint64_t)y_len; + mLdr = avgY; +#if 1 + if (avgY < 50) + { + if (m_params.autoExposure) + { + uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; + camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); + + int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); + status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); + + int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); + status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); + + XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY, + mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity); + } + AImage_delete(image); + return; + } +#endif + } +#endif + + int32_t format; + AImage_getFormat(image, &format); + + if (format == AIMAGE_FORMAT_JPEG) + { + int planeCount; + media_status_t status = AImage_getNumberOfPlanes(image, &planeCount); + + // LOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount); + if (!(status == AMEDIA_OK && planeCount == 1)) + { + // LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount); + return; + } + + uint8_t *data = nullptr; + int len = 0; + AImage_getPlaneData(image, 0, &data, &len); + + FILE *file = fopen(m_path.c_str(), "wb"); + if (file && data && len) + { + fwrite(data, 1, len, file); + fclose(file); + } + else + { + if (file) + fclose(file); + } + } + + AImage_delete(image); +} + +int32_t CPhoneDevice::CJpegCamera::getOutputFormat() const +{ + return AIMAGE_FORMAT_JPEG; +} + +std::mutex CPhoneDevice::m_powerLocker; +long CPhoneDevice::mCameraPowerCount = 0; +long CPhoneDevice::mOtgCount = 0; + +CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode) : mVersionCode(versionCode) { mCamera = NULL; m_listener = NULL; @@ -195,7 +357,7 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(IJ)V"); mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z"); mUpdateCaptureScheduleMid = env->GetMethodID(classService, "updateCaptureSchedule", "(J)Z"); - mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(IJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(ZIJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); mRequestWakelockMid = env->GetMethodID(classService, "requestWakelock", "(Ljava/lang/String;J)V"); mReleaseWakelockMid = env->GetMethodID(classService, "releaseWakelock", "(Ljava/lang/String;)V"); @@ -308,16 +470,18 @@ bool CPhoneDevice::SelfTest(std::string& result) { result.clear(); + const char* ITEM_SEP = "\t"; // unsigned int numberOfChannels = 0; - result += "设备自检 版本:" + GetVersion() + NEW_LINE_TAG; + result += "设备自检 版本:" + GetVersion() + ITEM_SEP; Json::Value appConfig = Json::objectValue; std::vector content; std::string filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF); if (!readFile(filePath, content)) { - result += ("读取系统配置文件App.json失败" NEW_LINE_TAG); + result += "读取系统配置文件App.json失败"; + result += ITEM_SEP; } else { @@ -331,16 +495,16 @@ bool CPhoneDevice::SelfTest(std::string& result) if (GetJSONUInt32Value(appConfig, "channels", val) && (val > 0 && val <= 255)) { numberOfChannels = val; - result += "通道数:" + std::to_string(numberOfChannels) + NEW_LINE_TAG; + result += "通道数:" + std::to_string(numberOfChannels) + ITEM_SEP; } else { - result += "通道数未定义或者无效" NEW_LINE_TAG; + result += "通道数未定义或者无效" + std::string(ITEM_SEP); } } else { - result += "解析系统配置文件App.json失败" NEW_LINE_TAG; + result += "解析系统配置文件App.json失败" + std::string(ITEM_SEP); } } @@ -355,7 +519,7 @@ bool CPhoneDevice::SelfTest(std::string& result) filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF); if (!readFile(filePath, content)) { - result += "读取通道" + std::to_string(channel) + "配置文件失败" NEW_LINE_TAG; + result += "读取通道" + std::to_string(channel) + "配置文件失败" + std::string(ITEM_SEP); } else { @@ -368,17 +532,17 @@ bool CPhoneDevice::SelfTest(std::string& result) GetJSONUInt8Value(channelCfg, "usbCamera", usbCamera); if (GetJSONUInt8Value(channelCfg, "cameraId", cameraId)) { - result += "通道" + std::to_string(channel) + " Camera ID为 " + std::to_string(cameraId) + NEW_LINE_TAG; + result += "通道" + std::to_string(channel) + " Camera ID为 " + std::to_string(cameraId) + ITEM_SEP; } else { cameraId = channel - 1; - result += "通道" + std::to_string(channel) + "未定义Camera ID, 使用默认值 " + std::to_string(cameraId) + NEW_LINE_TAG; + result += "通道" + std::to_string(channel) + "未定义Camera ID, 使用默认值 " + std::to_string(cameraId) + ITEM_SEP; } } else { - result += "解析通道" + std::to_string(channel) + "配置文件App.json失败" NEW_LINE_TAG; + result += "解析通道" + std::to_string(channel) + "配置文件App.json失败" + std::string(ITEM_SEP); } } @@ -400,11 +564,11 @@ bool CPhoneDevice::SelfTest(std::string& result) } if (res == 0) { - result += "通道" + std::to_string(channel) + "正常:最大分辨率:" + std::to_string(width) + "x" + std::to_string(height) + NEW_LINE_TAG; + result += "通道" + std::to_string(channel) + "正常:最大分辨率:" + std::to_string(width) + "x" + std::to_string(height) + ITEM_SEP; } else { - result += "通道" + std::to_string(channel) + " 异常 err=" + std::to_string(res) + NEW_LINE_TAG; + result += "通道" + std::to_string(channel) + " 异常 err=" + std::to_string(res) + ITEM_SEP; } } @@ -412,19 +576,19 @@ bool CPhoneDevice::SelfTest(std::string& result) if (bv > 0) { bv -= bv % 100; - result += std::string("电池电压:") + std::to_string(bv / 1000) + std::string(".") + std::to_string((bv % 1000) / 100) + NEW_LINE_TAG; + result += std::string("电池电压:") + std::to_string(bv / 1000) + std::string(".") + std::to_string((bv % 1000) / 100) + ITEM_SEP; } fs::space_info si = fs::space("/data"); double fr = ((double)si.available * 100.0f) / ((double)si.capacity); result += "可用存储:"; result += std::to_string((int)fr); - result += "%%" NEW_LINE_TAG; + result += "%%" + std::string(ITEM_SEP); long fm = android_os_Process_getFreeMemory(); long tm = android_os_Process_getTotalMemory(); double fmp = ((double)fm * 100.0f) / ((double)tm); - result += std::string("可用内存:") + std::to_string((int)fmp) + std::string("%%" NEW_LINE_TAG); + result += std::string("可用内存:") + std::to_string((int)fmp) + std::string("%%") + ITEM_SEP; if (!m_tfCardPath.empty()) { @@ -432,12 +596,12 @@ bool CPhoneDevice::SelfTest(std::string& result) double fr2 = ((double)si2.available * 100.0f) / ((double)si2.capacity); result += "TF卡可用空间:"; result += std::to_string((int)fr2); - result += "%%" NEW_LINE_TAG; + result += "%%" + std::string(ITEM_SEP); } result += "4G信号强度:"; result += std::to_string(m_signalLevel); - result += NEW_LINE_TAG; + result += ITEM_SEP; result += "网络接口:"; std::vector devices; @@ -447,7 +611,7 @@ bool CPhoneDevice::SelfTest(std::string& result) result += (*it); result += " "; } - // result += NEW_LINE_TAG; + // result += ITEM_SEP; return true; } @@ -530,19 +694,19 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro else if (it->first == PROP_MODEL) { __system_property_get("ro.product.model", value); - it->second = value; + it->second = std::string(value); } else if (it->first == PROP_BS_MANU) { __system_property_get("ro.product.manufacturer", value); - it->second = value; + it->second = std::string(value); } else if (it->first == PROP_VERSION) { // FOR Protocol snprintf(value, sizeof(value), "%u.%03u", (mVersionCode / 1000), (mVersionCode % 1000)); // __system_property_get("ro.build.version.release", value); - it->second = value; + it->second = std::string(value); } else if (it->first == (PROP_VERSION_ABBR)) { @@ -560,19 +724,19 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro else if (it->first == PROP_PROD_DATE) { __system_property_get("ro.build.date.utc", value); - it->second = value; + it->second = std::string(value); } else if (it->first == PROP_SN || it->first == PROP_BS_ID) { __system_property_get("ro.serialno", value); - it->second = value; + it->second = std::string(value); } else if (it->first == PROP_IMEI) { if (m_simcard.empty()) { __system_property_get("phone.imei", value); - it->second = value; + it->second = std::string(value); } else { @@ -592,9 +756,8 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro { fs::space_info si = fs::space("/data"); double fr = ((double)si.available * 100.0f) / ((double)si.capacity); - char buf[12] = { 0 }; - snprintf(buf, sizeof(buf), "%d%%", (int)fr); - it->second = buf; + snprintf(value, sizeof(value), "%d%%", (int)fr); + it->second = std::string(value); } else if (it->first == PROP_TOTAL_ROM) { @@ -610,9 +773,8 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro long fm = android_os_Process_getFreeMemory(); long tm = android_os_Process_getTotalMemory(); double fmp = ((double)fm * 100.0f) / ((double)tm); - char buf[12] = { 0 }; - snprintf(buf, sizeof(buf), "%d%%", (int)fmp); - it->second = buf; // Unit: M + snprintf(value, sizeof(value), "%d%%", (int)fmp); + it->second = std::string(value); // Unit: M } else if (it->first == PROP_TOTAL_MEMORY) { @@ -643,7 +805,7 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro continue; } snprintf(str, sizeof(str), "%.1f", (val / 1000.0)); - it->second = str; + it->second = std::string(str); break; } } @@ -657,9 +819,8 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro if (val > 0) { bv = val; - char str[32] = { 0 }; - snprintf(str, sizeof(str), "%.1f", val / 1000.0); - it->second = str; + snprintf(value, sizeof(value), "%.1f", val / 1000.0); + it->second = std::string(value); } else { @@ -701,7 +862,7 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro char str[32] = { 0 }; float batteryCurrent = STANDARD_CURRENT_64V / ((float)bv / 1000.0f / STANDARD_VOLTAGE_64V); snprintf(str, sizeof(str), "%d", (int)batteryCurrent); - it->second = str; + it->second = std::string(str); } } // __system_property_get("ro.telephony.default_network", value); @@ -1179,6 +1340,8 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< params.requestTemplate = mPhotoInfo.requestTemplate; params.awbMode = mPhotoInfo.awbMode; params.wait3ALocked = mPhotoInfo.wait3ALocked; + params.burstRawCapture = mPhotoInfo.usingRawFormat; + params.burstCaptures = mPhotoInfo.burstCaptures; if (params.requestTemplate <= 0 || params.requestTemplate > 5) { params.requestTemplate = 2; @@ -1196,7 +1359,6 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< } #endif - // GpioControl::EnableGpio(CMD_SET_CAM_3V3_EN_STATE, true); bool res = false; if (photoInfo.usbCamera) @@ -1206,9 +1368,10 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< TurnOnCameraPower(NULL); res = true; - if (mPhotoInfo.mediaType == 0) + if (mPhotoInfo.mediaType == 0/* && mPhotoInfo.usingRawFormat == 0*/) { mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params); + // mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params); if (mCamera->open(to_string(mPhotoInfo.cameraId)) == 0) { XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Succeeded to OpenCamera CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); @@ -1267,8 +1430,10 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< } int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90; - env->CallVoidMethod(m_javaService, mStartRecordingMid, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId, mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height, - mPhotoInfo.duration, orientation, leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD); + jboolean photoOrVideo = mPhotoInfo.mediaType == 0 ? JNI_TRUE : JNI_FALSE; + env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId, + mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height, mPhotoInfo.duration, orientation, + leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD); if (leftTopOSD) env->DeleteLocalRef(leftTopOSD); if (rightTopOSD) env->DeleteLocalRef(rightTopOSD); @@ -1307,13 +1472,13 @@ void CPhoneDevice::CloseCamera2(CPhoneDevice::CPhoneCamera* camera, unsigned int delete camera; } - XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Will Turn Off Power=%u", photoId); + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Will Turn Off Power PHOTOID=%u", photoId); if (turnOffOtg) { TurnOffOtg(NULL); } TurnOffCameraPower(NULL); - XYLOG(XYLOG_SEVERITY_DEBUG, "TP: End Turn Off Power=%u", photoId); + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: End Turn Off Power PHOTOID=%u", photoId); XYLOG(XYLOG_SEVERITY_DEBUG, "TP: CloseCamera PHOTOID=%u", photoId); @@ -1347,14 +1512,315 @@ void DrawOutlineText(cv::Ptr ft2, cv::Mat& mat, const std::st } } -bool CPhoneDevice::OnImageReady(cv::Mat& mat) +bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, + std::vector >& results, + uint32_t ldr, std::vector >& frames) { - if (mCamera == NULL) + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) { - // int aa = 0; - return false; + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + + vector osds; + osds.swap(mOsds); + PHOTO_INFO photoInfo = mPhotoInfo; + std::string path; + path.swap(mPath); + + std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + + int sensorOrientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e); + if (status == ACAMERA_OK) + { + sensorOrientation = (int)e.data.i32[0]; + } } + bool turnOffOtg = (photoInfo.usbCamera != 0); + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + std::thread th([=]()mutable + { + cv::Mat rgb; + std::vector > rawFiles; + + media_status_t mstatus; + std::string cameraInfo; + if (photoInfo.usingRawFormat != 0) + { + // + for (int idx = 0; idx < frames.size(); idx++) + { + std::shared_ptr spImage = frames[idx]; + std::shared_ptr result = results[idx]; + + auto it = rawFiles.insert(rawFiles.end(), std::vector()); + + int32_t width; + int32_t height; + AImage_getWidth(spImage.get(), &width); + AImage_getHeight(spImage.get(), &height); + + int planeCount; + media_status_t status = AImage_getNumberOfPlanes(spImage.get(), &planeCount); + AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount); + + uint8_t *planeData = NULL; + int planeDataLen = 0; + mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen); + DngCreator dngCreator(characteristics.get(), result.get()); + dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0); + } + } + else + { + if (results.size() == 1 && frames.size() == 1) + { + std::shared_ptr result = results[0]; + std::shared_ptr frame = frames[0]; + + if (photoInfo.outputDbgInfo != 0) + { + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(result.get(), captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + (uint32_t)captureResult.duration, captureResult.frameDuration); + cameraInfo = str; + } + + int32_t format; + mstatus = AImage_getFormat(frame.get(), &format); + + if (format == AIMAGE_FORMAT_YUV_420_888) + { + int32_t width; + int32_t height; + mstatus = AImage_getWidth(frame.get(), &width); + mstatus = AImage_getHeight(frame.get(), &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride); + AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride); + AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride); + AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride); + AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride); + + uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(frame.get(), 0, &y_data, &y_len); + AImage_getPlaneData(frame.get(), 1, &u_data, &u_len); + AImage_getPlaneData(frame.get(), 2, &v_data, &v_len); + + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + { + // already nv21 + ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + } + else + { + // construct nv21 + uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; + { + // Y + uint8_t* yptr = nv21; + for (int y = 0; y < height; y++) + { + const uint8_t* y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) + { + yptr[0] = y_data_ptr[0]; + yptr++; + y_data_ptr += y_pixelStride; + } + } + + // UV + uint8_t* uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) + { + const uint8_t* v_data_ptr = v_data + v_rowStride * y; + const uint8_t* u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) + { + uvptr[0] = v_data_ptr[0]; + uvptr[1] = u_data_ptr[0]; + uvptr += 2; + v_data_ptr += v_pixelStride; + u_data_ptr += u_pixelStride; + } + } + } + + ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + + delete[] nv21; + } + + if (photoInfo.outputDbgInfo != 0) + { + + } + } + } + } + + frames.clear(); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + +#ifdef OUTPUT_CAMERA_DBG_INFO +#if 0 + bool shouldRetry = false; + if (ldr != ~0) + { + if (ldr < MIN_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // photoInfo.usingRawFormat = 1; + } + } + else if (ldr > MAX_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } + + photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); + } + } +#endif // 0 +#endif // OUTPUT_CAMERA_DBG_INFO + + // Notify to take next photo + TakePhotoCb(1, photoInfo, "", takingTime); + + if (photoInfo.usingRawFormat != 0) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + hdrplus::hdrplus_pipeline pipeline; + pipeline.run_pipeline(rawFiles, 0, rgb); + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + +#ifdef NDEBUG + for (auto it = rawFilePaths.cbegin(); it != rawFilePaths.cend(); ++it) + { + std::remove((*it).c_str()); + } +#endif + { + cv::Mat tempPic = convert16bit2_8bit_(rgb); + rgb = tempPic; + } + + if (photoInfo.orientation > 0) + { + if (photoInfo.orientation == 1) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + cv::flip(rgb, rgb, 1); + } + } else if (photoInfo.orientation == 2) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 1); + } + else if (photoInfo.orientation == 3) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + flip(rgb, rgb, 0); + } + else + { + cv::flip(rgb, rgb, -1); + } + } + else if (photoInfo.orientation == 4) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 0); + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + } + cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); + } + + bool res = PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } + }); + + th.detach(); + + return true; +} + +bool CPhoneDevice::OnImageReady(cv::Mat& mat) +{ time_t takingTime = time(NULL); if (mPhotoInfo.remedy != 0) { @@ -1429,8 +1895,12 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) "hair drier", "toothbrush" }; #endif + cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16); cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16); + float minSizeW = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.width * m_pRecognizationCfg->minSize / 100) : 0; + float minSizeH = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.height * m_pRecognizationCfg->minSize / 100) : 0; + for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend();) { if (it->label >= m_pRecognizationCfg->items.size()) @@ -1446,6 +1916,15 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) continue; } + if (m_pRecognizationCfg->minSize > 0) + { + if (it->w < minSizeW || it->h < minSizeH) + { + it = objs.erase(it); + continue; + } + } + if ((mPhotoInfo.recognization & 0x2) != 0) { cv::Rect rc(it->x, it->y, it->w, it->h); @@ -1491,102 +1970,43 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled"); } -#ifdef OUTPUT_CAMERA_DBG_INFO - - cv::Scalar scalarRed(0, 0, 255); // red - - NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); +// #ifdef OUTPUT_CAMERA_DBG_INFO -#if 0 - if (captureResult.avgY < 25 && mPhotoInfo.autoExposure != 0) + if (mCamera != NULL) { - // Take another photo - CPhoneDevice* pThis = this; - std::string path = mPath; - IDevice::PHOTO_INFO photoInfo = mPhotoInfo; - std::vector osds = mOsds; - photoInfo.photoId += 1; - photoInfo.autoExposure = 0; - if (captureResult.avgY == 0) - { - photoInfo.exposureTime = 600000000; - photoInfo.sensitivity = 2500; - } - else if (captureResult.avgY <= 6) - { - photoInfo.exposureTime = captureResult.exposureTime * 150 / captureResult.avgY; - photoInfo.sensitivity = photoInfo.sensitivity * 80 / captureResult.avgY; - if (photoInfo.sensitivity < captureResult.sensitivity) - { - photoInfo.sensitivity = captureResult.sensitivity; - } - else if (photoInfo.sensitivity > 3000) - { - photoInfo.sensitivity = 3000; - } - } - else + if (mPhotoInfo.outputDbgInfo != 0) { - photoInfo.exposureTime = captureResult.exposureTime * 120 / captureResult.avgY; - photoInfo.sensitivity = photoInfo.sensitivity * 60 / captureResult.avgY; - if (photoInfo.sensitivity < captureResult.sensitivity) + cv::Scalar scalarRed(0, 0, 255); // red + + char extimeunit[4] = { 0 }; + char str[128] = { 0 }; + + int fs = fontSize * 2 / 3; + textSize = ft2->getTextSize(str, fs, -1, &baseline); + cv::Point lt(0, mat.rows - fs - 20 * ratio); + cv::Point lt2(0, lt.y - 2 * ratio); + cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio); + + if (rb.x > (int)width - 1) { - photoInfo.sensitivity = captureResult.sensitivity; + rb.x = (int)width - 1; } - else if (photoInfo.sensitivity > 3000) + if (rb.y > (int)height - 1) { - photoInfo.sensitivity = 3000; + rb.y = (int)height - 1; } - } - - std::thread t([=] - { - std::this_thread::sleep_for(std::chrono::milliseconds(5000)); - pThis->TakePhoto(photoInfo, osds, path); - }); - - t.detach(); - } -#endif // 0 - - char extimeunit[4] = { 0 }; - unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); - strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); - char str[128] = { 0 }; - snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", - captureResult.autoExposure, captureResult.autoFocus, - extime, extimeunit, captureResult.compensation, captureResult.sensitivity, - // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, - (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, - captureResult.sceneMode, GpioControl::getLightAdc(), (unsigned int)captureResult.avgY, captureResult.zoomRatio, - (uint32_t)captureResult.duration, captureResult.frameDuration); - // cv::putText(mat, str, cv::Point(0, mat.rows - 20), cv::FONT_HERSHEY_COMPLEX, fontScale, scalarWhite, thickness1, cv::LINE_AA); + cv::Mat roi = mat(cv::Rect(lt2, rb)); + cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite); + double alpha = 0.5; + cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi); - int fs = fontSize * 2 / 3; - textSize = ft2->getTextSize(str, fs, -1, &baseline); - cv::Point lt(0, mat.rows - fs - 20 * ratio); - cv::Point lt2(0, lt.y - 2 * ratio); - cv::Point rb(0 + textSize.width, lt2.y + textSize.height + 8 * ratio); - - if (rb.x > (int)width - 1) - { - rb.x = (int)width - 1; - } - if (rb.y > (int)height - 1) - { - rb.y = (int)height - 1; + // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1); + ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false); + // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); + } } - cv::Mat roi = mat(cv::Rect(lt2, rb)); - cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite); - double alpha = 0.2; - cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi); - - // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1); - ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false); - // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); - -#endif // OUTPUT_CAMERA_DBG_INFO +// #endif // OUTPUT_CAMERA_DBG_INFO for (vector::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it) { @@ -1631,26 +2051,53 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness); } - vector params; + std::vector params; params.push_back(cv::IMWRITE_JPEG_QUALITY); - params.push_back(mPhotoInfo.quality); + params.push_back((int)((uint32_t)mPhotoInfo.quality)); bool res = false; std::string fullPath = endsWith(mPath, ".jpg") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo)); #ifdef OUTPUT_CAMERA_DBG_INFO + bool shouldRetry = false; - if (captureResult.avgY > 245 || captureResult.avgY < 10) - { - if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) +#if 0 + if (mCamera != NULL) { + NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); + + if (captureResult.avgY < MIN_LIGHT_Y) + { + if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // mPhotoInfo.usingRawFormat = 1; + } + } + else if (captureResult.avgY > MAX_LIGHT_Y) { - shouldRetry = true; - replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg"); - replaceAll(fullPath, "/photos/", "/sentPhotos/"); + if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } - XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", (uint32_t)captureResult.avgY); + mPhotoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); } } +#endif + #endif // OUTPUT_CAMERA_DBG_INFO if (!std::filesystem::exists(std::filesystem::path(fullPath))) @@ -1667,52 +2114,318 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) #ifdef OUTPUT_CAMERA_DBG_INFO if (shouldRetry) { - TakePhotoCb(false, mPhotoInfo, fullPath, takingTime, objs); + TakePhotoCb(0, mPhotoInfo, fullPath, takingTime, objs); } else { - TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs); + TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs); } #else - TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs); + TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs); #endif } else { ALOGI("Photo file exists: %s", mPath.c_str()); } - CPhoneCamera* pCamera = mCamera; - mCamera = NULL; - - bool turnOffOtg = (mPhotoInfo.usbCamera != 0); - std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); - m_threadClose.swap(closeThread); - if (closeThread.joinable()) - { - closeThread.detach(); - } return res; } -bool CPhoneDevice::OnVideoReady(bool result, const char* path, unsigned int photoId) +bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat) { - mPhotoInfo.photoTime = time(NULL); + int baseline = 0; + cv::Size textSize; + double height = mat.rows; + double width = mat.cols; + // double ratio = std::min(height / 1024, width / 1920); + double ratio = height / 1024.0; + int thickness = round(1.4 * ratio); + if (thickness < 1) thickness = 1; + else if (thickness > 5) thickness = 5; + cv::Scalar scalarWhite(255, 255, 255); // white + int fontSize = (int)(28.0 * ratio); + cv::Point pt; - CPhoneCamera* pCamera = NULL; - std::vector objs; - std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo); - if (result) + std::string fontPath; + if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc")) { - std::rename(path, fullPath.c_str()); + fontPath = "/system/fonts/NotoSansCJK-Regular.ttc"; } - TakePhotoCb(result, mPhotoInfo, fullPath, time(NULL), objs); - - bool turnOffOtg = (mPhotoInfo.usbCamera != 0); - std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); - m_threadClose.swap(closeThread); - - return result; + else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc")) + { + fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc"; + } + else + { + fontPath = m_appPath+ "fonts/Noto.otf"; + } + cv::Ptr ft2; + ft2 = cv::ft::createFreeType2(); + ft2->loadFontData(fontPath.c_str(), 0); + // cv::Rect rc(0, 0, mat.cols, mat.rows); + // cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED); + std::vector objs; + + if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (photoInfo.recognization != 0)) + { + XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled"); + + // visualize(ncnnPath.c_str(), in); +#ifdef _DEBUG + double startTime = ncnn::get_current_time(); +#endif // _DEBUG + + bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs); +#ifdef _DEBUG + double elasped = ncnn::get_current_time() - startTime; + // __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped); +#endif // _DEBUG +#ifdef _DEBUG + ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0)); +#endif + if (detected && !objs.empty()) + { + cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16); + cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16); + float minSizeW = m_pRecognizationCfg->minSize > 0 ? (photoInfo.width * m_pRecognizationCfg->minSize / 100) : 0; + float minSizeH = m_pRecognizationCfg->minSize > 0 ? (photoInfo.height * m_pRecognizationCfg->minSize / 100) : 0; + + for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend();) + { + if (it->label >= m_pRecognizationCfg->items.size()) + { + it = objs.erase(it); + continue; + } + + const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label]; + if (item.enabled == 0 || it->prob < item.prob) + { + it = objs.erase(it); + continue; + } + + if (m_pRecognizationCfg->minSize > 0) + { + if (it->w < minSizeW || it->h < minSizeH) + { + it = objs.erase(it); + continue; + } + } + + if ((photoInfo.recognization & 0x2) != 0) + { + cv::Rect rc(it->x, it->y, it->w, it->h); + cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness); + textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline); + textSize.height += baseline; + if (it->y > textSize.height) + { + pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness; + } + else if (mat.rows - it->y - it->h > textSize.height) + { + pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness; + } + else + { + // Inner + pt.y = it->y + 4 + m_pRecognizationCfg->thickness; + } + if (mat.cols - it->x > textSize.width) + { + pt.x = it->x; + } + else + { + pt.x = it->x + it->w - textSize.width; + } + +#ifdef OUTPUT_CAMERA_DBG_INFO + char buf[128]; + snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)", + it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height); + XYLOG(XYLOG_SEVERITY_DEBUG, buf); +#endif + ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true); + } + ++it; + } + } + } + else + { + XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled"); + } + +// #ifdef OUTPUT_CAMERA_DBG_INFO + + if (!cameraInfo.empty()) + { + // NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); + + if (photoInfo.outputDbgInfo != 0) + { + cv::Scalar scalarRed(0, 0, 255); // red + + int fs = fontSize * 2 / 3; + textSize = ft2->getTextSize(cameraInfo, fs, -1, &baseline); + cv::Point lt(0, mat.rows - fs - 20 * ratio); + cv::Point lt2(0, lt.y - 2 * ratio); + cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio); + + if (rb.x > (int)width - 1) + { + rb.x = (int)width - 1; + } + if (rb.y > (int)height - 1) + { + rb.y = (int)height - 1; + } + cv::Mat roi = mat(cv::Rect(lt2, rb)); + cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite); + double alpha = 0.5; + cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi); + + // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1); + ft2->putText(mat, cameraInfo, lt, fs, scalarRed, -1, cv::LINE_AA, false); + + // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); + } + } +// #endif // OUTPUT_CAMERA_DBG_INFO + + for (vector::const_iterator it = osds.cbegin(); it != osds.cend(); ++it) + { + if (it->text.empty()) + { + continue; + } + +#ifdef _DEBUG + if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + int aa = 0; + } +#endif + + textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline); + XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); + + if (it->alignment == OSD_ALIGNMENT_TOP_LEFT) + { + pt.x = it->x * ratio; + pt.y = it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y= it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT) + { + pt.x = it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + + // cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height); + // cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2); + DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness); + } + + std::vector params; + params.push_back(cv::IMWRITE_JPEG_QUALITY); + params.push_back((int)((uint32_t)photoInfo.quality)); + + bool res = false; + std::string fullPath = endsWith(path, ".jpg") ? path : (path + CTerminal::BuildPhotoFileName(photoInfo)); + + if (!std::filesystem::exists(std::filesystem::path(fullPath))) + { +#ifdef _DEBUG + char log[256] = { 0 }; + strcpy(log, fullPath.c_str()); +#endif + bool res = cv::imwrite(fullPath.c_str(), mat, params); + if (!res) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s", fullPath.c_str() + m_appPath.size()); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s", fullPath.c_str() + m_appPath.size()); + } + TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, photoInfo.photoTime, objs); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "Photo File Exists: %s", fullPath.c_str() + m_appPath.size()); + } + + return res; +} + +bool CPhoneDevice::OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId) +{ + XYLOG(XYLOG_SEVERITY_INFO, "RAW Capture finished: %u RES=%d", photoId, (result ? 1 : 0)); + if (photoOrVideo) + { + if (result) + { + OnImageReady(mat); + } + else + { + std::vector objs; + TakePhotoCb(0, mPhotoInfo, "", time(NULL), objs); + + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + bool turnOffOtg = (mPhotoInfo.usbCamera != 0); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + } + } + + return true; +} + +bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId) +{ + if (photoOrVideo) + { + } + else + { + mPhotoInfo.photoTime = time(NULL); + CPhoneCamera* pCamera = NULL; + + std::vector objs; + std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo); + if (result) + { + std::rename(path, fullPath.c_str()); + } + TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs); + + bool turnOffOtg = (mPhotoInfo.usbCamera != 0); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + } + + return result; } void CPhoneDevice::onError(const std::string& msg) @@ -1727,7 +2440,7 @@ void CPhoneDevice::onError(const std::string& msg) CPhoneCamera* pCamera = mCamera; mCamera = NULL; - TakePhotoCb(false, mPhotoInfo, mPath, 0); + TakePhotoCb(0, mPhotoInfo, mPath, 0); bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); @@ -1746,7 +2459,7 @@ void CPhoneDevice::onDisconnected(ACameraDevice* device) CPhoneCamera* pCamera = mCamera; mCamera = NULL; - TakePhotoCb(false, mPhotoInfo, mPath, 0); + TakePhotoCb(0, mPhotoInfo, mPath, 0); bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); @@ -1775,24 +2488,28 @@ void CPhoneDevice::UpdatePosition(double lon, double lat, double radius, time_t { if (m_listener != NULL) { + if (shouldConvertPosition(lat, lon)) + { + transformPosition(lat, lon); + } return m_listener->OnPositionDataArrived(lon, lat, radius, ts); } } void CPhoneDevice::TurnOnCameraPower(JNIEnv* env) { - m_devLocker.lock(); + m_powerLocker.lock(); if (mCameraPowerCount == 0) { GpioControl::setCam3V3Enable(true); } mCameraPowerCount++; - m_devLocker.unlock(); + m_powerLocker.unlock(); } void CPhoneDevice::TurnOffCameraPower(JNIEnv* env) { - m_devLocker.lock(); + m_powerLocker.lock(); if (mCameraPowerCount > 0) { mCameraPowerCount--; @@ -1801,24 +2518,24 @@ void CPhoneDevice::TurnOffCameraPower(JNIEnv* env) GpioControl::setCam3V3Enable(false); } } - m_devLocker.unlock(); + m_powerLocker.unlock(); } void CPhoneDevice::TurnOnOtg(JNIEnv* env) { - m_devLocker.lock(); + m_powerLocker.lock(); if (mOtgCount == 0) { ALOGD("setOtgState 1"); GpioControl::setOtgState(true); } mOtgCount++; - m_devLocker.unlock(); + m_powerLocker.unlock(); } void CPhoneDevice::TurnOffOtg(JNIEnv* env) { - m_devLocker.lock(); + m_powerLocker.lock(); if (mOtgCount > 0) { mOtgCount--; @@ -1828,7 +2545,7 @@ void CPhoneDevice::TurnOffOtg(JNIEnv* env) GpioControl::setOtgState(false); } } - m_devLocker.unlock(); + m_powerLocker.unlock(); } void CPhoneDevice::UpdateSignalLevel(int signalLevel) @@ -1840,4 +2557,216 @@ void CPhoneDevice::UpdateSignalLevel(int signalLevel) void CPhoneDevice::UpdateSimcard(const std::string& simcard) { m_simcard = simcard; -} \ No newline at end of file +} + +bool CPhoneDevice::ProcessRawCapture(bool result, int numberOfCaptures, const std::string& pathsJoinedByTab, bool frontCamera, int rotation, long photoId) +{ + std::vector paths = split(pathsJoinedByTab, "\t"); + + if (paths.empty()) + { + cv::Mat mat; + OnCaptureReady(true, false, mat, (unsigned long)photoId); + return false; + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Start Processing Raw Capture CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + + hdrplus::hdrplus_pipeline pipeline; + cv::Mat mat; + pipeline.run_pipeline(paths, 0, mat); + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + + mat = convert16bit2_8bit_(mat.clone()); + + if (rotation >= 0) + { + if (rotation == 90) + { + cv::Mat tempPic; + cv::transpose(mat, tempPic); + cv::flip(tempPic, mat, 1); + } + else if (rotation == 180) + { + if (frontCamera) + { + flip(mat, mat, 0); + + } + else + { + cv::flip(mat, mat, -1); + } + } + else if (rotation == 270) + { + cv::Mat tempPic; + cv::transpose(mat, tempPic); + cv::flip(tempPic, mat, 0); + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + } + cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR); + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish Processing Raw Capture CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + +#ifdef _DEBUG + // cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR); + cv::imwrite("/sdcard/com.xypower.mpapp/tmp/final.jpg", mat); +#endif + + OnCaptureReady(true, result != JNI_FALSE, mat, (unsigned long)photoId); + return true; +} + +int CPhoneDevice::GetIceData(IDevice::ICE_INFO *iceInfo, IDevice::ICE_TAIL *iceTail, SENSOR_PARAM *sensorParam) +{ + Collect_sensor_data(); //15s + Data_DEF airt; + //++等值覆冰厚度, 综合悬挂载荷, 不均衡张力差 置0 + iceInfo->equal_icethickness = 0; + iceInfo->tension = 0; + iceInfo->tension_difference = 0; + + int pullno = 0; + int angleno = 0; + for(int num = 0; num < MAX_SERIAL_DEV_NUM; num++) + { + if(sensorParam[num].SensorsType == RALLY_PROTOCOL) + { + GetPullValue(num, &airt); + iceInfo->t_sensor_data[pullno].original_tension = airt.EuValue; + pullno++; + } else if(sensorParam[num].SensorsType == SLANT_PROTOCOL) + { + GetAngleValue(num, &airt, 0); + iceInfo->t_sensor_data[angleno].deflection_angle = airt.EuValue; + GetAngleValue(num, &airt, 1); + iceInfo->t_sensor_data[angleno].windage_yaw_angle = airt.EuValue; + angleno++; + } + } + + GetWindSpeedData(&airt); + iceTail->instantaneous_windspeed = airt.EuValue; + GetWindDirectionData(&airt); + iceTail->instantaneous_winddirection = airt.EuValue;//需求无符号整数给出浮点数 + GetAirTempData(&airt); + iceTail->air_temperature = airt.EuValue; + GetHumidityData(&airt); + iceTail->humidity = airt.EuValue;//需求无符号整数给出浮点数 + + return true; +} + + + +int CPhoneDevice::GetWData(IDevice::WEATHER_INFO *weatherInfo) +{ + Collect_sensor_data(); //15s + + Data_DEF airt; + GetWeatherData(&airt, 0); + weatherInfo->air_temperature = airt.EuValue; + + if (airt.AiState == -1) return false; + + GetWeatherData(&airt, 1); + weatherInfo->humidity = airt.EuValue; + GetWeatherData(&airt, 2); + weatherInfo->avg_windspeed_10min = airt.EuValue; + weatherInfo->extreme_windspeed = airt.EuValue; + weatherInfo->standard_windspeed = airt.EuValue; + GetWeatherData(&airt, 3); + weatherInfo->avg_winddirection_10min = airt.EuValue; + GetWeatherData(&airt, 4); + weatherInfo->precipitation = airt.EuValue; + GetWeatherData(&airt, 5); + weatherInfo->air_pressure = airt.EuValue; + GetWeatherData(&airt, 6); + weatherInfo->radiation_intensity = airt.EuValue; + return true; + +} + +#ifdef USING_N938 +bool CPhoneDevice::OpenSensors() +{ + GpioControl::setInt(CMD_SET_CAM_3V3_EN_STATE, true ? 1 : 0); + GpioControl::setInt(CMD_SET_485_EN_STATE, true ? 1 : 0); + int igpio; + GpioControl::setInt(CMD_SET_WTH_POWER, 1); + GpioControl::setInt(CMD_SET_PULL_POWER, 1); + GpioControl::setInt(CMD_SET_ANGLE_POWER, 1); + GpioControl::setInt(CMD_SET_OTHER_POWER, 1); + GpioControl::setInt(CMD_SET_PIC1_POWER, 1); + + igpio = GpioControl::getInt(CMD_SET_WTH_POWER); + igpio = GpioControl::getInt(CMD_SET_PULL_POWER); + igpio = GpioControl::getInt(CMD_SET_ANGLE_POWER); + igpio = GpioControl::getInt(CMD_SET_OTHER_POWER); + igpio = GpioControl::getInt(CMD_SET_PIC1_POWER); + + GpioControl::setInt(CMD_SET_SPI_POWER, 1); + GpioControl::setInt(CMD_SET_485_en0, 1); + GpioControl::setInt(CMD_SET_485_en1, 1); + GpioControl::setInt(CMD_SET_485_en2, 1); + GpioControl::setInt(CMD_SET_485_en3, 1); + GpioControl::setInt(CMD_SET_485_en4, 1); + + igpio = GpioControl::getInt(CMD_SET_SPI_POWER); + igpio = GpioControl::getInt(CMD_SET_485_en0); + igpio = GpioControl::getInt(CMD_SET_485_en1); + igpio = GpioControl::getInt(CMD_SET_485_en2); + igpio = GpioControl::getInt(CMD_SET_485_en3); + igpio = GpioControl::getInt(CMD_SET_485_en4); + return 0; + +} +bool CPhoneDevice::CloseSensors() +{ + GpioControl::setInt(CMD_SET_12V_EN_STATE, false ? 1 : 0); + GpioControl::setInt(CMD_SET_CAM_3V3_EN_STATE, false ? 1 : 0); + GpioControl::setInt(CMD_SET_485_EN_STATE, false ? 1 : 0); + int igpio; + GpioControl::setInt(CMD_SET_WTH_POWER, 0); + GpioControl::setInt(CMD_SET_PULL_POWER, 0); + GpioControl::setInt(CMD_SET_ANGLE_POWER, 0); + GpioControl::setInt(CMD_SET_OTHER_POWER, 0); + GpioControl::setInt(CMD_SET_PIC1_POWER, 0); + + igpio = GpioControl::getInt(CMD_SET_WTH_POWER); + igpio = GpioControl::getInt(CMD_SET_PULL_POWER); + igpio = GpioControl::getInt(CMD_SET_ANGLE_POWER); + igpio = GpioControl::getInt(CMD_SET_OTHER_POWER); + igpio = GpioControl::getInt(CMD_SET_PIC1_POWER); + + GpioControl::setInt(CMD_SET_SPI_POWER, 0); + GpioControl::setInt(CMD_SET_485_en0, 0); + GpioControl::setInt(CMD_SET_485_en1, 0); + GpioControl::setInt(CMD_SET_485_en2, 0); + GpioControl::setInt(CMD_SET_485_en3, 0); + GpioControl::setInt(CMD_SET_485_en4, 0); + + //sleep(3); + igpio = GpioControl::getInt(CMD_SET_SPI_POWER); + igpio = GpioControl::getInt(CMD_SET_485_en0); + igpio = GpioControl::getInt(CMD_SET_485_en1); + igpio = GpioControl::getInt(CMD_SET_485_en2); + igpio = GpioControl::getInt(CMD_SET_485_en3); + igpio = GpioControl::getInt(CMD_SET_485_en4); + return 0; +} +#else +bool CPhoneDevice::OpenSensors() +{ + return false; +} + +bool CPhoneDevice::CloseSensors() +{ + return false; +} +#endif diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index 1676f7a2..bfa6e16a 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -153,18 +153,33 @@ class CPhoneDevice : public IDevice { public: - class CPhoneCamera : public NdkCamera { + class CPhoneCamera : public NdkCamera + { public: CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params); virtual ~CPhoneCamera(); virtual bool on_image(cv::Mat& rgb); virtual void on_error(const std::string& msg); virtual void onDisconnected(ACameraDevice* device); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); protected: CPhoneDevice* m_dev; }; + class CJpegCamera : public CPhoneCamera + { + public: + CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params); + + virtual void onImageAvailable(AImageReader* reader); + virtual int32_t getOutputFormat() const; + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); + + protected: + std::string m_path; + }; + struct TIMER_CONTEXT { CPhoneDevice* device; @@ -198,10 +213,18 @@ public: virtual unsigned long RequestWakelock(unsigned long timeout); virtual bool ReleaseWakelock(unsigned long wakelock); + virtual int GetWData(WEATHER_INFO *weatherInfo); + virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, SENSOR_PARAM *sensorParam); + virtual bool OpenSensors(); + virtual bool CloseSensors(); + bool GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector& items); void UpdatePosition(double lon, double lat, double radius, time_t ts); - bool OnVideoReady(bool result, const char* path, unsigned int photoId); + bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId); + bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId); + bool ProcessRawCapture(bool result, int numberOfCaptures, const std::string& pathsJoinedByTab, bool frontCamera, int rotation, long photoId); + void UpdateSignalLevel(int signalLevel); void UpdateTfCardPath(const std::string& tfCardPath) { @@ -213,11 +236,11 @@ public: } void UpdateSimcard(const std::string& simcard); - void TurnOnCameraPower(JNIEnv* env); - void TurnOffCameraPower(JNIEnv* env); + static void TurnOnCameraPower(JNIEnv* env); + static void TurnOffCameraPower(JNIEnv* env); - void TurnOnOtg(JNIEnv* env); - void TurnOffOtg(JNIEnv* env); + static void TurnOnOtg(JNIEnv* env); + static void TurnOffOtg(JNIEnv* env); protected: @@ -227,7 +250,8 @@ protected: bool SendBroadcastMessage(std::string action, int value); // bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_, - inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector& objects) const + bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat); + inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector& objects) const { if (m_listener != NULL) { @@ -236,13 +260,12 @@ protected: return false; } - - inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const + inline bool TakePhotoCb(int result, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const { if (m_listener != NULL) { std::vector objects; - return m_listener->OnPhotoTaken(res, photoInfo, path, photoTime, objects); + return m_listener->OnPhotoTaken(result, photoInfo, path, photoTime, objects); } return false; @@ -252,6 +275,7 @@ protected: std::string QueryCpuTemperature(); bool OnImageReady(cv::Mat& mat); + bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); void onError(const std::string& msg); void onDisconnected(ACameraDevice* device); @@ -310,8 +334,9 @@ protected: time_t mHeartbeatStartTime; unsigned int mHeartbeatDuration; - long mCameraPowerCount; - long mOtgCount; + static std::mutex m_powerLocker; + static long mCameraPowerCount; + static long mOtgCount; std::thread m_threadClose; int m_signalLevel; diff --git a/app/src/main/cpp/PositionHelper.h b/app/src/main/cpp/PositionHelper.h new file mode 100644 index 00000000..b1d0e865 --- /dev/null +++ b/app/src/main/cpp/PositionHelper.h @@ -0,0 +1,57 @@ +#ifndef __POSITION_HELPER_H__ +#define __POSITION_HELPER_H__ + +#include +#define _USE_MATH_DEFINES + +inline double transformLat(double x, double y) +{ + double ret = -100.0 + 2.0 * x + 3.0 * y + 0.2 * y * y + 0.1 * x * y + 0.2 * std::sqrt(std::abs(x)); + ret += (20.0 * std::sin(6.0 * x * M_PI) + 20.0 * std::sin(2.0 * x * M_PI)) * 2.0 / 3.0; + ret += (20.0 * std::sin(y * M_PI) + 40.0 * std::sin(y / 3.0 * M_PI)) * 2.0 / 3.0; + ret += (160.0 * std::sin(y / 12.0 * M_PI) + 320 * std::sin(y * M_PI / 30.0)) * 2.0 / 3.0; + return ret; +} + +inline double transformLng(double x, double y) +{ + double ret = 300.0 + x + 2.0 * y + 0.1 * x * x + 0.1 * x * y + 0.1 * std::sqrt(std::abs(x)); + ret += (20.0 * std::sin(6.0 * x * M_PI) + 20.0 * std::sin(2.0 * x * M_PI)) * 2.0 / 3.0; + ret += (20.0 * std::sin(x * M_PI) + 40.0 * std::sin(x / 3.0 * M_PI)) * 2.0 / 3.0; + ret += (150.0 * std::sin(x / 12.0 * M_PI) + 300.0 * std::sin(x / 30.0 * M_PI)) * 2.0 / 3.0; + return ret; +} + +inline void transformPosition(double& lat, double& lng) +{ + // 卫星椭球坐标投影到平面地图坐标系的投影因子 +#define AXIS 6378245.0 + // 椭球的偏心率(a^2 - b^2) / a^2 +#define OFFSET 0.00669342162296594323 + double dLat = transformLat(lng - 105.0, lat - 35.0); + double dLon = transformLng(lng - 105.0, lat - 35.0); + double radLat = lat / 180.0 * M_PI; + double magic = std::sin(radLat); + magic = 1 - OFFSET * magic * magic; + double sqrtMagic = std::sqrt(magic); + dLat = (dLat * 180.0) / ((AXIS * (1 - OFFSET)) / (magic * sqrtMagic) * M_PI); + dLon = (dLon * 180.0) / (AXIS / sqrtMagic * std::cos(radLat) * M_PI); + lat += dLat; + lng += dLon; +} + +inline bool shouldConvertPosition(double lat, double lon) +{ + if (lon < 72.004 || lon > 137.8347) + { + return false; + } + if (lat < 0.8293 || lat > 55.8271) + { + return false; + } + return true; +} + + +#endif // __POSITION_HELPER_H__ \ No newline at end of file diff --git a/app/src/main/cpp/SensorsProtocol.cpp b/app/src/main/cpp/SensorsProtocol.cpp index 980e1c85..b6b8e094 100644 --- a/app/src/main/cpp/SensorsProtocol.cpp +++ b/app/src/main/cpp/SensorsProtocol.cpp @@ -19,12 +19,15 @@ #include #include #include "GPIOControl.h" -#include "serialComm.h" -#include "SensorsProtocol.h" +#include "SerialComm.h" #include -#include "SensorsProtocol.h" + +#include +#include //#include "Eint.h" +#include + SIO_PARAM_SERIAL_DEF serialport[MAX_SERIAL_PORT_NUM]; SERIAL_PARAM devparam[MAX_SERIAL_DEV_NUM]; SRDT_DEF srdt; @@ -34,167 +37,171 @@ AI_DEF slantpntmsg[6][SLANTANGLE_DATA_NUM]; static void setInt(int cmd, int value) { - int fd = open("/dev/mtkgpioctrl", O_RDONLY); - IOT_PARAM param; - param.cmd = cmd; - param.value = value; - // LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value); - if( fd > 0 ) - { - ioctl(fd, IOT_PARAM_WRITE, ¶m); - // LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result); - close(fd); - } - //return; + int fd = open("/dev/mtkgpioctrl", O_RDONLY); + IOT_PARAM param; + param.cmd = cmd; + param.value = value; + // LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value); + if (fd > 0) + { + ioctl(fd, IOT_PARAM_WRITE, ¶m); + // LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result); + close(fd); + } + //return; } int getInt(int cmd) { - int fd = open("/dev/mtkgpioctrl", O_RDONLY); - // LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd); - if( fd > 0 ) - { - IOT_PARAM param; - param.cmd = cmd; - ioctl(fd, IOT_PARAM_READ, ¶m); + int fd = open("/dev/mtkgpioctrl", O_RDONLY); + // LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd); + if (fd > 0) + { + IOT_PARAM param; + param.cmd = cmd; + ioctl(fd, IOT_PARAM_READ, ¶m); #ifdef _DEBUG - //ALOGI("getInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result); + //ALOGI("getInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result); #endif - close(fd); - return param.value; - } - return -1; + close(fd); + return param.value; + } + return -1; } static void setRS485Enable(bool z) { - setInt(CMD_SET_485_EN_STATE, z ? 1 : 0); + setInt(CMD_SET_485_EN_STATE, z ? 1 : 0); } static void set485WriteMode() { - setInt(CMD_SET_485_STATE, 1); + setInt(CMD_SET_485_STATE, 1); } static void set485ReadMode() { - setInt(CMD_SET_485_STATE, 0); + setInt(CMD_SET_485_STATE, 0); } static void set12VEnable(bool z) { - setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0); + setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0); } static void setCam3V3Enable(bool enabled) { - setInt(CMD_SET_CAM_3V3_EN_STATE, enabled ? 1 : 0); +#ifdef ENABLE_3V3_ALWAYS + setInt(CMD_SET_CAM_3V3_EN_STATE, 1); +#else + setInt(CMD_SET_CAM_3V3_EN_STATE, enabled ? 1 : 0); +#endif } #if 0 /********************************************************************************* * 气象数据处理 * **********************************************************************************/ -static void PortDataProcess( void ) -{ - float fvalue, fcorvalue, *fvalua, frnb/*, fwind*/; - //WORD uDevAddr; - unsigned char cmdidx; - int i, j, aipnt, datanum; - SIO_PARAM_SERIAL_DEF *pPortParam; - char szbuf[64]; - - pPortParam = &serialport; - //取出装置地址,开始处理地址+++ - if(0x02 == pPortParam->m_au8RecvBuf[5]) - { - //pPortParam->devaddr = pPortParam->m_au8RecvBuf[4]; - return; - } - cmdidx = pPortParam->m_au8RecvBuf[5]; +static void PortDataProcess(void) +{ + float fvalue, fcorvalue, *fvalua, frnb/*, fwind*/; + //uint16_t uDevAddr; + unsigned char cmdidx; + int i, j, aipnt, datanum; + SIO_PARAM_SERIAL_DEF *pPortParam; + char szbuf[64]; + + pPortParam = &serialport; + //取出装置地址,开始处理地址+++ + if (0x02 == pPortParam->m_au8RecvBuf[5]) + { + //pPortParam->devaddr = pPortParam->m_au8RecvBuf[4]; + return; + } + cmdidx = pPortParam->m_au8RecvBuf[5]; #if 0 - aipnt = pPortParam->SameTypeDevIdx; - uDevAddr = serialport->m_au8RecvBuf[4]; - if(0 == srdt.IsReadWireTem) - { - if(uDevAddr != pPortParam->devaddr) - return; - } + aipnt = pPortParam->SameTypeDevIdx; + uDevAddr = serialport->m_au8RecvBuf[4]; + if (0 == srdt.IsReadWireTem) + { + if (uDevAddr != pPortParam->devaddr) + return; + } #endif - fvalua = &fvalue; - datanum = pPortParam->m_au8RecvBuf[6]; - if((0x08 != cmdidx) && (0x09 != cmdidx)) - return; - - for(i = 0, j=7; (im_au8RecvBuf[1]); i++, j+=5 ) - { - if(0x08 == cmdidx) - fvalue = (pPortParam->m_au8RecvBuf[j+1]<<24)+(pPortParam->m_au8RecvBuf[j+2]<<16) - +(pPortParam->m_au8RecvBuf[j+3]<<8)+pPortParam->m_au8RecvBuf[j+4]; - else - { - *(u_char *)fvalua = pPortParam->m_au8RecvBuf[j+4]; - *((u_char *)fvalua+1) = pPortParam->m_au8RecvBuf[j+3]; - *((u_char *)fvalua+2) = pPortParam->m_au8RecvBuf[j+2]; - *((u_char *)fvalua+3) = pPortParam->m_au8RecvBuf[j+1]; - } - switch(pPortParam->m_au8RecvBuf[j]) - { - case 1: /*温度*/ - weatherpntmsg[0] = fvalue; - LOGE("温度:%0.3f ", fvalue); - break; - case 2: /*气压*/ - weatherpntmsg[5] = fvalue; - LOGE("气压:%0.3f ", fvalue); - break; - case 3: /*湿度*/ - weatherpntmsg[1] = fvalue; - LOGE("湿度:%0.3f ", fvalue); - break; - case 4: /*雨量*/ - break; - case 5: /*日照*/ - break; - case 6: /*风速*/ - weatherpntmsg[2] = fvalue; - LOGE("风速:%0.3f ", fvalue); - break; - case 7: /*风向*/ - weatherpntmsg[3] = fvalue; - LOGE("风向:%0.3f ", fvalue); - break; - case 8: /*拉力*/ - case 9: /*倾角传感器X轴倾角*/ - case 10: /*倾角传感器Y轴倾角*/ - case 11: /*测温球导线温度*/ - case 12: /*测温球内部温度*/ - break; - case 13: /*测温球导线X轴倾角*/ - break; - case 14: /*测温球导线Y轴倾角*/ - break; - case 15: /*测温球导线电流*/ - break; - case 16: /*测温球电池电压*/ - break; - case 17: /*A相泄漏电流平均值;*/ - break; - case 18: /*A相泄漏电流最大值;*/ - break; - case 19: /*A相超过3mA的脉冲频次*/ - break; - case 20: /*A相超过10mA的脉冲频次*/ - break; - case 21: /*B相泄漏电流平均值;*/ - break; - case 22: /*B相泄漏电流最大值;*/ - break; - case 23: /*B相超过3mA的脉冲频次*/ - break; - case 24: /*B相超过10mA的脉冲频次*/ - case 25: /*C相泄漏电流平均值;*/ - case 26: /*C相泄漏电流最大值;*/ - case 27: /*C相超过3mA的脉冲频次*/ - case 28: /*C相超过10mA的脉冲频次*/ - break; - } - } + fvalua = &fvalue; + datanum = pPortParam->m_au8RecvBuf[6]; + if ((0x08 != cmdidx) && (0x09 != cmdidx)) + return; + + for (i = 0, j = 7; (i < datanum) && (j < 6 + pPortParam->m_au8RecvBuf[1]); i++, j += 5) + { + if (0x08 == cmdidx) + fvalue = (pPortParam->m_au8RecvBuf[j + 1] << 24) + (pPortParam->m_au8RecvBuf[j + 2] << 16) + + (pPortParam->m_au8RecvBuf[j + 3] << 8) + pPortParam->m_au8RecvBuf[j + 4]; + else + { + *(u_char *)fvalua = pPortParam->m_au8RecvBuf[j + 4]; + *((u_char *)fvalua + 1) = pPortParam->m_au8RecvBuf[j + 3]; + *((u_char *)fvalua + 2) = pPortParam->m_au8RecvBuf[j + 2]; + *((u_char *)fvalua + 3) = pPortParam->m_au8RecvBuf[j + 1]; + } + switch (pPortParam->m_au8RecvBuf[j]) + { + case 1: /*温度*/ + weatherpntmsg[0] = fvalue; + LOGE("温度:%0.3f ", fvalue); + break; + case 2: /*气压*/ + weatherpntmsg[5] = fvalue; + LOGE("气压:%0.3f ", fvalue); + break; + case 3: /*湿度*/ + weatherpntmsg[1] = fvalue; + LOGE("湿度:%0.3f ", fvalue); + break; + case 4: /*雨量*/ + break; + case 5: /*日照*/ + break; + case 6: /*风速*/ + weatherpntmsg[2] = fvalue; + LOGE("风速:%0.3f ", fvalue); + break; + case 7: /*风向*/ + weatherpntmsg[3] = fvalue; + LOGE("风向:%0.3f ", fvalue); + break; + case 8: /*拉力*/ + case 9: /*倾角传感器X轴倾角*/ + case 10: /*倾角传感器Y轴倾角*/ + case 11: /*测温球导线温度*/ + case 12: /*测温球内部温度*/ + break; + case 13: /*测温球导线X轴倾角*/ + break; + case 14: /*测温球导线Y轴倾角*/ + break; + case 15: /*测温球导线电流*/ + break; + case 16: /*测温球电池电压*/ + break; + case 17: /*A相泄漏电流平均值;*/ + break; + case 18: /*A相泄漏电流最大值;*/ + break; + case 19: /*A相超过3mA的脉冲频次*/ + break; + case 20: /*A相超过10mA的脉冲频次*/ + break; + case 21: /*B相泄漏电流平均值;*/ + break; + case 22: /*B相泄漏电流最大值;*/ + break; + case 23: /*B相超过3mA的脉冲频次*/ + break; + case 24: /*B相超过10mA的脉冲频次*/ + case 25: /*C相泄漏电流平均值;*/ + case 26: /*C相泄漏电流最大值;*/ + case 27: /*C相超过3mA的脉冲频次*/ + case 28: /*C相超过10mA的脉冲频次*/ + break; + } + } } /*************************************************************** @@ -202,344 +209,345 @@ static void PortDataProcess( void ) ***************************************************************/ static void RecvData(u_char *buf, int len)// 规约读数据处理 { - int i, ictime; - //WORD crc, check; - SIO_PARAM_SERIAL_DEF *pPortParam; - - pPortParam = &serialport; - ictime = (int)time(NULL); - - if(pPortParam->m_iRecvLen == 0) - { - pPortParam->iRecvTime = ictime; - } - else - { - if((ictime-pPortParam->iRecvTime > 6) || (ictime - pPortParam->iRecvTime < 0)) - pPortParam->iRecvTime = ictime; - else if(ictime - pPortParam->iRecvTime > 2) - { - pPortParam->m_iRecvLen = 0; - pPortParam->m_iRevStatus = 0; - } - } - - for(i=0; im_iRevStatus) - { - case 0: // 0x68 - pPortParam->m_iRecvLen = 0; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 1: // len1 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus++; - break; - case 2: // len2 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(buf[i] == pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen-2]) - { - pPortParam->m_iRevStatus++; - pPortParam->m_iNeedRevLength = buf[i]+5; - } - else - pPortParam->m_iRevStatus = 18; - break; - case 3: // 0x68 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iNeedRevLength--; - if(0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 4: // 正确接收数据 - pPortParam->m_iNeedRevLength--; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(pPortParam->m_iNeedRevLength > 0) - break; - if(buf[i] != 0x16) - { - pPortParam->m_iRevStatus=18; - break; - } - - //if(CheckLpcError(serialport->m_au8RecvBuf, pPortParam->m_iRecvLen) == TRUE) - { - PortDataProcess(); - pPortParam->m_iRevStatus = 0; - pPortParam->RevCmdFlag = 1; - } - pPortParam->m_iRecvLen = 0; - break; - case 255:// 错误接收数据 - default: - if(buf[i] == 0x68) - { - pPortParam->m_iRevStatus = 1; - pPortParam->m_iRecvLen = 1; - pPortParam->m_au8RecvBuf[0] = buf[i]; - } - else if(buf[i] == 0x16) - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus = 0; - pPortParam->m_iRecvLen = 0; - } - else - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(pPortParam->m_iRecvLen > 200) - { - pPortParam->m_iRecvLen = 0; - } - } - break; - } - } + int i, ictime; + //uint16_t crc, check; + SIO_PARAM_SERIAL_DEF *pPortParam; + + pPortParam = &serialport; + ictime = (int)time(NULL); + + if (pPortParam->m_iRecvLen == 0) + { + pPortParam->iRecvTime = ictime; + } + else + { + if ((ictime - pPortParam->iRecvTime > 6) || (ictime - pPortParam->iRecvTime < 0)) + pPortParam->iRecvTime = ictime; + else if (ictime - pPortParam->iRecvTime > 2) + { + pPortParam->m_iRecvLen = 0; + pPortParam->m_iRevStatus = 0; + } + } + + for (i = 0; i < len; i++) + { + switch (pPortParam->m_iRevStatus) + { + case 0: // 0x68 + pPortParam->m_iRecvLen = 0; + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (0x68 == buf[i]) + pPortParam->m_iRevStatus++; + else + pPortParam->m_iRevStatus = 18; + break; + case 1: // len1 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus++; + break; + case 2: // len2 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (buf[i] == pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 2]) + { + pPortParam->m_iRevStatus++; + pPortParam->m_iNeedRevLength = buf[i] + 5; + } + else + pPortParam->m_iRevStatus = 18; + break; + case 3: // 0x68 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iNeedRevLength--; + if (0x68 == buf[i]) + pPortParam->m_iRevStatus++; + else + pPortParam->m_iRevStatus = 18; + break; + case 4: // 正确接收数据 + pPortParam->m_iNeedRevLength--; + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (pPortParam->m_iNeedRevLength > 0) + break; + if (buf[i] != 0x16) + { + pPortParam->m_iRevStatus = 18; + break; + } + + //if(CheckLpcError(serialport->m_au8RecvBuf, pPortParam->m_iRecvLen) == TRUE) + { + PortDataProcess(); + pPortParam->m_iRevStatus = 0; + pPortParam->RevCmdFlag = 1; + } + pPortParam->m_iRecvLen = 0; + break; + case 255:// 错误接收数据 + default: + if (buf[i] == 0x68) + { + pPortParam->m_iRevStatus = 1; + pPortParam->m_iRecvLen = 1; + pPortParam->m_au8RecvBuf[0] = buf[i]; + } + else if (buf[i] == 0x16) + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus = 0; + pPortParam->m_iRecvLen = 0; + } + else + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (pPortParam->m_iRecvLen > 200) + { + pPortParam->m_iRecvLen = 0; + } + } + break; + } + } } -static LONG get_msec(void ) +static int64_t get_msec(void) { - struct timeval tv; + struct timeval tv; - gettimeofday(&tv, NULL); - LONG time_in_msec = tv.tv_sec * 1000 + tv.tv_usec/1000; + gettimeofday(&tv, NULL); + int64_t time_in_msec = tv.tv_sec * 1000 + tv.tv_usec / 1000; - return time_in_msec; + return time_in_msec; } //int inum =0; //int itimecnt=0; static int weather_comm(SERIAL_PARAM weatherport) { - int fd = -1; - int len, i,ret, icnt=0; - LONG ictime, iruntime, isendtime, irecvtime; - unsigned char sendbuf[] = {0x68,0x00,0x00,0x68,0x01,0x09,0x0a,0x16}; - char recvbuf[256], szbuf[512]; - //char serial_description[] = "/dev/ttyS0"; + int fd = -1; + int len, i, ret, icnt = 0; + int64_t ictime, iruntime, isendtime, irecvtime; + unsigned char sendbuf[] = { 0x68,0x00,0x00,0x68,0x01,0x09,0x0a,0x16 }; + char recvbuf[256], szbuf[512]; + //char serial_description[] = "/dev/ttyS0"; #if 0 - DIR *dir = opendir("/dev"); - if (dir == NULL) { - LOGE("_test_ opendir"); - return -1; - } - - // 读取目录项 - struct dirent *entry; - while ((entry = readdir(dir)) != NULL) { - // 过滤出串口设备,通常以"ttyS"或"ttyUSB"开头 - if ((strncmp(entry->d_name, "ttyS2", 5) == 0) || - (strncmp(entry->d_name, "ttyS0", 5) == 0)) { - LOGE("_test_ Found serial port: %s\n", entry->d_name); - } - } - - // 关闭目录 - closedir(dir); + DIR *dir = opendir("/dev"); + if (dir == NULL) { + LOGE("_test_ opendir"); + return -1; + } + + // 读取目录项 + struct dirent *entry; + while ((entry = readdir(dir)) != NULL) { + // 过滤出串口设备,通常以"ttyS"或"ttyUSB"开头 + if ((strncmp(entry->d_name, "ttyS2", 5) == 0) || + (strncmp(entry->d_name, "ttyS0", 5) == 0)) { + LOGE("_test_ Found serial port: %s\n", entry->d_name); + } + } + + // 关闭目录 + closedir(dir); #endif - serialport.RevCmdFlag = 1; - serialport.m_iRecvLen = 0; - serialport.m_iRevStatus = 0; - - set12VEnable(true); - setCam3V3Enable(true); - setRS485Enable(true); - sleep(2); - //ictime = (int)time(NULL); - ictime = get_msec(); - for(;;) - { - if(fd < 0) - { - fd = open(weatherport.pathname, O_RDWR | O_NDELAY); - //fd = open(weatherport.pathname, O_RDWR | O_NOCTTY); - if(fd < 0) - { - LOGE("_test_ open serial error \n"); - perror(weatherport.pathname); - return -1; - } - - ret= set_port_attr (fd, weatherport.baudrate,weatherport.databit,weatherport.stopbit,weatherport.parity,0,0 );/*9600 8n1 */ - if(ret < 0) - { - LOGE("_test_ set uart arrt faile \n"); - return -1; - } - } - - usleep(10000); - //iruntime = (int)time(NULL); - iruntime = get_msec(); - if((iruntime - ictime > 120000) || (iruntime - ictime < 0)) - ictime = iruntime; - if(iruntime - ictime > 20000) - { - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "气象采样时间=%0.3f秒,停止采样!", (iruntime-ictime)/1000.0); - LOGE("%s", szbuf); - break; - } - - if(1 == serialport.RevCmdFlag) - { - set485WriteMode(); - - len = write(fd, sendbuf, sizeof(sendbuf));/* 向串囗发送字符串 */ - serialport.RevCmdFlag = 0; - LOGE("发送命令时间差%ld毫秒", get_msec()-isendtime); - //isendtime = time(NULL); - isendtime = get_msec(); - if (len < 0) { - LOGE("write data error \n"); - return -1; - } else { - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "Send:"); - for (i = 0; i < len; i++) { - sprintf(szbuf, "%s %02X", szbuf, sendbuf[i]); - } - LOGE("%s", szbuf); - //icnt = 0; - //inum++; - } - tcdrain(fd); - //usleep(50000); - } - else - { - //irecvtime = time(NULL); - irecvtime = get_msec(); - if((irecvtime-isendtime > 6000) ||(irecvtime - isendtime < 0)) - isendtime = irecvtime; - if (irecvtime-isendtime > 300) - { - LOGE("传感器超过%ld毫秒未应答", irecvtime-isendtime); - serialport.RevCmdFlag = 1; - serialport.m_iRecvLen = 0; - serialport.m_iRevStatus = 0; - close(fd); - fd = -1; - continue; - } - } - set485ReadMode(); - memset(recvbuf, 0, sizeof(recvbuf)); - len = read(fd, recvbuf, sizeof(recvbuf));/* 在串口读取字符串 */ - if (len < 0) { - LOGE("serial read error \n"); - continue; - } - if(0 == len) - { - //icnt++; - continue; - } - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "Recv:"); - for (i = 0; i < len; i++) { - sprintf(szbuf, "%s %02X", szbuf, recvbuf[i]); - } - __android_log_print(ANDROID_LOG_INFO, "serial", "%s", szbuf); - RecvData((u_char*)recvbuf, len); - //LOGE("一周期空循环次数%d, 读取次数%d, 时间:%d %d", icnt, inum, (int)time(NULL), itimecnt); - icnt = 0; - //serialport.RevCmdFlag =1; - } - - close(fd); - set12VEnable(false); - setCam3V3Enable(false); - setRS485Enable(false); - - //exit(-1); - return(0); + serialport.RevCmdFlag = 1; + serialport.m_iRecvLen = 0; + serialport.m_iRevStatus = 0; + + set12VEnable(true); + setCam3V3Enable(true); + setRS485Enable(true); + sleep(2); + //ictime = (int)time(NULL); + ictime = get_msec(); + for (;;) + { + if (fd < 0) + { + fd = open(weatherport.pathname, O_RDWR | O_NDELAY); + //fd = open(weatherport.pathname, O_RDWR | O_NOCTTY); + if (fd < 0) + { + LOGE("_test_ open serial error \n"); + perror(weatherport.pathname); + return -1; + } + + ret = set_port_attr(fd, weatherport.baudrate, weatherport.databit, weatherport.stopbit, weatherport.parity, 0, 0);/*9600 8n1 */ + if (ret < 0) + { + LOGE("_test_ set uart arrt faile \n"); + return -1; + } + } + + usleep(10000); + //iruntime = (int)time(NULL); + iruntime = get_msec(); + if ((iruntime - ictime > 120000) || (iruntime - ictime < 0)) + ictime = iruntime; + if (iruntime - ictime > 20000) + { + memset(szbuf, 0, sizeof(szbuf)); + sprintf(szbuf, "气象采样时间=%0.3f秒,停止采样!", (iruntime - ictime) / 1000.0); + LOGE("%s", szbuf); + break; + } + + if (1 == serialport.RevCmdFlag) + { + set485WriteMode(); + + len = write(fd, sendbuf, sizeof(sendbuf));/* 向串囗发送字符串 */ + serialport.RevCmdFlag = 0; + LOGE("发送命令时间差%ld毫秒", get_msec() - isendtime); + //isendtime = time(NULL); + isendtime = get_msec(); + if (len < 0) { + LOGE("write data error \n"); + return -1; + } + else { + memset(szbuf, 0, sizeof(szbuf)); + sprintf(szbuf, "Send:"); + for (i = 0; i < len; i++) { + sprintf(szbuf, "%s %02X", szbuf, sendbuf[i]); + } + LOGE("%s", szbuf); + //icnt = 0; + //inum++; + } + tcdrain(fd); + //usleep(50000); + } + else + { + //irecvtime = time(NULL); + irecvtime = get_msec(); + if ((irecvtime - isendtime > 6000) || (irecvtime - isendtime < 0)) + isendtime = irecvtime; + if (irecvtime - isendtime > 300) + { + LOGE("传感器超过%ld毫秒未应答", irecvtime - isendtime); + serialport.RevCmdFlag = 1; + serialport.m_iRecvLen = 0; + serialport.m_iRevStatus = 0; + close(fd); + fd = -1; + continue; + } + } + set485ReadMode(); + memset(recvbuf, 0, sizeof(recvbuf)); + len = read(fd, recvbuf, sizeof(recvbuf));/* 在串口读取字符串 */ + if (len < 0) { + LOGE("serial read error \n"); + continue; + } + if (0 == len) + { + //icnt++; + continue; + } + memset(szbuf, 0, sizeof(szbuf)); + sprintf(szbuf, "Recv:"); + for (i = 0; i < len; i++) { + sprintf(szbuf, "%s %02X", szbuf, recvbuf[i]); + } + __android_log_print(ANDROID_LOG_INFO, "serial", "%s", szbuf); + RecvData((u_char*)recvbuf, len); + //LOGE("一周期空循环次数%d, 读取次数%d, 时间:%d %d", icnt, inum, (int)time(NULL), itimecnt); + icnt = 0; + //serialport.RevCmdFlag =1; + } + + close(fd); + set12VEnable(false); + setCam3V3Enable(false); + setRS485Enable(false); + + //exit(-1); + return(0); } int serial_port_comm() { - SERIAL_PARAM portparm; + SERIAL_PARAM portparm; - //struct timeval tv; + //struct timeval tv; - //gettimeofday(&tv, NULL); - //LONG time_in_microseconds = tv.tv_sec * 1000000 + tv.tv_usec; + //gettimeofday(&tv, NULL); + //int64_t time_in_microseconds = tv.tv_sec * 1000000 + tv.tv_usec; - //LOGE("Current time in microseconds: %ld\n", time_in_microseconds); + //LOGE("Current time in microseconds: %ld\n", time_in_microseconds); #if 1 - memset(portparm.pathname, 0, sizeof(portparm.pathname)); - sprintf(portparm.pathname, "/dev/ttyS0"); - portparm.parity = 'N'; - portparm.databit = 8; - portparm.baudrate = B9600; - memset(portparm.stopbit, 0, sizeof(portparm.stopbit)); - sprintf(portparm.stopbit, "1"); + memset(portparm.pathname, 0, sizeof(portparm.pathname)); + sprintf(portparm.pathname, "/dev/ttyS0"); + portparm.parity = 'N'; + portparm.databit = 8; + portparm.baudrate = B9600; + memset(portparm.stopbit, 0, sizeof(portparm.stopbit)); + sprintf(portparm.stopbit, "1"); #endif - //itimecnt = (int)time(NULL); + //itimecnt = (int)time(NULL); - //for(;;) - weather_comm(portparm); - return 0; + //for(;;) + weather_comm(portparm); + return 0; } #endif static speed_t getBaudrate(unsigned int baudrate) { - switch(baudrate) { - case 0: return B0; - case 50: return B50; - case 75: return B75; - case 110: return B110; - case 134: return B134; - case 150: return B150; - case 200: return B200; - case 300: return B300; - case 600: return B600; - case 1200: return B1200; - case 1800: return B1800; - case 2400: return B2400; - case 4800: return B4800; - case 9600: return B9600; - case 19200: return B19200; - case 38400: return B38400; - case 57600: return B57600; - case 115200: return B115200; - case 230400: return B230400; - case 460800: return B460800; - case 500000: return B500000; - case 576000: return B576000; - case 921600: return B921600; - case 1000000: return B1000000; - case 1152000: return B1152000; - case 1500000: return B1500000; - case 2000000: return B2000000; - case 2500000: return B2500000; - case 3000000: return B3000000; - case 3500000: return B3500000; - case 4000000: return B4000000; - default: return B9600; - } -} - -static LONG get_msec() -{ - struct timeval tv; - LONG time_in_msec=0; - - gettimeofday(&tv, NULL); - time_in_msec = tv.tv_sec; - time_in_msec *= 1000; - time_in_msec += tv.tv_usec/1000; - - return time_in_msec; + switch (baudrate) { + case 0: return B0; + case 50: return B50; + case 75: return B75; + case 110: return B110; + case 134: return B134; + case 150: return B150; + case 200: return B200; + case 300: return B300; + case 600: return B600; + case 1200: return B1200; + case 1800: return B1800; + case 2400: return B2400; + case 4800: return B4800; + case 9600: return B9600; + case 19200: return B19200; + case 38400: return B38400; + case 57600: return B57600; + case 115200: return B115200; + case 230400: return B230400; + case 460800: return B460800; + case 500000: return B500000; + case 576000: return B576000; + case 921600: return B921600; + case 1000000: return B1000000; + case 1152000: return B1152000; + case 1500000: return B1500000; + case 2000000: return B2000000; + case 2500000: return B2500000; + case 3000000: return B3000000; + case 3500000: return B3500000; + case 4000000: return B4000000; + default: return B9600; + } +} + +static int64_t get_msec() +{ + struct timeval tv; + int64_t time_in_msec = 0; + + gettimeofday(&tv, NULL); + time_in_msec = tv.tv_sec; + time_in_msec *= 1000; + time_in_msec += tv.tv_usec / 1000; + + return time_in_msec; } /* @@ -547,7 +555,7 @@ static LONG get_msec() */ void Gm_OpenSerialPower() { - /*由传送的主站的地方来控制串口电源,这里不实现*/; + /*由传送的主站的地方来控制串口电源,这里不实现*/; } // 关闭串口电源 @@ -558,54 +566,54 @@ void Gm_CloseSerialPower() // 关闭传感器电源 void Gm_CloseSensorsPower() { - //char iIoNo; - //char szbuf[128]; - int igpio; + //char iIoNo; + //char szbuf[128]; + int igpio; - //sprintf(szbuf, "Close Sensors port %d Power!", port); + //sprintf(szbuf, "Close Sensors port %d Power!", port); - /* 关闭电源*/ - //switch(port) - /* 根据硬件具体布置最后调整,目前是微拍板子的来控制*/ + /* 关闭电源*/ + //switch(port) + /* 根据硬件具体布置最后调整,目前是微拍板子的来控制*/ /* set12VEnable(false); - setCam3V3Enable(false); - setRS485Enable(false); + setCam3V3Enable(false); + setRS485Enable(false); #if 0 - setInt(CMD_SET_WTH_POWER, 0); - setInt(CMD_SET_PULL_POWER, 0); - setInt(CMD_SET_ANGLE_POWER, 0); - setInt(CMD_SET_OTHER_POWER, 0); - setInt(CMD_SET_PIC1_POWER, 0); - - sleep(3); - igpio = getInt(CMD_SET_WTH_POWER); - igpio = getInt(CMD_SET_PULL_POWER); - igpio = getInt(CMD_SET_ANGLE_POWER); - igpio = getInt(CMD_SET_OTHER_POWER); - igpio = getInt(CMD_SET_PIC1_POWER); + setInt(CMD_SET_WTH_POWER, 0); + setInt(CMD_SET_PULL_POWER, 0); + setInt(CMD_SET_ANGLE_POWER, 0); + setInt(CMD_SET_OTHER_POWER, 0); + setInt(CMD_SET_PIC1_POWER, 0); + + sleep(3); + igpio = getInt(CMD_SET_WTH_POWER); + igpio = getInt(CMD_SET_PULL_POWER); + igpio = getInt(CMD_SET_ANGLE_POWER); + igpio = getInt(CMD_SET_OTHER_POWER); + igpio = getInt(CMD_SET_PIC1_POWER); #endif #if 1 - setInt(CMD_SET_SPI_POWER, 1); - setInt(CMD_SET_485_en0, 1); - setInt(CMD_SET_485_en1, 1); - setInt(CMD_SET_485_en2, 1); - setInt(CMD_SET_485_en3, 1); - setInt(CMD_SET_485_en4, 1); + setInt(CMD_SET_SPI_POWER, 1); + setInt(CMD_SET_485_en0, 1); + setInt(CMD_SET_485_en1, 1); + setInt(CMD_SET_485_en2, 1); + setInt(CMD_SET_485_en3, 1); + setInt(CMD_SET_485_en4, 1); #else - setInt(CMD_SET_SPI_POWER, 0); - setInt(CMD_SET_485_en0, 0); - setInt(CMD_SET_485_en1, 0); - setInt(CMD_SET_485_en2, 0); - setInt(CMD_SET_485_en3, 0); - setInt(CMD_SET_485_en4, 0); - sleep(3); - igpio = getInt(CMD_SET_SPI_POWER); - igpio = getInt(CMD_SET_485_en0); - igpio = getInt(CMD_SET_485_en1); - igpio = getInt(CMD_SET_485_en2); - igpio = getInt(CMD_SET_485_en3); - igpio = getInt(CMD_SET_485_en4); + setInt(CMD_SET_SPI_POWER, 0); + setInt(CMD_SET_485_en0, 0); + setInt(CMD_SET_485_en1, 0); + setInt(CMD_SET_485_en2, 0); + setInt(CMD_SET_485_en3, 0); + setInt(CMD_SET_485_en4, 0); + sleep(3); + igpio = getInt(CMD_SET_SPI_POWER); + igpio = getInt(CMD_SET_485_en0); + igpio = getInt(CMD_SET_485_en1); + igpio = getInt(CMD_SET_485_en2); + igpio = getInt(CMD_SET_485_en3); + igpio = getInt(CMD_SET_485_en4); #endif */ } @@ -613,596 +621,602 @@ void Gm_CloseSensorsPower() // 打开传感器电源 void Gm_OpenSensorsPower() { - //char iIoNo; + //char iIoNo; /* int igpio; - char szbuf[128]; + char szbuf[128]; - //if(0 == port) - // return; - //sprintf(szbuf, "Open Sensors port %d Power!", port); + //if(0 == port) + // return; + //sprintf(szbuf, "Open Sensors port %d Power!", port); - //set12VEnable(true); - setCam3V3Enable(true); - setRS485Enable(true); + //set12VEnable(true); + setCam3V3Enable(true); + setRS485Enable(true); #if 0 - setInt(CMD_SET_WTH_POWER, 0); - setInt(CMD_SET_PULL_POWER, 0); - setInt(CMD_SET_ANGLE_POWER, 0); - setInt(CMD_SET_OTHER_POWER, 0); - setInt(CMD_SET_PIC1_POWER, 0); + setInt(CMD_SET_WTH_POWER, 0); + setInt(CMD_SET_PULL_POWER, 0); + setInt(CMD_SET_ANGLE_POWER, 0); + setInt(CMD_SET_OTHER_POWER, 0); + setInt(CMD_SET_PIC1_POWER, 0); #else - setInt(CMD_SET_WTH_POWER, 1); - setInt(CMD_SET_PULL_POWER, 1); - setInt(CMD_SET_ANGLE_POWER, 1); - setInt(CMD_SET_OTHER_POWER, 1); - setInt(CMD_SET_PIC1_POWER, 1); - //sleep(3); - igpio = getInt(CMD_SET_WTH_POWER); - igpio = getInt(CMD_SET_PULL_POWER); - igpio = getInt(CMD_SET_ANGLE_POWER); - igpio = getInt(CMD_SET_OTHER_POWER); - igpio = getInt(CMD_SET_PIC1_POWER); + setInt(CMD_SET_WTH_POWER, 1); + setInt(CMD_SET_PULL_POWER, 1); + setInt(CMD_SET_ANGLE_POWER, 1); + setInt(CMD_SET_OTHER_POWER, 1); + setInt(CMD_SET_PIC1_POWER, 1); + //sleep(3); + igpio = getInt(CMD_SET_WTH_POWER); + igpio = getInt(CMD_SET_PULL_POWER); + igpio = getInt(CMD_SET_ANGLE_POWER); + igpio = getInt(CMD_SET_OTHER_POWER); + igpio = getInt(CMD_SET_PIC1_POWER); #endif #if 1 - setInt(CMD_SET_SPI_POWER, 1); - setInt(CMD_SET_485_en0, 1); - setInt(CMD_SET_485_en1, 1); - setInt(CMD_SET_485_en2, 1); - setInt(CMD_SET_485_en3, 1); - setInt(CMD_SET_485_en4, 1); - - //sleep(3); - igpio = getInt(CMD_SET_SPI_POWER); - igpio = getInt(CMD_SET_485_en0); - igpio = getInt(CMD_SET_485_en1); - igpio = getInt(CMD_SET_485_en2); - igpio = getInt(CMD_SET_485_en3); - igpio = getInt(CMD_SET_485_en4); + setInt(CMD_SET_SPI_POWER, 1); + setInt(CMD_SET_485_en0, 1); + setInt(CMD_SET_485_en1, 1); + setInt(CMD_SET_485_en2, 1); + setInt(CMD_SET_485_en3, 1); + setInt(CMD_SET_485_en4, 1); + + //sleep(3); + igpio = getInt(CMD_SET_SPI_POWER); + igpio = getInt(CMD_SET_485_en0); + igpio = getInt(CMD_SET_485_en1); + igpio = getInt(CMD_SET_485_en2); + igpio = getInt(CMD_SET_485_en3); + igpio = getInt(CMD_SET_485_en4); #else - setInt(CMD_SET_485_en0, 0); - setInt(CMD_SET_485_en1, 0); - setInt(CMD_SET_485_en2, 0); - setInt(CMD_SET_485_en3, 0); - setInt(CMD_SET_485_en4, 0); + setInt(CMD_SET_485_en0, 0); + setInt(CMD_SET_485_en1, 0); + setInt(CMD_SET_485_en2, 0); + setInt(CMD_SET_485_en3, 0); + setInt(CMD_SET_485_en4, 0); #endif - // 打开电源 - //switch(port) + // 打开电源 + //switch(port) */ } // 查询传感器电源状态 char Gm_GetSensorsPowerState(int port) { - char iIoNo, cstate=0; - //char szbuf[128]; + char iIoNo, cstate = 0; + //char szbuf[128]; - /* 查询电源状态*/ - //switch(port) + /* 查询电源状态*/ + //switch(port) - return cstate; + return cstate; } void BytestreamLOG(int commid, char* describe, u_char* buf, int len, char flag) { - int i; - char szbuf[4096]; - - memset(szbuf, 0, sizeof(szbuf)); - if(NULL != describe) - strncpy(szbuf, describe, strlen(describe)); - for (i = 0; i < len; i++) - { - sprintf(szbuf, "%s %02X", szbuf, buf[i]); - } - SaveLogTofile(commid, szbuf); - switch (flag) - { - case 'E': - LOGE("%s", szbuf); - break; - case 'I': - LOGI("%s", szbuf); - break; - case 'D': - LOGD("%s", szbuf); - break; - case 'V': - LOGV("%s", szbuf); - break; - case 'W': - LOGW("%s", szbuf); - break; - default: - LOGI("%s", szbuf); - break; - } + int i; + char szbuf[4096]; + + memset(szbuf, 0, sizeof(szbuf)); + if (NULL != describe) + strncpy(szbuf, describe, strlen(describe)); + for (i = 0; i < len; i++) + { + sprintf(szbuf, "%s %02X", szbuf, buf[i]); + } + SaveLogTofile(commid, szbuf); + switch (flag) + { + case 'E': + ALOGE("%s", szbuf); + break; + case 'I': + ALOGI("%s", szbuf); + break; + case 'D': + ALOGD("%s", szbuf); + break; + case 'V': + ALOGI("%s", szbuf); + break; + case 'W': + ALOGW("%s", szbuf); + break; + default: + ALOGI("%s", szbuf); + break; + } } // 打开串口通讯 void Gm_OpenSerialPort(int devidx) { - int fd = -1; - char szbuf[512]; - - if((devidx < 0) || (devidx >= MAX_SERIAL_DEV_NUM)) - return; - memset(szbuf, 0, sizeof(szbuf)); - if(serialport[devparam[devidx].commid].fd <= 0) - { - fd = open(devparam[devidx].pathname, O_RDWR | O_NDELAY); - if(fd < 0) - { - sprintf(szbuf, "装置%d 打开串口%d失败!fd=%d", devidx+1, devparam[devidx].commid+1, fd); - DebugLog(devparam[devidx].commid, szbuf, 'E'); - return; - } - sprintf(szbuf, "装置%d 打开串口%d %s成功!fd=%d", devidx+1, devparam[devidx].commid+1, devparam[devidx].pathname, fd ); - DebugLog(devparam[devidx].commid, szbuf, 'I'); - serialport[devparam[devidx].commid].fd = fd; - return; - } - sprintf(szbuf, "装置%d 串口%d %s已经打开!fd=%d", devidx+1, devparam[devidx].commid+1, devparam[devidx].pathname,serialport[devparam[devidx].commid].fd); - DebugLog(devparam[devidx].commid, szbuf, 'I'); + int fd = -1; + char szbuf[512]; + + if ((devidx < 0) || (devidx >= MAX_SERIAL_DEV_NUM)) + return; + memset(szbuf, 0, sizeof(szbuf)); + if (serialport[devparam[devidx].commid].fd <= 0) + { + fd = open(devparam[devidx].pathname, O_RDWR | O_NDELAY); + if (fd < 0) + { + sprintf(szbuf, "装置%d 打开串口%d失败!fd=%d", devidx + 1, devparam[devidx].commid + 1, fd); + DebugLog(devparam[devidx].commid, szbuf, 'E'); + return; + } + sprintf(szbuf, "装置%d 打开串口%d %s成功!fd=%d", devidx + 1, devparam[devidx].commid + 1, devparam[devidx].pathname, fd); + DebugLog(devparam[devidx].commid, szbuf, 'I'); + serialport[devparam[devidx].commid].fd = fd; + return; + } + sprintf(szbuf, "装置%d 串口%d %s已经打开!fd=%d", devidx + 1, devparam[devidx].commid + 1, devparam[devidx].pathname, serialport[devparam[devidx].commid].fd); + DebugLog(devparam[devidx].commid, szbuf, 'I'); } // 关闭串口通讯 void Gm_CloseSerialPort() { - int i; + int i; - for(i=0; i 0) - { - close(serialport[i].fd); - serialport[i].fd = -1; - } - } + for (i = 0; i < MAX_SERIAL_PORT_NUM; i++) + { + if (serialport[i].fd > 0) + { + close(serialport[i].fd); + serialport[i].fd = -1; + } + } } /******************************************************************************* -函数名称: int GM_SerialComSend(const BYTE * cSendBuf, DWORD nSendLen, int commid) +函数名称: int GM_SerialComSend(const uint8_t * cSendBuf, uint32_t nSendLen, int commid) 功能说明:串口发送数据 返回实际发送的字节数 输入参数: 输出参数: 其它说明: *********************************************************************************/ -int GM_SerialComSend(unsigned char * cSendBuf, LONG nSendLen, int commid) -{ - int i, len; - char szbuf[512]; - - memset(szbuf, 0, sizeof(szbuf)); - len = write(serialport[commid].fd, cSendBuf, (size_t)nSendLen);/* 向串囗发送字符串 */ - //serialport[commid].RevCmdFlag = 0; - //LOGE("发送命令时间差%ld毫秒", get_msec() - isendtime); - //isendtime = time(NULL); - //isendtime = get_msec(); - if (len < 0) - { - sprintf(szbuf, "write data error "); - DebugLog(commid, szbuf, 'E'); - return -1; - } - else if(len > 0) - { - ; - } - return len; +int GM_SerialComSend(unsigned char * cSendBuf, size_t nSendLen, int commid) +{ + int i, len; + char szbuf[512]; + + memset(szbuf, 0, sizeof(szbuf)); + len = write(serialport[commid].fd, cSendBuf, (size_t)nSendLen);/* 向串囗发送字符串 */ + //serialport[commid].RevCmdFlag = 0; + //LOGE("发送命令时间差%ld毫秒", get_msec() - isendtime); + //isendtime = time(NULL); + //isendtime = get_msec(); + if (len < 0) + { + sprintf(szbuf, "write data error "); + DebugLog(commid, szbuf, 'E'); + return -1; + } + else if (len > 0) + { + ; + } + return len; } int Gm_SetSerialPortParam(int commid) { - int ret; - char szbuf[128]; - SERIAL_PARAM *pPortParam=NULL; - - pPortParam = &devparam[srdt.curdevidx[commid]]; - ret= set_port_attr (serialport[commid].fd, pPortParam->baudrate,pPortParam->databit,pPortParam->stopbit,pPortParam->parity,0,0 );/*9600 8n1 */ - if(ret < 0) - { - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf,"串口%d 波特率等参数设置错误!", commid+1); - DebugLog(commid, szbuf, 'E'); - return -1; - } - return ret; + int ret; + char szbuf[128]; + SERIAL_PARAM *pPortParam = NULL; + + pPortParam = &devparam[srdt.curdevidx[commid]]; + ret = set_port_attr(serialport[commid].fd, pPortParam->baudrate, pPortParam->databit, pPortParam->stopbit, pPortParam->parity, 0, 0);/*9600 8n1 */ + if (ret < 0) + { + memset(szbuf, 0, sizeof(szbuf)); + sprintf(szbuf, "串口%d 波特率等参数设置错误!", commid + 1); + DebugLog(commid, szbuf, 'E'); + return -1; + } + return ret; } void Gm_InitSerialComm_Test() { - int i, j; - SENSOR_PARAM sensorParam[MAX_SERIAL_DEV_NUM]; + int i, j; + SENSOR_PARAM sensorParam[MAX_SERIAL_DEV_NUM]; #if 0 - srdt.PtzCmdType = Cmd_Cancel; // 云台指令类型 - for(i=0; i>>>>>> cf0f3f52d373254ad85da3aa96b1e8c8f7c34070 #else - sleep(15); - CameraPhotoCmd(time(NULL), 1, 0, 6, 1); - sleep(5); - - CameraPhotoCmd(time(NULL), 1, 10017, 0, 2); - sleep(5); - - CameraPhotoCmd(0, 1, MOVE_LEFT, 0, 0); - sleep(5); - CameraPhotoCmd(0, 1, MOVE_DOWN, 0, 0); - sleep(5); - CameraPhotoCmd(0, 1, MOVE_RIGHT, 0, 0); - sleep(5); - CameraPhotoCmd(0, 1, MOVE_UP, 0, 0); - sleep(5); - CameraPhotoCmd(0, 1, 10017, 0, 1); - sleep(5); - CameraPhotoCmd(0, 1, ZOOM_TELE, 0, 0); - sleep(5); - CameraPhotoCmd(0, 1, ZOOM_WIDE, 0, 0); - sleep(5); - CameraPhotoCmd(time(NULL), 1, 0, 6, 2); - sleep(5); + sleep(15); + CameraPhotoCmd(time(NULL), 1, 0, 6, 1); + sleep(5); + + CameraPhotoCmd(time(NULL), 1, 10017, 0, 2); + sleep(5); + + CameraPhotoCmd(0, 1, MOVE_LEFT, 0, 0); + sleep(5); + CameraPhotoCmd(0, 1, MOVE_DOWN, 0, 0); + sleep(5); + CameraPhotoCmd(0, 1, MOVE_RIGHT, 0, 0); + sleep(5); + CameraPhotoCmd(0, 1, MOVE_UP, 0, 0); + sleep(5); + CameraPhotoCmd(0, 1, 10017, 0, 1); + sleep(5); + CameraPhotoCmd(0, 1, ZOOM_TELE, 0, 0); + sleep(5); + CameraPhotoCmd(0, 1, ZOOM_WIDE, 0, 0); + sleep(5); + CameraPhotoCmd(time(NULL), 1, 0, 6, 2); + sleep(5); #endif - } + } #if 1 - //******************** 端口基本信息 ************************ - /*for(i=0; i*/ - //sprintf(devparam[i].pathname, "/dev/swk3"); - devparam[i].commid = 3; - break; - case SLANT_PROTOCOL: - //memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); - /* 目前还不确定具体串口分配,暂时默认使用串口1*/ - //sprintf(devparam[i].pathname, "/dev/swk2"); - devparam[i].commid = 2; - break; - case RALLY_PROTOCOL: - //memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); - /* 目前还不确定具体串口分配,暂时默认使用串口1*/ - //sprintf(devparam[i].pathname, "/dev/swk1"); - devparam[i].commid = 1; - break; - case PELCO_D_PROTOCOL: /* 摄像机协议*/ - case PELCO_P_PROTOCOL: /* 摄像机协议*/ - case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ - //memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); - /* 目前还不确定//具体串口分配,暂时默认使用串口1*/ - //sprintf(devparam[i].pathname, "/dev/ttyS1"); - devparam[i].commid = 0; - srdt.camerauseserial = 0; - break; - default: - devparam[i].IsNoInsta = 0; - break; - } - } + int i; + + for (i = 0; i < MAX_SERIAL_DEV_NUM; i++) + { + if (0 == devparam[i].IsNoInsta) + continue; + switch (devparam[i].ProtocolIdx) + { + case WEATHER_PROTOCOL: + case WIND_PROTOCOL: + //memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); + /* 目前还不确定具体串口分配,暂时默认使用串口1*/ + //sprintf(devparam[i].pathname, "/dev/swk3"); + devparam[i].commid = 3; + break; + case SLANT_PROTOCOL: + //memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); + /* 目前还不确定具体串口分配,暂时默认使用串口1*/ + //sprintf(devparam[i].pathname, "/dev/swk2"); + devparam[i].commid = 2; + break; + case RALLY_PROTOCOL: + //memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); + /* 目前还不确定具体串口分配,暂时默认使用串口1*/ + //sprintf(devparam[i].pathname, "/dev/swk1"); + devparam[i].commid = 1; + break; + case PELCO_D_PROTOCOL: /* 摄像机协议*/ + case PELCO_P_PROTOCOL: /* 摄像机协议*/ + case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ + //memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); + /* 目前还不确定//具体串口分配,暂时默认使用串口1*/ + //sprintf(devparam[i].pathname, "/dev/ttyS1"); + devparam[i].commid = 0; + srdt.camerauseserial = 0; + break; + default: + devparam[i].IsNoInsta = 0; + break; + } + } } void GM_StartSerialComm() { - int i, j, commid; - char szbuf[64], logbuf[128]; - //LONG polltime=0; - - // 此处不能对轮询设备标识清零,否则如果先起摄像机,就会导致poll乱掉 - //memset((void*)srdt.curdevidx, 0, sizeof(srdt.curdevidx)); - //Gm_OpenSerialPower(); /* 不在这使用*/ - //FindDevUseSerialCommNo(); - // 初始化串口使用状态(需要考虑不同时间启用了摄像机使用) - for (i = 0; i < MAX_SERIAL_PORT_NUM; i++) - { - if(i == srdt.camerauseserial) - continue; - serialport[i].Retry = 0; - serialport[i].RetryTime = 800; - serialport[i].WaitTime = 20; - serialport[i].m_iRevStatus = 0; - serialport[i].m_iRecvLen = 0; - serialport[i].m_iNeedRevLength = 0; - serialport[i].fd = -1; - memset(serialport[i].m_au8RecvBuf, 0, RECVDATA_MAXLENTH); // 接收数据缓存区 - ClearCmdFormPollCmdBuf(i); - } + int i, j, commid; + char szbuf[64], logbuf[128]; + //int64_t polltime=0; + + // 此处不能对轮询设备标识清零,否则如果先起摄像机,就会导致poll乱掉 + //memset((void*)srdt.curdevidx, 0, sizeof(srdt.curdevidx)); + //Gm_OpenSerialPower(); /* 不在这使用*/ + //FindDevUseSerialCommNo(); + // 初始化串口使用状态(需要考虑不同时间启用了摄像机使用) + for (i = 0; i < MAX_SERIAL_PORT_NUM; i++) + { + if (i == srdt.camerauseserial) + continue; + serialport[i].Retry = 0; + serialport[i].RetryTime = 800; + serialport[i].WaitTime = 20; + serialport[i].m_iRevStatus = 0; + serialport[i].m_iRecvLen = 0; + serialport[i].m_iNeedRevLength = 0; + serialport[i].fd = -1; + memset(serialport[i].m_au8RecvBuf, 0, RECVDATA_MAXLENTH); // 接收数据缓存区 + ClearCmdFormPollCmdBuf(i); + } #if 0/* 简化插入使用摄像机过程,摄像机使用单独的串口*/ - else - { - for (i = 0; i < MAX_SERIAL_PORT_NUM; i++) - { - for(j=0; j= MAX_SERIAL_PORT_NUM)) - return; - serialport[i].Retry = 0; - serialport[i].RetryTime = 1000; - serialport[i].WaitTime = 0; - serialport[i].m_iRevStatus = 0; - serialport[i].m_iRecvLen = 0; - serialport[i].m_iNeedRevLength = 0; - serialport[i].fd = -1; - memset(serialport[i].m_au8RecvBuf, 0, RECVDATA_MAXLENTH); // 接收数据缓存区 - ClearCmdFormPollCmdBuf(i); - - // 初始化串口使用状态 - for(i=0; idevparam[i].CameraChannel) || (devparam[i].CameraChannel >MAX_CHANNEL_NUM)) - { - srdt.ms_dev[i].IsNeedSerial = 0; - break; - } - if(channel == devparam[i].CameraChannel) - { - ; - } - else - break; - if(0 == srdt.ms_dev[i].IsNeedSerial) - srdt.iLastGetPhotoNo = -1; - else - srdt.iLastGetPhotoNo = srdt.ms_dev[i].SerialCmdidx; - srdt.ms_dev[i].IsNeedSerial = 1; - if(0 == cmdidx) - srdt.ms_dev[i].image.state = SER_STARTSAMPLE; - if((0 == cmdidx) && (srdt.presetno > 0 )) - { - srdt.ms_dev[i].SerialCmdidx = 10017; - srdt.iLastGetPhotoNo = cmdidx; - } - else - srdt.ms_dev[i].SerialCmdidx = cmdidx; - srdt.ms_dev[i].FirstCmdTimeCnt = get_msec(); - srdt.sendphotocmdcnt = 0; - sprintf(szbuf, "摄像机"); - flag = 1; - break; - default: - if(1 == srdt.ms_dev[i].IsNeedSerial) - break; - srdt.ms_dev[i].IsNeedSerial = 0; - break; - } - if(1 == srdt.ms_dev[i].IsNeedSerial) - { - sprintf(logbuf, "装置%d, IsNoInsta=%d, 类型:%s", i+1, devparam[i].IsNoInsta, szbuf); - DebugLog(8, logbuf, 'I'); - Gm_OpenSensorsPower(); - Gm_OpenSerialPort(i); - } - } - if(0x01 == flag) - { - sprintf(szbuf, "摄像机启动串口定时器!"); - DebugLog(8, szbuf, 'I'); - for(;;) - { - usleep(10); - //LOGW("polltime=%ldms", get_msec()-polltime); - //polltime = get_msec(); - if(GM_SerialTimer() < 0) - { - //LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - DebugLog(8, "退出拍照流程!", 'V'); - sleep(3); - break; - } - } - } - //return; + i = srdt.camerauseserial; + if ((i < 0) || (i >= MAX_SERIAL_PORT_NUM)) + return; + serialport[i].Retry = 0; + serialport[i].RetryTime = 1000; + serialport[i].WaitTime = 0; + serialport[i].m_iRevStatus = 0; + serialport[i].m_iRecvLen = 0; + serialport[i].m_iNeedRevLength = 0; + serialport[i].fd = -1; + memset(serialport[i].m_au8RecvBuf, 0, RECVDATA_MAXLENTH); // 接收数据缓存区 + ClearCmdFormPollCmdBuf(i); + + // 初始化串口使用状态 + for (i = 0; i < MAX_SERIAL_DEV_NUM; i++) + { + if (0 == devparam[i].IsNoInsta) + { + srdt.ms_dev[i].IsNeedSerial = 0; + continue; + } + memset(szbuf, 0, sizeof(szbuf)); + switch (devparam[i].ProtocolIdx) + { + case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ + case PELCO_P_PROTOCOL: /* 摄像机协议*/ + case PELCO_D_PROTOCOL: /* 摄像机协议*/ + if ((1 > devparam[i].CameraChannel) || (devparam[i].CameraChannel > MAX_CHANNEL_NUM)) + { + srdt.ms_dev[i].IsNeedSerial = 0; + break; + } + if (channel == devparam[i].CameraChannel) + { + ; + } + else + break; + if (0 == srdt.ms_dev[i].IsNeedSerial) + srdt.iLastGetPhotoNo = -1; + else + srdt.iLastGetPhotoNo = srdt.ms_dev[i].SerialCmdidx; + srdt.ms_dev[i].IsNeedSerial = 1; + if (0 == cmdidx) + srdt.ms_dev[i].image.state = SER_STARTSAMPLE; + if ((0 == cmdidx) && (srdt.presetno > 0)) + { + srdt.ms_dev[i].SerialCmdidx = 10017; + srdt.iLastGetPhotoNo = cmdidx; + } + else + srdt.ms_dev[i].SerialCmdidx = cmdidx; + srdt.ms_dev[i].FirstCmdTimeCnt = get_msec(); + srdt.sendphotocmdcnt = 0; + sprintf(szbuf, "摄像机"); + flag = 1; + break; + default: + if (1 == srdt.ms_dev[i].IsNeedSerial) + break; + srdt.ms_dev[i].IsNeedSerial = 0; + break; + } + if (1 == srdt.ms_dev[i].IsNeedSerial) + { + sprintf(logbuf, "装置%d, IsNoInsta=%d, 类型:%s", i + 1, devparam[i].IsNoInsta, szbuf); + DebugLog(8, logbuf, 'I'); + Gm_OpenSensorsPower(); + Gm_OpenSerialPort(i); + } + } + if (0x01 == flag) + { + sprintf(szbuf, "摄像机启动串口定时器!"); + DebugLog(8, szbuf, 'I'); + for (;;) + { + usleep(10); + //LOGW("polltime=%ldms", get_msec()-polltime); + //polltime = get_msec(); + if (GM_SerialTimer() < 0) + { + //LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); + DebugLog(8, "退出拍照流程!", 'V'); + sleep(3); + break; + } + } + } + //return; } #endif int GM_SerialTimer(void) { - int flag = -1; - - GM_AllSerialComRecv(); - GM_IsCloseSensors(); - Gm_FindAllSensorsCommand(); - GM_AllSerialComRecv(); - flag = GM_CloseTimer(); - return flag; + int flag = -1; + + GM_AllSerialComRecv(); + GM_IsCloseSensors(); + Gm_FindAllSensorsCommand(); + GM_AllSerialComRecv(); + flag = GM_CloseTimer(); + return flag; } /******************************************************************************** @@ -1546,59 +1560,59 @@ int GM_SerialTimer(void) *********************************************************************************/ void Gm_FindAllSensorsCommand() { - int i, j,curidx,flag; - - //Gm_CheckSensorsPower(); /* 暂时不考虑电源控制*/ - for(j=0; j 0) - break; - //LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - //LOGE("3.3V state= %d", getInt(CMD_SET_CAM_3V3_EN_STATE)); - //LOGE("485 state=%d", getInt(CMD_SET_485_EN_STATE)); - - - flag = -1; - switch(devparam[curidx].ProtocolIdx) - { - case WEATHER_PROTOCOL: /* 温湿度气压*/ - case RALLY_PROTOCOL: /* 拉力*/ - case WIND_PROTOCOL: /* 风速风向*/ - case SLANT_PROTOCOL: /* 倾角*/ - flag = FindNextShxyProtocolCommand(curidx); - break; - case RESERVE2_PROTOCOL: - break; - case RESERVE5_PROTOCOL: - break; - case PELCO_D_PROTOCOL: /* 摄像机协议*/ - case PELCO_P_PROTOCOL: /* 摄像机协议*/ - case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ - flag = FindNextCameraPhotoCommand(curidx); - break; - break; - } - if(flag == -1) - continue; - srdt.curdevidx[j] = curidx; - break; - } - // 发送缓冲区中命令 生成了命令之后紧接着进行命令发送 - SendCmdFormPollCmdBuf(j); - } + int i, j, curidx, flag; + + //Gm_CheckSensorsPower(); /* 暂时不考虑电源控制*/ + for (j = 0; j < MAX_SERIAL_PORT_NUM; j++) + { + // 发送缓冲区中命令 接收到了应答报文,紧接着进行缓冲区清理 + SendCmdFormPollCmdBuf(j); + // 串口已经被占用则直接跳过 + curidx = srdt.curdevidx[j]; + for (i = 0; i < MAX_SERIAL_DEV_NUM; i++) + { + curidx = (curidx + 1) % MAX_SERIAL_DEV_NUM; + if (j != devparam[curidx].commid) + continue; + if (srdt.ms_dev[curidx].IsNeedSerial == 0) + continue; + if (serialport[devparam[curidx].commid].fd < 0) + Gm_OpenSerialPort(curidx); + if (serialport[devparam[curidx].commid].cmdlen > 0) + break; + //LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); + //LOGE("3.3V state= %d", getInt(CMD_SET_CAM_3V3_EN_STATE)); + //LOGE("485 state=%d", getInt(CMD_SET_485_EN_STATE)); + + + flag = -1; + switch (devparam[curidx].ProtocolIdx) + { + case WEATHER_PROTOCOL: /* 温湿度气压*/ + case RALLY_PROTOCOL: /* 拉力*/ + case WIND_PROTOCOL: /* 风速风向*/ + case SLANT_PROTOCOL: /* 倾角*/ + flag = FindNextShxyProtocolCommand(curidx); + break; + case RESERVE2_PROTOCOL: + break; + case RESERVE5_PROTOCOL: + break; + case PELCO_D_PROTOCOL: /* 摄像机协议*/ + case PELCO_P_PROTOCOL: /* 摄像机协议*/ + case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ + flag = FindNextCameraPhotoCommand(curidx); + break; + break; + } + if (flag == -1) + continue; + srdt.curdevidx[j] = curidx; + break; + } + // 发送缓冲区中命令 生成了命令之后紧接着进行命令发送 + SendCmdFormPollCmdBuf(j); + } } /******************************************************************************** @@ -1610,65 +1624,65 @@ void Gm_FindAllSensorsCommand() *********************************************************************************/ void GM_IsCloseSensors() { - int i, j; - char buf[256]; - LONG lctime; - - lctime = get_msec(); - for(i=0; i 50*1000) ||(lctime-srdt.ms_dev[i].FirstCmdTimeCnt < 0)) - { - srdt.ms_dev[i].FirstCmdTimeCnt = lctime; - break; - } - if(lctime - srdt.ms_dev[i].FirstCmdTimeCnt > 15*1000) - { - srdt.ms_dev[i].IsNeedSerial = 0; - // 关闭传感器电源 - sprintf(buf, "读取装置%d数据%0.3f秒,关闭装置%d电源!", i+1, (get_msec()-srdt.ms_dev[i].FirstCmdTimeCnt)/1000.0,i+1); - DebugLog(devparam[i].commid, buf, 'I'); - for(j=0;j 50 * 1000) || (lctime - srdt.ms_dev[i].FirstCmdTimeCnt < 0)) + { + srdt.ms_dev[i].FirstCmdTimeCnt = lctime; + break; + } + if (lctime - srdt.ms_dev[i].FirstCmdTimeCnt > 15 * 1000) + { + srdt.ms_dev[i].IsNeedSerial = 0; + // 关闭传感器电源 + sprintf(buf, "读取装置%d数据%0.3f秒,关闭装置%d电源!", i + 1, (get_msec() - srdt.ms_dev[i].FirstCmdTimeCnt) / 1000.0, i + 1); + DebugLog(devparam[i].commid, buf, 'I'); + for (j = 0; j < MAX_DEV_VALUE_NUM; j++) + { + if (SER_STARTSAMPLE == srdt.ms_dev[i].aiValue[j].AiState) + srdt.ms_dev[i].aiValue[j].AiState = SER_SAMPLEFAIL; + else if (SER_SAMPLE == srdt.ms_dev[i].aiValue[j].AiState) + srdt.ms_dev[i].aiValue[j].AiState = SAMPLINGSUCCESS; + } + for (j = 0; j < WEATHER_DATA_NUM; j++) + { + if (SER_STARTSAMPLE == weatherpntmsg[j].AiState) + weatherpntmsg[j].AiState = SER_SAMPLEFAIL; + else if (SER_SAMPLE == weatherpntmsg[j].AiState) + weatherpntmsg[j].AiState = SAMPLINGSUCCESS; + } + } + break; + case PELCO_D_PROTOCOL: /* 摄像机类型*/ + case PELCO_P_PROTOCOL: /* 摄像机类型*/ + case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ + if (-1 == srdt.ms_dev[i].SerialCmdidx) + { + if ((SER_STARTSAMPLE == srdt.ms_dev[i].image.state) || (SER_SAMPLE == srdt.ms_dev[i].image.state)) + srdt.ms_dev[i].image.state = SER_SAMPLEFAIL; + else if (PHOTO_SAVE_SUCC == srdt.ms_dev[i].image.state) + srdt.ms_dev[i].image.state = SAMPLINGSUCCESS; + srdt.ms_dev[i].IsNeedSerial = 0; + sprintf(buf, "通道%d摄像机使用完毕!可以关闭摄像机电源!", devparam[i].CameraChannel); + DebugLog(devparam[i].commid, buf, 'I'); + } + break; + } + } } /******************************************************************************** @@ -1680,27 +1694,27 @@ void GM_IsCloseSensors() *********************************************************************************/ void GM_AllSerialComRecv() { - int i, j, recvlen; - u_char recvbuf[RECVDATA_MAXLENTH]; - char buf[256]; - - for(j=0; jdevparam[i].ProtocolIdx))) - break; - } - if(i < MAX_SERIAL_DEV_NUM) - { - return 1; // 寻找 - } - else // 关闭所有串口及电源 - { - Gm_CloseSerialPort(); - for(j=0; j< MAX_SERIAL_DEV_NUM; j++) - Gm_CloseSensorsPower(); - for(j=0; j devparam[i].ProtocolIdx))) + break; + } + if (i < MAX_SERIAL_DEV_NUM) + { + return 1; // 寻找 + } + else // 关闭所有串口及电源 + { + Gm_CloseSerialPort(); + for (j = 0; j < MAX_SERIAL_DEV_NUM; j++) + Gm_CloseSensorsPower(); + for (j = 0; j < MAX_SERIAL_PORT_NUM; j++) + ClearCmdFormPollCmdBuf(j); + sprintf(buf, "关闭串口定时器!"); + DebugLog(8, buf, 'I'); + return -1; + } } void SerialDataProcess(int devidx, u_char *buf, int len) { - switch(devparam[devidx].ProtocolIdx) - { - - case WEATHER_PROTOCOL: /* 气象*/ - case RALLY_PROTOCOL: /* 拉力*/ - case WIND_PROTOCOL: /* 风速风向*/ - case SLANT_PROTOCOL: /* 倾角*/ - ShxyProtocolRecvData(devidx, buf, len); - break; - case RESERVE2_PROTOCOL: /* 意科电池电量读取协议*/ - break; - case PELCO_D_PROTOCOL: /* 摄像机协议*/ - case PELCO_P_PROTOCOL: /* 摄像机协议*/ - case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ - CameraRecvData(devidx, buf, len); - break; - } + switch (devparam[devidx].ProtocolIdx) + { + + case WEATHER_PROTOCOL: /* 气象*/ + case RALLY_PROTOCOL: /* 拉力*/ + case WIND_PROTOCOL: /* 风速风向*/ + case SLANT_PROTOCOL: /* 倾角*/ + ShxyProtocolRecvData(devidx, buf, len); + break; + case RESERVE2_PROTOCOL: /* 意科电池电量读取协议*/ + break; + case PELCO_D_PROTOCOL: /* 摄像机协议*/ + case PELCO_P_PROTOCOL: /* 摄像机协议*/ + case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ + CameraRecvData(devidx, buf, len); + break; + } } void DebugLog(int commid, char *szbuf, char flag) { - if(NULL == szbuf) - return; - SaveLogTofile(commid, szbuf); - switch (flag) - { - case 'E': - LOGE("%s", szbuf); - break; - case 'I': - LOGI("%s", szbuf); - break; - case 'D': - LOGD("%s", szbuf); - break; - case 'V': - LOGV("%s", szbuf); - break; - case 'W': - LOGW("%s", szbuf); - break; - default: - LOGI("%s", szbuf); - break; - } + if (NULL == szbuf) + return; + SaveLogTofile(commid, szbuf); + switch (flag) + { + case 'E': + ALOGE("%s", szbuf); + break; + case 'I': + ALOGI("%s", szbuf); + break; + case 'D': + ALOGD("%s", szbuf); + break; + case 'V': + ALOGI("%s", szbuf); + break; + case 'W': + ALOGW("%s", szbuf); + break; + default: + ALOGI("%s", szbuf); + break; + } } int SaveLogTofile(int commid, char *szbuf) { - int status; - time_t now; - char filename[512], filedir[512], buf[128]; - FILE *fp=NULL; - struct tm t0; - struct timeval tv; - - if(NULL == szbuf) - return -1; - - now = time(NULL); - localtime_r(&now, &t0); - gettimeofday(&tv, NULL); - - memset(filedir, 0, sizeof(filedir)); - sprintf(filedir, "/sdcard/log/"); - - if(access(filedir, 0)==0) - ;//LOGI("文件路径已经存在!"); - else - { - status = mkdir(filedir, S_IRWXU | S_IRWXG | S_IRWXO); - if(status < 0) - return -1; - } - // 写入文件到sdcard - memset(filename, 0, sizeof(filename)); - sprintf(filename, "%s%d-%d-%d-COM%d.log", filedir,t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday, commid+1); - fp = fopen(filename, "a+"); - if(NULL == fp) - return -1; - memset(buf, 0, sizeof(buf)); - sprintf(buf, "%d-%d-%d %d:%d:%d-%d ", t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday, t0.tm_hour, t0.tm_min, t0.tm_sec, tv.tv_usec/1000); - fwrite(buf, 1, strlen(buf), fp); - fwrite(szbuf, 1, strlen(szbuf), fp); - memset(buf, 0, sizeof(buf)); - sprintf(buf, "\n"); - fwrite(buf, 1, strlen(buf), fp); - - fclose(fp); - return 1; + int status; + time_t now; + char filename[512], filedir[512], buf[128]; + FILE *fp = NULL; + struct tm t0; + struct timeval tv; + + if (NULL == szbuf) + return -1; + + now = time(NULL); + localtime_r(&now, &t0); + gettimeofday(&tv, NULL); + + memset(filedir, 0, sizeof(filedir)); + sprintf(filedir, "/sdcard/log/"); + + if (access(filedir, 0) == 0) + ;//LOGI("文件路径已经存在!"); + else + { + status = mkdir(filedir, S_IRWXU | S_IRWXG | S_IRWXO); + if (status < 0) + return -1; + } + // 写入文件到sdcard + memset(filename, 0, sizeof(filename)); + sprintf(filename, "%s%d-%d-%d-COM%d.log", filedir, t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday, commid + 1); + fp = fopen(filename, "a+"); + if (NULL == fp) + return -1; + memset(buf, 0, sizeof(buf)); + sprintf(buf, "%d-%d-%d %d:%d:%d-%d ", t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday, t0.tm_hour, t0.tm_min, t0.tm_sec, tv.tv_usec / 1000); + fwrite(buf, 1, strlen(buf), fp); + fwrite(szbuf, 1, strlen(szbuf), fp); + memset(buf, 0, sizeof(buf)); + sprintf(buf, "\n"); + fwrite(buf, 1, strlen(buf), fp); + + fclose(fp); + return 1; } int SaveImageDataTofile(int devno) { - u_char *image=NULL, *tempphoto=NULL; - int i, status; - size_t len; - char filename[512]/*, filedir[512]*/, szbuf[128]; - FILE *fp=NULL; - SERIAL_DEV_DEF *pPortParam; - - pPortParam = &srdt.ms_dev[devno]; - image = (u_char*)malloc(pPortParam->image.imagelen); - if(NULL == image) - return -1; - tempphoto = image; - for (i = 0; i < pPortParam->image.imagenum; ++i) { - memmove(tempphoto, &pPortParam->image.buf[i], (size_t)pPortParam->image.ilen[i]); - tempphoto += (size_t)pPortParam->image.ilen[i]; - } - - memset(szbuf, 0, sizeof(szbuf)); - //memset(filedir, 0, sizeof(filedir)); - //sprintf(filedir, "/sdcard/photo/"); - - if(access(srdt.filedir, 0)==0) - { - sprintf(szbuf,"文件路径%s已经存在!", srdt.filedir); - DebugLog(devparam[devno].commid, szbuf, 'I'); - } - else - { - status = mkdir(srdt.filedir, S_IRWXU | S_IRWXG | S_IRWXO); - if(status < 0) - return -1; - } - // 写入文件到sdcard - memset(pPortParam->image.photoname, 0, sizeof(pPortParam->image.photoname)); - sprintf(pPortParam->image.photoname, "%s%d-%d-%d.jpg", srdt.filedir,devparam[devno].CameraChannel, pPortParam->image.presetno, pPortParam->image.phototime); - fp = fopen(pPortParam->image.photoname, "wb+"); - if(NULL == fp) - return -1; - len = fwrite(image,1, pPortParam->image.imagelen, fp); - fclose(fp); - free(image); - image = NULL; - if(len < pPortParam->image.imagelen) - return -1; - else - { - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf,"写入图片文件%s成功!", pPortParam->image.photoname); - DebugLog(devparam[devno].commid, szbuf, 'I'); - return 1; - } + u_char *image = NULL, *tempphoto = NULL; + int i, status; + size_t len; + char filename[512]/*, filedir[512]*/, szbuf[128]; + FILE *fp = NULL; + SERIAL_DEV_DEF *pPortParam; + + pPortParam = &srdt.ms_dev[devno]; + image = (u_char*)malloc(pPortParam->image.imagelen); + if (NULL == image) + return -1; + tempphoto = image; + for (i = 0; i < pPortParam->image.imagenum; ++i) { + memmove(tempphoto, &pPortParam->image.buf[i], (size_t)pPortParam->image.ilen[i]); + tempphoto += (size_t)pPortParam->image.ilen[i]; + } + + memset(szbuf, 0, sizeof(szbuf)); + //memset(filedir, 0, sizeof(filedir)); + //sprintf(filedir, "/sdcard/photo/"); + + if (access(srdt.filedir, 0) == 0) + { + sprintf(szbuf, "文件路径%s已经存在!", srdt.filedir); + DebugLog(devparam[devno].commid, szbuf, 'I'); + } + else + { + status = mkdir(srdt.filedir, S_IRWXU | S_IRWXG | S_IRWXO); + if (status < 0) + return -1; + } + // 写入文件到sdcard + memset(pPortParam->image.photoname, 0, sizeof(pPortParam->image.photoname)); + sprintf(pPortParam->image.photoname, "%s%d-%d-%d.jpg", srdt.filedir, devparam[devno].CameraChannel, pPortParam->image.presetno, pPortParam->image.phototime); + fp = fopen(pPortParam->image.photoname, "wb+"); + if (NULL == fp) + return -1; + len = fwrite(image, 1, pPortParam->image.imagelen, fp); + fclose(fp); + free(image); + image = NULL; + if (len < pPortParam->image.imagelen) + return -1; + else + { + memset(szbuf, 0, sizeof(szbuf)); + sprintf(szbuf, "写入图片文件%s成功!", pPortParam->image.photoname); + DebugLog(devparam[devno].commid, szbuf, 'I'); + return 1; + } } /******************************************************************* @@ -1892,1864 +1906,1864 @@ int SaveImageDataTofile(int devno) *******************************************************************/ void CameraRecvData(int devno, u_char *buf, int len) { - int i; - SIO_PARAM_SERIAL_DEF *pPortParam; - - if((devno<0) || (devno > MAX_SERIAL_DEV_NUM)) - { - return; - } - pPortParam = &serialport[devparam[devno].commid]; - - for(i=0; im_iRevStatus) - { - case 0: // 0x68 - pPortParam->m_iRecvLen = 0; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 1: // len1 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus++; - break; - case 2: // len2 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus++; - pPortParam->m_iNeedRevLength = pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen-2]*256+buf[i]+5; - break; - case 3: // 0x68 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iNeedRevLength--; - if(0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 4: // 正确接收数据 - pPortParam->m_iNeedRevLength--; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(pPortParam->m_iNeedRevLength > 0) - break; - if(buf[i] != 0x16) - { - pPortParam->m_iRevStatus=18; - break; - } - - if(pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen-2] == - CalLpc( &pPortParam->m_au8RecvBuf[4], pPortParam->m_iRecvLen-6 )) - { - CameraPhotoPortDataProcess(devno); - pPortParam->m_iRevStatus = 0; - pPortParam->RevCmdFlag = 1; - } - else - { - pPortParam->m_iRevStatus = 0; - } - pPortParam->m_iRecvLen = 0; - break; - case 255:// 错误接收数据 - default: - if(buf[i] == 0x68) - { - pPortParam->m_iRevStatus = 1; - pPortParam->m_iRecvLen = 1; - pPortParam->m_au8RecvBuf[0] = buf[i]; - } - else if(buf[i] == 0x16) - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus = 0; - pPortParam->m_iRecvLen = 0; - } - else - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(pPortParam->m_iRecvLen > 200) - { - pPortParam->m_iRecvLen = 0; - } - } - break; - } - } + int i; + SIO_PARAM_SERIAL_DEF *pPortParam; + + if ((devno < 0) || (devno > MAX_SERIAL_DEV_NUM)) + { + return; + } + pPortParam = &serialport[devparam[devno].commid]; + + for (i = 0; i < len; i++) + { + switch (pPortParam->m_iRevStatus) + { + case 0: // 0x68 + pPortParam->m_iRecvLen = 0; + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (0x68 == buf[i]) + pPortParam->m_iRevStatus++; + else + pPortParam->m_iRevStatus = 18; + break; + case 1: // len1 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus++; + break; + case 2: // len2 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus++; + pPortParam->m_iNeedRevLength = pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 2] * 256 + buf[i] + 5; + break; + case 3: // 0x68 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iNeedRevLength--; + if (0x68 == buf[i]) + pPortParam->m_iRevStatus++; + else + pPortParam->m_iRevStatus = 18; + break; + case 4: // 正确接收数据 + pPortParam->m_iNeedRevLength--; + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (pPortParam->m_iNeedRevLength > 0) + break; + if (buf[i] != 0x16) + { + pPortParam->m_iRevStatus = 18; + break; + } + + if (pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 2] == + CalLpc(&pPortParam->m_au8RecvBuf[4], pPortParam->m_iRecvLen - 6)) + { + CameraPhotoPortDataProcess(devno); + pPortParam->m_iRevStatus = 0; + pPortParam->RevCmdFlag = 1; + } + else + { + pPortParam->m_iRevStatus = 0; + } + pPortParam->m_iRecvLen = 0; + break; + case 255:// 错误接收数据 + default: + if (buf[i] == 0x68) + { + pPortParam->m_iRevStatus = 1; + pPortParam->m_iRecvLen = 1; + pPortParam->m_au8RecvBuf[0] = buf[i]; + } + else if (buf[i] == 0x16) + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus = 0; + pPortParam->m_iRecvLen = 0; + } + else + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (pPortParam->m_iRecvLen > 200) + { + pPortParam->m_iRecvLen = 0; + } + } + break; + } + } } /********************************************************************************* CameraPhoto 端口数据处理 **********************************************************************************/ -void CameraPhotoPortDataProcess( int devno) -{ - RTUMSG rtumsg; - SERIAL_DEV_DEF *pPortParam; - SIO_PARAM_SERIAL_DEF *curserial; - int img_file_size, packetnum, iNo, packsize, i=0, presetno, iphototime, pidx; - char szbuf[128]; - WORD uDevAddr; - BYTE cmdidx, recvend; - - pPortParam = &srdt.ms_dev[devno]; - curserial = &serialport[devparam[devno].commid]; - - memset((void*)rtumsg.MsgData, 0, sizeof(rtumsg.MsgData)); - memcpy((void*)rtumsg.MsgData, (void*)curserial->m_au8RecvBuf, curserial->m_iRecvLen); - rtumsg.MsgLen = curserial->m_iRecvLen; - rtumsg.PortIdx = devparam[devno].commid; - cmdidx = curserial->m_au8RecvBuf[5]; - uDevAddr = curserial->m_au8RecvBuf[4]; - - //sprintf(szbuf, "摄像机地址%d,命令%02X!", uDevAddr, cmdidx); - //DebugStringPrintf(szbuf, strlen(szbuf), 1); - if(uDevAddr != devparam[devno].devaddr) - return; - memset(szbuf, 0, sizeof(szbuf)); - switch(cmdidx) - { - case 0x10: /* 拍照应答*/ - if(0xFF == rtumsg.MsgData[6]) - { - srdt.RephotographCnt++; - if(srdt.RephotographCnt > 2) - { - pPortParam->SerialCmdidx = -1; - sprintf(szbuf, "因摄像机重拍%d次均未成功!结束拍照!",srdt.RephotographCnt); - DebugLog(devparam[devno].commid, szbuf, 'E'); - } - break; - } - i = 6; - memset(&pPortParam->image, 0, sizeof(pPortParam->image)); - iphototime = rtumsg.MsgData[i+3]+ (rtumsg.MsgData[i+2]<<8)+(rtumsg.MsgData[i+1]<<16)+(rtumsg.MsgData[i]<<24); - srdt.photographtime = iphototime; - pPortParam->image.phototime = iphototime; - i=10; - img_file_size = rtumsg.MsgData[i+3]+ (rtumsg.MsgData[i+2]<<8)+(rtumsg.MsgData[i+1]<<16)+(rtumsg.MsgData[i]<<24); - packetnum = rtumsg.MsgData[i+5]+(rtumsg.MsgData[i+4]<<8); - pPortParam->image.imagelen = img_file_size; - pPortParam->image.imagenum = packetnum; - srdt.imagepacketnum = packetnum; - srdt.historyimagenum[devparam[devno].CameraChannel-1] = rtumsg.MsgData[i+7]+(rtumsg.MsgData[i+6]<<8); - sprintf(szbuf, "有%d张历史图片!",srdt.historyimagenum[devparam[devno].CameraChannel-1]); - DebugLog(devparam[devno].commid, szbuf, 'V'); - presetno = (int)rtumsg.MsgData[i+8]; - pPortParam->image.presetno = presetno; - pPortParam->image.state = SER_SAMPLE; - curserial->RevCmdFlag = 1; - pPortParam->SerialCmdidx = 1; - srdt.sendphotocmdcnt = 0; - break; - case 0x11: /* 图片数据包*/ - i = 6; - iNo = rtumsg.MsgData[i+1]+rtumsg.MsgData[i]*256; - packsize = rtumsg.MsgData[i+3]+rtumsg.MsgData[i+2]*256; - memmove(&pPortParam->image.buf[iNo-1], &rtumsg.MsgData[i+4], packsize); - pPortParam->image.ilen[iNo-1] = packsize; - sprintf(szbuf, "收到第%d(总%d包)包长=%d", iNo, srdt.imagepacketnum, packsize); - DebugLog(devparam[devno].commid, szbuf, 'V'); - curserial->RevCmdFlag = 1; - pPortParam->FirstCmdTimeCnt = get_msec(); - if((iNo == pPortParam->SerialCmdidx) && (0xFF > rtumsg.MsgData[i+2])) - { - if(iNo == srdt.imagepacketnum) - { /* 检查是否有漏包*/ - for(pidx = 0; pidx < srdt.imagepacketnum; pidx++) - { - if(pPortParam->image.ilen[pidx] < 1) - break; - } - if(pidx < srdt.imagepacketnum) - { - iNo = pidx; - recvend = 0; - } - else - { - if((1 == SaveImageDataTofile(devno)) && (SER_SAMPLE == pPortParam->image.state)) - pPortParam->image.state = PHOTO_SAVE_SUCC; - recvend = 1; - } - } - else - recvend = 0; - - if(packsize > MAX_PHOTO_FRAME_LEN) - recvend = 0xFF; - - if(1 == recvend) - { - pPortParam->SerialCmdidx = 10002;/* 图片读取完成*/ - //pPortParam->image.lastlen = packsize; - } - else if(0xFF == recvend) - { - pPortParam->SerialCmdidx = -1; - } - else - { - if((iNo > srdt.imagepacketnum) || (0 >= srdt.imagepacketnum)) - { - pPortParam->SerialCmdidx = -1; - } - else - pPortParam->SerialCmdidx = iNo+1; - } - srdt.errorPhotoNoCnt = 0; - break; - } - srdt.errorPhotoNoCnt++; - sprintf(szbuf, "问询第%d包图片摄像机应答第%d包,连续错误%d次!", - pPortParam->SerialCmdidx, iNo, srdt.errorPhotoNoCnt); - DebugLog(devparam[devno].commid, szbuf, 'E'); - if(srdt.errorPhotoNoCnt > 5) - { - pPortParam->SerialCmdidx = 0; - srdt.RephotographCnt++; - if(srdt.RephotographCnt > 2) - { - pPortParam->SerialCmdidx = -1; - sprintf(szbuf, "因摄像机重拍%d次均未成功!结束拍照!",srdt.RephotographCnt); - DebugLog(devparam[devno].commid, szbuf, 'E'); - } - } - break; - case 0x03: - sprintf(szbuf, "设置波特率%d成功", devparam[devno].baudrate); - DebugLog(devparam[devno].commid, szbuf, 'D'); - pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - curserial->RevCmdFlag = 1; - pPortParam->FirstCmdTimeCnt = get_msec(); - break; - case 0x15: - if(0xFF == rtumsg.MsgData[6]) - { - pPortParam->SerialCmdidx = -1; - sprintf(szbuf, "没有历史图片!结束读取图片!"); - DebugLog(devparam[devno].commid, szbuf, 'I'); - break; - } - i = 6; - iphototime = rtumsg.MsgData[i+3]+ (rtumsg.MsgData[i+2]<<8)+(rtumsg.MsgData[i+1]<<16)+(rtumsg.MsgData[i]<<24); - srdt.photographtime = iphototime; - pPortParam->image.phototime = iphototime; - i=10; - img_file_size = rtumsg.MsgData[i+3]+ (rtumsg.MsgData[i+2]<<8)+(rtumsg.MsgData[i+1]<<16)+(rtumsg.MsgData[i]<<24); - packetnum = rtumsg.MsgData[i+5]+(rtumsg.MsgData[i+4]<<8); - pPortParam->image.imagelen = img_file_size; - pPortParam->image.imagenum = packetnum; - srdt.imagepacketnum = packetnum; - srdt.historyimagenum[devparam[devno].CameraChannel-1] = rtumsg.MsgData[i+7]+(rtumsg.MsgData[i+6]<<8); - presetno = rtumsg.MsgData[i+8]; - pPortParam->image.presetno = presetno; - sprintf(szbuf, "读取历史图片,还有%d张历史图片!",srdt.historyimagenum[devparam[devno].CameraChannel-1]); - DebugLog(devparam[devno].commid, szbuf, 'I'); - curserial->RevCmdFlag = 1; - pPortParam->SerialCmdidx = 1; - srdt.sendphotocmdcnt = 0; - break; - case 0x16: - if(0xFF == rtumsg.MsgData[10]) - { - pPortParam->SerialCmdidx = -1; - sprintf(szbuf, "摄像机图片保存失败!"); - DebugLog(devparam[devno].commid, szbuf, 'E'); - } - pPortParam->SerialCmdidx = -1; - if(0 == rtumsg.MsgData[10]) - { - if(0 == srdt.historyimagenum[devparam[devno].CameraChannel-1]) - { - ; - } - else - { - pPortParam->SerialCmdidx = 10003;/* 暂时不实现*/ - } - } - pPortParam->FirstCmdTimeCnt = get_msec(); - curserial->RevCmdFlag = 1; - break; - default: - curserial->RevCmdFlag = 1; - break; - } +void CameraPhotoPortDataProcess(int devno) +{ + RTUMSG rtumsg; + SERIAL_DEV_DEF *pPortParam; + SIO_PARAM_SERIAL_DEF *curserial; + int img_file_size, packetnum, iNo, packsize, i = 0, presetno, iphototime, pidx; + char szbuf[128]; + uint16_t uDevAddr; + uint8_t cmdidx, recvend; + + pPortParam = &srdt.ms_dev[devno]; + curserial = &serialport[devparam[devno].commid]; + + memset((void*)rtumsg.MsgData, 0, sizeof(rtumsg.MsgData)); + memcpy((void*)rtumsg.MsgData, (void*)curserial->m_au8RecvBuf, curserial->m_iRecvLen); + rtumsg.MsgLen = curserial->m_iRecvLen; + rtumsg.PortIdx = devparam[devno].commid; + cmdidx = curserial->m_au8RecvBuf[5]; + uDevAddr = curserial->m_au8RecvBuf[4]; + + //sprintf(szbuf, "摄像机地址%d,命令%02X!", uDevAddr, cmdidx); + //DebugStringPrintf(szbuf, strlen(szbuf), 1); + if (uDevAddr != devparam[devno].devaddr) + return; + memset(szbuf, 0, sizeof(szbuf)); + switch (cmdidx) + { + case 0x10: /* 拍照应答*/ + if (0xFF == rtumsg.MsgData[6]) + { + srdt.RephotographCnt++; + if (srdt.RephotographCnt > 2) + { + pPortParam->SerialCmdidx = -1; + sprintf(szbuf, "因摄像机重拍%d次均未成功!结束拍照!", srdt.RephotographCnt); + DebugLog(devparam[devno].commid, szbuf, 'E'); + } + break; + } + i = 6; + memset(&pPortParam->image, 0, sizeof(pPortParam->image)); + iphototime = rtumsg.MsgData[i + 3] + (rtumsg.MsgData[i + 2] << 8) + (rtumsg.MsgData[i + 1] << 16) + (rtumsg.MsgData[i] << 24); + srdt.photographtime = iphototime; + pPortParam->image.phototime = iphototime; + i = 10; + img_file_size = rtumsg.MsgData[i + 3] + (rtumsg.MsgData[i + 2] << 8) + (rtumsg.MsgData[i + 1] << 16) + (rtumsg.MsgData[i] << 24); + packetnum = rtumsg.MsgData[i + 5] + (rtumsg.MsgData[i + 4] << 8); + pPortParam->image.imagelen = img_file_size; + pPortParam->image.imagenum = packetnum; + srdt.imagepacketnum = packetnum; + srdt.historyimagenum[devparam[devno].CameraChannel - 1] = rtumsg.MsgData[i + 7] + (rtumsg.MsgData[i + 6] << 8); + sprintf(szbuf, "有%d张历史图片!", srdt.historyimagenum[devparam[devno].CameraChannel - 1]); + DebugLog(devparam[devno].commid, szbuf, 'V'); + presetno = (int)rtumsg.MsgData[i + 8]; + pPortParam->image.presetno = presetno; + pPortParam->image.state = SER_SAMPLE; + curserial->RevCmdFlag = 1; + pPortParam->SerialCmdidx = 1; + srdt.sendphotocmdcnt = 0; + break; + case 0x11: /* 图片数据包*/ + i = 6; + iNo = rtumsg.MsgData[i + 1] + rtumsg.MsgData[i] * 256; + packsize = rtumsg.MsgData[i + 3] + rtumsg.MsgData[i + 2] * 256; + memmove(&pPortParam->image.buf[iNo - 1], &rtumsg.MsgData[i + 4], packsize); + pPortParam->image.ilen[iNo - 1] = packsize; + sprintf(szbuf, "收到第%d(总%d包)包长=%d", iNo, srdt.imagepacketnum, packsize); + DebugLog(devparam[devno].commid, szbuf, 'V'); + curserial->RevCmdFlag = 1; + pPortParam->FirstCmdTimeCnt = get_msec(); + if ((iNo == pPortParam->SerialCmdidx) && (0xFF > rtumsg.MsgData[i + 2])) + { + if (iNo == srdt.imagepacketnum) + { /* 检查是否有漏包*/ + for (pidx = 0; pidx < srdt.imagepacketnum; pidx++) + { + if (pPortParam->image.ilen[pidx] < 1) + break; + } + if (pidx < srdt.imagepacketnum) + { + iNo = pidx; + recvend = 0; + } + else + { + if ((1 == SaveImageDataTofile(devno)) && (SER_SAMPLE == pPortParam->image.state)) + pPortParam->image.state = PHOTO_SAVE_SUCC; + recvend = 1; + } + } + else + recvend = 0; + + if (packsize > MAX_PHOTO_FRAME_LEN) + recvend = 0xFF; + + if (1 == recvend) + { + pPortParam->SerialCmdidx = 10002;/* 图片读取完成*/ + //pPortParam->image.lastlen = packsize; + } + else if (0xFF == recvend) + { + pPortParam->SerialCmdidx = -1; + } + else + { + if ((iNo > srdt.imagepacketnum) || (0 >= srdt.imagepacketnum)) + { + pPortParam->SerialCmdidx = -1; + } + else + pPortParam->SerialCmdidx = iNo + 1; + } + srdt.errorPhotoNoCnt = 0; + break; + } + srdt.errorPhotoNoCnt++; + sprintf(szbuf, "问询第%d包图片摄像机应答第%d包,连续错误%d次!", + pPortParam->SerialCmdidx, iNo, srdt.errorPhotoNoCnt); + DebugLog(devparam[devno].commid, szbuf, 'E'); + if (srdt.errorPhotoNoCnt > 5) + { + pPortParam->SerialCmdidx = 0; + srdt.RephotographCnt++; + if (srdt.RephotographCnt > 2) + { + pPortParam->SerialCmdidx = -1; + sprintf(szbuf, "因摄像机重拍%d次均未成功!结束拍照!", srdt.RephotographCnt); + DebugLog(devparam[devno].commid, szbuf, 'E'); + } + } + break; + case 0x03: + sprintf(szbuf, "设置波特率%d成功", devparam[devno].baudrate); + DebugLog(devparam[devno].commid, szbuf, 'D'); + pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + curserial->RevCmdFlag = 1; + pPortParam->FirstCmdTimeCnt = get_msec(); + break; + case 0x15: + if (0xFF == rtumsg.MsgData[6]) + { + pPortParam->SerialCmdidx = -1; + sprintf(szbuf, "没有历史图片!结束读取图片!"); + DebugLog(devparam[devno].commid, szbuf, 'I'); + break; + } + i = 6; + iphototime = rtumsg.MsgData[i + 3] + (rtumsg.MsgData[i + 2] << 8) + (rtumsg.MsgData[i + 1] << 16) + (rtumsg.MsgData[i] << 24); + srdt.photographtime = iphototime; + pPortParam->image.phototime = iphototime; + i = 10; + img_file_size = rtumsg.MsgData[i + 3] + (rtumsg.MsgData[i + 2] << 8) + (rtumsg.MsgData[i + 1] << 16) + (rtumsg.MsgData[i] << 24); + packetnum = rtumsg.MsgData[i + 5] + (rtumsg.MsgData[i + 4] << 8); + pPortParam->image.imagelen = img_file_size; + pPortParam->image.imagenum = packetnum; + srdt.imagepacketnum = packetnum; + srdt.historyimagenum[devparam[devno].CameraChannel - 1] = rtumsg.MsgData[i + 7] + (rtumsg.MsgData[i + 6] << 8); + presetno = rtumsg.MsgData[i + 8]; + pPortParam->image.presetno = presetno; + sprintf(szbuf, "读取历史图片,还有%d张历史图片!", srdt.historyimagenum[devparam[devno].CameraChannel - 1]); + DebugLog(devparam[devno].commid, szbuf, 'I'); + curserial->RevCmdFlag = 1; + pPortParam->SerialCmdidx = 1; + srdt.sendphotocmdcnt = 0; + break; + case 0x16: + if (0xFF == rtumsg.MsgData[10]) + { + pPortParam->SerialCmdidx = -1; + sprintf(szbuf, "摄像机图片保存失败!"); + DebugLog(devparam[devno].commid, szbuf, 'E'); + } + pPortParam->SerialCmdidx = -1; + if (0 == rtumsg.MsgData[10]) + { + if (0 == srdt.historyimagenum[devparam[devno].CameraChannel - 1]) + { + ; + } + else + { + pPortParam->SerialCmdidx = 10003;/* 暂时不实现*/ + } + } + pPortParam->FirstCmdTimeCnt = get_msec(); + curserial->RevCmdFlag = 1; + break; + default: + curserial->RevCmdFlag = 1; + break; + } } /********************************************************************************* 发送命令 **********************************************************************************/ -void SendCmdFormPollCmdBuf( int port ) -{ - char buf[128]; - int len, ret; - LONG lctime; - SIO_PARAM_SERIAL_DEF *pPortParam; - - pPortParam = &serialport[port]; - memset(buf, 0, sizeof(buf)); - lctime = get_msec(); - if(pPortParam->ForceWaitFlag && pPortParam->ForceWaitCnt) - { - pPortParam->ForceWaitCnt--; - return; - } - - if(pPortParam->SendCmdFlag && (pPortParam->RevCmdFlag == 0)) - { - //pPortParam->RetryTimeCnt++; - if((lctime - pPortParam->RetryTimeCnt > 3*pPortParam->RetryTime) || (lctime - pPortParam->RetryTimeCnt < 0)) - { - pPortParam->RetryTimeCnt = lctime; - return; - } - if(lctime - pPortParam->RetryTimeCnt < pPortParam->RetryTime) - //if(pPortParam->RetryTimeCnt < pPortParam->RetryTime) - { - return; - } - pPortParam->RetryTimeCnt = lctime; - pPortParam->RetryCnt++; - if(pPortParam->RetryCnt > pPortParam->Retry) - { - ClearCmdFormPollCmdBuf(port); - } - else - { - pPortParam->SendCmdFlag = 0; - pPortParam->RevCmdFlag = 0; - pPortParam->ReSendCmdFlag = 1; - } - } - - if ( pPortParam->SendCmdFlag && pPortParam->RevCmdFlag ) - { - // 清除当前命令 - ClearCmdFormPollCmdBuf(port); - } - - if(pPortParam->WaitTime > 0) - { - if((lctime - pPortParam->WaitTimeCnt > 30*pPortParam->WaitTime) || (lctime - pPortParam->WaitTimeCnt < 0)) - { - pPortParam->WaitTimeCnt = lctime; - return; - } - - if(lctime- pPortParam->WaitTimeCnt < pPortParam->WaitTime) - { - return; - } - } - pPortParam->WaitTimeCnt = lctime; - - if ( pPortParam->ReSendCmdFlag ) - len = pPortParam->cmdlen; - else - { - len = pPortParam->cmdlen; - pPortParam->RetryCnt = 0; - } - if ( len == 0 ) - return; - - serialport[devparam[srdt.curdevidx[port]].commid].m_iRecvLen = 0; // 当发送一条新指令时,清除接收状态 - serialport[devparam[srdt.curdevidx[port]].commid].m_iRevStatus = 0; - if(serialport[port].fd < 0) - return; - ret = Gm_SetSerialPortParam(port); - if(ret < 0) - return; - len = GM_SerialComSend(&pPortParam->PollCmd[2], pPortParam->cmdlen-2, port); - if(len < 1) - { - sprintf(buf, "串口%d, 发送命令失败!", port+1); - DebugLog(port, buf, 'E'); - } - else - { - - sprintf(buf, "发送串口%d 装置%d命令:", port+1, srdt.curdevidx[port]+1); - BytestreamLOG(port, buf, &pPortParam->PollCmd[2], len, 'D'); - sprintf(buf, "sendtimeconst= %lld", lctime-pPortParam->lsendtime); - DebugLog(port, buf, 'W'); - pPortParam->lsendtime = lctime; - } - pPortParam->SendCmdFlag = 1; - pPortParam->ReSendCmdFlag = 0; - pPortParam->RevCmdFlag = 0; - pPortParam->RetryTimeCnt = lctime; - - pPortParam->ForceWaitCnt = pPortParam->PollCmd[0]*256+pPortParam->PollCmd[1]+TIMER_CNT-1; - pPortParam->ForceWaitCnt /= TIMER_CNT; - if(pPortParam->ForceWaitCnt) - { - pPortParam->ForceWaitFlag = 1; - } +void SendCmdFormPollCmdBuf(int port) +{ + char buf[128]; + int len, ret; + int64_t lctime; + SIO_PARAM_SERIAL_DEF *pPortParam; + + pPortParam = &serialport[port]; + memset(buf, 0, sizeof(buf)); + lctime = get_msec(); + if (pPortParam->ForceWaitFlag && pPortParam->ForceWaitCnt) + { + pPortParam->ForceWaitCnt--; + return; + } + + if (pPortParam->SendCmdFlag && (pPortParam->RevCmdFlag == 0)) + { + //pPortParam->RetryTimeCnt++; + if ((lctime - pPortParam->RetryTimeCnt > 3 * pPortParam->RetryTime) || (lctime - pPortParam->RetryTimeCnt < 0)) + { + pPortParam->RetryTimeCnt = lctime; + return; + } + if (lctime - pPortParam->RetryTimeCnt < pPortParam->RetryTime) + //if(pPortParam->RetryTimeCnt < pPortParam->RetryTime) + { + return; + } + pPortParam->RetryTimeCnt = lctime; + pPortParam->RetryCnt++; + if (pPortParam->RetryCnt > pPortParam->Retry) + { + ClearCmdFormPollCmdBuf(port); + } + else + { + pPortParam->SendCmdFlag = 0; + pPortParam->RevCmdFlag = 0; + pPortParam->ReSendCmdFlag = 1; + } + } + + if (pPortParam->SendCmdFlag && pPortParam->RevCmdFlag) + { + // 清除当前命令 + ClearCmdFormPollCmdBuf(port); + } + + if (pPortParam->WaitTime > 0) + { + if ((lctime - pPortParam->WaitTimeCnt > 30 * pPortParam->WaitTime) || (lctime - pPortParam->WaitTimeCnt < 0)) + { + pPortParam->WaitTimeCnt = lctime; + return; + } + + if (lctime - pPortParam->WaitTimeCnt < pPortParam->WaitTime) + { + return; + } + } + pPortParam->WaitTimeCnt = lctime; + + if (pPortParam->ReSendCmdFlag) + len = pPortParam->cmdlen; + else + { + len = pPortParam->cmdlen; + pPortParam->RetryCnt = 0; + } + if (len == 0) + return; + + serialport[devparam[srdt.curdevidx[port]].commid].m_iRecvLen = 0; // 当发送一条新指令时,清除接收状态 + serialport[devparam[srdt.curdevidx[port]].commid].m_iRevStatus = 0; + if (serialport[port].fd < 0) + return; + ret = Gm_SetSerialPortParam(port); + if (ret < 0) + return; + len = GM_SerialComSend(&pPortParam->PollCmd[2], pPortParam->cmdlen - 2, port); + if (len < 1) + { + sprintf(buf, "串口%d, 发送命令失败!", port + 1); + DebugLog(port, buf, 'E'); + } + else + { + + sprintf(buf, "发送串口%d 装置%d命令:", port + 1, srdt.curdevidx[port] + 1); + BytestreamLOG(port, buf, &pPortParam->PollCmd[2], len, 'D'); + sprintf(buf, "sendtimeconst= %lld", lctime - pPortParam->lsendtime); + DebugLog(port, buf, 'W'); + pPortParam->lsendtime = lctime; + } + pPortParam->SendCmdFlag = 1; + pPortParam->ReSendCmdFlag = 0; + pPortParam->RevCmdFlag = 0; + pPortParam->RetryTimeCnt = lctime; + + pPortParam->ForceWaitCnt = pPortParam->PollCmd[0] * 256 + pPortParam->PollCmd[1] + TIMER_CNT - 1; + pPortParam->ForceWaitCnt /= TIMER_CNT; + if (pPortParam->ForceWaitCnt) + { + pPortParam->ForceWaitFlag = 1; + } } void ClearCmdAllFlag(int commid) { - if((commid < 0) || (commid >= MAX_SERIAL_PORT_NUM)) - return; - - serialport[commid].RetryCnt = 0; - serialport[commid].RetryTimeCnt = get_msec(); - serialport[commid].WaitTimeCnt = get_msec(); - serialport[commid].ForceWaitFlag = 0; - serialport[commid].ForceWaitCnt = 0; - serialport[commid].SendCmdFlag = 0; - serialport[commid].RevCmdFlag = 0; - serialport[commid].ReSendCmdFlag = 0; + if ((commid < 0) || (commid >= MAX_SERIAL_PORT_NUM)) + return; + + serialport[commid].RetryCnt = 0; + serialport[commid].RetryTimeCnt = get_msec(); + serialport[commid].WaitTimeCnt = get_msec(); + serialport[commid].ForceWaitFlag = 0; + serialport[commid].ForceWaitCnt = 0; + serialport[commid].SendCmdFlag = 0; + serialport[commid].RevCmdFlag = 0; + serialport[commid].ReSendCmdFlag = 0; } void ClearCmdFormPollCmdBuf(int port) { - //int len, idx; - SIO_PARAM_SERIAL_DEF *pPortParam; - - if((port < 0) || (port >= MAX_SERIAL_PORT_NUM)) - return; - pPortParam = &serialport[port]; - - pPortParam->cmdlen = 0; - memset(pPortParam->PollCmd, 0, sizeof(pPortParam->PollCmd)); - srdt.serialstatus[port] = 0; - // 清除指令下发标识 - ClearCmdAllFlag(port); + //int len, idx; + SIO_PARAM_SERIAL_DEF *pPortParam; + + if ((port < 0) || (port >= MAX_SERIAL_PORT_NUM)) + return; + pPortParam = &serialport[port]; + + pPortParam->cmdlen = 0; + memset(pPortParam->PollCmd, 0, sizeof(pPortParam->PollCmd)); + srdt.serialstatus[port] = 0; + // 清除指令下发标识 + ClearCmdAllFlag(port); } // 下发串口拍照指令控制 int FindNextCameraPhotoCommand(int devidx) { - int imagesize=3, cmdno; - BYTE channel, imagequality=90, presetno; - WORD packetsize; - LONG lcurtime; - BYTE cmdidx=0x10; - char szbuf[128]; - - //如果命令缓冲区仍有命令,则退出本函数 - if((devparam[devidx].commid+1 < 1) || (devparam[devidx].commid+1 > MAX_SERIAL_PORT_NUM)) - return -1; - // if(serialport[devparam[devidx].commid].cmdlen > 0) - // return -1; - if(-1 == srdt.ms_dev[devidx].SerialCmdidx)/* 下发拍照指令*/ - return -1; - channel = devparam[devidx].CameraChannel; - if((1>channel) || (channel >MAX_CHANNEL_NUM)) - { - /* 通道号错误退出拍照流程*/; - srdt.ms_dev[devidx].SerialCmdidx = -1; - sprintf(szbuf, "装置配置中,通道号配置错误!结束拍照!"); - DebugLog(devparam[devidx].commid, szbuf, 'I'); - } - cmdno = srdt.ms_dev[devidx].SerialCmdidx; - lcurtime = get_msec(); - if((1 > cmdno) || (10000 <= cmdno)) { - if (lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt < 300) - return -1; - } - switch (cmdno) - { - case 0:/* 下发拍照指令*/ - if(lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt < 3800) - return -1; - if((lcurtime-srdt.ms_dev[devidx].FirstCmdTimeCnt > 3*35*1000) ||(lcurtime-srdt.ms_dev[devidx].FirstCmdTimeCnt < 0)) - { - srdt.ms_dev[devidx].FirstCmdTimeCnt = lcurtime; - return -1; - } - - if(lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt > 35*1000) - { - srdt.ms_dev[devidx].SerialCmdidx = -1; - sprintf(szbuf, "串口摄像机未接或故障!结束拍照!"); - DebugLog(devparam[devidx].commid, szbuf, 'I'); - return -1; - } - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "time=%lldms", lcurtime-srdt.ms_dev[devidx].FirstCmdTimeCnt); - DebugLog(devparam[devidx].commid, szbuf, 'I'); - packetsize = (WORD)MAX_PHOTO_FRAME_LEN; - srdt.sendphotocmdcnt++; - srdt.imagepacketnum = 0; - srdt.errorPhotoNoCnt = 0; - cmdidx = 0x10; - imagesize = srdt.bImageSize; - break; - - case 10000: /* 下发设置串口波特率命令*/ - switch(devparam[devidx].baudrate) - { - case B9600: - imagesize = 0x07; - break; - case B19200: - imagesize = 0x08; - break; - case B38400: - imagesize = 0x09; - break; - default: - sprintf(szbuf, "设置串口摄像机参数时,配置参数错误!退出设置!"); - DebugLog(devparam[devidx].commid, szbuf, 'I'); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return -1; - } - - if(lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt > 15*1000) - { - srdt.ms_dev[devidx].SerialCmdidx = -1; - sprintf(szbuf, "设置串口摄像机参数时,15秒未收到摄像机应答!退出设置!"); - DebugLog(devparam[devidx].commid, szbuf, 'I'); - return -1; - } - cmdidx = 0x03; - packetsize = 0xFFFF; - break; - - case 10001: /* 通知摄像机图片读取完成或存储(16H)*/ - case 10002: - cmdidx = 0x16; - if(10001 == cmdno) - packetsize = 1; - else - packetsize = 0; - imagesize = srdt.photographtime; /* 需要保存或删除的图片拍摄时间*/ - break; - - case 10003: /* 读取历史图片(15H)*/ - cmdidx = 0x15; - packetsize = (WORD)MAX_PHOTO_FRAME_LEN; - break; - - case 10005: /* 关闭功能*/ - //Gm_CtrlPtzCmd(1, P_MOVE_LEFT); - //sleep(2); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - //sleep(20); - return 1; - case 10006: /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ - Gm_CtrlPtzCmd(channel, P_Auto_Scan); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10007: /* 光圈缩小(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_IRIS_CLOSE); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10008: /* 光圈放大(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_IRIS_OPEN); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10009: /* 近距离聚焦(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_FOCUS_NEAR); - usleep(100000); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10010: /* 远距离聚焦(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_FOCUS_FAR); - usleep(100000); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10011: /* 远离物体(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_ZOOM_WIDE); - usleep(100000); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10012: /* 接近物体(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_ZOOM_TELE); - usleep(100000); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10013: /* 向下移动镜头(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_MOVE_DOWN); - sleep(1); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10014: /* 向上移动镜头(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_MOVE_UP); - sleep(1); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10015: /* 向左移动镜头(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_MOVE_LEFT); - sleep(1); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10016: /* 向右移动镜头(1 有效)*/ - Gm_CtrlPtzCmd(channel, P_MOVE_RIGHT); - sleep(1); - Gm_CtrlPtzCmd(channel, Cmd_Cancel); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - case 10017: /* 调用预置点*/ - //srdt.presetno = 2; - Gm_CtrlPtzCmd(channel, MOVE_TO_PRESETNO+srdt.presetno); - sleep(2); - if(0 == srdt.IsSleep) - { - srdt.ms_dev[devidx].SerialCmdidx = 10017; - srdt.IsSleep++; - return 1; - } - //if(srdt.presetno > 1) - // srdt.presetno = 1; - // else - // srdt.presetno++; - //srdt.ms_dev[devidx].SerialCmdidx = -1; - //Gm_CtrlPtzCmd(channel, MOVE_TO_PRESETNO+srdt.presetno); - //usleep(1000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - srdt.IsSleep = 0; - return 1; - case 10018: /* 设置预置点*/ - Gm_CtrlPtzCmd(channel, SET_PRESETNO+srdt.presetno); - usleep(100000); - srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; - srdt.iLastGetPhotoNo = -1; - return 1; - default: - imagesize = 0xFF; - packetsize = (WORD)srdt.ms_dev[devidx].SerialCmdidx; - cmdidx = 0x11; + int imagesize = 3, cmdno; + uint8_t channel, imagequality = 90, presetno; + uint16_t packetsize; + int64_t lcurtime; + uint8_t cmdidx = 0x10; + char szbuf[128]; + + //如果命令缓冲区仍有命令,则退出本函数 + if ((devparam[devidx].commid + 1 < 1) || (devparam[devidx].commid + 1 > MAX_SERIAL_PORT_NUM)) + return -1; + // if(serialport[devparam[devidx].commid].cmdlen > 0) + // return -1; + if (-1 == srdt.ms_dev[devidx].SerialCmdidx)/* 下发拍照指令*/ + return -1; + channel = devparam[devidx].CameraChannel; + if ((1 > channel) || (channel > MAX_CHANNEL_NUM)) + { + /* 通道号错误退出拍照流程*/; + srdt.ms_dev[devidx].SerialCmdidx = -1; + sprintf(szbuf, "装置配置中,通道号配置错误!结束拍照!"); + DebugLog(devparam[devidx].commid, szbuf, 'I'); + } + cmdno = srdt.ms_dev[devidx].SerialCmdidx; + lcurtime = get_msec(); + if ((1 > cmdno) || (10000 <= cmdno)) { + if (lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt < 300) + return -1; + } + switch (cmdno) + { + case 0:/* 下发拍照指令*/ + if (lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt < 3800) + return -1; + if ((lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt > 3 * 35 * 1000) || (lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt < 0)) + { + srdt.ms_dev[devidx].FirstCmdTimeCnt = lcurtime; + return -1; + } + + if (lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt > 35 * 1000) + { + srdt.ms_dev[devidx].SerialCmdidx = -1; + sprintf(szbuf, "串口摄像机未接或故障!结束拍照!"); + DebugLog(devparam[devidx].commid, szbuf, 'I'); + return -1; + } + memset(szbuf, 0, sizeof(szbuf)); + sprintf(szbuf, "time=%lldms", lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt); + DebugLog(devparam[devidx].commid, szbuf, 'I'); + packetsize = (uint16_t)MAX_PHOTO_FRAME_LEN; + srdt.sendphotocmdcnt++; + srdt.imagepacketnum = 0; + srdt.errorPhotoNoCnt = 0; + cmdidx = 0x10; + imagesize = srdt.bImageSize; + break; + + case 10000: /* 下发设置串口波特率命令*/ + switch (devparam[devidx].baudrate) + { + case B9600: + imagesize = 0x07; + break; + case B19200: + imagesize = 0x08; + break; + case B38400: + imagesize = 0x09; + break; + default: + sprintf(szbuf, "设置串口摄像机参数时,配置参数错误!退出设置!"); + DebugLog(devparam[devidx].commid, szbuf, 'I'); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return -1; + } + + if (lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt > 15 * 1000) + { + srdt.ms_dev[devidx].SerialCmdidx = -1; + sprintf(szbuf, "设置串口摄像机参数时,15秒未收到摄像机应答!退出设置!"); + DebugLog(devparam[devidx].commid, szbuf, 'I'); + return -1; + } + cmdidx = 0x03; + packetsize = 0xFFFF; + break; + + case 10001: /* 通知摄像机图片读取完成或存储(16H)*/ + case 10002: + cmdidx = 0x16; + if (10001 == cmdno) + packetsize = 1; + else + packetsize = 0; + imagesize = srdt.photographtime; /* 需要保存或删除的图片拍摄时间*/ + break; + + case 10003: /* 读取历史图片(15H)*/ + cmdidx = 0x15; + packetsize = (uint16_t)MAX_PHOTO_FRAME_LEN; + break; + + case 10005: /* 关闭功能*/ + //Gm_CtrlPtzCmd(1, P_MOVE_LEFT); + //sleep(2); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + //sleep(20); + return 1; + case 10006: /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ + Gm_CtrlPtzCmd(channel, P_Auto_Scan); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10007: /* 光圈缩小(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_IRIS_CLOSE); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10008: /* 光圈放大(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_IRIS_OPEN); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10009: /* 近距离聚焦(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_FOCUS_NEAR); + usleep(100000); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10010: /* 远距离聚焦(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_FOCUS_FAR); + usleep(100000); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10011: /* 远离物体(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_ZOOM_WIDE); + usleep(100000); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10012: /* 接近物体(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_ZOOM_TELE); + usleep(100000); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10013: /* 向下移动镜头(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_MOVE_DOWN); + sleep(1); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10014: /* 向上移动镜头(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_MOVE_UP); + sleep(1); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10015: /* 向左移动镜头(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_MOVE_LEFT); + sleep(1); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10016: /* 向右移动镜头(1 有效)*/ + Gm_CtrlPtzCmd(channel, P_MOVE_RIGHT); + sleep(1); + Gm_CtrlPtzCmd(channel, Cmd_Cancel); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + case 10017: /* 调用预置点*/ + //srdt.presetno = 2; + Gm_CtrlPtzCmd(channel, MOVE_TO_PRESETNO + srdt.presetno); + sleep(2); + if (0 == srdt.IsSleep) + { + srdt.ms_dev[devidx].SerialCmdidx = 10017; + srdt.IsSleep++; + return 1; + } + //if(srdt.presetno > 1) + // srdt.presetno = 1; + // else + // srdt.presetno++; + //srdt.ms_dev[devidx].SerialCmdidx = -1; + //Gm_CtrlPtzCmd(channel, MOVE_TO_PRESETNO+srdt.presetno); + //usleep(1000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + srdt.IsSleep = 0; + return 1; + case 10018: /* 设置预置点*/ + Gm_CtrlPtzCmd(channel, SET_PRESETNO + srdt.presetno); + usleep(100000); + srdt.ms_dev[devidx].SerialCmdidx = srdt.iLastGetPhotoNo; + srdt.iLastGetPhotoNo = -1; + return 1; + default: + imagesize = 0xFF; + packetsize = (uint16_t)srdt.ms_dev[devidx].SerialCmdidx; + cmdidx = 0x11; #if 0 - if(0 == srdt.IsSleep) - { - srdt.IsSleep++; - testComm(); - } + if (0 == srdt.IsSleep) + { + srdt.IsSleep++; + testComm(); + } #endif - if(lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt > 35*1000) - { - srdt.ms_dev[devidx].SerialCmdidx = -1; - sprintf(szbuf, "读取第%d包图片数据35秒未收到!结束拍照!",packetsize); - DebugLog(devparam[devidx].commid, szbuf, 'I'); - return -1; - } - break; - } - MakeCameraPhotoCommand(devidx, cmdidx, imagesize, packetsize, imagequality, srdt.sendphototime); - //MakeCameraPhotoCommand(devidx, 2, imagesize, packetsize, imagequality); - srdt.curdevidx[devparam[devidx].commid] = devidx; - return 1; + if (lcurtime - srdt.ms_dev[devidx].FirstCmdTimeCnt > 35 * 1000) + { + srdt.ms_dev[devidx].SerialCmdidx = -1; + sprintf(szbuf, "读取第%d包图片数据35秒未收到!结束拍照!", packetsize); + DebugLog(devparam[devidx].commid, szbuf, 'I'); + return -1; + } + break; + } + MakeCameraPhotoCommand(devidx, cmdidx, imagesize, packetsize, imagequality, srdt.sendphototime); + //MakeCameraPhotoCommand(devidx, 2, imagesize, packetsize, imagequality); + srdt.curdevidx[devparam[devidx].commid] = devidx; + return 1; } /********************************************************************************* 生成 CameraPhoto命令 **********************************************************************************/ -void MakeCameraPhotoCommand( int portno, BYTE cmdidx, int OneParam, WORD TwoParam, BYTE Threep, int phototime ) -{ - int i, icurtime; - u_char *sendbuf; - //char szbuf[128]; - - sendbuf = serialport[devparam[portno].commid].PollCmd; - - icurtime = phototime; - i = 0; - sendbuf[i++] = 0x00; /* 强制等待时间*/ - sendbuf[i++] = 0x00; /* 强制等待时间*/ - sendbuf[i++] = 0x68; /* 起始字符*/ - sendbuf[i++] = (BYTE)0x00; /* length */ - sendbuf[i++] = (BYTE)0x00; /* length */ - sendbuf[i++] = 0x68; /* 起始字符*/ - sendbuf[i++] = (BYTE)devparam[portno].devaddr;/* 传感器地址*/ - sendbuf[i++] = cmdidx; /* 命令字*/ - switch(cmdidx) - { - case 0x02: /* */ - sendbuf[i-2] = 0xFF; - break; - case 0x03: /*设置传感器通讯参数(03H)*/ - sendbuf[i++] = 0x00; /* 波特率*/ - sendbuf[i++] = 0x00; - sendbuf[i++] = 0x00; - sendbuf[i++] = (BYTE)OneParam; - sendbuf[i++] = 0x08; /* 数据位*/ - sendbuf[i++] = 0x00; /* 校验位*/ - sendbuf[i++] = 0x01; /* 停止位*/ - break; - case 0x010: /* 拍摄图片并指定大小分包(10H)*/ - sendbuf[i++] = OneParam; /* 图片大小(Resolution)*/ - sendbuf[i++] = HIBYTE(TwoParam);/*包大小(PackageSize)*/ - sendbuf[i++] = LOBYTE(TwoParam); - sendbuf[i++] = HIBYTE(HIWORD(icurtime+8*60*60));/* 请求拍摄图片时间(PhotoTime)*/ - sendbuf[i++] = LOBYTE(HIWORD(icurtime+8*60*60)); - sendbuf[i++] = HIBYTE(LOWORD(icurtime+8*60*60)); - sendbuf[i++] = LOBYTE(LOWORD(icurtime+8*60*60)); - sendbuf[i++] = Threep;/*图像质量(ImageQuality)*/ - sendbuf[i++] = srdt.presetno;//netportparam.CurPresetno[srdt.ms_dev[portno].CameraChannel-1];/*拍照预置点(PresetNo)*/ - break; - case 0x11: /* 获取指定包数据(11H)*/ - sendbuf[i++] = HIBYTE(TwoParam);/*图片包号:(PackageNo)*/ - sendbuf[i++] = LOBYTE(TwoParam); - break; - case 0x15: /* 读取历史图片(15H)*/ - sendbuf[i++] = HIBYTE(TwoParam);/*包大小(PackageSize)*/ - sendbuf[i++] = LOBYTE(TwoParam); - break; - case 0x16: /* 通知摄像机图片读取完成或存储(16H)*/ - sendbuf[i++] = HIBYTE(HIWORD(OneParam));/* 需要保存或删除的图片拍摄时间*/ - sendbuf[i++] = LOBYTE(HIWORD(OneParam)); - sendbuf[i++] = HIBYTE(LOWORD(OneParam)); - sendbuf[i++] = LOBYTE(LOWORD(OneParam)); - sendbuf[i++] = (BYTE)TwoParam; /* 是否需要保存*/ - break; - } - sendbuf[i] = CalLpc((u_char *)&sendbuf[6],i-6); - i+= 1; - sendbuf[i++] = 0x16; /* 信息尾*/ - sendbuf[3] = (BYTE)((i-10)>>8); - sendbuf[4] = (BYTE)(i-10); - serialport[devparam[portno].commid].cmdlen = i; - //return; +void MakeCameraPhotoCommand(int portno, uint8_t cmdidx, int OneParam, uint16_t TwoParam, uint8_t Threep, int phototime) +{ + int i, icurtime; + u_char *sendbuf; + //char szbuf[128]; + + sendbuf = serialport[devparam[portno].commid].PollCmd; + + icurtime = phototime; + i = 0; + sendbuf[i++] = 0x00; /* 强制等待时间*/ + sendbuf[i++] = 0x00; /* 强制等待时间*/ + sendbuf[i++] = 0x68; /* 起始字符*/ + sendbuf[i++] = (uint8_t)0x00; /* length */ + sendbuf[i++] = (uint8_t)0x00; /* length */ + sendbuf[i++] = 0x68; /* 起始字符*/ + sendbuf[i++] = (uint8_t)devparam[portno].devaddr;/* 传感器地址*/ + sendbuf[i++] = cmdidx; /* 命令字*/ + switch (cmdidx) + { + case 0x02: /* */ + sendbuf[i - 2] = 0xFF; + break; + case 0x03: /*设置传感器通讯参数(03H)*/ + sendbuf[i++] = 0x00; /* 波特率*/ + sendbuf[i++] = 0x00; + sendbuf[i++] = 0x00; + sendbuf[i++] = (uint8_t)OneParam; + sendbuf[i++] = 0x08; /* 数据位*/ + sendbuf[i++] = 0x00; /* 校验位*/ + sendbuf[i++] = 0x01; /* 停止位*/ + break; + case 0x010: /* 拍摄图片并指定大小分包(10H)*/ + sendbuf[i++] = OneParam; /* 图片大小(Resolution)*/ + sendbuf[i++] = HIBYTE(TwoParam);/*包大小(PackageSize)*/ + sendbuf[i++] = LOBYTE(TwoParam); + sendbuf[i++] = HIBYTE(HIWORD(icurtime + 8 * 60 * 60));/* 请求拍摄图片时间(PhotoTime)*/ + sendbuf[i++] = LOBYTE(HIWORD(icurtime + 8 * 60 * 60)); + sendbuf[i++] = HIBYTE(LOWORD(icurtime + 8 * 60 * 60)); + sendbuf[i++] = LOBYTE(LOWORD(icurtime + 8 * 60 * 60)); + sendbuf[i++] = Threep;/*图像质量(ImageQuality)*/ + sendbuf[i++] = srdt.presetno;//netportparam.CurPresetno[srdt.ms_dev[portno].CameraChannel-1];/*拍照预置点(PresetNo)*/ + break; + case 0x11: /* 获取指定包数据(11H)*/ + sendbuf[i++] = HIBYTE(TwoParam);/*图片包号:(PackageNo)*/ + sendbuf[i++] = LOBYTE(TwoParam); + break; + case 0x15: /* 读取历史图片(15H)*/ + sendbuf[i++] = HIBYTE(TwoParam);/*包大小(PackageSize)*/ + sendbuf[i++] = LOBYTE(TwoParam); + break; + case 0x16: /* 通知摄像机图片读取完成或存储(16H)*/ + sendbuf[i++] = HIBYTE(HIWORD(OneParam));/* 需要保存或删除的图片拍摄时间*/ + sendbuf[i++] = LOBYTE(HIWORD(OneParam)); + sendbuf[i++] = HIBYTE(LOWORD(OneParam)); + sendbuf[i++] = LOBYTE(LOWORD(OneParam)); + sendbuf[i++] = (uint8_t)TwoParam; /* 是否需要保存*/ + break; + } + sendbuf[i] = CalLpc((u_char *)&sendbuf[6], i - 6); + i += 1; + sendbuf[i++] = 0x16; /* 信息尾*/ + sendbuf[3] = (uint8_t)((i - 10) >> 8); + sendbuf[4] = (uint8_t)(i - 10); + serialport[devparam[portno].commid].cmdlen = i; + //return; } // 准备发送云台指令 -int Gm_CtrlPtzCmd(u_char channel, DWORD ptzcmd) -{ - int i; - char szbuf[64]; - - srdt.PtzCmdType = ptzcmd; - // 查找装置序号 - for(i=0; i srdt.ms_dev[i].UseSerialidx+1) ||(MAX_SERIAL_PORT_NUM < srdt.ms_dev[i].UseSerialidx+1)) - { - sprintf(szbuf, "通道%d摄像机错误的使用了串口%d", channel, srdt.ms_dev[i].UseSerialidx+1); - LOGI("%s", szbuf); - return 1; - } - else - { + if ((2 > srdt.ms_dev[i].UseSerialidx + 1) || (MAX_SERIAL_PORT_NUM < srdt.ms_dev[i].UseSerialidx + 1)) + { + sprintf(szbuf, "通道%d摄像机错误的使用了串口%d", channel, srdt.ms_dev[i].UseSerialidx + 1); + LOGI("%s", szbuf); + return 1; + } + else + { #endif - sprintf(szbuf, "摄像机通道%d 使用串口%d", channel, devparam[i].commid+1); - DebugLog(devparam[i].commid, szbuf, 'I'); - //} - srdt.usecameradevidx = i; - // 查找串口序号 - // 1.打开串口电源 - //Gm_OpenSensorsPower(); - // 2.打开串口通讯 - //Gm_OpenSerialPort(i); - - srdt.SendStopPtzCmdTimeCnt = -1; - //return 1; - return Gm_Camera_Timer(); - //return 1; -} - -// 发送转动摄像机云台命令定时器 -int Gm_Camera_Timer() -{ - char szbuf[128]; - - if(PELCO_D_PROTOCOL == devparam[srdt.usecameradevidx].ProtocolIdx) - { - switch(srdt.PtzCmdType) - { - case P_Auto_Scan: - srdt.PtzCmdType = D_Auto_Scan; - break; - case P_IRIS_CLOSE: - srdt.PtzCmdType = D_IRIS_CLOSE; - break; - case P_IRIS_OPEN: - srdt.PtzCmdType = D_IRIS_OPEN; - break; - case P_FOCUS_NEAR: - srdt.PtzCmdType = D_FOCUS_NEAR; - break; - case P_FOCUS_FAR: - srdt.PtzCmdType = D_FOCUS_FAR; - break; - case P_ZOOM_WIDE: - srdt.PtzCmdType = D_ZOOM_WIDE; - break; - case P_ZOOM_TELE: - srdt.PtzCmdType = D_ZOOM_TELE; - break; - case P_MOVE_DOWN: - srdt.PtzCmdType = D_MOVE_DOWN; - break; - case P_MOVE_UP: - srdt.PtzCmdType = D_MOVE_UP; - break; - case P_MOVE_LEFT: - srdt.PtzCmdType = D_MOVE_LEFT; - break; - case P_MOVE_RIGHT: - srdt.PtzCmdType = D_MOVE_RIGHT; - break; - } - } - if(srdt.SendStopPtzCmdTimeCnt == -1) - { - if(serialport[srdt.camerauseserial].cmdlen > 0) - return -1; - if(PELCO_D_PROTOCOL == devparam[srdt.usecameradevidx].ProtocolIdx) - Gm_SendPelco_DCommand(srdt.PtzCmdType); - else - Gm_SendPelco_pCommand(srdt.PtzCmdType); - - if((SET_PRESETNO == (srdt.PtzCmdType & 0xFFFF0000)) - || (MOVE_TO_PRESETNO == (srdt.PtzCmdType & 0xFFFF0000))) - { - //srdt.sampling &= 0xFD; - return 1; - } - srdt.PtzCmdType = Cmd_Cancel; - srdt.SendStopPtzCmdTimeCnt = 0; - } - return 1; + sprintf(szbuf, "摄像机通道%d 使用串口%d", channel, devparam[i].commid + 1); + DebugLog(devparam[i].commid, szbuf, 'I'); + //} + srdt.usecameradevidx = i; + // 查找串口序号 + // 1.打开串口电源 + //Gm_OpenSensorsPower(); + // 2.打开串口通讯 + //Gm_OpenSerialPort(i); + + srdt.SendStopPtzCmdTimeCnt = -1; + //return 1; + return Gm_Camera_Timer(); + //return 1; + } + + // 发送转动摄像机云台命令定时器 + int Gm_Camera_Timer() + { + char szbuf[128]; + + if (PELCO_D_PROTOCOL == devparam[srdt.usecameradevidx].ProtocolIdx) + { + switch (srdt.PtzCmdType) + { + case P_Auto_Scan: + srdt.PtzCmdType = D_Auto_Scan; + break; + case P_IRIS_CLOSE: + srdt.PtzCmdType = D_IRIS_CLOSE; + break; + case P_IRIS_OPEN: + srdt.PtzCmdType = D_IRIS_OPEN; + break; + case P_FOCUS_NEAR: + srdt.PtzCmdType = D_FOCUS_NEAR; + break; + case P_FOCUS_FAR: + srdt.PtzCmdType = D_FOCUS_FAR; + break; + case P_ZOOM_WIDE: + srdt.PtzCmdType = D_ZOOM_WIDE; + break; + case P_ZOOM_TELE: + srdt.PtzCmdType = D_ZOOM_TELE; + break; + case P_MOVE_DOWN: + srdt.PtzCmdType = D_MOVE_DOWN; + break; + case P_MOVE_UP: + srdt.PtzCmdType = D_MOVE_UP; + break; + case P_MOVE_LEFT: + srdt.PtzCmdType = D_MOVE_LEFT; + break; + case P_MOVE_RIGHT: + srdt.PtzCmdType = D_MOVE_RIGHT; + break; + } + } + if (srdt.SendStopPtzCmdTimeCnt == -1) + { + if (serialport[srdt.camerauseserial].cmdlen > 0) + return -1; + if (PELCO_D_PROTOCOL == devparam[srdt.usecameradevidx].ProtocolIdx) + Gm_SendPelco_DCommand(srdt.PtzCmdType); + else + Gm_SendPelco_pCommand(srdt.PtzCmdType); + + if ((SET_PRESETNO == (srdt.PtzCmdType & 0xFFFF0000)) + || (MOVE_TO_PRESETNO == (srdt.PtzCmdType & 0xFFFF0000))) + { + //srdt.sampling &= 0xFD; + return 1; + } + srdt.PtzCmdType = Cmd_Cancel; + srdt.SendStopPtzCmdTimeCnt = 0; + } + return 1; #if 0 - //if(srdt.SendStopPtzCmdTimeCnt > PTZ_MOVETIME*1000/TIMER_CNT) - { - if(serialport[srdt.camerauseserial].cmdlen > 0) - return -1; - if(PELCO_D_PROTOCOL == devparam[srdt.usecameradevidx].ProtocolIdx) - Gm_SendPelco_DCommand(srdt.PtzCmdType); - else - Gm_SendPelco_pCommand(srdt.PtzCmdType); - srdt.SendStopPtzCmdTimeCnt = -1; - //srdt.sampling &= 0xFD; - return 1; - } - //else - // srdt.SendStopPtzCmdTimeCnt ++; - //return -1; + //if(srdt.SendStopPtzCmdTimeCnt > PTZ_MOVETIME*1000/TIMER_CNT) + { + if (serialport[srdt.camerauseserial].cmdlen > 0) + return -1; + if (PELCO_D_PROTOCOL == devparam[srdt.usecameradevidx].ProtocolIdx) + Gm_SendPelco_DCommand(srdt.PtzCmdType); + else + Gm_SendPelco_pCommand(srdt.PtzCmdType); + srdt.SendStopPtzCmdTimeCnt = -1; + //srdt.sampling &= 0xFD; + return 1; + } + //else + // srdt.SendStopPtzCmdTimeCnt ++; + //return -1; #endif -} - -/******************************************************************************** -* 生成 PELCO_P 命令 * -*********************************************************************************/ -void Gm_SendPelco_pCommand( DWORD cmdtype) -{ - int len; - BYTE commandbuf[32]; - char buf[128]; - - len = 0; - commandbuf[len++] = (BYTE)0xA0; - commandbuf[len++] = (BYTE)devparam[srdt.usecameradevidx].devaddr; - commandbuf[len++] = (BYTE)(cmdtype>>24); - commandbuf[len++] = (BYTE)(cmdtype>>16); - commandbuf[len++] = (BYTE)(cmdtype>>8); - commandbuf[len++] = (BYTE)(cmdtype); - commandbuf[len++] = (BYTE)0xAF; - commandbuf[len] = (BYTE)Gm_Pelco_pXORCheck(commandbuf, len); - len++; - serialport[srdt.camerauseserial].cmdlen = len; - Gm_SetSerialPortParam(srdt.camerauseserial); - //unsigned char sendbuf[] = {0x68,0x00,0x00,0x68,0x0ff,0x02,0x01,0x16}; - //len = GM_SerialComSend(sendbuf, sizeof(sendbuf), srdt.camerauseserial); - - len = GM_SerialComSend(commandbuf, len, srdt.camerauseserial); - if(len < 1) - { - DebugLog(srdt.camerauseserial, "发送Pelco_p命令失败", 'E'); - } - else - { - sprintf(buf, "发送串口%d 像机通道%d Pelco_P命令:", - srdt.camerauseserial+1, devparam[srdt.usecameradevidx].CameraChannel); - BytestreamLOG(srdt.camerauseserial, buf, commandbuf, len, 'D'); - } - ClearCmdFormPollCmdBuf(srdt.camerauseserial); -} - -BYTE Gm_Pelco_pXORCheck( BYTE *msg, int len ) -{ - int i; - BYTE checkvalue=0; - - if(len <= 0) - return checkvalue; - checkvalue = msg[0]; - for(i=1; i> 24); + commandbuf[len++] = (uint8_t)(cmdtype >> 16); + commandbuf[len++] = (uint8_t)(cmdtype >> 8); + commandbuf[len++] = (uint8_t)(cmdtype); + commandbuf[len++] = (uint8_t)0xAF; + commandbuf[len] = (uint8_t)Gm_Pelco_pXORCheck(commandbuf, len); + len++; + serialport[srdt.camerauseserial].cmdlen = len; + Gm_SetSerialPortParam(srdt.camerauseserial); + //unsigned char sendbuf[] = {0x68,0x00,0x00,0x68,0x0ff,0x02,0x01,0x16}; + //len = GM_SerialComSend(sendbuf, sizeof(sendbuf), srdt.camerauseserial); + + len = GM_SerialComSend(commandbuf, len, srdt.camerauseserial); + if (len < 1) + { + DebugLog(srdt.camerauseserial, "发送Pelco_p命令失败", 'E'); + } + else + { + sprintf(buf, "发送串口%d 像机通道%d Pelco_P命令:", + srdt.camerauseserial + 1, devparam[srdt.usecameradevidx].CameraChannel); + BytestreamLOG(srdt.camerauseserial, buf, commandbuf, len, 'D'); + } + ClearCmdFormPollCmdBuf(srdt.camerauseserial); + } + + uint8_t Gm_Pelco_pXORCheck(uint8_t *msg, int len) + { + int i; + uint8_t checkvalue = 0; + + if (len <= 0) + return checkvalue; + checkvalue = msg[0]; + for (i = 1; i < len; i++) + checkvalue ^= msg[i]; + return checkvalue; + } + + /******************************************************************************** + * 生成 PELCO_D 命令 * + *********************************************************************************/ + void Gm_SendPelco_DCommand(uint32_t cmdtype) + { + int len; + uint8_t commandbuf[32]; + char buf[128]; + + len = 0; #if 1 /* Pelco_D*/ - commandbuf[len++] = (BYTE)0xFF; - commandbuf[len++] = (BYTE)devparam[srdt.usecameradevidx].devaddr; - commandbuf[len++] = (BYTE)(cmdtype>>24); - commandbuf[len++] = (BYTE)(cmdtype>>16); - commandbuf[len++] = (BYTE)(cmdtype>>8); - commandbuf[len++] = (BYTE)(cmdtype); - commandbuf[len] = (BYTE)Gm_Pelco_DCheck(commandbuf, len); + commandbuf[len++] = (uint8_t)0xFF; + commandbuf[len++] = (uint8_t)devparam[srdt.usecameradevidx].devaddr; + commandbuf[len++] = (uint8_t)(cmdtype >> 24); + commandbuf[len++] = (uint8_t)(cmdtype >> 16); + commandbuf[len++] = (uint8_t)(cmdtype >> 8); + commandbuf[len++] = (uint8_t)(cmdtype); + commandbuf[len] = (uint8_t)Gm_Pelco_DCheck(commandbuf, len); #endif - len++; - serialport[srdt.camerauseserial].cmdlen = len; - Gm_SetSerialPortParam(srdt.camerauseserial); - len = GM_SerialComSend(commandbuf, len, srdt.camerauseserial); - if(len < 1) - { - DebugLog(srdt.camerauseserial, "发送Pelco_D命令失败", 'E'); - } - else - { - sprintf(buf, "发送串口%d 像机通道%d Pelco_D命令:", - srdt.camerauseserial+1, devparam[srdt.usecameradevidx].CameraChannel); - BytestreamLOG(srdt.camerauseserial, buf, commandbuf, len, 'D'); - } - ClearCmdFormPollCmdBuf(srdt.camerauseserial); - //serialport[srdt.camerauseserial].ForceWaitCnt = 10; - //serialport[srdt.camerauseserial].ForceWaitFlag = 1; -} - -// 计算Pelco_D校验 -BYTE Gm_Pelco_DCheck( BYTE *msg, int len ) -{ - int i; - BYTE checkvalue=0; - - if(len <= 0) - return checkvalue; - checkvalue = 0; - for(i=1; i MAX_SERIAL_PORT_NUM)) - return -1; - if(get_msec()-srdt.ms_dev[devidx].FirstCmdTimeCnt < 3*1000) - { - return -1; - } - //if(SLANT_PROTOCOL == devparam[devidx].ProtocolIdx) - // return -1; - switch (cmdno) - { - case 0: /* 正常采集数据*/ - MakeShxyProtocolPollCommand(devidx, 0x09); - srdt.curdevidx[devparam[devidx].commid] = devidx; - return 1; - case 1: /* 测试读取地址*/ - MakeShxyProtocolPollCommand(devidx, 0x02); - srdt.curdevidx[devparam[devidx].commid] = devidx; - return 1; - default: - break; - } - return -1; -} - -/********************************************************************************* - 生成下发命令 -**********************************************************************************/ -void MakeShxyProtocolPollCommand(int portno, BYTE cmdidx) -{ - int i, length=0; - int newaddr = 9, baud = 9600, stopbit = 1, parity=0; - //char buf[128]; - u_char *sendbuf; - - sendbuf = serialport[devparam[portno].commid].PollCmd; - - /* 测试变量*/ - cmdidx =cmdidx; - - i = 0; - sendbuf[i++] = 0x00; // 强制等待时间 - sendbuf[i++] = 0x00; // - sendbuf[i++] = 0x68; // 起始字符 - sendbuf[i++] = (BYTE)0x00; // length - sendbuf[i++] = (BYTE)0x00; // length - sendbuf[i++] = 0x68; - sendbuf[i++] = (BYTE)devparam[portno].devaddr; // 传感器地址 - sendbuf[i++] = cmdidx; // 命令信息0x06 - switch (cmdidx) - { - case 1: /* 设置传感器新地址*/ - sendbuf[i++] = newaddr; - length = 1; - break; - case 2: /* 广播读地址*/ - sendbuf[6] = 0xFF; - break; - case 3: /* 设置串口参数*/ - sendbuf[i++] = (u_char)(baud >> 24); - sendbuf[i++] = (u_char)(baud >> 16); - sendbuf[i++] = (u_char)(baud >> 8); - sendbuf[i++] = (u_char)baud; - sendbuf[i++] = 8; - sendbuf[i++] = parity; - sendbuf[i++] = stopbit; - length = 7; - break; - default: - break; - } - sendbuf[i] = CalLpc((u_char *)&sendbuf[6],i-6); - i+= 1; - sendbuf[3] = length; - sendbuf[4] = length; - sendbuf[i++] = 0x16; // 信息尾 - serialport[devparam[portno].commid].cmdlen = i; -} + len++; + serialport[srdt.camerauseserial].cmdlen = len; + Gm_SetSerialPortParam(srdt.camerauseserial); + len = GM_SerialComSend(commandbuf, len, srdt.camerauseserial); + if (len < 1) + { + DebugLog(srdt.camerauseserial, "发送Pelco_D命令失败", 'E'); + } + else + { + sprintf(buf, "发送串口%d 像机通道%d Pelco_D命令:", + srdt.camerauseserial + 1, devparam[srdt.usecameradevidx].CameraChannel); + BytestreamLOG(srdt.camerauseserial, buf, commandbuf, len, 'D'); + } + ClearCmdFormPollCmdBuf(srdt.camerauseserial); + //serialport[srdt.camerauseserial].ForceWaitCnt = 10; + //serialport[srdt.camerauseserial].ForceWaitFlag = 1; + } + + // 计算Pelco_D校验 + uint8_t Gm_Pelco_DCheck(uint8_t *msg, int len) + { + int i; + uint8_t checkvalue = 0; + + if (len <= 0) + return checkvalue; + checkvalue = 0; + for (i = 1; i < len; i++) + checkvalue += msg[i]; + return checkvalue; + } + + /********************************************************************************* + 寻找并生成下一条倾角命令 + **********************************************************************************/ + int FindNextShxyProtocolCommand(int devidx) + { + int cmdno = 0; + + //如果命令缓冲区仍有命令,则退出本函数 + if ((devparam[devidx].commid + 1 < 1) || (devparam[devidx].commid + 1 > MAX_SERIAL_PORT_NUM)) + return -1; + if (get_msec() - srdt.ms_dev[devidx].FirstCmdTimeCnt < 3 * 1000) + { + return -1; + } + //if(SLANT_PROTOCOL == devparam[devidx].ProtocolIdx) + // return -1; + switch (cmdno) + { + case 0: /* 正常采集数据*/ + MakeShxyProtocolPollCommand(devidx, 0x09); + srdt.curdevidx[devparam[devidx].commid] = devidx; + return 1; + case 1: /* 测试读取地址*/ + MakeShxyProtocolPollCommand(devidx, 0x02); + srdt.curdevidx[devparam[devidx].commid] = devidx; + return 1; + default: + break; + } + return -1; + } + + /********************************************************************************* + 生成下发命令 + **********************************************************************************/ + void MakeShxyProtocolPollCommand(int portno, uint8_t cmdidx) + { + int i, length = 0; + int newaddr = 9, baud = 9600, stopbit = 1, parity = 0; + //char buf[128]; + u_char *sendbuf; + + sendbuf = serialport[devparam[portno].commid].PollCmd; + + /* 测试变量*/ + cmdidx = cmdidx; + + i = 0; + sendbuf[i++] = 0x00; // 强制等待时间 + sendbuf[i++] = 0x00; // + sendbuf[i++] = 0x68; // 起始字符 + sendbuf[i++] = (uint8_t)0x00; // length + sendbuf[i++] = (uint8_t)0x00; // length + sendbuf[i++] = 0x68; + sendbuf[i++] = (uint8_t)devparam[portno].devaddr; // 传感器地址 + sendbuf[i++] = cmdidx; // 命令信息0x06 + switch (cmdidx) + { + case 1: /* 设置传感器新地址*/ + sendbuf[i++] = newaddr; + length = 1; + break; + case 2: /* 广播读地址*/ + sendbuf[6] = 0xFF; + break; + case 3: /* 设置串口参数*/ + sendbuf[i++] = (u_char)(baud >> 24); + sendbuf[i++] = (u_char)(baud >> 16); + sendbuf[i++] = (u_char)(baud >> 8); + sendbuf[i++] = (u_char)baud; + sendbuf[i++] = 8; + sendbuf[i++] = parity; + sendbuf[i++] = stopbit; + length = 7; + break; + default: + break; + } + sendbuf[i] = CalLpc((u_char *)&sendbuf[6], i - 6); + i += 1; + sendbuf[3] = length; + sendbuf[4] = length; + sendbuf[i++] = 0x16; // 信息尾 + serialport[devparam[portno].commid].cmdlen = i; + } + + unsigned char CalLpc(unsigned char *msg, int len) + { + int i; + u_char retval = 0; + + for (i = 0; i < len; i++) + retval += msg[i]; + return retval; + } + + /*************************************************************** + * 读上海欣影传感器协议数据 * + ***************************************************************/ + void ShxyProtocolRecvData(int devno, u_char *buf, int len)// 规约读数据处理 + { + int i, ictime; + //uint16_t crc, check; + //SERIAL_DEV_DEF *pPortParam; + SIO_PARAM_SERIAL_DEF *pPortParam; + + if ((devno < 0) || (devno > MAX_SERIAL_DEV_NUM)) + { + return; + } + pPortParam = &serialport[devparam[devno].commid]; -unsigned char CalLpc(unsigned char *msg, int len) -{ - int i; - u_char retval = 0; + for (i = 0; i < len; i++) + { + switch (pPortParam->m_iRevStatus) + { + case 0: // 0x68 + pPortParam->m_iRecvLen = 0; + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (0x68 == buf[i]) + pPortParam->m_iRevStatus++; + else + pPortParam->m_iRevStatus = 18; + break; + case 1: // len1 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus++; + break; + case 2: // len2 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (buf[i] == pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 2]) + { + pPortParam->m_iRevStatus++; + pPortParam->m_iNeedRevLength = buf[i] + 5; + } + else + pPortParam->m_iRevStatus = 18; + break; + case 3: // 0x68 + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iNeedRevLength--; + if (0x68 == buf[i]) + pPortParam->m_iRevStatus++; + else + pPortParam->m_iRevStatus = 18; + break; + case 4: // 正确接收数据 + pPortParam->m_iNeedRevLength--; + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (pPortParam->m_iNeedRevLength > 0) + break; + if (buf[i] != 0x16) + { + pPortParam->m_iRevStatus = 18; + break; + } + + if (CheckShxyProtocolLpcError(pPortParam->m_au8RecvBuf, pPortParam->m_iRecvLen) == 1) + { + ShxyProtocolDataProcess(devno); + pPortParam->m_iRevStatus = 0; + pPortParam->RevCmdFlag = 1; + } + else + { + pPortParam->m_iRevStatus = 0; + } + pPortParam->m_iRecvLen = 0; + break; + case 255:// 错误接收数据 + default: + if (buf[i] == 0x68) + { + pPortParam->m_iRevStatus = 1; + pPortParam->m_iRecvLen = 1; + pPortParam->m_au8RecvBuf[0] = buf[i]; + } + else if (buf[i] == 0x16) + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus = 0; + pPortParam->m_iRecvLen = 0; + } + else + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (pPortParam->m_iRecvLen > 200) + { + pPortParam->m_iRecvLen = 0; + } + } + break; + } + } + } - for (i = 0 ; i < len; i++) - retval += msg[i]; - return retval; -} + //******************************************************************************** + // 检查检验和是否正确 + //******************************************************************************** + int CheckShxyProtocolLpcError(u_char* msg, int len) + { + int bRetval = 0; + int iCheckLen; -/*************************************************************** -* 读上海欣影传感器协议数据 * -***************************************************************/ -void ShxyProtocolRecvData(int devno, u_char *buf, int len)// 规约读数据处理 -{ - int i, ictime; - //WORD crc, check; - //SERIAL_DEV_DEF *pPortParam; - SIO_PARAM_SERIAL_DEF *pPortParam; - - if((devno<0) || (devno > MAX_SERIAL_DEV_NUM)) - { - return; - } - pPortParam = &serialport[devparam[devno].commid]; - - for(i=0; im_iRevStatus) - { - case 0: // 0x68 - pPortParam->m_iRecvLen = 0; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 1: // len1 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus++; - break; - case 2: // len2 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(buf[i] == pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen-2]) - { - pPortParam->m_iRevStatus++; - pPortParam->m_iNeedRevLength = buf[i]+5; - } - else - pPortParam->m_iRevStatus = 18; - break; - case 3: // 0x68 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iNeedRevLength--; - if(0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 4: // 正确接收数据 - pPortParam->m_iNeedRevLength--; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(pPortParam->m_iNeedRevLength > 0) - break; - if(buf[i] != 0x16) - { - pPortParam->m_iRevStatus=18; - break; - } - - if(CheckShxyProtocolLpcError(pPortParam->m_au8RecvBuf, pPortParam->m_iRecvLen) == TRUE) - { - ShxyProtocolDataProcess(devno); - pPortParam->m_iRevStatus = 0; - pPortParam->RevCmdFlag = 1; - } - else - { - pPortParam->m_iRevStatus = 0; - } - pPortParam->m_iRecvLen = 0; - break; - case 255:// 错误接收数据 - default: - if(buf[i] == 0x68) - { - pPortParam->m_iRevStatus = 1; - pPortParam->m_iRecvLen = 1; - pPortParam->m_au8RecvBuf[0] = buf[i]; - } - else if(buf[i] == 0x16) - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus = 0; - pPortParam->m_iRecvLen = 0; - } - else - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if(pPortParam->m_iRecvLen > 200) - { - pPortParam->m_iRecvLen = 0; - } - } - break; - } - } -} + if (0x68 == msg[0]) + { + if (msg[0] != msg[3]) + return bRetval; + if (msg[len - 1] != 0x16) + return bRetval; + if (msg[1] != msg[2]) + return bRetval; + iCheckLen = msg[1]; + if (CalLpc(&msg[4], iCheckLen + 2) != msg[len - 2]) + return bRetval; + bRetval = 1; + } + return bRetval; + } + + /********************************************************************************* + 上海欣影传感器协议数据处理 + **********************************************************************************/ + void ShxyProtocolDataProcess(int devno) + { + float fvalue, fcorvalue, *fvalua, frnb/*, fwind*/; + uint16_t uDevAddr; + uint8_t cmdidx; + int i, j, aipnt, datanum; + SERIAL_DEV_DEF *pPortParam; + SIO_PARAM_SERIAL_DEF *curserial; + char szbuf[64]; + + pPortParam = &srdt.ms_dev[devno]; + curserial = &serialport[devparam[devno].commid]; + + //取出装置地址,开始处理地址+++ + if (0x02 == curserial->m_au8RecvBuf[5]) + { + devparam[devno].devaddr = curserial->m_au8RecvBuf[4]; + return; + } -//******************************************************************************** -// 检查检验和是否正确 -//******************************************************************************** -int CheckShxyProtocolLpcError( u_char* msg, int len ) -{ - int bRetval = FALSE; - int iCheckLen; - - if(0x68 == msg[0]) - { - if ( msg[0] != msg[3] ) - return bRetval; - if ( msg[len-1] != 0x16 ) - return bRetval; - if ( msg[1] != msg[2] ) - return bRetval; - iCheckLen = msg[1]; - if ( CalLpc( &msg[4], iCheckLen+2 ) != msg[len-2] ) - return bRetval; - bRetval = TRUE; - } - return bRetval; -} + cmdidx = curserial->m_au8RecvBuf[5]; + aipnt = pPortParam->SameTypeDevIdx; + uDevAddr = curserial->m_au8RecvBuf[4]; -/********************************************************************************* - 上海欣影传感器协议数据处理 -**********************************************************************************/ -void ShxyProtocolDataProcess( int devno) -{ - float fvalue, fcorvalue, *fvalua, frnb/*, fwind*/; - WORD uDevAddr; - BYTE cmdidx; - int i, j, aipnt, datanum; - SERIAL_DEV_DEF *pPortParam; - SIO_PARAM_SERIAL_DEF *curserial; - char szbuf[64]; - - pPortParam = &srdt.ms_dev[devno]; - curserial = &serialport[devparam[devno].commid]; - - //取出装置地址,开始处理地址+++ - if(0x02 == curserial->m_au8RecvBuf[5]) - { - devparam[devno].devaddr = curserial->m_au8RecvBuf[4]; - return; - } - - cmdidx = curserial->m_au8RecvBuf[5]; - aipnt = pPortParam->SameTypeDevIdx; - uDevAddr = curserial->m_au8RecvBuf[4]; - - fvalua = &fvalue; - memset(szbuf, 0, sizeof(szbuf)); - if(0x06 == cmdidx) - { - if(0x08 !=curserial->m_au8RecvBuf[1]) - return; - pPortParam->recvdatacnt++; - if(pPortParam->recvdatacnt < 2) - return; - // ++++++++++++++++++++++++++++ - //g_SelfTest.SensorsFault |= (0x800<m_au8RecvBuf[9]; - *((BYTE*)fvalua+1) = curserial->m_au8RecvBuf[8]; - *((BYTE*)fvalua+2) = curserial->m_au8RecvBuf[7]; - *((BYTE*)fvalua+3) = curserial->m_au8RecvBuf[6]; - if((fvalue < -59) ||(fvalue > 59)) - { - frnb = (GeneratingRandomNumber()%101-50)/1000.0; - pPortParam->aiValue[0].EuValue *= (1+frnb); - } - else - pPortParam->aiValue[0].EuValue = fvalue; - pPortParam->aiValue[0].AiState = SER_SAMPLE; - //slantpntmsg[aipnt][0].EuValue = fvalue*slantpntmsg[aipnt][0].AiParam.fFactor\ + fvalua = &fvalue; + memset(szbuf, 0, sizeof(szbuf)); + if (0x06 == cmdidx) + { + if (0x08 != curserial->m_au8RecvBuf[1]) + return; + pPortParam->recvdatacnt++; + if (pPortParam->recvdatacnt < 2) + return; + // ++++++++++++++++++++++++++++ + //g_SelfTest.SensorsFault |= (0x800<m_au8RecvBuf[9]; + *((uint8_t*)fvalua + 1) = curserial->m_au8RecvBuf[8]; + *((uint8_t*)fvalua + 2) = curserial->m_au8RecvBuf[7]; + *((uint8_t*)fvalua + 3) = curserial->m_au8RecvBuf[6]; + if ((fvalue < -59) || (fvalue > 59)) + { + frnb = (GeneratingRandomNumber() % 101 - 50) / 1000.0; + pPortParam->aiValue[0].EuValue *= (1 + frnb); + } + else + pPortParam->aiValue[0].EuValue = fvalue; + pPortParam->aiValue[0].AiState = SER_SAMPLE; + //slantpntmsg[aipnt][0].EuValue = fvalue*slantpntmsg[aipnt][0].AiParam.fFactor\ // +slantpntmsg[aipnt][0].AiParam.EuValueDelta; - //slantpntmsg[aipnt][0].AiState = 1; - //if ((gDisSunRain & 0x20) == 0x20) - { - sprintf(szbuf, "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); - //DebugLog(devparam[devno].commid, szbuf, 'V'); - } - //XslantSec[aipnt][srdt.SectimesamplingCnt[0]] = (short)slantpntmsg[aipnt][0].EuValue; - //srdt.SectimesamplingCnt[0] += 1; - - *(BYTE*)fvalua = curserial->m_au8RecvBuf[13]; - *((BYTE*)fvalua+1) = curserial->m_au8RecvBuf[12]; - *((BYTE*)fvalua+2) = curserial->m_au8RecvBuf[11]; - *((BYTE*)fvalua+3) = curserial->m_au8RecvBuf[10]; - //if ((gDisSunRain & 0x20) == 0x20) - { - sprintf(szbuf, "%sY =%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); - } - if((fvalue < -59) ||(fvalue > 59)) - { - frnb = (GeneratingRandomNumber()%101-50)/1000.0; - pPortParam->aiValue[1].EuValue *= (1+frnb); - //slantpntmsg[aipnt][1].EuValue *= (1+frnb); - } - else - pPortParam->aiValue[1].EuValue = fvalue; - pPortParam->aiValue[1].AiState = SER_SAMPLE; - /*slantpntmsg[aipnt][1].EuValue = fvalue*slantpntmsg[aipnt][1].AiParam.fFactor\ + //slantpntmsg[aipnt][0].AiState = 1; + //if ((gDisSunRain & 0x20) == 0x20) + { + sprintf(szbuf, "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); + //DebugLog(devparam[devno].commid, szbuf, 'V'); + } + //XslantSec[aipnt][srdt.SectimesamplingCnt[0]] = (short)slantpntmsg[aipnt][0].EuValue; + //srdt.SectimesamplingCnt[0] += 1; + + *(uint8_t*)fvalua = curserial->m_au8RecvBuf[13]; + *((uint8_t*)fvalua + 1) = curserial->m_au8RecvBuf[12]; + *((uint8_t*)fvalua + 2) = curserial->m_au8RecvBuf[11]; + *((uint8_t*)fvalua + 3) = curserial->m_au8RecvBuf[10]; + //if ((gDisSunRain & 0x20) == 0x20) + { + sprintf(szbuf, "%sY =%0.3f ", szbuf, fvalue); + DebugLog(devparam[devno].commid, szbuf, 'V'); + } + if ((fvalue < -59) || (fvalue > 59)) + { + frnb = (GeneratingRandomNumber() % 101 - 50) / 1000.0; + pPortParam->aiValue[1].EuValue *= (1 + frnb); + //slantpntmsg[aipnt][1].EuValue *= (1+frnb); + } + else + pPortParam->aiValue[1].EuValue = fvalue; + pPortParam->aiValue[1].AiState = SER_SAMPLE; + /*slantpntmsg[aipnt][1].EuValue = fvalue*slantpntmsg[aipnt][1].AiParam.fFactor\ +slantpntmsg[aipnt][1].AiParam.EuValueDelta; - slantpntmsg[aipnt][1].AiState = 1;*/ - //YslantSec[aipnt][srdt.SectimesamplingCnt[1]] = (short)slantpntmsg[aipnt][1].EuValue; - srdt.SectimesamplingCnt[1] += 1; - } - datanum = curserial->m_au8RecvBuf[6]; - if((0x08 != cmdidx) && (0x09 != cmdidx)) - return; - - for(i = 0, j=7; (im_au8RecvBuf[1]); i++, j+=5 ) - { - if(0x08 == cmdidx) - fvalue = (curserial->m_au8RecvBuf[j+1]<<24)+(curserial->m_au8RecvBuf[j+2]<<16) - +(curserial->m_au8RecvBuf[j+3]<<8)+curserial->m_au8RecvBuf[j+4]; - else - { - *(BYTE*)fvalua = curserial->m_au8RecvBuf[j+4]; - *((BYTE*)fvalua+1) = curserial->m_au8RecvBuf[j+3]; - *((BYTE*)fvalua+2) = curserial->m_au8RecvBuf[j+2]; - *((BYTE*)fvalua+3) = curserial->m_au8RecvBuf[j+1]; - } - switch(curserial->m_au8RecvBuf[j]) - { - case 1: /*温度*/ - if((fvalue < -40) ||(fvalue > 85)) - { - frnb = (GeneratingRandomNumber()%101-50)/1000.0; - pPortParam->aiValue[AirTempNo].EuValue *= (1+frnb); - weatherpntmsg[AirTempNo].EuValue *= (1+frnb); - //weatherpntmsg[AirTempNo].AiState = SER_SAMPLE; - } - else - { - pPortParam->aiValue[AirTempNo].EuValue = fvalue;/*pPortParam->aiValue[0].AiParam.fFactor + pPortParam->aiValue[0].AiParam.EuValueDelta;*/ - weatherpntmsg[AirTempNo].EuValue = fvalue;/*weatherpntmsg[AirTempNo].AiParam.fFactor + weatherpntmsg[AirTempNo].AiParam.EuValueDelta;*/ - } - pPortParam->aiValue[AirTempNo].AiState = SER_SAMPLE; - weatherpntmsg[AirTempNo].AiState = SER_SAMPLE; - //g_SelfTest.SensorsFault |= (0x01); - //if ((gDisSunRain & 0x80) == 0x80) - { - sprintf(szbuf, "ID:%d 温度:%0.3f ", devparam[devno].devaddr, fvalue); - //DebugLog(devparam[devno].commid, szbuf, 'V'); - } - break; - case 2: /*气压*/ - if((fvalue < 550) ||(fvalue > 1060)) - { - frnb = (GeneratingRandomNumber()%41-20)/10000.0; - pPortParam->aiValue[AtmosNo].EuValue *= (1+frnb); - weatherpntmsg[AtmosNo].EuValue *= (1+frnb); - } - else - { - pPortParam->aiValue[AtmosNo].EuValue = fvalue;/*pPortParam->aiValue[5].AiParam.fFactor + pPortParam->aiValue[5].AiParam.EuValueDelta;*/ - weatherpntmsg[AtmosNo].EuValue = fvalue;/*weatherpntmsg[AtmosNo].AiParam.fFactor + weatherpntmsg[AtmosNo].AiParam.EuValueDelta;*/ - } - pPortParam->aiValue[AtmosNo].AiState = SER_SAMPLE; - weatherpntmsg[AtmosNo].AiState = SER_SAMPLE; - //g_SelfTest.SensorsFault |= (0x10); - //if ((gDisSunRain & 0x80) == 0x80) - { - sprintf(szbuf, "气压:%0.3f ", fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); - } - break; - case 3: /*湿度*/ - if((fvalue < 0) ||(fvalue > 100)) - { - frnb = (GeneratingRandomNumber()%41-20)/1000.0; - pPortParam->aiValue[HumidityNo].EuValue *= (1+frnb); - weatherpntmsg[HumidityNo].EuValue *= (1+frnb); - } - else - { - pPortParam->aiValue[HumidityNo].EuValue = fvalue;/*pPortParam->aiValue[1].AiParam.fFactor + pPortParam->aiValue[1].AiParam.EuValueDelta;*/ - weatherpntmsg[HumidityNo].EuValue = fvalue;/*weatherpntmsg[HumidityNo].AiParam.fFactor + weatherpntmsg[HumidityNo].AiParam.EuValueDelta;*/ - } - pPortParam->aiValue[HumidityNo].AiState = SER_SAMPLE; - weatherpntmsg[HumidityNo].AiState = SER_SAMPLE; - //g_SelfTest.SensorsFault |= (0x02); - //if ((gDisSunRain & 0x80) == 0x80) - { - sprintf(szbuf, "%s湿度:%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); - } - break; - case 4: /*雨量*/ - break; - case 5: /*日照*/ - break; - case 6: /*风速*/ - if((fvalue < 0) ||(fvalue > 80)) - { - frnb = (GeneratingRandomNumber()%41-20)/1000.0; - pPortParam->aiValue[WindSpeedNo].EuValue *= (1+frnb); - weatherpntmsg[WindSpeedNo].EuValue *= (1+frnb); - } - else - { - pPortParam->aiValue[WindSpeedNo].EuValue = fvalue;/*pPortParam->aiValue[2].AiParam.fFactor + pPortParam->aiValue[2].AiParam.EuValueDelta;*/ - weatherpntmsg[WindSpeedNo].EuValue = fvalue;/*weatherpntmsg[WindSpeedNo].AiParam.fFactor + weatherpntmsg[WindSpeedNo].AiParam.EuValueDelta;*/ - } - pPortParam->aiValue[WindSpeedNo].AiState = SER_SAMPLE; - weatherpntmsg[WindSpeedNo].AiState = SER_SAMPLE; - //g_SelfTest.SensorsFault |= (0x04); - //if ((gDisSunRain & 0x10) == 0x10) - { - //fwind = fvalue/1000*0.95; - //if(fvalue/1000 > 25) - // fwind -= 1.2; - //sprintf(szbuf, "风速:%0.3f ", fwind); - sprintf(szbuf, "ID:%d 风速:%0.3f ", devparam[devno].devaddr, fvalue); - } - break; - case 7: /*风向*/ - if((fvalue/1000 < 0) ||(fvalue/1000 > 359.99)) - { - frnb = (GeneratingRandomNumber()%41-20)/1000.0; - pPortParam->aiValue[WindDirectionNo].EuValue *= (1+frnb); - weatherpntmsg[WindDirectionNo].EuValue *= (1+frnb); - } - else - { - pPortParam->aiValue[WindDirectionNo].EuValue = fvalue;/*pPortParam->aiValue[3].AiParam.fFactor + pPortParam->aiValue[3].AiParam.EuValueDelta;*/ - weatherpntmsg[WindDirectionNo].EuValue = fvalue;/*weatherpntmsg[WindDirectionNo].AiParam.fFactor + weatherpntmsg[WindDirectionNo].AiParam.EuValueDelta;*/ - } - pPortParam->aiValue[WindDirectionNo].AiState = SER_SAMPLE; - weatherpntmsg[WindDirectionNo].AiState = SER_SAMPLE; - //g_SelfTest.SensorsFault |= (0x08); - //if ((gDisSunRain & 0x10) == 0x10) - { - sprintf(szbuf, "%s 风向:%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); - } - break; - case 8: /*拉力*/ - pPortParam->recvdatacnt++; - if(pPortParam->recvdatacnt < 2) - break; - - pPortParam->aiValue[0].EuValue = fvalue;/*pPortParam->aiValue[0].AiParam.fFactor\ + slantpntmsg[aipnt][1].AiState = 1;*/ + //YslantSec[aipnt][srdt.SectimesamplingCnt[1]] = (short)slantpntmsg[aipnt][1].EuValue; + srdt.SectimesamplingCnt[1] += 1; + } + datanum = curserial->m_au8RecvBuf[6]; + if ((0x08 != cmdidx) && (0x09 != cmdidx)) + return; + + for (i = 0, j = 7; (i < datanum) && (j < 6 + curserial->m_au8RecvBuf[1]); i++, j += 5) + { + if (0x08 == cmdidx) + fvalue = (curserial->m_au8RecvBuf[j + 1] << 24) + (curserial->m_au8RecvBuf[j + 2] << 16) + + (curserial->m_au8RecvBuf[j + 3] << 8) + curserial->m_au8RecvBuf[j + 4]; + else + { + *(uint8_t*)fvalua = curserial->m_au8RecvBuf[j + 4]; + *((uint8_t*)fvalua + 1) = curserial->m_au8RecvBuf[j + 3]; + *((uint8_t*)fvalua + 2) = curserial->m_au8RecvBuf[j + 2]; + *((uint8_t*)fvalua + 3) = curserial->m_au8RecvBuf[j + 1]; + } + switch (curserial->m_au8RecvBuf[j]) + { + case 1: /*温度*/ + if ((fvalue < -40) || (fvalue > 85)) + { + frnb = (GeneratingRandomNumber() % 101 - 50) / 1000.0; + pPortParam->aiValue[AirTempNo].EuValue *= (1 + frnb); + weatherpntmsg[AirTempNo].EuValue *= (1 + frnb); + //weatherpntmsg[AirTempNo].AiState = SER_SAMPLE; + } + else + { + pPortParam->aiValue[AirTempNo].EuValue = fvalue;/*pPortParam->aiValue[0].AiParam.fFactor + pPortParam->aiValue[0].AiParam.EuValueDelta;*/ + weatherpntmsg[AirTempNo].EuValue = fvalue;/*weatherpntmsg[AirTempNo].AiParam.fFactor + weatherpntmsg[AirTempNo].AiParam.EuValueDelta;*/ + } + pPortParam->aiValue[AirTempNo].AiState = SER_SAMPLE; + weatherpntmsg[AirTempNo].AiState = SER_SAMPLE; + //g_SelfTest.SensorsFault |= (0x01); + //if ((gDisSunRain & 0x80) == 0x80) + { + sprintf(szbuf, "ID:%d 温度:%0.3f ", devparam[devno].devaddr, fvalue); + //DebugLog(devparam[devno].commid, szbuf, 'V'); + } + break; + case 2: /*气压*/ + if ((fvalue < 550) || (fvalue > 1060)) + { + frnb = (GeneratingRandomNumber() % 41 - 20) / 10000.0; + pPortParam->aiValue[AtmosNo].EuValue *= (1 + frnb); + weatherpntmsg[AtmosNo].EuValue *= (1 + frnb); + } + else + { + pPortParam->aiValue[AtmosNo].EuValue = fvalue;/*pPortParam->aiValue[5].AiParam.fFactor + pPortParam->aiValue[5].AiParam.EuValueDelta;*/ + weatherpntmsg[AtmosNo].EuValue = fvalue;/*weatherpntmsg[AtmosNo].AiParam.fFactor + weatherpntmsg[AtmosNo].AiParam.EuValueDelta;*/ + } + pPortParam->aiValue[AtmosNo].AiState = SER_SAMPLE; + weatherpntmsg[AtmosNo].AiState = SER_SAMPLE; + //g_SelfTest.SensorsFault |= (0x10); + //if ((gDisSunRain & 0x80) == 0x80) + { + sprintf(szbuf, "气压:%0.3f ", fvalue); + DebugLog(devparam[devno].commid, szbuf, 'V'); + } + break; + case 3: /*湿度*/ + if ((fvalue < 0) || (fvalue > 100)) + { + frnb = (GeneratingRandomNumber() % 41 - 20) / 1000.0; + pPortParam->aiValue[HumidityNo].EuValue *= (1 + frnb); + weatherpntmsg[HumidityNo].EuValue *= (1 + frnb); + } + else + { + pPortParam->aiValue[HumidityNo].EuValue = fvalue;/*pPortParam->aiValue[1].AiParam.fFactor + pPortParam->aiValue[1].AiParam.EuValueDelta;*/ + weatherpntmsg[HumidityNo].EuValue = fvalue;/*weatherpntmsg[HumidityNo].AiParam.fFactor + weatherpntmsg[HumidityNo].AiParam.EuValueDelta;*/ + } + pPortParam->aiValue[HumidityNo].AiState = SER_SAMPLE; + weatherpntmsg[HumidityNo].AiState = SER_SAMPLE; + //g_SelfTest.SensorsFault |= (0x02); + //if ((gDisSunRain & 0x80) == 0x80) + { + sprintf(szbuf, "%s湿度:%0.3f ", szbuf, fvalue); + DebugLog(devparam[devno].commid, szbuf, 'V'); + } + break; + case 4: /*雨量*/ + break; + case 5: /*日照*/ + break; + case 6: /*风速*/ + if ((fvalue < 0) || (fvalue > 80)) + { + frnb = (GeneratingRandomNumber() % 41 - 20) / 1000.0; + pPortParam->aiValue[WindSpeedNo].EuValue *= (1 + frnb); + weatherpntmsg[WindSpeedNo].EuValue *= (1 + frnb); + } + else + { + pPortParam->aiValue[WindSpeedNo].EuValue = fvalue;/*pPortParam->aiValue[2].AiParam.fFactor + pPortParam->aiValue[2].AiParam.EuValueDelta;*/ + weatherpntmsg[WindSpeedNo].EuValue = fvalue;/*weatherpntmsg[WindSpeedNo].AiParam.fFactor + weatherpntmsg[WindSpeedNo].AiParam.EuValueDelta;*/ + } + pPortParam->aiValue[WindSpeedNo].AiState = SER_SAMPLE; + weatherpntmsg[WindSpeedNo].AiState = SER_SAMPLE; + //g_SelfTest.SensorsFault |= (0x04); + //if ((gDisSunRain & 0x10) == 0x10) + { + //fwind = fvalue/1000*0.95; + //if(fvalue/1000 > 25) + // fwind -= 1.2; + //sprintf(szbuf, "风速:%0.3f ", fwind); + sprintf(szbuf, "ID:%d 风速:%0.3f ", devparam[devno].devaddr, fvalue); + } + break; + case 7: /*风向*/ + if ((fvalue / 1000 < 0) || (fvalue / 1000 > 359.99)) + { + frnb = (GeneratingRandomNumber() % 41 - 20) / 1000.0; + pPortParam->aiValue[WindDirectionNo].EuValue *= (1 + frnb); + weatherpntmsg[WindDirectionNo].EuValue *= (1 + frnb); + } + else + { + pPortParam->aiValue[WindDirectionNo].EuValue = fvalue;/*pPortParam->aiValue[3].AiParam.fFactor + pPortParam->aiValue[3].AiParam.EuValueDelta;*/ + weatherpntmsg[WindDirectionNo].EuValue = fvalue;/*weatherpntmsg[WindDirectionNo].AiParam.fFactor + weatherpntmsg[WindDirectionNo].AiParam.EuValueDelta;*/ + } + pPortParam->aiValue[WindDirectionNo].AiState = SER_SAMPLE; + weatherpntmsg[WindDirectionNo].AiState = SER_SAMPLE; + //g_SelfTest.SensorsFault |= (0x08); + //if ((gDisSunRain & 0x10) == 0x10) + { + sprintf(szbuf, "%s 风向:%0.3f ", szbuf, fvalue); + DebugLog(devparam[devno].commid, szbuf, 'V'); + } + break; + case 8: /*拉力*/ + pPortParam->recvdatacnt++; + if (pPortParam->recvdatacnt < 2) + break; + + pPortParam->aiValue[0].EuValue = fvalue;/*pPortParam->aiValue[0].AiParam.fFactor\ +pPortParam->aiValue[0].AiParam.EuValueDelta;*/ - //rallypntmsg[aipnt][0].EuValue = fvalue*rallypntmsg[aipnt][0].AiParam.fFactor\ - // +rallypntmsg[aipnt][0].AiParam.EuValueDelta; - pPortParam->aiValue[0].AiState = SER_SAMPLE; - //rallypntmsg[aipnt][0].AiState = 1; - sprintf(szbuf, "地址%d拉力:%0.3fKg ", devparam[devno].devaddr, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); - //} - break; - case 9: /*倾角传感器X轴倾角*/ - if((fvalue < -59) ||(fvalue > 59)) - { - frnb = (GeneratingRandomNumber()%101-50)/1000.0; - pPortParam->aiValue[0].EuValue *= (1+frnb); - //slantpntmsg[aipnt][0].EuValue *= (1+frnb); - } - else - { - pPortParam->aiValue[0].EuValue = fvalue;/*pPortParam->aiValue[0].AiParam.fFactor\ - +pPortParam->aiValue[0].AiParam.EuValueDelta;*/ - //slantpntmsg[aipnt][0].EuValue = fvalue*slantpntmsg[aipnt][0].AiParam.fFactor\ + //rallypntmsg[aipnt][0].EuValue = fvalue*rallypntmsg[aipnt][0].AiParam.fFactor\ + // +rallypntmsg[aipnt][0].AiParam.EuValueDelta; + pPortParam->aiValue[0].AiState = SER_SAMPLE; + //rallypntmsg[aipnt][0].AiState = 1; + sprintf(szbuf, "地址%d拉力:%0.3fKg ", devparam[devno].devaddr, fvalue); + DebugLog(devparam[devno].commid, szbuf, 'V'); + //} + break; + case 9: /*倾角传感器X轴倾角*/ + if ((fvalue < -59) || (fvalue > 59)) + { + frnb = (GeneratingRandomNumber() % 101 - 50) / 1000.0; + pPortParam->aiValue[0].EuValue *= (1 + frnb); + //slantpntmsg[aipnt][0].EuValue *= (1+frnb); + } + else + { + pPortParam->aiValue[0].EuValue = fvalue;/*pPortParam->aiValue[0].AiParam.fFactor\ + +pPortParam->aiValue[0].AiParam.EuValueDelta;*/ + //slantpntmsg[aipnt][0].EuValue = fvalue*slantpntmsg[aipnt][0].AiParam.fFactor\ //+slantpntmsg[aipnt][0].AiParam.EuValueDelta; - } - pPortParam->aiValue[0].AiState = SER_SAMPLE; - //slantpntmsg[aipnt][0].AiState = 1; - sprintf(szbuf, "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); - break; - case 10: /*倾角传感器Y轴倾角*/ - sprintf(szbuf, "%s Y =%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); - - if((fvalue < -59) ||(fvalue > 59)) - { - frnb = (GeneratingRandomNumber()%101-50)/1000.0; - pPortParam->aiValue[1].EuValue *= (1+frnb); - } - else - { - pPortParam->aiValue[1].EuValue = fvalue;/*pPortParam->aiValue[1].AiParam.fFactor\ - +pPortParam->aiValue[1].AiParam.EuValueDelta;*/ - } - pPortParam->aiValue[1].AiState = SER_SAMPLE; - //slantpntmsg[aipnt][1].AiState = 1; - break; - - } - } -} + } + pPortParam->aiValue[0].AiState = SER_SAMPLE; + //slantpntmsg[aipnt][0].AiState = 1; + sprintf(szbuf, "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); + break; + case 10: /*倾角传感器Y轴倾角*/ + sprintf(szbuf, "%s Y =%0.3f ", szbuf, fvalue); + DebugLog(devparam[devno].commid, szbuf, 'V'); + + if ((fvalue < -59) || (fvalue > 59)) + { + frnb = (GeneratingRandomNumber() % 101 - 50) / 1000.0; + pPortParam->aiValue[1].EuValue *= (1 + frnb); + } + else + { + pPortParam->aiValue[1].EuValue = fvalue;/*pPortParam->aiValue[1].AiParam.fFactor\ + +pPortParam->aiValue[1].AiParam.EuValueDelta;*/ + } + pPortParam->aiValue[1].AiState = SER_SAMPLE; + //slantpntmsg[aipnt][1].AiState = 1; + break; -void delete_old_files(const char *path, int days) -{ - struct stat file_stat; - struct tm *file_tm; - time_t now = time(NULL); - DIR *dir = opendir(path); - struct dirent *entry; - char szbuf[1024]; - char fullpath[256]; - - memset(szbuf, 0, sizeof(szbuf)); - if (!dir) - { - sprintf(szbuf, "delete_old_files opendir %s error ", path); - DebugLog(8, szbuf, 'E'); - return; - } - - while ((entry = readdir(dir))) - { - memset(szbuf, 0, sizeof(szbuf)); - if (entry->d_type == DT_REG) - { // 只处理普通文件 - snprintf(fullpath, sizeof(fullpath), "%s/%s", path, entry->d_name); - - if (stat(fullpath, &file_stat) == -1) - { - perror("stat"); - sprintf(szbuf, "stat"); - DebugLog(8, szbuf, 'E'); - continue; - } - - localtime_r(&(file_stat.st_mtime), file_tm); - //file_tm = localtime(&(file_stat.st_mtime)); - - if (difftime(now, mktime(file_tm)) > days * 24 * 60 * 60) - { - if (unlink(fullpath) == -1) - { // 删除文件 - perror("unlink"); - } - } - } - } - - closedir(dir); -} -/********************************************************************************* - 把16进制和10进制ASCII字符串转换成int整数 -*********************************************************************************/ -int ATOI(char *buf) -{ - int i, ilen, iRetVal; - - if(NULL == buf) - return 0; - ilen = strlen(buf); - if(ilen > 2) - { - if((buf[0]=='0') && ((buf[1]=='x') || (buf[1]=='X'))) - { - iRetVal = 0; - for(i=2; id_type == DT_REG) + { // 只处理普通文件 + snprintf(fullpath, sizeof(fullpath), "%s/%s", path, entry->d_name); + + if (stat(fullpath, &file_stat) == -1) + { + perror("stat"); + sprintf(szbuf, "stat"); + DebugLog(8, szbuf, 'E'); + continue; + } + + localtime_r(&(file_stat.st_mtime), file_tm); + //file_tm = localtime(&(file_stat.st_mtime)); + + if (difftime(now, mktime(file_tm)) > days * 24 * 60 * 60) + { + if (unlink(fullpath) == -1) + { // 删除文件 + perror("unlink"); + } + } + } + } + + closedir(dir); + } + /********************************************************************************* + 把16进制和10进制ASCII字符串转换成int整数 + *********************************************************************************/ + int ATOI(char *buf) + { + int i, ilen, iRetVal; + + if (NULL == buf) + return 0; + ilen = strlen(buf); + if (ilen > 2) + { + if ((buf[0] == '0') && ((buf[1] == 'x') || (buf[1] == 'X'))) + { + iRetVal = 0; + for (i = 2; i < ilen; i++) + { + iRetVal = (iRetVal << 4) + HexCharToInt(buf[i]); + } + } + else + { + iRetVal = atoi(buf); + } + } + else + { + iRetVal = atoi(buf); + } + return iRetVal; + } #if 0 -// 控制关闭传感器电源 -void Gm_CtrlCloseSensorsPower(int devidx) -{ - if((devidx<0) || (devidx > MAX_SERIAL_DEV_NUM-1)) - return; - srdt.ms_dev[devidx].uOpenPowerFlag = CLOSEPOWER; -} + // 控制关闭传感器电源 + void Gm_CtrlCloseSensorsPower(int devidx) + { + if ((devidx < 0) || (devidx > MAX_SERIAL_DEV_NUM - 1)) + return; + srdt.ms_dev[devidx].uOpenPowerFlag = CLOSEPOWER; + } -// 检查传感器电源是否应该关闭或打开 -void Gm_CheckSensorsPower(void) -{ - int i, j; - - for(i=0; i srdt.ms_dev[i].ProtocolIdx) ||(INVALID_PROTOCOL<=srdt.ms_dev[i].ProtocolIdx)) - continue; - if((PELCO_P_PROTOCOL == srdt.ms_dev[i].ProtocolIdx) ||(PELCO_D_PROTOCOL == srdt.ms_dev[i].ProtocolIdx) - ||( SERIALCAMERA_PROTOCOL == srdt.ms_dev[i].ProtocolIdx)) - continue; - // 需要传感器处于上电状态 - for(j=0; j rand()%(m-n+1)+n*/ - ictime = (int)time(NULL); - srand((DWORD)ictime); - randomdate = rand(); - return randomdate; -} + for (i = 0; i < MAX_SERIAL_DEV_NUM - 2; i++) + { + if ((1 > srdt.ms_dev[i].ProtocolIdx) || (INVALID_PROTOCOL <= srdt.ms_dev[i].ProtocolIdx)) + continue; + if ((PELCO_P_PROTOCOL == srdt.ms_dev[i].ProtocolIdx) || (PELCO_D_PROTOCOL == srdt.ms_dev[i].ProtocolIdx) + || (SERIALCAMERA_PROTOCOL == srdt.ms_dev[i].ProtocolIdx)) + continue; + // 需要传感器处于上电状态 + for (j = 0; j < MAX_SERIAL_DEV_NUM - 2; j++) + { + if ((srdt.ms_dev[i].PowerPort == srdt.ms_dev[j].PowerPort) && (srdt.ms_dev[j].uOpenPowerFlag == OPENPOWER)) + break; + } + if (j < MAX_SERIAL_DEV_NUM - 2) + continue; -/* 串口启动接口函数 开始*/ -void Collect_sensor_data() -{ + } + } +#endif + + int GeneratingRandomNumber(void) + { + int ictime, randomdate; + /* 生成随机数n-m -> rand()%(m-n+1)+n*/ + ictime = (int)time(NULL); + srand((uint32_t)ictime); + randomdate = rand(); + return randomdate; + } + + /* 串口启动接口函数 开始*/ + void Collect_sensor_data() + { #if 0 - int i; - - for(i=0; i datano) || (datano > OpticalRadiationNo)) - return -1; - data->EuValue = weatherpntmsg[datano].EuValue; - data->AiState = weatherpntmsg[datano].AiState; - if((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) - { - weatherpntmsg[datano].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetAirTempData(Data_DEF *airt) -{ - if(NULL == airt) - return -1; - airt->EuValue = weatherpntmsg[AirTempNo].EuValue; - airt->AiState = weatherpntmsg[AirTempNo].AiState; - if((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) - { - weatherpntmsg[AirTempNo].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetHumidityData(Data_DEF *airt) -{ - if(NULL == airt) - return -1; - airt->EuValue = weatherpntmsg[HumidityNo].EuValue; - airt->AiState = weatherpntmsg[HumidityNo].AiState; - if((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) - { - weatherpntmsg[HumidityNo].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetWindSpeedData(Data_DEF *airt) -{ - if(NULL == airt) - return -1; - airt->EuValue = weatherpntmsg[WindSpeedNo].EuValue; - airt->AiState = weatherpntmsg[WindSpeedNo].AiState; - if((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) - { - weatherpntmsg[WindSpeedNo].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetWindDirectionData(Data_DEF *airt) -{ - if(NULL == airt) - return -1; - airt->EuValue = weatherpntmsg[WindDirectionNo].EuValue; - airt->AiState = weatherpntmsg[WindDirectionNo].AiState; - if((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) - { - weatherpntmsg[WindDirectionNo].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetRainfallData(Data_DEF *airt) -{ - if(NULL == airt) - return -1; - airt->EuValue = weatherpntmsg[RainfallNo].EuValue; - airt->AiState = weatherpntmsg[RainfallNo].AiState; - if((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) - { - weatherpntmsg[RainfallNo].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetAtmosData(Data_DEF *airt) -{ - if(NULL == airt) - return -1; - airt->EuValue = weatherpntmsg[AtmosNo].EuValue; - airt->AiState = weatherpntmsg[AtmosNo].AiState; - if((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) - { - weatherpntmsg[AtmosNo].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetOpticalRadiationData(Data_DEF *airt) -{ - if(NULL == airt) - return -1; - airt->EuValue = weatherpntmsg[OpticalRadiationNo].EuValue; - airt->AiState = weatherpntmsg[OpticalRadiationNo].AiState; - if((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) - { - weatherpntmsg[OpticalRadiationNo].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetPullValue(int devno, Data_DEF *data) -{ - if(NULL == data) - return -1; - if((0>devno)||(MAX_SERIAL_DEV_NUM < devno)) - return -1; - if(RALLY_PROTOCOL != devparam[devno].ProtocolIdx) - return -1; - data->EuValue = srdt.ms_dev[devno].aiValue[0].EuValue; - data->AiState = srdt.ms_dev[devno].aiValue[0].AiState; - if((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) - { - srdt.ms_dev[devno].aiValue[0].AiState = SER_IDLE; - return 2; - } - return 1; -} - -int GetAngleValue(int devno, Data_DEF *data, int Xy) -{ - if(NULL == data) - return -1; - if((0>devno)||(MAX_SERIAL_DEV_NUM < devno)) - return -1; - if(SLANT_PROTOCOL != devparam[devno].ProtocolIdx) - return -1; - if((0 > Xy) || (1 < Xy)) - return -1; - - data->EuValue = srdt.ms_dev[devno].aiValue[Xy].EuValue; - data->AiState = srdt.ms_dev[devno].aiValue[Xy].AiState; - if((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) - { - srdt.ms_dev[devno].aiValue[Xy].AiState = SER_IDLE; - return 2; - } - return 1; -} + for (i = 0; i < MAX_SERIAL_DEV_NUM; i++) + { + devparam[i].IsNoInsta = sensorParam[i].IsNoInsta; + if (0 == sensorParam[i].IsNoInsta) + continue; + devparam[i].ProtocolIdx = sensorParam[i].SensorsType; + devparam[i].devaddr = sensorParam[i].devaddr; + //baudrate = getBaudrate(sensorParam[i].baudrate); + devparam[i].baudrate = getBaudrate(sensorParam[i].baudrate); + devparam[i].databit = sensorParam[i].databit; + devparam[i].stopbit = (int)(sensorParam[i].stopbit * 10); + devparam[i].CameraChannel = sensorParam[i].CameraChannel; + devparam[i].Phase = sensorParam[i].Phase; + memset(devparam[i].pathname, 0, sizeof(devparam[i].pathname)); + memmove(devparam[i].pathname, sensorParam[i].pathname, sizeof(devparam[i].pathname)); + } -int GetImage(int devno, IMAGE_DEF *photo) -{ - if(NULL == photo) - return -1; - if((0>devno)||(MAX_SERIAL_DEV_NUM < devno)) - return -1; - if((PELCO_D_PROTOCOL != devparam[devno].ProtocolIdx) && (PELCO_P_PROTOCOL != devparam[devno].ProtocolIdx) &&(SERIALCAMERA_PROTOCOL != devparam[devno].ProtocolIdx)) - return -1; - photo->presetno = srdt.ms_dev[devno].image.presetno; - photo->phototime = srdt.ms_dev[devno].image.phototime; - memset(photo->photoname, 0, sizeof(photo->photoname)); - memmove(photo->photoname, srdt.ms_dev[devno].image.photoname, sizeof(photo->photoname)); - photo->imagelen = srdt.ms_dev[devno].image.imagelen; - photo->state = srdt.ms_dev[devno].image.state; - if((SER_SAMPLEFAIL == photo->state) || (SAMPLINGSUCCESS == photo->state)) - { - srdt.ms_dev[devno].image.state = SER_IDLE; - return 2; - } - return 1; -} -/* 数据和图片采集数据返回函数 结束*/ \ No newline at end of file + FindDevUseSerialCommNo(); + for (;;) + { + Gm_CtrlPtzCmd(1, P_MOVE_LEFT); + sleep(3); + GM_AllSerialComRecv(); + Gm_CtrlPtzCmd(1, Cmd_Cancel); + sleep(10); + } +#endif + srdt.bImageSize = bImageSize; + srdt.presetno = presetno; + srdt.sendphototime = phototime; + GM_StartSerialCameraPhoto(1, cmdidx); + } + /* 串口启动接口函数 结束*/ + + /* 数据和图片采集数据返回函数 开始*/ + int GetWeatherData(Data_DEF *data, int datano) + { + int i; + + if (NULL == data) + return -1; + if ((AirTempNo > datano) || (datano > OpticalRadiationNo)) + return -1; + data->EuValue = weatherpntmsg[datano].EuValue; + data->AiState = weatherpntmsg[datano].AiState; + if ((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) + { + weatherpntmsg[datano].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetAirTempData(Data_DEF *airt) + { + if (NULL == airt) + return -1; + airt->EuValue = weatherpntmsg[AirTempNo].EuValue; + airt->AiState = weatherpntmsg[AirTempNo].AiState; + if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) + { + weatherpntmsg[AirTempNo].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetHumidityData(Data_DEF *airt) + { + if (NULL == airt) + return -1; + airt->EuValue = weatherpntmsg[HumidityNo].EuValue; + airt->AiState = weatherpntmsg[HumidityNo].AiState; + if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) + { + weatherpntmsg[HumidityNo].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetWindSpeedData(Data_DEF *airt) + { + if (NULL == airt) + return -1; + airt->EuValue = weatherpntmsg[WindSpeedNo].EuValue; + airt->AiState = weatherpntmsg[WindSpeedNo].AiState; + if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) + { + weatherpntmsg[WindSpeedNo].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetWindDirectionData(Data_DEF *airt) + { + if (NULL == airt) + return -1; + airt->EuValue = weatherpntmsg[WindDirectionNo].EuValue; + airt->AiState = weatherpntmsg[WindDirectionNo].AiState; + if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) + { + weatherpntmsg[WindDirectionNo].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetRainfallData(Data_DEF *airt) + { + if (NULL == airt) + return -1; + airt->EuValue = weatherpntmsg[RainfallNo].EuValue; + airt->AiState = weatherpntmsg[RainfallNo].AiState; + if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) + { + weatherpntmsg[RainfallNo].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetAtmosData(Data_DEF *airt) + { + if (NULL == airt) + return -1; + airt->EuValue = weatherpntmsg[AtmosNo].EuValue; + airt->AiState = weatherpntmsg[AtmosNo].AiState; + if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) + { + weatherpntmsg[AtmosNo].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetOpticalRadiationData(Data_DEF *airt) + { + if (NULL == airt) + return -1; + airt->EuValue = weatherpntmsg[OpticalRadiationNo].EuValue; + airt->AiState = weatherpntmsg[OpticalRadiationNo].AiState; + if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) + { + weatherpntmsg[OpticalRadiationNo].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetPullValue(int devno, Data_DEF *data) + { + if (NULL == data) + return -1; + if ((0 > devno) || (MAX_SERIAL_DEV_NUM < devno)) + return -1; + if (RALLY_PROTOCOL != devparam[devno].ProtocolIdx) + return -1; + data->EuValue = srdt.ms_dev[devno].aiValue[0].EuValue; + data->AiState = srdt.ms_dev[devno].aiValue[0].AiState; + if ((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) + { + srdt.ms_dev[devno].aiValue[0].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetAngleValue(int devno, Data_DEF *data, int Xy) + { + if (NULL == data) + return -1; + if ((0 > devno) || (MAX_SERIAL_DEV_NUM < devno)) + return -1; + if (SLANT_PROTOCOL != devparam[devno].ProtocolIdx) + return -1; + if ((0 > Xy) || (1 < Xy)) + return -1; + + data->EuValue = srdt.ms_dev[devno].aiValue[Xy].EuValue; + data->AiState = srdt.ms_dev[devno].aiValue[Xy].AiState; + if ((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) + { + srdt.ms_dev[devno].aiValue[Xy].AiState = SER_IDLE; + return 2; + } + return 1; + } + + int GetImage(int devno, IMAGE_DEF *photo) + { + if (NULL == photo) + return -1; + if ((0 > devno) || (MAX_SERIAL_DEV_NUM < devno)) + return -1; + if ((PELCO_D_PROTOCOL != devparam[devno].ProtocolIdx) && (PELCO_P_PROTOCOL != devparam[devno].ProtocolIdx) && (SERIALCAMERA_PROTOCOL != devparam[devno].ProtocolIdx)) + return -1; + photo->presetno = srdt.ms_dev[devno].image.presetno; + photo->phototime = srdt.ms_dev[devno].image.phototime; + memset(photo->photoname, 0, sizeof(photo->photoname)); + memmove(photo->photoname, srdt.ms_dev[devno].image.photoname, sizeof(photo->photoname)); + photo->imagelen = srdt.ms_dev[devno].image.imagelen; + photo->state = srdt.ms_dev[devno].image.state; + if ((SER_SAMPLEFAIL == photo->state) || (SAMPLINGSUCCESS == photo->state)) + { + srdt.ms_dev[devno].image.state = SER_IDLE; + return 2; + } + return 1; + } + /* 数据和图片采集数据返回函数 结束*/ \ No newline at end of file diff --git a/app/src/main/cpp/SensorsProtocol.h b/app/src/main/cpp/SensorsProtocol.h deleted file mode 100644 index 3b40e8bb..00000000 --- a/app/src/main/cpp/SensorsProtocol.h +++ /dev/null @@ -1,467 +0,0 @@ -// -// Created by hyz on 2024/6/5. -// - -#ifndef WEATHERCOMM_H -#define WEATHERCOMM_H - -#include -#include "GPIOControl.h" -#include "termios.h" - -#ifndef DWORD -typedef unsigned int DWORD; -#endif -#ifndef WORD -typedef unsigned short WORD; -#endif -#ifndef BYTE -typedef unsigned char BYTE; -#endif -#ifndef LONG -typedef long long LONG; -#endif -#ifndef FALSE -#define FALSE 0 -#endif -#ifndef TRUE -#define TRUE 1 -#endif - -#define LOBYTE(w) ((unsigned char)(w)) -#define HIBYTE(w) ((unsigned char)(((unsigned short)(w) >> 8) & 0xFF)) - -#define LOWORD(l) ((WORD)(l)) -#define HIWORD(l) ((WORD)((DWORD)(l) >> 16)) - -#define min(a, b) ((a) < (b) ? (a) : (b)) -#define MAX_STRING_LEN 32 -#define IOT_PARAM_WRITE 0xAE -#define IOT_PARAM_READ 0xAF - -#define MAX_SERIAL_DEV_NUM 25 /* 最大接串口传感器数量*/ -#define MAX_SERIAL_PORT_NUM 5 -#define MAX_DEV_VALUE_NUM 12 /* 一台装置最大的采样值数量*/ - -#define WEATHER_PROTOCOL 1 /* 温湿度协议序号*/ -#define WIND_PROTOCOL 2 /* 风速风向协议序号*/ -#define SLANT_PROTOCOL 3 /* 倾斜角协议序号*/ -#define RALLY_PROTOCOL 4 /* 拉力协议序号*/ -#define PELCO_P_PROTOCOL 5 /* 摄像机Pelco_P协议序号*/ -#define PELCO_D_PROTOCOL 6 /* 摄像机Pelco_D协议序号*/ -#define SERIALCAMERA_PROTOCOL 8 /* 串口摄像机协议序号*/ -#define RESERVE2_PROTOCOL 17 /* 备用2协议序号*/ -#define RESERVE4_PROTOCOL 19 /* 备用4协议序号*/ -#define RESERVE5_PROTOCOL 20 /* 备用5协议序号*/ -#define INVALID_PROTOCOL 21 /* 无效协议序号*/ - -#define AirTempNo 0 /* 空气温度数据存储序号*/ -#define HumidityNo 1 /* 相对湿度数据存储序号*/ -#define WindSpeedNo 2 /* 风速数据存储序号*/ -#define WindDirectionNo 3 /* 风向数据存储序号*/ -#define RainfallNo 4 /* 雨量数据存储序号*/ -#define AtmosNo 5 /* 大气压数据存储序号*/ -#define OpticalRadiationNo 6 /* 日照(光辐射)数据存储序号*/ - -#define SER_IDLE 0 /* 传感器处于空闲状态,未启动采样*/ -#define SER_SAMPLE 1 /* 正在采样过程中*/ -#define SAMPLINGSUCCESS 2 /* 采样结束,正常读取到数据*/ -#define SER_STARTSAMPLE 3 /* 启动采样*/ -#define SER_SAMPLEFAIL -1 /* 采样失败,未采集到数据,传感器故障或未接*/ -#define PHOTO_SAVE_SUCC 5 /* 图片保存成功*/ - -#define WEATHER_DATA_NUM 8 /* 气象数据最大数量(一般最多是6要素)*/ -#define RALLY_DATA_NUM 2 /* 拉力数据最大数量(一般是1个)*/ -#define SLANTANGLE_DATA_NUM 3 /* 倾角数据最大数量(一般只有X轴和Y轴值)*/ - -#define PTZ_MOVETIME 1 // 云台移动等待时间为1秒 -#define MAX_CHANNEL_NUM 2 /* 视频通道最大通道*/ -#define MAX_PHOTO_FRAME_LEN 1024 /* 图片数据一包最大长度*/ -#define MAX_PHOTO_PACKET_NUM 1024 /* 图片最大包数(图片最大定为1MB)*/ - -#define RECVDATA_MAXLENTH 2048 /* 接收数据缓冲区最大值*/ -#define TIMER_CNT 50 // Poll命令定时器时间 5 ms -#define SENDDATA_MAXLENTH RECVDATA_MAXLENTH /* 正常发送数据缓冲区最大值*/ - -// 摄像机控制命令宏定义 -#define Cmd_Cancel 0x00000000 // 关闭功能 -#define SET_PRESETNO 0x00030000 // 设置预置点 -#define MOVE_TO_PRESETNO 0x00070000 // 调用预置点 - -/* 摄像机PELCO-P控制命令宏定义*/ -#define P_Auto_Scan 0x20000000 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ -#define P_IRIS_CLOSE 0x08000000 /* 光圈缩小(1 有效)*/ -#define P_IRIS_OPEN 0x04000000 /* 光圈放大(1 有效)*/ -#define P_FOCUS_NEAR 0x02000000 /* 近距离聚焦(1 有效)*/ -#define P_FOCUS_FAR 0x01000000 /* 远距离聚焦(1 有效)*/ -#define P_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/ -#define P_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/ -#define P_MOVE_DOWN 0x0010001f /* 向下移动镜头(1 有效)*/ -#define P_MOVE_UP 0x0008001f /* 向上移动镜头(1 有效)*/ -#define P_MOVE_LEFT 0x00041f00 /* 向左移动镜头(1 有效)*/ -#define P_MOVE_RIGHT 0x00021f00 /* 向右移动镜头(1 有效)*/ - -// 摄像机PELCO-D控制命令宏定义 -#define D_Auto_Scan 0x10000000 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ -#define D_IRIS_CLOSE 0x04000000 /* 光圈缩小(1 有效)*/ -#define D_IRIS_OPEN 0x02000000 /* 光圈放大(1 有效)*/ -#define D_FOCUS_NEAR 0x01000000 /* 近距离聚焦(1 有效)*/ -#define D_FOCUS_FAR 0x00800000 /* 远距离聚焦(1 有效)*/ -#define D_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/ -#define D_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/ -#define D_MOVE_DOWN 0x0010002d /* 向下移动镜头(1 有效)*/ -#define D_MOVE_UP 0x0008002d /* 向上移动镜头(1 有效)*/ -#define D_MOVE_LEFT 0x00042d00 /* 向左移动镜头(1 有效)*/ -#define D_MOVE_RIGHT 0x00022d00 /* 向右移动镜头(1 有效)*/ - -/* 摄像机下发命令宏定义*/ -#define Take_Photo 0 /* 拍照*/ -#define Stop_Baud 10000 /* 设置球机波特率*/ -#define Stop_Cmd 10005 /* 取消或停止指令*/ -#define Auto_Scan 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ -#define IRIS_CLOSE 10007 /* 光圈缩小(1 有效)*/ -#define IRIS_OPEN 10008 /* 光圈放大(1 有效)*/ -#define FOCUS_NEAR 10009 /* 近距离聚焦(1 有效)*/ -#define FOCUS_FAR 10010 /* 远距离聚焦(1 有效)*/ -#define ZOOM_WIDE 10011 /* 远离物体(1 有效)*/ -#define ZOOM_TELE 10012 /* 接近物体(1 有效)*/ -#define MOVE_DOWN 10013 /* 向下移动镜头(1 有效)*/ -#define MOVE_UP 10014 /* 向上移动镜头(1 有效)*/ -#define MOVE_LEFT 10015 /* 向左移动镜头(1 有效)*/ -#define MOVE_RIGHT 10016 /* 向右移动镜头(1 有效)*/ -#define MOVE_PRESETNO 10017 // 调用预置点 -#define SAVE_PRESETNO 10018 // 设置预置点 -#define SPEED_DOME_CAMERA 0 /* 球机摄像机*/ -#define SERIAL_CAMERA 2 /* 串口摄像机a*/ - -#define COLLECT_DATA 0 /* 调试使用*/ - -#define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, "serial_port_comm", fmt, ##args) /* 红色*/ -#define LOGI(fmt, args...) __android_log_print(ANDROID_LOG_INFO, "Sensors_Protocol", fmt, ##args) /* 草绿色*/ -#define LOGV(fmt, args...) __android_log_print(ANDROID_LOG_VERBOSE, "serial_port_comm", fmt, ##args)/* 白色*/ -#define LOGW(fmt, args...) __android_log_print(ANDROID_LOG_WARN, "Sensors_Protocol", fmt, ##args) /* 黄色*/ -#define LOGD(fmt, args...) __android_log_print(ANDROID_LOG_DEBUG, "Sensors_Protocol", fmt, ##args) /* 蓝色*/ -#define HexCharToInt( c ) (((c) >= '0') && ((c) <= '9') ? (c) - '0' : ((c) >= 'a') && ((c) <= 'f') ? (c) - 'a' + 10 :((c) >= 'A') && ((c) <= 'F') ? (c) - 'A' + 10 : 0 ) - -typedef struct -{ - int cmd; - int value; - int result; - LONG value2; - char str[MAX_STRING_LEN]; -}IOT_PARAM; - -//SDS包类型结构 -typedef struct -{ - BYTE PortIdx; // 信息类型 - WORD MsgType; // 信息类型 - int MsgLen; // 信息长度 - u_char MsgData[RECVDATA_MAXLENTH]; -} RTUMSG; - -typedef struct -{ - float fFactor; // 数据系数 - float EuValueDelta; // 数据工程值偏移 -} AI_PARAM; - -typedef struct -{ - AI_PARAM AiParam; // 数据点配置参数 - int AiState; // 数据标识(-1:采样失败;0:没有采样;1:正在采样;2:采样结束;3:启动采样;) - float EuValue; // 数据工程值 -} AI_DEF; - -typedef struct -{ - BYTE AiState; // 数据标识(-1:采样失败;0:没有采样;1:正在采样;2:采样结束;3:启动采样;) - float EuValue; // 数据工程值 -} Data_DEF; - -typedef struct -{ - int imagelen; // 整个图片大小 - int phototime; // 拍照时间 - u_char presetno; // 拍照预置点 - char photoname[512]; // 图片存储名称和路径 - int state;// 标识(-1:拍照失败;0:没有拍照;1:正在取图;2:拍照成功;3:启动拍照;) -} IMAGE_DEF; - -typedef struct -{ - int imagelen; // 整个图片大小 - int imagenum; // 整个图片的总包数 - int phototime; // 拍照时间 - u_char presetno; // 拍照预置点 - char photoname[512]; // 图片存储名称和路径 - u_char buf[MAX_PHOTO_PACKET_NUM][MAX_PHOTO_FRAME_LEN]; // 图片数据缓存 - int ilen[MAX_PHOTO_PACKET_NUM]; // 相对应的每包图片数据的长度 - int state;// 标识(-1:拍照失败;0:没有拍照;1:正在取图;2:拍照成功;3:启动拍照;) -} PHOTO_DEF; - -// 上层调用采集传感器参数 -typedef struct -{ - unsigned int baudrate; /* 波特率*/ - int databit; /* 数据位*/ - float stopbit; /* 停止位*/ - char parity; /* 校验位*/ - char pathname[64]; /* 串口文件名及路径*/ - //int commNo; /* 约定的串口序号,例如我们PC机上显示的COM1。。。*/ - u_char SensorsType; /* 传感器类型索引,大于 0*/ - int devaddr; /* 装置(传感器)使用的地址*/ - u_char IsNoInsta; /* 装置没有安装或者已经坏了(1:正常, 0:无效,坏了或没有安装)*/ - u_char CameraChannel; /* 像机的通道号*/ - u_char Phase; /* 传感器所安装相别,指拉力和倾角11表示A1....*/ -} SENSOR_PARAM; - -// 需要配置的串口装置参数 -typedef struct -{ - unsigned int baudrate; /* 波特率*/ - int databit; /* 数据位*/ - int stopbit; /* 停止位*/ - char parity; /* 校验位*/ - char pathname[64]; /* 串口文件名及路径*/ - int commid; /* 串口序号 注意:从0开始*/ - u_char ProtocolIdx; /* 规约索引,大于 0*/ - int devaddr; /* 装置使用的地址*/ - u_char IsNoInsta; /* 装置没有安装或者已经坏了(1:正常, 0:无效,坏了或没有安装)*/ - u_char CameraChannel; /* 像机的通道号*/ - u_char Phase; /* 传感器所安装相别,指拉力和倾角11表示A1....*/ -} SERIAL_PARAM; - -typedef struct -{ - int m_iRevStatus; /* */ - int m_iRecvLen; /* */ - int m_iNeedRevLength; /* */ - int iRecvTime; /* */ - u_char m_au8RecvBuf[RECVDATA_MAXLENTH];/* */ - int fd; /* 串口打开的文件句柄*/ - u_char PollCmd[SENDDATA_MAXLENTH]; - int cmdlen; // 发送缓冲区命令长度 - //******************** Poll Cmd **************************** - u_char Retry; /* 重试命令次数 */ - u_char RetryCnt; /* 重试命令计数*/ - LONG RetryTime; /* 重试命令时间 */ - LONG RetryTimeCnt; /* 重试命令时间计数*/ - LONG WaitTime; /* 命令间隔时间 */ - LONG WaitTimeCnt; /* 命令间隔时间计数*/ - u_char ForceWaitFlag; /* 强制等待标志*/ - u_short ForceWaitCnt; /* 强制等待计数*/ - u_char ReSendCmdFlag; /* 重发命令标志 */ - u_char SendCmdFlag; /* 命令发送标志 */ - u_char RevCmdFlag; /* 命令正常接收标志*/ - //********************************************************** - LONG lsendtime; /* 命令发送绝对时间计时(毫秒)*/ -} SIO_PARAM_SERIAL_DEF; - -//串口相关装置所有参数集中定义 -typedef struct -{ - //******************** 端口基本信息 ************************ - u_char IsNeedSerial; /* 是否需要使用串口通讯*/ - int CmdWaitTime; /* 没有使用*/ - u_char UseSerialidx; /* 使用的串口序号*/ - int SerialCmdidx; /* 正在使用的串口发送命令的命令序号(-1:表示没有命令发送) - 摄像机使用命令序号存储*/ - int enrecvtime; /* 发送加密命令后接收到应答计时*/ - LONG FirstCmdTimeCnt; /* 串口读取数据起始时间*/ - u_char nextcmd; /* 第二次发送读取气象雨量命令 */ - u_char SameTypeDevIdx; /* 相同类型装置顺序排列序号(从0开始)*/ - u_char uOpenPowerFlag; /* 传感器上电标志(0:不需要打开; 1:需要打开)*/ - int recvdatacnt; /* 接收到有效数据*/ - PHOTO_DEF image; /* 临时存储图片数据*/ - AI_DEF aiValue[MAX_DEV_VALUE_NUM]; /* 传感器采样值*/ -} SERIAL_DEV_DEF; - -//串口相关装置所有参数集中定义 -typedef struct -{ - u_char clcyesampling; /* 正在进行采样(0:没有进行采样;1:正在进行采样;)*/ - u_char camerauseserial; /* 摄像机使用那个串口*/ - DWORD PtzCmdType; /* 云台指令类型*/ - int usecameradevidx; /* 有像机指令需要执行*/ - /* 执行指令的装置序号(-1:表示没有需要执行的指令;)*/ - int SendStopPtzCmdTimeCnt; /* 发送云台停止指令*/ - u_char serialstatus[MAX_SERIAL_PORT_NUM]; /* 串口是否可以使用状态分别对应串口1、2、3*/ - SERIAL_DEV_DEF ms_dev[MAX_SERIAL_DEV_NUM]; /* 装置所接传感器数量*/ - int UseingSerialdev[MAX_SERIAL_PORT_NUM]; /* 正在使用串口通讯的装置序号(-1,表示串口空闲)*/ - int curdevidx[MAX_SERIAL_PORT_NUM]; /* 当前正在通讯的装置序号(-1表示没有装置需要通讯)*/ - u_char IsReadWireTem; /* 是否在开始读取测温数据(0:表示没有;1:是)*/ - //int proruntime; /* 程序运行时间*/ - int IsSleep; /* 是否使程序休眠(1:不休眠;2:休眠)*/ - int tempsamplingstartime; /* 测温启动距离采样启动时间间隔*/ - int tempsamplingsucctime; /* 测温启动距离采样成功时间间隔*/ - int samplingtimeSec; /* 高速采样数据秒级时间控制*/ - int SectimesamplingCnt[3]; /* 高速采样数据秒级采样数*/ - int SunshineSensorsFault; /* 控制日照传感器故障发送*/ - int TempSensorsFault; /* 控制测温传感器故障发送*/ - int FirstSensorsFault; /* 第一次检测传感器故障发送*/ - int SensorsIsUse; /* 传感器是否启用与自检位置匹配*/ - int sequsampling; /* 顺序采样控制序号-1:无采样;其他对应相应装置序号*/ - - int imagepacketnum; /* 串口摄像机拍照图片总包数*/ - int historyimagenum[MAX_CHANNEL_NUM]; /* 球机保存的历史图片数量*/ -#if 1 - //int sendflag; /* 临时上送泄露电流值标志*/ - int sendphototime; /* 临时上送图片数据统计*/ - int sendphotocmdcnt; /* 一次拍照过程中发送拍照指令计数*/ - int photographtime; /* 图片拍摄的时间*/ - int iLastGetPhotoNo; /* 设置串口摄像机参数时暂存拍照命令序号*/ - u_char bImageSize; /* 用于临时存储接收上层命令的图片大小*/ - u_char presetno; /* 用于临时存储接收上层命令的预置点*/ - char filedir[512]; /* 用于摄像机拍照之后暂时存放的路径*/ -#endif - u_char errorPhotoNoCnt; /* 串口摄像机拍照时回应错误包号计数(如:召第6包回应第3包)*/ - u_char RephotographCnt; /* 串口摄像机重拍计数(只在读照片数据应答出错时才重拍)*/ -} SRDT_DEF; - -static void PortDataProcess( void ); -static LONG get_msec(); -int serial_port_comm(); -static int weather_comm(SERIAL_PARAM weatherport); - -static void setRS485Enable(bool z); -static void set485WriteMode(); -static void set485ReadMode(); -static void set12VEnable(bool z); -static void setCam3V3Enable(bool enabled); - -// 串口相关的所有函数定义 -/* 打开串口电源*/ -void Gm_OpenSerialPower(); - -// 打开传感器电源 -void Gm_OpenSensorsPower(); -// 关闭传感器电源 -void Gm_CloseSensorsPower(int port); - -// 打开串口通讯 -void Gm_OpenSerialPort(int devidx); - -// 关闭串口通讯 -void Gm_CloseSerialPort(); -void DebugLog(int commid, char *szbuf, char flag); -int SaveLogTofile(int commid, char *szbuf); -// 功能说明:串口发送数据 返回实际发送的字节数 -int GM_SerialComSend(const unsigned char * cSendBuf, LONG nSendLen, int commid); -void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, char *filedir); -// 启动串口通讯 -void GM_StartSerialComm(); -// 启动使用串口拍照 -void GM_StartSerialCameraPhoto(BYTE channel, int cmdidx); -void delete_old_files(const char *path, int days); - -// 串口轮询通讯定时器 -int GM_SerialTimer(); -//轮询所有串口和传感器是否需要生成下发命令 -void Gm_FindAllSensorsCommand(); -//检查所有传感器是否采集完毕,采集完毕的关闭传感器电源 -void GM_IsCloseSensors(); -//检查所有串口是否有数据接收,有则启动接收 -void GM_AllSerialComRecv(); -//判断是否需要关闭定时器 -int GM_CloseTimer(); -void testComm(); -void Gm_InitSerialComm_Test(); -// 串口接收数据处理 -void SerialDataProcess(int devidx, u_char *buf, int len); - -void CameraRecvData(int commid, u_char *buf, int len); - -// 串口摄像机数据处理 -void CameraPhotoPortDataProcess( int port); - -// 发送命令 -void SendCmdFormPollCmdBuf( int port ); - -// 清除发送命令的所有标识 -void ClearCmdAllFlag(int commid); - -// 下发串口拍照指令控制 -int FindNextCameraPhotoCommand(int devidx); -// 生成 CameraPhoto命令 -void MakeCameraPhotoCommand( int portno, BYTE cmdidx, int OneParam, WORD TwoParam, BYTE Threep, int phototime); - -// 清除命令缓冲区 -void ClearCmdFormPollCmdBuf(int port); - -// 准备发送云台指令 -int Gm_CtrlPtzCmd(u_char channel, DWORD ptzcmd); - -// 发送转动摄像机云台命令定时器 -int Gm_Camera_Timer(); - -// 生成 PELCO_P 命令 * -void Gm_SendPelco_pCommand( DWORD cmdtype); - -// 计算Pelco_p校验 -BYTE Gm_Pelco_pXORCheck( BYTE *msg, int len ); -// 生成 PELCO_D 命令 * -void Gm_SendPelco_DCommand( DWORD cmdtype); - -// 计算Pelco_D校验 -BYTE Gm_Pelco_DCheck( BYTE *msg, int len ); -// 查询传感器电源状态 -char Gm_GetSensorsPowerState(int port); - -// 通过传感器使用的航空头查找传感器使用的串口序号 -void FindDevUseSerialCommNo(); - -// 寻找并生成下一条倾角命令 -int FindNextShxyProtocolCommand( int devidx ); -// 倾角命令校验码计算 -unsigned char CalLpc(unsigned char *msg, int len); -// 读上海欣影传感器协议数据 -void ShxyProtocolRecvData(int commid, u_char *buf, int len); -// 检查检验和是否正确 -int CheckShxyProtocolLpcError( u_char* msg, int len ); - -// 把16进制和10进制ASCII字符串转换成int整数 -int ATOI(char *buf); - -//生成倾角命令 -void MakeShxyProtocolPollCommand(int portno, BYTE cmdidx); -// 上海欣影传感器协议数据处理 -void ShxyProtocolDataProcess( int commid); -// 控制关闭传感器电源 -//void Gm_CtrlCloseSensorsPower(int devidx); -// 检查传感器电源是否应该关闭或打开 -//void Gm_CheckSensorsPower(void); -int SaveImageDataTofile(int devno); -void Collect_sensor_data(); - -void CameraPhotoCmd(int phototime, u_char channel, int cmdidx, u_char bImageSize, u_char presetno); - - -/* 数据和图片采集数据返回函数 开始*/ -int GetWeatherData(Data_DEF *data, int datano); - -int GetAirTempData(Data_DEF *airt); - -int GetHumidityData(Data_DEF *airt); - -int GetWindSpeedData(Data_DEF *airt); - -int GetWindDirectionData(Data_DEF *airt); - -int GetRainfallData(Data_DEF *airt); - -int GetAtmosData(Data_DEF *airt); - -int GetOpticalRadiationData(Data_DEF *airt); - -int GetPullValue(int devno, Data_DEF *data); - -int GetAngleValue(int devno, Data_DEF *data, int Xy); - -int GetImage(int devno, IMAGE_DEF *photo); -/* 数据和图片采集数据返回函数 结束*/ -// 生成一个随机整数 -int GeneratingRandomNumber(); - -#endif //WEATHERCOMM_H diff --git a/app/src/main/cpp/serialComm.cpp b/app/src/main/cpp/SerialComm.cpp similarity index 99% rename from app/src/main/cpp/serialComm.cpp rename to app/src/main/cpp/SerialComm.cpp index b300d1c8..4222349b 100644 --- a/app/src/main/cpp/serialComm.cpp +++ b/app/src/main/cpp/SerialComm.cpp @@ -15,7 +15,7 @@ #include #include #include "GPIOControl.h" -#include "serialComm.h" +#include "SerialComm.h" static void set_baudrate (struct termios *opt, unsigned int baudrate) diff --git a/app/src/main/cpp/serialComm.h b/app/src/main/cpp/SerialComm.h similarity index 100% rename from app/src/main/cpp/serialComm.h rename to app/src/main/cpp/SerialComm.h diff --git a/app/src/main/cpp/camera2/Camera2Helper.h b/app/src/main/cpp/camera2/Camera2Helper.h index d75746eb..b3a3b33a 100644 --- a/app/src/main/cpp/camera2/Camera2Helper.h +++ b/app/src/main/cpp/camera2/Camera2Helper.h @@ -17,6 +17,11 @@ #ifndef __CAMERA2_HELPER_H__ #define __CAMERA2_HELPER_H__ +#include +#include +#include +#include "mat.h" + template class RangeValue { @@ -103,4 +108,107 @@ private: }; +inline void ConvertYUV21ToMat(const uint8_t* nv21, int nv21_width, int nv21_height, int orgWidth, int orgHeight, + int sensorOrientation, bool front, int rotation, cv::Mat& rgb) +{ + int w = 0; + int h = 0; + int rotate_type = 0; + cv::Mat nv21_rotated; + const unsigned char* yuv420data = nv21; + + if (rotation != 0) + { + int co = 0; + if (front) + { + co = (sensorOrientation + (rotation - 1) * 90) % 360; + co = (360 - co) % 360; + } + else + { + co = (sensorOrientation - (rotation - 1) * 90 + 360) % 360; + } + + // XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing); + + // int co = 0; + if (co == 0) + { + w = nv21_width; + h = nv21_height; + rotate_type = front ? 2 : 1; + } + else if (co == 90) + { + w = nv21_height; + h = nv21_width; + + int tmp = orgWidth; + orgWidth = orgHeight; + orgHeight = tmp; + + rotate_type = front ? 5 : 6; + } + else if (co == 180) + { + w = nv21_width; + h = nv21_height; + rotate_type = front ? 4 : 3; + } + else if (co == 270) + { + w = nv21_height; + h = nv21_width; + + int tmp = orgWidth; + orgWidth = orgHeight; + orgHeight = tmp; + + rotate_type = front ? 7 : 8; + } + + nv21_rotated.create(h + h / 2, w, CV_8UC1); + ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type); + yuv420data = nv21_rotated.data; + } + else + { + w = nv21_width; + h = nv21_height; + } + + // nv21_rotated to rgb + if (w == orgWidth && h == orgHeight) + { + rgb.create(h, w, CV_8UC3); + // ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data); + ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data); + } + else + { + cv::Mat org(h, w, CV_8UC3); + ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data); + if (w * orgHeight == h * orgWidth) // Same Ratio + { + cv::resize(org, rgb, cv::Size(orgWidth, orgHeight)); + } + else + { + // Crop image + if (w > orgWidth && h >= orgHeight) + { + int left = (w - orgWidth) / 2; + int top = (h - orgHeight) / 2; + rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth)); + } + else + { + rgb = org; + } + } + } +} + + #endif /* __CAMERA2_HELPER_H__ */ diff --git a/app/src/main/cpp/camera2/OpenCVHdr.h b/app/src/main/cpp/camera2/OpenCVHdr.h index f79ef57c..dc246719 100644 --- a/app/src/main/cpp/camera2/OpenCVHdr.h +++ b/app/src/main/cpp/camera2/OpenCVHdr.h @@ -9,6 +9,7 @@ using namespace std; using namespace cv; +// https://zhuanlan.zhihu.com/p/38176640 void Debevec(vectorexposureImages, vectorexposureTimes, Mat& output); void Robertson(vectorexposureImages, vectorexposureTimes, Mat& output); diff --git a/app/src/main/cpp/camera2/camera_listeners.cpp b/app/src/main/cpp/camera2/camera_listeners.cpp index 4a3ef4a4..d299b293 100644 --- a/app/src/main/cpp/camera2/camera_listeners.cpp +++ b/app/src/main/cpp/camera2/camera_listeners.cpp @@ -1,3 +1,4 @@ + /* * Copyright (C) 2017 The Android Open Source Project * diff --git a/app/src/main/cpp/camera2/ndkcamera.cpp b/app/src/main/cpp/camera2/ndkcamera.cpp index 837b5c73..e644d18c 100644 --- a/app/src/main/cpp/camera2/ndkcamera.cpp +++ b/app/src/main/cpp/camera2/ndkcamera.cpp @@ -26,965 +26,1832 @@ #include "Camera2Helper.h" #include #include +#include "DngCreator.h" static void onAvailabilityCallback(void* context, const char* cameraId) { - ((NdkCamera*)context)->onAvailabilityCallback(cameraId); - // ALOGI("CameraStatus::onAvailability CameraId: %s", cameraId); - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onAvailability CameraId: %s", cameraId); + ((NdkCamera*)context)->onAvailabilityCallback(cameraId); + // ALOGI("CameraStatus::onAvailability CameraId: %s", cameraId); + XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onAvailability CameraId: %s", cameraId); } static void onUnavailabilityCallback(void* context, const char* cameraId) { - ((NdkCamera*)context)->onUnavailabilityCallback(cameraId); - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onUnavailability CameraId: %s", cameraId); + ((NdkCamera*)context)->onUnavailabilityCallback(cameraId); + XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onUnavailability CameraId: %s", cameraId); } static void onDisconnected(void* context, ACameraDevice* device) { - ((NdkCamera*)context)->onDisconnected(device); - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onDisconnected CameraId: %s", ACameraDevice_getId(device)); + ((NdkCamera*)context)->onDisconnected(device); + XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onDisconnected CameraId: %s", ACameraDevice_getId(device)); } static void onError(void* context, ACameraDevice* device, int error) { - if (ACAMERA_ERROR_CAMERA_DEVICE == error) - { - - } - - XYLOG(XYLOG_SEVERITY_ERROR, "CameraStatus::onError CameraId: %s err=%d", ACameraDevice_getId(device), error); - std::string msg = "NdkCamera error code=" + std::to_string(error); - ((NdkCamera*)context)->on_error(msg); - // __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onError %p %d", device, error); + ((NdkCamera*)context)->onError(device, error); } static void onImageAvailable(void* context, AImageReader* reader) { - ((NdkCamera*)context)->onImageAvailable(reader); + NdkCamera* pThis = reinterpret_cast(context); + pThis->onImageAvailable(reader); } static void onSessionActive(void* context, ACameraCaptureSession *session) { - ALOGD("onSessionActive %p", session); + ALOGD("onSessionActive %p", session); } static void onSessionReady(void* context, ACameraCaptureSession *session) { - ALOGD("onSessionReady %p", session); - ((NdkCamera*)context)->onSessionReady(session); + ALOGD("onSessionReady %p", session); + ((NdkCamera*)context)->onSessionReady(session); } static void onSessionClosed(void* context, ACameraCaptureSession *session) { - XYLOG(XYLOG_SEVERITY_INFO, "onSessionClosed %p", session); + XYLOG(XYLOG_SEVERITY_INFO, "onSessionClosed %p", session); } void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) { - XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason); + // XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason); + ((NdkCamera*)context)->onCaptureFailed(session, request, failure); } void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber) { - ALOGD("onCaptureSequenceCompleted %p %d %ld", session, sequenceId, frameNumber); + ALOGD("onCaptureSequenceCompleted %p sequenceId=%d frameNumber=%ld", session, sequenceId, frameNumber); } void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId) { - ALOGD("onCaptureSequenceAborted %p %d", session, sequenceId); + ALOGD("onCaptureSequenceAborted %p sequenceId=%d", session, sequenceId); } void onCaptureProgressed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { - ((NdkCamera*)context)->onCaptureProgressed(session, request, result); + ((NdkCamera*)context)->onCaptureProgressed(session, request, result); } void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { - ((NdkCamera*)context)->onCaptureCompleted(session, request, result); + ((NdkCamera*)context)->onCaptureCompleted(session, request, result); } NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params) { - camera_facing = 0; - camera_orientation = 0; + camera_facing = 0; + camera_orientation = 0; - m_params = params; - m_firstFrame = true; - mWidth = width; - mHeight = height; + m_params = params; + m_firstFrame = true; + m_photoTaken = false; + mWidth = width; + mHeight = height; - m_imagesCaptured = ~0; + mCaptureTriggered = false; - maxFrameDuration = 0; - afSupported = false; - awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO; - aeLockAvailable = false; - awbLockAvailable = false; + maxFrameDuration = 0; + afSupported = false; + awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO; + aeLockAvailable = false; + awbLockAvailable = false; - sceneModeSupported = false; + sceneModeSupported = false; - numberOfPrecaptures = 0; - m_precaptureStartTime = 0; + numberOfPrecaptures = 0; + m_precaptureStartTime = 0; - activeArraySize[0] = 0; - activeArraySize[1] = 0; + activeArraySize[0] = 0; + activeArraySize[1] = 0; - maxRegions[0] = 0; - maxRegions[1] = 0; - maxRegions[2] = 0; + maxRegions[0] = 0; + maxRegions[1] = 0; + maxRegions[2] = 0; - camera_manager_cb.context = this; - camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback; - camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback; + camera_manager_cb.context = this; + camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback; + camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback; - camera_device = 0; - image_reader = 0; - image_reader_surface = 0; - image_reader_target = 0; - capture_request = 0; - capture_session_output_container = 0; - capture_session_output = 0; - capture_session = 0; - captureSequenceId = 0; + mPreviewImageReader = NULL; + mPreviewImageWindow = NULL; + mPreviewOutputTarget = NULL; - lightDetected = false; + mImageReader = NULL; + mImageWindow = NULL; + mOutputTarget = NULL; - mResult = { 0 }; + camera_device = 0; + + capture_session_output_container = 0; + capture_session = 0; + + lightDetected = false; + + mResult = { 0 }; + mLdr = ~0; } NdkCamera::~NdkCamera() { - close(); + close(); } int NdkCamera::selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY) { - camera_manager.Create(); - // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); - - // find camera - bool foundIt = false; - // DisplayDimension disp(mWidth, mHeight); - // DisplayDimension foundRes = disp; - camera_status_t status = ACAMERA_OK; - - ACameraIdList* cameraIdList = NULL; - status = ACameraManager_getCameraIdList(camera_manager, &cameraIdList); - if (status != ACAMERA_OK) - { - return 1; - } + camera_manager.Create(); + // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); + + // find camera + bool foundIt = false; + // DisplayDimension disp(mWidth, mHeight); + // DisplayDimension foundRes = disp; + camera_status_t status = ACAMERA_OK; + + ACameraIdList* cameraIdList = NULL; + status = ACameraManager_getCameraIdList(camera_manager, &cameraIdList); + if (status != ACAMERA_OK) + { + return 1; + } + + for (int i = 0; i < cameraIdList->numCameras; ++i) + { + const char *id = cameraIdList->cameraIds[i]; + if (cameraId.compare(id) == 0) { + foundIt = true; + break; + } + } + ACameraManager_deleteCameraIdList(cameraIdList); + if (!foundIt) + { + return 2; + } + + ACameraMetadata * camera_metadata = 0; + status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); + if (status != ACAMERA_OK) + { + return 3; + } + + { + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); + // format of the data: format, width, height, input?, type int32 + + maxResolutionX = 0; + maxResolutionY = 0; + + for (int i = 0; i < e.count; i += 4) + { + int32_t input = e.data.i32[i + 3]; + int32_t format = e.data.i32[i + 0]; + if (input) continue; + + if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) + { + if (e.data.i32[i + 1] * e.data.i32[i + 2] > (maxResolutionX * maxResolutionY)) + { + maxResolutionX = e.data.i32[i + 1]; + maxResolutionY = e.data.i32[i + 2]; + } + } + } + } + + return 0; +} - for (int i = 0; i < cameraIdList->numCameras; ++i) - { - const char *id = cameraIdList->cameraIds[i]; - if (cameraId.compare(id) == 0) { - foundIt = true; - break; - } - } - ACameraManager_deleteCameraIdList(cameraIdList); - if (!foundIt) - { - return 2; - } +int NdkCamera::open(const std::string& cameraId) { + XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str()); + + // camera_facing = _camera_facing; + + camera_manager.Create(); + // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); + + // find camera + bool foundIt = false; + DisplayDimension disp(mWidth, mHeight); + DisplayDimension foundRes = disp; + camera_status_t status = ACAMERA_OK; + + ALOGD("Start ACameraManager_getCameraIdList"); + { + ACameraIdList *camera_id_list = 0; + for (int retry = 0; retry < 100; retry++) + { + status = ACameraManager_getCameraIdList(camera_manager, &camera_id_list); + AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraIdList return error, %d", status); + + for (int i = 0; i < camera_id_list->numCameras; ++i) { + const char *id = camera_id_list->cameraIds[i]; + if (cameraId.compare(id) == 0) { + foundIt = true; + break; + } + } + ACameraManager_deleteCameraIdList(camera_id_list); + if (foundIt) + { + break; + } + std::this_thread::sleep_for(std::chrono::milliseconds(16)); + } + + ALOGD("End ACameraManager_getCameraIdList"); + + // ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); + if (!foundIt) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Camera Not Found on ID: %s", cameraId.c_str()); + return 1; + } + + mCameraId = cameraId; + + ACameraMetadata * camera_metadata = 0; + status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); + AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraCharacteristics return error, %d", status); + + mCharacteristics = std::shared_ptr(camera_metadata, ACameraMetadata_free); + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); + // format of the data: format, width, height, input?, type int32 + + // DisplayDimension foundRes(4000, 4000); + // DisplayDimension maxJPG(0, 0); + + foundIt = false; + DisplayDimension temp; + + for (int i = 0; i < e.count; i += 4) + { + int32_t input = e.data.i32[i + 3]; + if (input) continue; + int32_t format = e.data.i32[i + 0]; + + if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) + { + DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]); + // XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height()); + if (!disp.IsSameRatio(res)) + { + + if (res.width() >= mWidth && res.height() >= mHeight) + { + temp = res; + } + continue; + } + + if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp) + { + foundIt = true; + foundRes = res; + } + } + } + + if (!foundIt) + { + foundRes = temp; + foundIt = true; + } + } + + if (!foundIt || foundRes.width() == 0 || foundRes.height() == 0) + { + // ACameraMetadata_free(camera_metadata); + XYLOG(XYLOG_SEVERITY_ERROR, "Camera RES(%d, %d) Not Found on ID: %s", mWidth, mHeight, cameraId.c_str()); + return 1; + } + + // foundRes.Flip(); + + // query faceing + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e); + AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_LENS_FACING return error, %d", status); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + } + camera_facing = facing; + + // query orientation + int orientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e); + AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_ORIENTATION return error, %d", status); + if (status == ACAMERA_OK) + { + orientation = (int)e.data.i32[0]; + } + } + camera_orientation = orientation; + + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &e); + } + + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AF_AVAILABLE_MODES, &e); + // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); +#ifdef _DEBUG + std::string afModes; + for (int idx = 0; idx < e.count; idx++) + { + afModes += std::to_string(e.data.u8[idx]) + " "; - ACameraMetadata * camera_metadata = 0; - status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); - if (status != ACAMERA_OK) + } + XYLOG(XYLOG_SEVERITY_DEBUG, "Available AF Mode: ", afModes.c_str()); +#endif + afSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AF_MODE_OFF)); + } + + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_AVAILABLE_MODES, &e); + // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); + if (status == ACAMERA_OK) + { + for (int idx = 0; idx < e.count; idx++) + { + if (m_params.awbMode == e.data.u8[idx]) + { + awbMode = m_params.awbMode; + break; + } + // unsigned int m = e.data.u8[idx]; + // XYLOG(XYLOG_SEVERITY_DEBUG, "Available AWB Mode %u", m); + } + } + // awbSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AWB_MODE_OFF)); + } + + if (!afSupported) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AF not Supported"); + } + + { + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val); + // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE return error, %d", status); + if (status == ACAMERA_OK) + { + exposureRange.min_ = val.data.i64[0]; + if (exposureRange.min_ < kMinExposureTime) + { + exposureRange.min_ = kMinExposureTime; + } + exposureRange.max_ = val.data.i64[1]; + if (exposureRange.max_ > kMaxExposureTime) + { + exposureRange.max_ = kMaxExposureTime; + } + // exposureTime = exposureRange.value(2); + } + else + { + ALOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE"); + exposureRange.min_ = exposureRange.max_ = 0l; + // exposureTime_ = 0l; + } + } + + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_LOCK_AVAILABLE, &e); + // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); + aeLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE) : false; + } + + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_LOCK_AVAILABLE, &e); + awbLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE) : false; + } + + { + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_ZOOM_RATIO_RANGE, &val); + if (status == ACAMERA_OK) + { + float zoomRatioMin = val.data.f[0]; + float zoomRatioMax = val.data.f[1]; + + ALOGI("Zoom Ratio Range: [%f,%f]", zoomRatioMin, zoomRatioMax); + } + } + + { + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_RANGE, &val); + if (status == ACAMERA_OK) + { + aeCompensationRange.min_ = val.data.i32[0]; + aeCompensationRange.max_ = val.data.i32[1]; + + XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_RANGE [%d,%d]", aeCompensationRange.min_, aeCompensationRange.max_); + } + else + { + ALOGW("Unsupported ACAMERA_CONTROL_AE_COMPENSATION_RANGE"); + aeCompensationRange.min_ = aeCompensationRange.max_ = 0l; + } + } + + { + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_STEP, &val); + if (status == ACAMERA_OK) + { + aeCompensationStep = val.data.r[0]; + + XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_STEP num=%d den=%d", aeCompensationStep.numerator, aeCompensationStep.denominator); + } + } + + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION, &e); + maxFrameDuration = (status == ACAMERA_OK) ? *e.data.i64 : 0; + } + + { + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val); + if (status == ACAMERA_OK) + { + sensitivityRange.min_ = val.data.i32[0]; + sensitivityRange.max_ = val.data.i32[1]; + } + else + { + ALOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE"); + sensitivityRange.min_ = sensitivityRange.max_ = 0; + } + } + + { + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &val); + if (status == ACAMERA_OK) + { + activeArraySize[0] = val.data.i32[2]; + activeArraySize[1] = val.data.i32[3]; + } + } + + { + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_MAX_REGIONS, &val); + if (status == ACAMERA_OK) + { + maxRegions[0] = val.data.i32[0]; + maxRegions[1] = val.data.i32[1]; + maxRegions[2] = val.data.i32[2]; + } + } + + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e); + if (status == ACAMERA_OK) + { + for (int i = 0; i < e.count; i++) + { + if (m_params.sceneMode == e.data.u8[i]) + { + sceneModeSupported = true; + break; + } + } + } + } + + // ACameraMetadata_free(camera_metadata); + } + + // open camera + { + ACameraDevice_StateCallbacks camera_device_state_callbacks; + camera_device_state_callbacks.context = this; + camera_device_state_callbacks.onDisconnected = ::onDisconnected; + camera_device_state_callbacks.onError = ::onError; + + status = ACameraManager_openCamera(camera_manager, cameraId.c_str(), &camera_device_state_callbacks, &camera_device); + if (status != ACAMERA_OK) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to open camera %s res=%d", cameraId.c_str(), status); + return 1; + } + } + + XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height()); + + status = ACaptureSessionOutputContainer_create(&capture_session_output_container); + + uint32_t burstCaptures = getBurstCaptures(); + if (burstCaptures == 0) { - return 3; + burstCaptures = 1; } - { - ACameraMetadata_const_entry e = {0}; - camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); - // format of the data: format, width, height, input?, type int32 - - maxResolutionX = 0; - maxResolutionY = 0; - - for (int i = 0; i < e.count; i += 4) - { - int32_t input = e.data.i32[i + 3]; - int32_t format = e.data.i32[i + 0]; - if (input) continue; + // setup imagereader and its surface + media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 5, &mPreviewImageReader); + if (mstatus == AMEDIA_OK) + { + AImageReader_ImageListener listener; + listener.context = this; + listener.onImageAvailable = ::onImageAvailable; + mstatus = AImageReader_setImageListener(mPreviewImageReader, &listener); + mstatus = AImageReader_getWindow(mPreviewImageReader, &mPreviewImageWindow); + ANativeWindow_acquire(mPreviewImageWindow); + } + status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget); + + status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput); + status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput); + + mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 2, &mImageReader); + if (mstatus == AMEDIA_OK) + { + AImageReader_ImageListener listener; + listener.context = this; + listener.onImageAvailable = ::onImageAvailable; + mstatus = AImageReader_setImageListener(mImageReader, &listener); + mstatus = AImageReader_getWindow(mImageReader, &mImageWindow); + ANativeWindow_acquire(mImageWindow); + } + status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget); + + status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput); + status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput); + + + CaptureRequest *request = CreateRequest(true); + mCaptureRequests.push_back(request); +#if 0 + for (int idx = 0; idx <= burstCaptures; idx++) + { + CaptureRequest *request = new CaptureRequest(); + std::memset(request, 0, sizeof(CaptureRequest)); + + bool isPreviewRequest = (idx == PREVIEW_REQUEST_IDX); + + request->pThis = this; + request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader; + request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow; + request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget; + request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; + + // capture request + status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request); + ACaptureRequest_setUserContext(request->request, request); + + // uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO; + uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode); + + uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent); + + uint8_t flashMode = ACAMERA_FLASH_MODE_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_FLASH_MODE, 1, &flashMode); + + uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode); + + uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_EDGE_MODE, 1, &edgeMode); + + if (afSupported && m_params.autoFocus) + { + if (!m_params.zoom) + { + if (maxRegions[2] > 0) + { + int32_t centerX = activeArraySize[0] >> 1; + int32_t centerY = activeArraySize[1] >> 1; + + int32_t sizeX = activeArraySize[0] >> 4; + int32_t sizeY = activeArraySize[1] >> 4; + + int32_t afRegions[] = { centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000 }; + // status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions); + if (status == ACAMERA_OK) + { +#ifdef _DEBUG + int aa = 0; +#endif + } + } + + // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO; + uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE; + // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_MODE, 1, &afMode); + + // uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + + // trig = ACAMERA_CONTROL_AF_TRIGGER_START; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + } + } + else + { + uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + } + + if (m_params.sceneMode != 0) + { + uint8_t sceneMode = m_params.sceneMode; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode); + } + + if (m_params.autoExposure) + { + uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); + // ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_); + + if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) + { + int32_t compensation = m_params.compensation; + if (compensation < aeCompensationRange.min_) + { + compensation = aeCompensationRange.min_; + } + if (compensation > aeCompensationRange.max_) + { + compensation = aeCompensationRange.max_; + } + // int32_t aeCompensation = aeCompensationRange.max_; + status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); + if (status != ACAMERA_OK) + { + int aa = 0; + } + } + + if (maxRegions[0] > 0) + { + int32_t aeRegions[] = { 0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000 }; + // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions); + if (status == ACAMERA_OK) + { +#ifdef _DEBUG + int aa = 0; +#endif + } + } - if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) + if (isPreviewRequest) { - if (e.data.i32[i + 1] * e.data.i32[i + 2] > (maxResolutionX * maxResolutionY)) + if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) + { + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + + XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); + mResult.aeLockSetted = 1; + } + else { - maxResolutionX = e.data.i32[i + 1]; - maxResolutionY = e.data.i32[i + 2]; + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); } + + uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); + XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); + m_precaptureStartTime = m_startTime; + + // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); } - } - } + } + else + { + uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); + + if (m_params.sensitivity > 0) + { + int32_t sensitivity = m_params.sensitivity; + status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); + } + if (m_params.exposureTime > 0) + { + int64_t exposureTime = m_params.exposureTime; + status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); + } + + int64_t frameDuration = maxFrameDuration / 2; + // status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration); + } + + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode); + if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) + { + uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock); + mResult.awbLockSetted = 1; + + XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState); + } - return 0; -} +#if 0 + uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode); + uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker); +#endif -int NdkCamera::open(const std::string& cameraId) { - XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str()); + if (m_params.zoom) + { + float zoomRatio = m_params.zoomRatio; + // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; + status = ACaptureRequest_setEntry_float(request->request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio); + if (status != ACAMERA_OK) + { + } + } + + status = ACaptureRequest_addTarget(request->request, request->imageTarget); + + status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput); + status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput); + } +#endif + + // capture session + ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; + camera_capture_session_state_callbacks.context = this; + camera_capture_session_state_callbacks.onActive = onSessionActive; + camera_capture_session_state_callbacks.onReady = ::onSessionReady; + camera_capture_session_state_callbacks.onClosed = onSessionClosed; + status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); + + ACameraCaptureSession_captureCallbacks capture_session_capture_callbacks; + capture_session_capture_callbacks.context = this; + capture_session_capture_callbacks.onCaptureStarted = 0; + capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed; + capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted; + capture_session_capture_callbacks.onCaptureFailed = ::onCaptureFailed; + capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted; + capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted; + capture_session_capture_callbacks.onCaptureBufferLost = 0; + + status = ACameraCaptureSession_setRepeatingRequest(capture_session, &capture_session_capture_callbacks, 1, &(mCaptureRequests[PREVIEW_REQUEST_IDX]->request), &(mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId)); + + ALOGW("Preview Request: seqId=%d", mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId); - // camera_facing = _camera_facing; + m_startTime = GetMicroTimeStamp(); - camera_manager.Create(); - // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); + m_precaptureStartTime = m_startTime; - // find camera - bool foundIt = false; - DisplayDimension disp(mWidth, mHeight); - DisplayDimension foundRes = disp; + return status == ACAMERA_OK ? 0 : 1; +} + +NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest) +{ camera_status_t status = ACAMERA_OK; - ALOGD("Start ACameraManager_getCameraIdList"); - { - ACameraIdList *camera_id_list = 0; - for (int retry = 0; retry < 100; retry++) - { - status = ACameraManager_getCameraIdList(camera_manager, &camera_id_list); - AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraIdList return error, %d", status); - - for (int i = 0; i < camera_id_list->numCameras; ++i) { - const char *id = camera_id_list->cameraIds[i]; - if (cameraId.compare(id) == 0) { - foundIt = true; - break; - } - } - ACameraManager_deleteCameraIdList(camera_id_list); - if (foundIt) - { - break; - } - std::this_thread::sleep_for(std::chrono::milliseconds(16)); - } + CaptureRequest *request = new CaptureRequest(); + std::memset(request, 0, sizeof(CaptureRequest)); - ALOGD("End ACameraManager_getCameraIdList"); + request->pThis = this; + request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader; + request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow; + request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget; + request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : mSessionOutput; + request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; - // ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); - if (!foundIt) - { - XYLOG(XYLOG_SEVERITY_ERROR, "Camera Not Found on ID: %s", cameraId.c_str()); - return 1; - } + // mCaptureRequests.push_back(request); - mCameraId = cameraId; + // capture request + status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request); + ACaptureRequest_setUserContext(request->request, request); - ACameraMetadata * camera_metadata = 0; - status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); - AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraCharacteristics return error, %d", status); + // uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO; + uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode); - { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); - // format of the data: format, width, height, input?, type int32 + uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent); - // DisplayDimension foundRes(4000, 4000); - // DisplayDimension maxJPG(0, 0); + uint8_t flashMode = ACAMERA_FLASH_MODE_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_FLASH_MODE, 1, &flashMode); - foundIt = false; - DisplayDimension temp; + uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode); - for (int i = 0; i < e.count; i += 4) - { - int32_t input = e.data.i32[i + 3]; - if (input) continue; - int32_t format = e.data.i32[i + 0]; + uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_EDGE_MODE, 1, &edgeMode); - if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) - { - DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]); - // XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height()); - if (!disp.IsSameRatio(res)) - { + if (afSupported && m_params.autoFocus) + { + if (!m_params.zoom) + { + if (maxRegions[2] > 0) + { + int32_t centerX = activeArraySize[0] >> 1; + int32_t centerY = activeArraySize[1] >> 1; - if (res.width() >= mWidth && res.height() >= mHeight) - { - temp = res; - } - continue; - } + int32_t sizeX = activeArraySize[0] >> 4; + int32_t sizeY = activeArraySize[1] >> 4; - if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp) - { - foundIt = true; - foundRes = res; - } - } - } + int32_t afRegions[] = { centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000 }; + // status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions); + if (status == ACAMERA_OK) + { +#ifdef _DEBUG + int aa = 0; +#endif + } + } + + // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO; + uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE; + // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_MODE, 1, &afMode); + + // uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + + // trig = ACAMERA_CONTROL_AF_TRIGGER_START; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + } + } + else + { + uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; + // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + } + + if (m_params.sceneMode != 0) + { + uint8_t sceneMode = m_params.sceneMode; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode); + } + + if (m_params.autoExposure) + { + uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); + // ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_); + + if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) + { + int32_t compensation = m_params.compensation; + if (compensation < aeCompensationRange.min_) + { + compensation = aeCompensationRange.min_; + } + if (compensation > aeCompensationRange.max_) + { + compensation = aeCompensationRange.max_; + } + // int32_t aeCompensation = aeCompensationRange.max_; + status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); + if (status != ACAMERA_OK) + { + int aa = 0; + } + } + + if (maxRegions[0] > 0) + { + int32_t aeRegions[] = { 0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000 }; + // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions); + if (status == ACAMERA_OK) + { +#ifdef _DEBUG + int aa = 0; +#endif + } + } + + if (isPreviewRequest) + { + if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) + { + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + + XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); + mResult.aeLockSetted = 1; + } + else + { + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); + } + + uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); + XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); + m_precaptureStartTime = m_startTime; + + // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); + } + } + else + { + uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); + + if (m_params.sensitivity > 0) + { + int32_t sensitivity = m_params.sensitivity; + status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); + } + if (m_params.exposureTime > 0) + { + int64_t exposureTime = m_params.exposureTime; + status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); + } + + int64_t frameDuration = maxFrameDuration / 2; + // status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration); + } + + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode); + if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) + { + uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock); + mResult.awbLockSetted = 1; + + XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState); + } - if (!foundIt) - { - foundRes = temp; - foundIt = true; - } - } +#if 0 + uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode); + uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker); +#endif - if (!foundIt || foundRes.width() == 0 || foundRes.height() == 0) - { - ACameraMetadata_free(camera_metadata); - XYLOG(XYLOG_SEVERITY_ERROR, "Camera RES(%d, %d) Not Found on ID: %s", mWidth, mHeight, cameraId.c_str()); - return 1; - } + if (m_params.zoom) + { + float zoomRatio = m_params.zoomRatio; + // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; + status = ACaptureRequest_setEntry_float(request->request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio); + if (status != ACAMERA_OK) + { + } + } - // foundRes.Flip(); + status = ACaptureRequest_addTarget(request->request, request->imageTarget); - // query faceing - acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; - { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e); - AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_LENS_FACING return error, %d", status); - if (status == ACAMERA_OK) - { - facing = (acamera_metadata_enum_android_lens_facing_t) e.data.u8[0]; - } - } - camera_facing = facing; + // status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput); + // status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput); - // query orientation - int orientation = 0; - { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e); - AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_ORIENTATION return error, %d", status); - if (status == ACAMERA_OK) - { - orientation = (int) e.data.i32[0]; - } - } - camera_orientation = orientation; + return request; +} - { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &e); - } +void NdkCamera::close() +{ + XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str()); + camera_status_t res = ACAMERA_OK; + + mCaptureFrames.clear(); + + if ((ACameraManager *)camera_manager != NULL) + { + // res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); + } + + if (capture_session) + { + // res = ACameraCaptureSession_stopRepeating(capture_session); + ACameraCaptureSession_close(capture_session); + capture_session = 0; + } + + for (auto it = mCaptureRequests.begin(); it != mCaptureRequests.end(); ++it) + { + CaptureRequest* request = *it; + + if (request->request) + { + res = ACaptureRequest_removeTarget(request->request, request->imageTarget); + ACaptureRequest_free(request->request); + request->request = 0; + } + + /* + if (request->imageTarget) + { + ACameraOutputTarget_free(request->imageTarget); + request->imageTarget = 0; + } + */ + + delete request; + } + mCaptureRequests.clear(); + + if (mPreviewOutputTarget != NULL) + { + ACameraOutputTarget_free(mPreviewOutputTarget); + mPreviewOutputTarget = 0; + } + + if (mPreviewImageWindow != NULL) + { + ANativeWindow_release(mPreviewImageWindow); + mPreviewImageWindow = 0; + } + + if (mPreviewImageReader != NULL) + { + // AImageReader_setImageListener(image_reader, NULL); + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); + AImageReader_delete(mPreviewImageReader); + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); + + mPreviewImageReader = 0; + } + + if (mOutputTarget != NULL) + { + ACameraOutputTarget_free(mOutputTarget); + mOutputTarget = 0; + } + + if (mImageWindow != NULL) + { + ANativeWindow_release(mImageWindow); + mImageWindow = 0; + } + + if (mImageReader != NULL) + { + // AImageReader_setImageListener(image_reader, NULL); + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); + AImageReader_delete(mImageReader); + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); + + mImageReader = 0; + } + + if (mPreviewSessionOutput != NULL) + { + if (capture_session_output_container) + { + ACaptureSessionOutputContainer_remove(capture_session_output_container, mPreviewSessionOutput); + } + ACaptureSessionOutput_free(mPreviewSessionOutput); + mPreviewSessionOutput = 0; + } + + if (mSessionOutput != NULL) + { + if (capture_session_output_container) + { + ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput); + } + ACaptureSessionOutput_free(mSessionOutput); + mSessionOutput = 0; + } + + if (capture_session_output_container) + { + ACaptureSessionOutputContainer_free(capture_session_output_container); + capture_session_output_container = 0; + } + + if (camera_device) + { + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::close device %s, %p", mCameraId.c_str(), camera_device); + ACameraDevice_close(camera_device); + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed device %s, %p", mCameraId.c_str(), camera_device); + camera_device = 0; + } + + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed %s", mCameraId.c_str()); +} + +void NdkCamera::onImageAvailable(AImageReader* reader) +{ + AImage* image = 0; + media_status_t mstatus = AMEDIA_OK; + if (reader == mPreviewImageReader) + { + mstatus = AImageReader_acquireLatestImage(reader, &image); + if (mstatus != AMEDIA_OK) { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AF_AVAILABLE_MODES, &e); - // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); -#ifdef _DEBUG - std::string afModes; - for (int idx = 0; idx < e.count; idx++) + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) { - afModes += std::to_string(e.data.u8[idx]) + " "; - + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); } - XYLOG(XYLOG_SEVERITY_DEBUG, "Available AF Mode: ", afModes.c_str()); -#endif - afSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AF_MODE_OFF)); + return; } - { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AWB_AVAILABLE_MODES, &e); - // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); - if (status == ACAMERA_OK) - { - for (int idx = 0; idx < e.count; idx++) - { - if (m_params.awbMode == e.data.u8[idx]) + if (mLdr == ~0) + { + uint8_t* y_data = 0; + int y_len = 0; + AImage_getPlaneData(image, 0, &y_data, &y_len); + +#if __cplusplus >= 201703L + uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); +#else + uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); +#endif + avgY = avgY / (uint64_t)y_len; + mLdr = avgY; + } + + AImage_delete(image); + return; + } + else + { + while (1) + { + mstatus = AImageReader_acquireNextImage(reader, &image); + if (mstatus != AMEDIA_OK) + { + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + { + if (mCaptureFrames.size() < m_params.burstCaptures) { - awbMode = m_params.awbMode; - break; + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus); } - // unsigned int m = e.data.u8[idx]; - // XYLOG(XYLOG_SEVERITY_DEBUG, "Available AWB Mode %u", m); - } - } - // awbSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AWB_MODE_OFF)); - } + } + break; + } - if (!afSupported) - { - XYLOG(XYLOG_SEVERITY_ERROR, "AF not Supported"); - } + m_photoTaken = true; + m_locker.lock(); + mCaptureFrames.push_back(std::shared_ptr(image, AImage_delete)); + m_locker.unlock(); - { - ACameraMetadata_const_entry val = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val); - // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE return error, %d", status); - if (status == ACAMERA_OK) - { - exposureRange.min_ = val.data.i64[0]; - if (exposureRange.min_ < kMinExposureTime) - { - exposureRange.min_ = kMinExposureTime; - } - exposureRange.max_ = val.data.i64[1]; - if (exposureRange.max_ > kMaxExposureTime) - { - exposureRange.max_ = kMaxExposureTime; - } - // exposureTime = exposureRange.value(2); - } - else - { - ALOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE"); - exposureRange.min_ = exposureRange.max_ = 0l; - // exposureTime_ = 0l; - } - } + ALOGD("Capture Image Received"); + } - { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AE_LOCK_AVAILABLE, &e); - // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); - aeLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE) : false; - } + bool captureCompleted = false; + size_t expectedTimes = mCaptureRequests.size() - 1; + m_locker.lock(); + captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes; + m_locker.unlock(); + if (captureCompleted) { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AWB_LOCK_AVAILABLE, &e); - awbLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE) : false; + onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames); } + } +} - { - ACameraMetadata_const_entry val = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_ZOOM_RATIO_RANGE, &val); - if (status == ACAMERA_OK) - { - float zoomRatioMin = val.data.f[0]; - float zoomRatioMax = val.data.f[1]; +void NdkCamera::on_error(const std::string& msg) +{ +} - ALOGI("Zoom Ratio Range: [%f,%f]", zoomRatioMin, zoomRatioMax); - } - } +void NdkCamera::onDisconnected(ACameraDevice* device) +{ +} - { - ACameraMetadata_const_entry val = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_RANGE, &val); - if (status == ACAMERA_OK) - { - aeCompensationRange.min_ = val.data.i32[0]; - aeCompensationRange.max_ = val.data.i32[1]; +bool NdkCamera::on_image(cv::Mat& rgb) +{ + return false; +} - XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_RANGE [%d,%d]", aeCompensationRange.min_, aeCompensationRange.max_); - } - else - { - ALOGW("Unsupported ACAMERA_CONTROL_AE_COMPENSATION_RANGE"); - aeCompensationRange.min_ = aeCompensationRange.max_ = 0l; - } - } +bool NdkCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + return false; +} - { - ACameraMetadata_const_entry val = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_STEP, &val); - if (status == ACAMERA_OK) - { - aeCompensationStep = val.data.r[0]; +void NdkCamera::on_image(const unsigned char* nv21, int nv21_width, int nv21_height) +{ + // ALOGW("nv21 size: %d x %d", nv21_width, nv21_height); + // rotate nv21 + int w = 0; + int h = 0; + int rotate_type = 0; + cv::Mat nv21_rotated; + const unsigned char* yuv420data = nv21; + // TODO !!!??? + /* + if (camera_->GetSensorOrientation(&facing, &angle)) { + if (facing == ACAMERA_LENS_FACING_FRONT) { + imageRotation = (angle + rotation_) % 360; + imageRotation = (360 - imageRotation) % 360; + } else { + imageRotation = (angle - rotation_ + 360) % 360; + } + } + */ + + int orgWidth = mWidth; + int orgHeight = mHeight; + // int co = camera_orientation > 0 ? camera_orientation + 90 : camera_orientation; + if (m_params.orientation != 0) + { + int co = 0; + if (camera_facing == ACAMERA_LENS_FACING_FRONT) + { + co = (camera_orientation + (m_params.orientation - 1) * 90) % 360; + co = (360 - co) % 360; + } + else + { + co = (camera_orientation - (m_params.orientation - 1) * 90 + 360) % 360; + } + + XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing); + + // int co = 0; + if (co == 0) + { + w = nv21_width; + h = nv21_height; + rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 2 : 1; + } + else if (co == 90) + { + w = nv21_height; + h = nv21_width; + + orgWidth = mHeight; + orgHeight = mWidth; + + rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 5 : 6; + } + else if (co == 180) + { + w = nv21_width; + h = nv21_height; + rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 4 : 3; + } + else if (co == 270) + { + w = nv21_height; + h = nv21_width; + + orgWidth = mHeight; + orgHeight = mWidth; + + rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 7 : 8; + } + + nv21_rotated.create(h + h / 2, w, CV_8UC1); + ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type); + yuv420data = nv21_rotated.data; + } + else + { + w = nv21_width; + h = nv21_height; + XYLOG(XYLOG_SEVERITY_DEBUG, "NO Orientation Facing=%d", camera_facing); + } + + // nv21_rotated to rgb + cv::Mat rgb; + if (w == orgWidth && h == orgHeight) + { + rgb.create(h, w, CV_8UC3); + // ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data); + ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data); + } + else + { + cv::Mat org(h, w, CV_8UC3); + ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data); + if (w * orgHeight == h * orgWidth) // Same Ratio + { + cv::resize(org, rgb, cv::Size(orgWidth, orgHeight)); + } + else + { + // Crop image + if (w > orgWidth && h >= orgHeight) + { + int left = (w - orgWidth) / 2; + int top = (h - orgHeight) / 2; + rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth)); + } + else + { + rgb = org; + } + } + } + on_image(rgb); +} - XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_STEP num=%d den=%d", aeCompensationStep.numerator, aeCompensationStep.denominator); - } - } +void NdkCamera::onSessionReady(ACameraCaptureSession *session) +{ +} - { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION, &e); - maxFrameDuration = (status == ACAMERA_OK) ? *e.data.i64 : 0; - } +void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) +{ +} - { - ACameraMetadata_const_entry val = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val); - if (status == ACAMERA_OK) - { - sensitivityRange.min_ = val.data.i32[0]; - sensitivityRange.max_ = val.data.i32[1]; - } - else - { - ALOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE"); - sensitivityRange.min_ = sensitivityRange.max_ = 0; - } - } +void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) +{ + void* context = NULL; + ACaptureRequest_getUserContext(request, &context); + CaptureRequest* pCaptureRequest = reinterpret_cast(context); + + if (pCaptureRequest->request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request) + { + if (mCaptureTriggered) + { + return; + } + + bool readyForCapture = true; + camera_status_t status = ACAMERA_ERROR_BASE; + unsigned long long ts = GetMicroTimeStamp(); + + uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE; + uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE; + uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE; + + ACameraMetadata_const_entry val = { 0 }; + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); + aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val); + awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); + afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; + + // ALOGW("Preview State AFS=%u AES=%u AWBS=%u Time=%u", + // (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); + + // Check if timeout + if (ts - m_startTime < m_params.focusTimeout) + { + if (afSupported && (m_params.autoFocus != 0)) + { + /* + if (afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED) + { + // Will lock it + if (mResult.afLockSetted == 0) + { + uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; + status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + + mResult.afLockSetted = 1; + //XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState); + readyForCapture = false; + } + } + */ + + if (afState != ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED && + afState != ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED && + afState != ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) + // if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE) + { + //XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused"); + readyForCapture = false; + } + } + + if (m_params.autoExposure != 0) + { + if (aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE) + { + uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; + status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); + + aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; + status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); + //XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState); + + readyForCapture = false; + numberOfPrecaptures = 0; + m_precaptureStartTime = ts; + } + + if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) + { + if (aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { + readyForCapture = false; + } + else + { +#if 0 + //XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked"); +#endif + } + } + else + { + if (aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED && + aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED && + aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { + readyForCapture = false; + } + else { +#if 0 + XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked"); +#endif + } + } + } + + if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) + { + if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) { + if (awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) + { + readyForCapture = false; + } + else + { +#if 0 + //XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked"); +#endif + } + } + else + { + if (awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED && + awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) + { + readyForCapture = false; + } + else + { +#if 0 + XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked"); +#endif + } + } + } + } + else + { +#if 0 + XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u", + (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); +#endif + } - { - ACameraMetadata_const_entry val = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &val); - if (status == ACAMERA_OK) - { - activeArraySize[0] = val.data.i32[2]; - activeArraySize[1] = val.data.i32[3]; - } - } + if (readyForCapture/* && mCaptureRequests.size() > 1*/) + { + ALOGW("Ready for Capture AFS=%u AES=%u AWBS=%u Time=%u", + (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); - { - ACameraMetadata_const_entry val = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_MAX_REGIONS, &val); - if (status == ACAMERA_OK) + uint32_t burstCaptures = getBurstCaptures(); + if (burstCaptures == 0) { - maxRegions[0] = val.data.i32[0]; - maxRegions[1] = val.data.i32[1]; - maxRegions[2] = val.data.i32[2]; + burstCaptures = 1; } - } + std::vector requests; + int sequenceId = 0; + requests.reserve(burstCaptures); + + for (int idx = 0; idx < burstCaptures; idx++) + { + CaptureRequest* request = CreateRequest(false); + mCaptureRequests.push_back(request); + // CopyPreviewRequest(mCaptureRequests[idx]->request, result); + requests.push_back(request->request); + } + + // ALOGW("Will Stop Repeating Request"); + status = ACameraCaptureSession_stopRepeating(capture_session); + // ALOGW("Finished Repeating Request"); + + ACameraCaptureSession_captureCallbacks capture_session_capture_cb; + capture_session_capture_cb.context = this; + capture_session_capture_cb.onCaptureStarted = 0; + capture_session_capture_cb.onCaptureProgressed = ::onCaptureProgressed; + capture_session_capture_cb.onCaptureCompleted = ::onCaptureCompleted; + capture_session_capture_cb.onCaptureFailed = ::onCaptureFailed; + capture_session_capture_cb.onCaptureSequenceCompleted = onCaptureSequenceCompleted; + capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted; + capture_session_capture_cb.onCaptureBufferLost = 0; + + int numberOfRequests = requests.size(); + status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb, + numberOfRequests, &requests[0], &sequenceId); + + ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId); + for (int idx = 1; idx < mCaptureRequests.size(); idx++) + { + mCaptureRequests[idx]->sessionSequenceId = sequenceId; + } + + mCaptureTriggered = true; + } + } + else + { + uint64_t tid = getThreadIdOfULL(); + ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid); + + ACameraMetadata* pCopy = ACameraMetadata_copy(result); + bool captureCompleted = false; + size_t expectedTimes = mCaptureRequests.size() - 1; + + m_locker.lock(); + mCaptureResults.push_back(std::shared_ptr(pCopy, ACameraMetadata_free)); + captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes; + m_locker.unlock(); + if (captureCompleted) { - ACameraMetadata_const_entry e = {0}; - status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e); - if (status == ACAMERA_OK) - { - for (int i = 0; i < e.count; i++) - { - if (m_params.sceneMode == e.data.u8[i]) - { - sceneModeSupported = true; - break; - } - } - } + onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames); } + } +} - ACameraMetadata_free(camera_metadata); - } +void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult) +{ + camera_status_t status = ACAMERA_ERROR_BASE; + + ACameraMetadata_const_entry val = { 0 }; + status = ACameraMetadata_getConstEntry(previewResult, ACAMERA_SENSOR_EXPOSURE_TIME, &val); + int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; + val = {0}; + status = ACameraMetadata_getConstEntry(previewResult, ACAMERA_SENSOR_SENSITIVITY, &val); + int32_t sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; + + if (exTime != -1 && sensitivity != 0) + { + uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF; + ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff); + ACaptureRequest_setEntry_i64(request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exTime); + ACaptureRequest_setEntry_i32(request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); + } + + /* + val = { 0 }; + float focusDistance = NAN; + status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val); + if (status == ACAMERA_OK) + { + focusDistance = *val.data.f; + } + */ +} - // open camera - { - ACameraDevice_StateCallbacks camera_device_state_callbacks; - camera_device_state_callbacks.context = this; - camera_device_state_callbacks.onDisconnected = ::onDisconnected; - camera_device_state_callbacks.onError = onError; +void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) +{ + XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason); + + char msg[32] = { 0 }; + snprintf(msg, sizeof(msg), "CaptureFailed reason=%d PhotoTaken=%d", failure->reason, m_photoTaken ? 1 : 0); + if (!m_photoTaken) + { + on_error(msg); + } +} - status = ACameraManager_openCamera(camera_manager, cameraId.c_str(), &camera_device_state_callbacks, &camera_device); - if (status != ACAMERA_OK) - { - XYLOG(XYLOG_SEVERITY_ERROR, "Failed to open camera %s res=%d", cameraId.c_str(), status); - return 1; - } - } +void NdkCamera::onError(ACameraDevice* device, int error) +{ + if (ACAMERA_ERROR_CAMERA_DEVICE == error) + { + } + + XYLOG(XYLOG_SEVERITY_ERROR, "CameraStatus::onError CameraId: %s err=%d PhotoTaken=%d", ACameraDevice_getId(device), error, m_photoTaken ? 1 : 0); + if (!m_photoTaken) + { + std::string msg = "NdkCamera error code=" + std::to_string(error); + on_error(msg); + } +} - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height()); +void NdkCamera::onAvailabilityCallback(const char* cameraId) +{ + std::string s(cameraId); + m_locker.lock(); + m_availableCameras.insert(s); + m_locker.unlock(); - // setup imagereader and its surface - { - // media_status_t mstatus = AImageReader_new(foundRes.width(), foundRes.height(), AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader); - media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader); +} +void NdkCamera::onUnavailabilityCallback(const char* cameraId) +{ + std::string s(cameraId); + m_locker.lock(); + m_availableCameras.erase(s); + m_locker.unlock(); +} - if (mstatus == AMEDIA_OK) - { - AImageReader_ImageListener listener; - listener.context = this; - listener.onImageAvailable = ::onImageAvailable; - mstatus = AImageReader_setImageListener(image_reader, &listener); - mstatus = AImageReader_getWindow(image_reader, &image_reader_surface); - // ANativeWindow_setBuffersGeometry(image_reader_surface, width, height,WINDOW_FORMAT_RGBX_8888); - ANativeWindow_acquire(image_reader_surface); - } - } +bool NdkCamera::IsCameraAvailable(const std::string& cameraId) +{ + bool existed = false; + m_locker.lock(); + existed = (m_availableCameras.find(cameraId) != m_availableCameras.cend()); + m_locker.unlock(); - m_imagesCaptured = 0; + return existed; +} - // capture request - { - ACameraDevice_request_template templateId = ((afSupported && m_params.autoFocus) || - m_params.autoExposure) ? TEMPLATE_PREVIEW - : TEMPLATE_STILL_CAPTURE; +int32_t NdkCamera::getOutputFormat() const +{ + return m_params.burstRawCapture ? AIMAGE_FORMAT_RAW16 : AIMAGE_FORMAT_YUV_420_888; +} - templateId = (ACameraDevice_request_template)m_params.requestTemplate; - status = ACameraDevice_createCaptureRequest(camera_device, templateId, &capture_request); +int32_t NdkCamera::getBurstCaptures() const +{ + return m_params.burstRawCapture ? m_params.burstCaptures : 1; +} - int32_t fpsRange[2] = {1,10}; - status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_TARGET_FPS_RANGE,2,fpsRange); - } +void NdkCamera::CreateSession(ANativeWindow* previewWindow, + ANativeWindow* jpgWindow, bool manualPreview, + int32_t imageRotation, int32_t width, int32_t height) { + media_status_t status; + + /* + // Create output from this app's ANativeWindow, and add into output container + requests[PREVIEW_REQUEST_IDX].outputNativeWindow = previewWindow; + requests[PREVIEW_REQUEST_IDX].templateId = TEMPLATE_PREVIEW; + //requests_[JPG_CAPTURE_REQUEST_IDX].outputNativeWindow_ = jpgWindow; + //requests_[JPG_CAPTURE_REQUEST_IDX].template_ = TEMPLATE_STILL_CAPTURE; + + ACaptureSessionOutputContainer_create(&capture_session_output_container); + + for (auto& req : requests) { + if (!req.outputNativeWindow) continue; + + ANativeWindow_acquire(req.outputNativeWindow); + ACaptureSessionOutput_create(req.outputNativeWindow, &req.sessionOutput); + ACaptureSessionOutputContainer_add(capture_session_output_container, req.sessionOutput); + ACameraOutputTarget_create(req.outputNativeWindow, &req.target); + ACameraDevice_createCaptureRequest(camera_device, req.templateId, &req.request); + ACaptureRequest_addTarget(req.request, req.target); + + // To capture images + media_status_t mstatus = AImageReader_new(width, height, getOutputFormat(), 1, &req.imageReader); + + if (mstatus == AMEDIA_OK) { + AImageReader_ImageListener listener; + listener.context = this; + listener.onImageAvailable = ::onImageAvailable; + mstatus = AImageReader_setImageListener(req.imageReader, &listener); + } + // req.imageReader = createJpegReader(); + status = AImageReader_getWindow(req.imageReader, &req.imageWindow); + ANativeWindow_acquire(req.outputNativeWindow); + + ACameraOutputTarget_create(req.imageWindow, &req.imageTarget); + ACaptureRequest_addTarget(req.request, req.imageTarget); + ACaptureSessionOutput_create(req.imageWindow, &req.imageOutput); + ACaptureSessionOutputContainer_add(capture_session_output_container, req.imageOutput); + + //ACameraOutputTarget_create(imageWindow, &imageTarget); + //ACaptureRequest_addTarget(req.request_, imageTarget); + //ACaptureSessionOutput_create(imageWindow, &imageOutput); + //ACaptureSessionOutputContainer_add(outputContainer_, imageOutput); + } + + // Create a capture session for the given preview request + ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; + camera_capture_session_state_callbacks.context = this; + camera_capture_session_state_callbacks.onActive = onSessionActive; + camera_capture_session_state_callbacks.onReady = ::onSessionReady; + camera_capture_session_state_callbacks.onClosed = onSessionClosed; + + ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); + + if (jpgWindow) { + ACaptureRequest_setEntry_i32(requests[JPG_CAPTURE_REQUEST_IDX].request, + ACAMERA_JPEG_ORIENTATION, 1, &imageRotation); + } + + if (!manualPreview) { + return; + } + // + // Only preview request is in manual mode, JPG is always in Auto mode + // JPG capture mode could also be switch into manual mode and control + // the capture parameters, this sample leaves JPG capture to be auto mode + // (auto control has better effect than author's manual control) + + + //uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF; + //ACaptureRequest_setEntry_u8(requests[PREVIEW_REQUEST_IDX].request, + // ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff)); + //ACaptureRequest_setEntry_i32(requests[PREVIEW_REQUEST_IDX].request, + // ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity)); + //ACaptureRequest_setEntry_i64(requests[PREVIEW_REQUEST_IDX].request, + // ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime)); + */ +} - uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_MODE, 1, &ctrlMode); +void NdkCamera::CreateSession(ANativeWindow* previewWindow) { + CreateSession(previewWindow, NULL, false, 0, 1920, 1080); +} - uint8_t flashMode = ACAMERA_FLASH_MODE_OFF; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_FLASH_MODE, 1, &flashMode); +void NdkCamera::DestroySession() +{ + /* + for (auto& req : requests) + { + if (!req.outputNativeWindow) continue; - uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_FAST; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode); + ACaptureRequest_removeTarget(req.request, req.target); + ACaptureRequest_free(req.request); + ACameraOutputTarget_free(req.target); - uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_EDGE_MODE, 1, &edgeMode); + ACaptureSessionOutputContainer_remove(capture_session_output_container, req.sessionOutput); + ACaptureSessionOutput_free(req.sessionOutput); - if (afSupported && m_params.autoFocus) - { - if (!m_params.zoom) - { - if (maxRegions[2] > 0) - { - int32_t centerX = activeArraySize[0] >> 1; - int32_t centerY = activeArraySize[1] >> 1; + ANativeWindow_release(req.outputNativeWindow); - int32_t sizeX = activeArraySize[0] >> 4; - int32_t sizeY = activeArraySize[1] >> 4; + AImageReader_delete(req.imageReader); + req.imageReader = nullptr; + } + */ +} - int32_t afRegions[] = {centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000}; - // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions); - if (status == ACAMERA_OK) - { - // m_imagesCaptured = ~0; -#ifdef _DEBUG - int aa = 0; -#endif - } - } +void NdkCamera::writeJpegFile(AImage *image, const char* path) +{ + int planeCount; + media_status_t status = AImage_getNumberOfPlanes(image, &planeCount); + // ASSERT(status == AMEDIA_OK && planeCount == 1, + // "Error: getNumberOfPlanes() planeCount = %d", planeCount); + uint8_t *data = nullptr; + int len = 0; + AImage_getPlaneData(image, 0, &data, &len); + + FILE *file = fopen(path, "wb"); + if (file) { + if (data && len) + { + fwrite(data, 1, len, file); + } + fclose(file); + } +} + +void NdkCamera::writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path) +{ + // dngCreator. + int32_t width; + int32_t height; + AImage_getWidth(image, &width); + AImage_getHeight(image, &height); - // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO; - uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE; - // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_MODE, 1, &afMode); + int planeCount; + media_status_t status = AImage_getNumberOfPlanes(image, &planeCount); + // ASSERT(status == AMEDIA_OK && planeCount == 1, + // "Error: getNumberOfPlanes() planeCount = %d", planeCount); + uint8_t *data = nullptr; + int len = 0; + AImage_getPlaneData(image, 0, &data, &len); - // uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL; - // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + DngCreator dngCreator(characteristics, result); - // trig = ACAMERA_CONTROL_AF_TRIGGER_START; - // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); - } - if (status == ACAMERA_OK) - { - m_imagesCaptured = ~0; - } - } - else - { - uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; - // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); - // m_imagesCaptured = (status == ACAMERA_OK) ? ~0 : 0; - } + std::vector dngFile; - // std::this_thread::sleep_for(std::chrono::milliseconds(128)); + // std::vector& out, const uint8_t* rawBuffer, size_t bufferLen, uint32_t width, uint32_t height, long offset); + dngCreator.writeInputBuffer(dngFile, data, len, width, height, 0); - if (m_params.sceneMode != 0) + if (dngFile.empty()) { - uint8_t sceneMode = m_params.sceneMode; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode); + return; } + FILE *file = fopen(path, "wb"); + if (file) { + if (data && len) + { + fwrite(&dngFile[0], 1, dngFile.size(), file); + } + fclose(file); + } +} - { - if (m_params.autoExposure) - { - uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); - // ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_); - - if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) - { - int32_t compensation = m_params.compensation; - if (compensation < aeCompensationRange.min_) - { - compensation = aeCompensationRange.min_; - } - if (compensation > aeCompensationRange.max_) - { - compensation = aeCompensationRange.max_; - } - // int32_t aeCompensation = aeCompensationRange.max_; - status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); - if (status != ACAMERA_OK) - { - int aa = 0; - } - } - - if (maxRegions[0] > 0) - { - int32_t aeRegions[] = {0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000}; - // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions); - if (status == ACAMERA_OK) - { - // m_imagesCaptured = ~0; -#ifdef _DEBUG - int aa = 0; -#endif - } - } - - if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) - { - uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1,&aeLock); - - XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); - mResult.aeLockSetted = 1; - } - else - { - XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); - } - - uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); - m_precaptureStartTime = m_startTime; - if (status == ACAMERA_OK) - { - m_imagesCaptured = ~0; - } - - - // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); - } - else - { - uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); - - if (m_params.sensitivity > 0) - { - int32_t sensitivity = m_params.sensitivity; - status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); - } - if (m_params.exposureTime > 0) - { - int64_t exposureTime = m_params.exposureTime; - - status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); - } - - int64_t frameDuration = maxFrameDuration / 2; - status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration); - - } - // TODO: - // m_imagesCaptured = 0; - - { - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode); - - if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) - { - uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock); - mResult.awbLockSetted = 1; - - XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState); - } - } - -#if 0 - uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode); - - uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker); -#endif - - status = ACameraOutputTarget_create(image_reader_surface, &image_reader_target); - status = ACaptureRequest_addTarget(capture_request, image_reader_target); - } - - if (m_params.zoom) - { - float zoomRatio = m_params.zoomRatio; - // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; - status = ACaptureRequest_setEntry_float(capture_request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio); - if (status != ACAMERA_OK) - { -#ifdef _DEBUG - int aa = 0; -#endif - } - } - - // capture session - { - ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; - camera_capture_session_state_callbacks.context = this; - camera_capture_session_state_callbacks.onActive = onSessionActive; - camera_capture_session_state_callbacks.onReady = ::onSessionReady; - camera_capture_session_state_callbacks.onClosed = onSessionClosed; - - status = ACaptureSessionOutputContainer_create(&capture_session_output_container); - - status = ACaptureSessionOutput_create(image_reader_surface, &capture_session_output); - - status = ACaptureSessionOutputContainer_add(capture_session_output_container, capture_session_output); - - status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); - - ACameraCaptureSession_captureCallbacks camera_capture_session_capture_callbacks; - camera_capture_session_capture_callbacks.context = this; - camera_capture_session_capture_callbacks.onCaptureStarted = 0; - camera_capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed; - camera_capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted; - camera_capture_session_capture_callbacks.onCaptureFailed = onCaptureFailed; - camera_capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted; - camera_capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted; - camera_capture_session_capture_callbacks.onCaptureBufferLost = 0; - - if (m_imagesCaptured != 0) - { - status = ACameraCaptureSession_setRepeatingRequest(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); - } - else - { - status = ACameraCaptureSession_capture(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); - } - - m_startTime = GetMicroTimeStamp(); - - m_precaptureStartTime = m_startTime; - } - - return status == ACAMERA_OK ? 0 : 1; -} - -void NdkCamera::close() -{ - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str()); - camera_status_t res = ACAMERA_OK; - - if ((ACameraManager *)camera_manager != NULL) - { - // res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); - } - - if (capture_session) - { - // res = ACameraCaptureSession_stopRepeating(capture_session); - ACameraCaptureSession_close(capture_session); - capture_session = 0; - } - - if (capture_request) - { - res = ACaptureRequest_removeTarget(capture_request, image_reader_target); - ACaptureRequest_free(capture_request); - capture_request = 0; - } - - if (image_reader_target) - { - ACameraOutputTarget_free(image_reader_target); - image_reader_target = 0; - } - - if (capture_session_output) - { - if (capture_session_output_container) - { - ACaptureSessionOutputContainer_remove(capture_session_output_container, capture_session_output); - } - ACaptureSessionOutput_free(capture_session_output); - capture_session_output = 0; - } - - if (capture_session_output_container) - { - ACaptureSessionOutputContainer_free(capture_session_output_container); - capture_session_output_container = 0; - } - - if (camera_device) - { - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::close device %s, %p", mCameraId.c_str(), camera_device); - ACameraDevice_close(camera_device); - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed device %s, %p", mCameraId.c_str(), camera_device); - camera_device = 0; - } - - if (image_reader_surface) - { - ANativeWindow_release(image_reader_surface); - image_reader_surface = 0; - } - - if (image_reader != NULL) - { - // AImageReader_setImageListener(image_reader, NULL); - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); - - AImageReader_delete(image_reader); - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); - - image_reader = 0; - } - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed %s", mCameraId.c_str()); -} - -void NdkCamera::onImageAvailable(AImageReader* reader) -{ - ALOGD("onImageAvailable %p", reader); - - AImage* image = 0; - media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); - - if (mstatus != AMEDIA_OK) - { - // error - // https://stackoverflow.com/questions/67063562 - if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) - { - XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); - } - return; - } - - uint8_t* y_data = 0; - int y_len = 0; -#if 0 - if (!lightDetected) - { - AImage_getPlaneData(image, 0, &y_data, &y_len); - - lightDetected = true; - -#if __cplusplus >= 201703L - uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); -#else - uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); -#endif - avgY = avgY / (uint64_t)y_len; - mResult.avgY = avgY; -#if 1 - if (avgY < 50) - { - if (m_params.autoExposure) - { - uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; - camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); - - int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); - status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); - - int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); - status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); - - XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY, - mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity); - } - AImage_delete(image); - return; - } -#endif - } -#endif - - if (m_imagesCaptured == ~0 || m_imagesCaptured >= 1) - { - // XYLOG(XYLOG_SEVERITY_DEBUG, "m_imagesCaptured=%u wait for next image", m_imagesCaptured); - // Not Ready Or Taken - AImage_delete(image); - /* if (m_imagesCaptured != ~0) - { - XYLOG(XYLOG_SEVERITY_DEBUG, "Skip Image index=%u", m_imagesCaptured); - m_imagesCaptured++; - } - */ - return; - } - - mResult.duration = GetMicroTimeStamp() - m_startTime; - - int32_t format; - AImage_getFormat(image, &format); - - // ASSERT(format == AIMAGE_FORMAT_YUV_420_888); - - int32_t width = 0; - int32_t height = 0; - AImage_getWidth(image, &width); - AImage_getHeight(image, &height); +bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height) +{ + media_status_t status; + status = AImage_getWidth(image, &width); + status = AImage_getHeight(image, &height); int32_t y_pixelStride = 0; int32_t u_pixelStride = 0; @@ -1000,44 +1867,30 @@ void NdkCamera::onImageAvailable(AImageReader* reader) AImage_getPlaneRowStride(image, 1, &u_rowStride); AImage_getPlaneRowStride(image, 2, &v_rowStride); - // uint8_t* y_data = 0; - uint8_t* u_data = 0; - uint8_t* v_data = 0; - // int y_len = 0; + uint8_t *y_data = 0; + uint8_t *u_data = 0; + uint8_t *v_data = 0; + int y_len = 0; int u_len = 0; int v_len = 0; AImage_getPlaneData(image, 0, &y_data, &y_len); AImage_getPlaneData(image, 1, &u_data, &u_len); AImage_getPlaneData(image, 2, &v_data, &v_len); -#if 1 -#if __cplusplus >= 201703L - uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); -#else - uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); -#endif - mResult.avgY = avgY / y_len; -#endif - - mFinalResult = mResult; - - if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) - { + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && + u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && + v_rowStride == width) { // already nv21 :) - on_image((unsigned char*)y_data, (int)width, (int)height); - } - else - { + // on_image((unsigned char*)y_data, (int)width, (int)height); + } else { // construct nv21 - unsigned char* nv21 = new unsigned char[width * height + width * height / 2]; + unsigned char *nv21 = new unsigned char[width * height + width * height / 2]; { // Y - unsigned char* yptr = nv21; - for (int y=0; yGetSensorOrientation(&facing, &angle)) { - if (facing == ACAMERA_LENS_FACING_FRONT) { - imageRotation = (angle + rotation_) % 360; - imageRotation = (360 - imageRotation) % 360; - } else { - imageRotation = (angle - rotation_ + 360) % 360; - } - } - */ - - int orgWidth = mWidth; - int orgHeight = mHeight; - // int co = camera_orientation > 0 ? camera_orientation + 90 : camera_orientation; - if (m_params.orientation != 0) - { - int co = 0; - if (camera_facing == ACAMERA_LENS_FACING_FRONT) - { - co = (camera_orientation + (m_params.orientation - 1) * 90) % 360; - co = (360 - co) % 360; - } - else - { - co = (camera_orientation - (m_params.orientation - 1) * 90 + 360) % 360; - } - - XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing); - - // int co = 0; - if (co == 0) - { - w = nv21_width; - h = nv21_height; - rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 2 : 1; - } - else if (co == 90) - { - w = nv21_height; - h = nv21_width; - - orgWidth = mHeight; - orgHeight = mWidth; - - rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 5 : 6; - } - else if (co == 180) - { - w = nv21_width; - h = nv21_height; - rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 4 : 3; - } - else if (co == 270) - { - w = nv21_height; - h = nv21_width; - - orgWidth = mHeight; - orgHeight = mWidth; - - rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 7 : 8; - } - - nv21_rotated.create(h + h / 2, w, CV_8UC1); - ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type); - yuv420data = nv21_rotated.data; - } - else - { - w = nv21_width; - h = nv21_height; - XYLOG(XYLOG_SEVERITY_DEBUG, "NO Orientation Facing=%d", camera_facing); - } - - // nv21_rotated to rgb - cv::Mat rgb; - if (w == orgWidth && h == orgHeight) - { - rgb.create(h, w, CV_8UC3); - // ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data); - ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data); - } - else - { - cv::Mat org(h, w, CV_8UC3); - ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data); - if (w * orgHeight == h * orgWidth) // Same Ratio - { - cv::resize(org, rgb, cv::Size(orgWidth, orgHeight)); - } - else - { - // Crop image - if (w > orgWidth && h >= orgHeight) - { - int left = (w - orgWidth) / 2; - int top = (h - orgHeight) / 2; - rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth)); - } - else - { - rgb = org; - } - } - } - on_image(rgb); -} - -void NdkCamera::onSessionReady(ACameraCaptureSession *session) -{ - return; - - camera_status_t status = ACAMERA_OK; - - ACameraCaptureSession_captureCallbacks camera_capture_session_capture_callbacks; - camera_capture_session_capture_callbacks.context = this; - camera_capture_session_capture_callbacks.onCaptureStarted = 0; - camera_capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed; - camera_capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted; - camera_capture_session_capture_callbacks.onCaptureFailed = onCaptureFailed; - camera_capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted; - camera_capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted; - camera_capture_session_capture_callbacks.onCaptureBufferLost = 0; - - if (m_imagesCaptured != 0) - { - status = ACameraCaptureSession_setRepeatingRequest(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); - } - else - { - status = ACameraCaptureSession_capture(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); - } -} - -void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) -{ - -} - -void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) -{ -// CALL_REQUEST(setEntry_i64(requests_[PREVIEW_REQUEST_IDX].request_, - // ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime_)); - - // ACameraMetadata_getConstEntry(result, ) - ACameraMetadata_const_entry val = { 0 }; camera_status_t status = ACAMERA_ERROR_BASE; - mResult.afState = ACAMERA_CONTROL_AF_STATE_INACTIVE; + ACameraMetadata_const_entry val = { 0 }; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); - mResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; + captureResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val); - mResult.awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; + captureResult.awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); - mResult.afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; - - val = { 0 }; - status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_MODE, &val); - mResult.awbMode = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AWB_MODE_OFF; - - if (afSupported && (m_params.autoFocus != 0)) - { - if (mResult.afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED) - { - uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); - XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState); - } - } - - unsigned long long ts = GetMicroTimeStamp(); -#if 0 - XYLOG(XYLOG_SEVERITY_DEBUG, "3ASTATE: AES=%u AWBS=%u AFS=%u", (uint32_t)mResult.aeState, (uint32_t)mResult.awbState, (uint32_t)mResult.afState); -#endif - - if (m_params.autoExposure != 0) - { - if (mResult.aeState == ACAMERA_CONTROL_AE_STATE_SEARCHING) - { - numberOfPrecaptures ++; - if (numberOfPrecaptures > 8 && ((ts - m_precaptureStartTime) > 2000)) - { - uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - - aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - - XYLOG(XYLOG_SEVERITY_WARNING, "Retrigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState); - numberOfPrecaptures = 0; - m_precaptureStartTime = ts; - } - } - else - { - numberOfPrecaptures = 0; - m_precaptureStartTime = ts; - } - if (mResult.aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE) - { - uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - - aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; - status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState); - - numberOfPrecaptures = 0; - m_precaptureStartTime = ts; - } - } - - if (!lightDetected) - { - val = { 0 }; - status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val); - int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; - mResult.exposureTime = exTime; - - val = {0}; - status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); - mResult.sensitivity = (status == ACAMERA_OK) ? (*(val.data.i32)) : 0; - } - - if (m_imagesCaptured == ~0) - { - if (ts - m_startTime >= m_params.focusTimeout * 2) - { - XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u", - (unsigned int)mResult.afState, (unsigned int)mResult.aeState, (unsigned int)mResult.awbState, (unsigned int)(ts - m_startTime)); - m_imagesCaptured = 0; - } - else - { - if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) - { - if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) - { - if (mResult.awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) - { - return; - } - else - { -#if 0 - XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked"); -#endif - } - } - else - { - if (mResult.awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED && mResult.awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) - { - return; - } - else - { -#if 0 - XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked"); -#endif - } - } - } - - if (m_params.autoExposure != 0) - { - if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) - { - if (mResult.aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) - { - return; - } - else - { -#if 0 - XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked"); -#endif - } - } - else - { - if (mResult.aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED && mResult.aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED && mResult.aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) - { - return; - } - else - { -#if 0 - XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked"); -#endif - } - } - } - - if (afSupported && (m_params.autoFocus != 0)) - { - // if (mResult.afState == ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED || mResult.afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_UNFOCUSED) - if (mResult.afState == ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) - { - // uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; - // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - - // aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; - // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - // XYLOG(XYLOG_SEVERITY_INFO, "onCaptureCompleted New Focus Trigger AFS=%u AES=%u Time=%u", (unsigned int)mResult.afState, (unsigned int)mResult.aeState); - return; - } - - ALOGD("onCaptureCompleted 1 AFS=%u AES=%u", (unsigned int)mResult.afState, (unsigned int)mResult.aeState); - if (mResult.afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED || mResult.afState == ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED) - // if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE) - { - XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused"); - m_imagesCaptured = 0; - } - } - else - { - XYLOG(XYLOG_SEVERITY_DEBUG, "AF Not Supported Or AF Not Enabled"); - m_imagesCaptured = 0; - } - } - } - - if (m_imagesCaptured != 0 || camera_facing == 2) - { - return; - } + captureResult.afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val); int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; - mResult.exposureTime = exTime; + captureResult.exposureTime = exTime; val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_MODE, &val); - mResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0; + captureResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_MODE, &val); uint8_t aeMode = (status == ACAMERA_OK) ? val.data.u8[0] : 0; - mResult.autoExposure = aeMode; + captureResult.autoExposure = aeMode; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_FRAME_DURATION, &val); int64_t frameDuration = (status == ACAMERA_OK) ? val.data.i64[0] : 0; - mResult.frameDuration = frameDuration; + captureResult.frameDuration = frameDuration; val = { 0 }; float focusDistance = NAN; - if (afSupported && (m_params.autoFocus != 0)) - { - status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val); - if (status == ACAMERA_OK) - { - focusDistance = *val.data.f; - } - } - mResult.FocusDistance = focusDistance; + status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val); + if (status == ACAMERA_OK) + { + focusDistance = *val.data.f; + } + captureResult.FocusDistance = focusDistance; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_ZOOM_RATIO, &val); if (status == ACAMERA_OK) { - mResult.zoomRatio = *val.data.f; + captureResult.zoomRatio = *val.data.f; } - /* - val = { 0 }; - status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); - mResult.afState = *(val.data.u8); - */ - -#if 0 - ALOGD("onCaptureCompleted 2 AFS=%u AES=%u", (unsigned int)mResult.afState, (unsigned int)mResult.aeState); -#endif - val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); - mResult.sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; + captureResult.sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val); - mResult.sceneMode = (status == ACAMERA_OK) ? *(val.data.u8) : 0; - - val = {0}; - status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_MODE, &val); - mResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0; + captureResult.sceneMode = (status == ACAMERA_OK) ? *(val.data.u8) : 0; val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val); - mResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0; - -#if 0 - ALOGD("onCaptureCompleted EXPO=%lld, FD=%f camera id=%s, AE=%s AFS=%u AES=%u", exTime, focusDistance, mCameraId.c_str(), ((aeMode == 1) ? "ON" : "OFF"), mResult.afState, mResult.aeState); -#endif - // __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureCompleted %p %p %p", session, request, result); -} - -void NdkCamera::onAvailabilityCallback(const char* cameraId) -{ - std::string s(cameraId); - m_locker.lock(); - m_availableCameras.insert(s); - m_locker.unlock(); - -} -void NdkCamera::onUnavailabilityCallback(const char* cameraId) -{ - std::string s(cameraId); - m_locker.lock(); - m_availableCameras.erase(s); - m_locker.unlock(); -} - -bool NdkCamera::IsCameraAvailable(const std::string& cameraId) -{ - bool existed = false; - m_locker.lock(); - existed = (m_availableCameras.find(cameraId) != m_availableCameras.cend()); - m_locker.unlock(); - - return existed; -} + captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0; +} \ No newline at end of file diff --git a/app/src/main/cpp/camera2/ndkcamera.h b/app/src/main/cpp/camera2/ndkcamera.h index 733f577b..96d5ec07 100644 --- a/app/src/main/cpp/camera2/ndkcamera.h +++ b/app/src/main/cpp/camera2/ndkcamera.h @@ -38,6 +38,8 @@ static const uint64_t kMaxExposureTime = static_cast(250000000); #define WAIT_AWB_LOCKED 2 #define WAIT_AF_LOCKED 4 +#define PREVIEW_REQUEST_IDX 0 + class CameraManager { public: @@ -79,6 +81,7 @@ public: unsigned int orientation:3; unsigned int zoom : 1; unsigned int wait3ALocked : 3; + unsigned int burstRawCapture : 1; unsigned int reserved : 18; int64_t exposureTime; unsigned int sensitivity; @@ -86,6 +89,7 @@ public: float zoomRatio; uint8_t requestTemplate; uint8_t awbMode; + uint8_t burstCaptures; unsigned short focusTimeout; // milli-seconds 65535 }; @@ -102,8 +106,8 @@ public: int32_t compensation; uint8_t sceneMode; uint8_t awbMode; + uint16_t avgY; float zoomRatio; - uint8_t avgY; uint64_t duration; int64_t frameDuration; @@ -112,6 +116,28 @@ public: uint8_t afLockSetted : 1; }; + struct CaptureRequest + { + /* For image capture */ + NdkCamera* pThis; + AImageReader* imageReader; + ANativeWindow* imageWindow; + ACameraOutputTarget* imageTarget; + + ACaptureSessionOutput* sessionOutput; + + ACaptureRequest* request; + ACameraDevice_request_template templateId; + int sessionSequenceId; + }; + + struct CaptureResult + { + ACameraMetadata* result; + AImage* image; + int sequenceId; + }; + NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params); virtual ~NdkCamera(); @@ -120,35 +146,56 @@ public: void close(); int selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY); + static void writeJpegFile(AImage *image, const char* path); + static void writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path); void onAvailabilityCallback(const char* cameraId); void onUnavailabilityCallback(const char* cameraId); - void onImageAvailable(AImageReader* reader); + virtual void onImageAvailable(AImageReader* reader); + virtual int32_t getOutputFormat() const; + virtual int32_t getBurstCaptures() const; + + void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height); + void CreateSession(ANativeWindow* previewWindow); + + CaptureRequest* CreateRequest(bool isPreviewRequest); + + void DestroySession(); + virtual bool on_image(cv::Mat& rgb); virtual void on_error(const std::string& msg); virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height); virtual void onDisconnected(ACameraDevice* device); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result); void onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result); + void onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure); void onSessionReady(ACameraCaptureSession *session); + void onError(ACameraDevice* device, int error); + + void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult); - const CAPTURE_RESULT& getCaptureResult() const + uint32_t GetLdr() const { - return mFinalResult; + return mLdr; } bool IsCameraAvailable(const std::string& cameraId); + static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height); + static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult); + protected: std::mutex m_locker; std::set m_availableCameras; -protected: CAMERA_PARAMS m_params; + DisplayDimension foundRes; int camera_facing; int camera_orientation; bool m_firstFrame; + bool m_photoTaken; int32_t mWidth; int32_t mHeight; std::string mCameraId; @@ -173,10 +220,9 @@ protected: int32_t activeArraySize[2]; int32_t maxRegions[3]; - unsigned int m_imagesCaptured; + bool mCaptureTriggered; CAPTURE_RESULT mResult; - CAPTURE_RESULT mFinalResult; unsigned long long m_startTime; protected: @@ -185,15 +231,35 @@ protected: CameraManager camera_manager; ACameraDevice* camera_device; - AImageReader* image_reader; - ANativeWindow* image_reader_surface; - ACameraOutputTarget* image_reader_target; - ACaptureRequest* capture_request; + ACaptureSessionOutputContainer* capture_session_output_container; - ACaptureSessionOutput* capture_session_output; + + AImageReader* mPreviewImageReader; + ANativeWindow* mPreviewImageWindow; + ACameraOutputTarget* mPreviewOutputTarget; + ACaptureSessionOutput* mPreviewSessionOutput; + + AImageReader* mImageReader; + ANativeWindow* mImageWindow; + ACameraOutputTarget* mOutputTarget; + ACaptureSessionOutput* mSessionOutput; + + std::shared_ptr mCharacteristics; + std::vector mCaptureRequests; + + std::shared_ptr mPreviewResults; + std::vector > mCaptureResults; + uint32_t mLdr; + std::vector > mCaptureFrames; + ACameraCaptureSession* capture_session; - int captureSequenceId; + // AImageReader* image_reader; + // ANativeWindow* image_reader_surface; + // ACameraOutputTarget* image_reader_target; + // ACaptureRequest* capture_request; + // ACaptureSessionOutput* capture_session_output; + }; #endif // NDKCAMERA_H diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/align.h b/app/src/main/cpp/hdrplus/include/hdrplus/align.h new file mode 100644 index 00000000..41ae831e --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/align.h @@ -0,0 +1,38 @@ +#pragma once + +#include +#include // std::pair +#include // all opencv header +#include "hdrplus/burst.h" + +namespace hdrplus +{ + +class align +{ + public: + align() = default; + ~align() = default; + + /** + * @brief Run alignment on burst of images + * + * @param burst_images collection of burst images + * @param aligements alignment in pixel value pair. + * Outer most vector is per alternative image. + * Inner most two vector is for horizontle & verticle tiles + */ + void process( const hdrplus::burst& burst_images, \ + std::vector>>>& aligements ); + + private: + // From original image to coarse image + const std::vector inv_scale_factors = { 1, 2, 4, 4 }; + const std::vector distances = { 1, 2, 2, 2 }; // L1 / L2 distance + const std::vector grayimg_search_radious = { 1, 4, 4, 4 }; + const std::vector grayimg_tile_sizes = { 16, 16, 16, 8 }; + const int num_levels = 4; +}; + + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/bayer_image.h b/app/src/main/cpp/hdrplus/include/hdrplus/bayer_image.h new file mode 100644 index 00000000..93230cf2 --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/bayer_image.h @@ -0,0 +1,36 @@ +#pragma once + +#include +#include +#include // std::pair +#include // std::shared_ptr +#include // all opencv header +#include + +namespace hdrplus +{ + +class bayer_image +{ + public: + explicit bayer_image( const std::string& bayer_image_path ); + explicit bayer_image( const std::vector& bayer_image_content ); + ~bayer_image() = default; + + std::pair get_noise_params() const; + + std::shared_ptr libraw_processor; + cv::Mat raw_image; + cv::Mat grayscale_image; + int width; + int height; + int white_level; + std::vector black_level_per_channel; + float iso; + + private: + float baseline_lambda_shot = 3.24 * pow( 10, -4 ); + float baseline_lambda_read = 4.3 * pow( 10, -6 ); +}; + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/burst.h b/app/src/main/cpp/hdrplus/include/hdrplus/burst.h new file mode 100644 index 00000000..bde3c04f --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/burst.h @@ -0,0 +1,44 @@ +#pragma once + +#include +#include +#include // all opencv header +#include "hdrplus/bayer_image.h" + +namespace hdrplus +{ + +class burst +{ + public: + explicit burst( const std::string& burst_path, const std::string& reference_image_path ); + explicit burst(const std::vector& burst_paths, int reference_image_index); + explicit burst( const std::vector >& bayer_image_contents, int reference_image_index ); + + ~burst() = default; + + // Reference image index in the array + int reference_image_idx; + + // Source bayer images & grayscale unpadded image + std::vector bayer_images; + + // Image padded to upper level tile size (16*2) + // Use for alignment, merging, and finishing + std::vector bayer_images_pad; + + // Padding information + std::vector padding_info_bayer; + + // Image padded to upper level tile size (16) + // Use for alignment, merging, and finishing + std::vector grayscale_images_pad; + + // number of image (including reference) in burst + int num_images; + + // Bayer image after merging, stored as cv::Mat + cv::Mat merged_bayer_image; +}; + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/finish.h b/app/src/main/cpp/hdrplus/include/hdrplus/finish.h new file mode 100644 index 00000000..7fc68ae1 --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/finish.h @@ -0,0 +1,240 @@ +#pragma once + +#include // all opencv header +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace hdrplus +{ + uint16_t uGammaCompress_1pix(float x, float threshold,float gainMin,float gainMax,float exponent); + uint16_t uGammaDecompress_1pix(float x, float threshold,float gainMin,float gainMax,float exponent); + cv::Mat uGammaCompress_(cv::Mat m,float threshold,float gainMin,float gainMax,float exponent); + cv::Mat uGammaDecompress_(cv::Mat m,float threshold,float gainMin,float gainMax,float exponent); + cv::Mat gammasRGB(cv::Mat img, bool mode); + + +class finish +{ + public: + cv::Mat mergedBayer; // merged image from Merge Module + std::string burstPath; // path to burst images + std::vector rawPathList; // a list or array of the path to all burst imgs under burst Path + int refIdx; // index of the reference img + Parameters params; + cv::Mat rawReference; + // LibRaw libraw_processor_finish; + bayer_image* refBayer; + + std::string mergedImgPath; + finish() = default; + +// please use this initialization after merging part finish + finish(std::string burstPath, cv::Mat mergedBayer,int refIdx){ + this->refIdx = refIdx; + this->burstPath = burstPath; + this->mergedBayer = mergedBayer; + } + +// for local testing only + finish(std::string burstPath, std::string mergedBayerPath,int refIdx){ + this->refIdx = refIdx; + this->burstPath = burstPath; + this->mergedBayer = loadFromCSV(mergedBayerPath, CV_16UC1);// + load_rawPathList(burstPath); + refBayer= new bayer_image(this->rawPathList[refIdx]); + this->rawReference = refBayer->raw_image;//;grayscale_image + + // initialize parameters in libraw_processor_finish + setLibRawParams(); + showParams(); + + std::cout<<"Finish init() finished!"<& libraw_ptr, cv::Mat B); + + // postprocess + // cv::Mat postprocess(std::shared_ptr& libraw_ptr); + + void showImg(cv::Mat img) + { + int ch = CV_MAT_CN(CV_8UC1); + + // cv::Mat tmp(4208,3120,CV_16UC1); + cv::Mat tmp(img); + // u_int16_t* ptr_tmp = (u_int16_t*)tmp.data; + // u_int16_t* ptr_img = (u_int16_t*)img.data; + // // col major to row major + // for(int r = 0; r < tmp.rows; r++) { + // for(int c = 0; c < tmp.cols; c++) { + // *(ptr_tmp+r*tmp.cols+c) = *(ptr_img+c*tmp.rows+r)/2048.0*255.0; + // } + // } + // std::cout<<"height="<mergedBayer.size()<rawPathList){ + std::cout< dvals; + std::stringstream ss(line); + std::string val; + // int count=0; + while (getline(ss, val, ',')) + { + dvals.push_back(stod(val));//*255.0/2048.0 + // count++; + } + // std::cout<d_name; // current filepath that ptr points to + if (ptr->d_type != 8 && ptr->d_type != 4) { // not normal file or dir + return; + } + // only need normal files + if (ptr->d_type == 8) { + if (strcmp(ptr->d_name, ".") != 0 && strcmp(ptr->d_name, "..") != 0) { + if (strstr(ptr->d_name, ".dng")) { + rawPathList.emplace_back(sub_file); + } + } + } + } + // close root dir + closedir(pDir); + } + + void setLibRawParams(){ + refBayer->libraw_processor->imgdata.params.user_qual = params.rawpyArgs.demosaic_algorithm; + refBayer->libraw_processor->imgdata.params.half_size = params.rawpyArgs.half_size; + refBayer->libraw_processor->imgdata.params.use_camera_wb = params.rawpyArgs.use_camera_wb; + refBayer->libraw_processor->imgdata.params.use_auto_wb = params.rawpyArgs.use_auto_wb; + refBayer->libraw_processor->imgdata.params.no_auto_bright = params.rawpyArgs.no_auto_bright; + refBayer->libraw_processor->imgdata.params.output_color = params.rawpyArgs.output_color; + refBayer->libraw_processor->imgdata.params.gamm[0] = params.rawpyArgs.gamma[0]; + refBayer->libraw_processor->imgdata.params.gamm[1] = params.rawpyArgs.gamma[1]; + refBayer->libraw_processor->imgdata.params.output_bps = params.rawpyArgs.output_bps; + + // libraw_processor_finish.imgdata.params.user_qual = params.rawpyArgs.demosaic_algorithm; + // libraw_processor_finish.imgdata.params.half_size = params.rawpyArgs.half_size; + // libraw_processor_finish.imgdata.params.use_camera_wb = params.rawpyArgs.use_camera_wb; + // libraw_processor_finish.imgdata.params.use_auto_wb = params.rawpyArgs.use_auto_wb; + // libraw_processor_finish.imgdata.params.no_auto_bright = params.rawpyArgs.no_auto_bright; + // libraw_processor_finish.imgdata.params.output_color = params.rawpyArgs.output_color; + // libraw_processor_finish.imgdata.params.gamm[0] = params.rawpyArgs.gamma[0]; + // libraw_processor_finish.imgdata.params.gamm[1] = params.rawpyArgs.gamma[1]; + // libraw_processor_finish.imgdata.params.output_bps = params.rawpyArgs.output_bps; + } + + +}; + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/hdrplus_pipeline.h b/app/src/main/cpp/hdrplus/include/hdrplus/hdrplus_pipeline.h new file mode 100644 index 00000000..3d6f478c --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/hdrplus_pipeline.h @@ -0,0 +1,29 @@ +#pragma once + +#include +#include // all opencv header +#include "hdrplus/burst.h" +#include "hdrplus/align.h" +#include "hdrplus/merge.h" +#include "hdrplus/finish.h" + +namespace hdrplus +{ + +class hdrplus_pipeline +{ + private: + hdrplus::align align_module; + hdrplus::merge merge_module; + hdrplus::finish finish_module; + + public: + void run_pipeline( const std::string& burst_path, const std::string& reference_image_path ); + bool run_pipeline( const std::vector& burst_paths, int reference_image_index, cv::Mat& finalImg ); + bool run_pipeline( const std::vector >& burst_contents, int reference_image_index, cv::Mat& finalImg ); + + hdrplus_pipeline() = default; + ~hdrplus_pipeline() = default; +}; + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/merge.h b/app/src/main/cpp/hdrplus/include/hdrplus/merge.h new file mode 100644 index 00000000..77319e7a --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/merge.h @@ -0,0 +1,184 @@ +#pragma once + +#include +#include // all opencv header +#include +#include "hdrplus/burst.h" + +#define TILE_SIZE 16 +#define TEMPORAL_FACTOR 75 +#define SPATIAL_FACTOR 0.1 + +namespace hdrplus +{ + +class merge +{ + public: + int offset = TILE_SIZE / 2; + float baseline_lambda_shot = 3.24 * pow( 10, -4 ); + float baseline_lambda_read = 4.3 * pow( 10, -6 ); + + merge() = default; + ~merge() = default; + + /** + * @brief Run alignment on burst of images + * + * @param burst_images collection of burst images + * @param alignments alignment in pixel value pair. + * Outer most vector is per alternative image. + * Inner most two vector is for horizontal & vertical tiles + */ + void process( hdrplus::burst& burst_images, \ + std::vector>>>& alignments); + + + /* + std::vector get_other_tiles(); //return the other tile list T_1 to T_n + + std::vector vector_math(string operation, reference_tile, other_tile_list); //for loop allowing operations across single element and list + + std::vector scalar_vector_math(string operation, scalar num, std::vector tile_list); //for loop allowing operations across single element and list + + std::vector average_vector(std::vector tile_list); //take average of vector elements + + */ + + private: + float tileRMS(cv::Mat tile) { + cv::Mat squared; + cv::multiply(tile, tile, squared); + return sqrt(cv::mean(squared)[0]); + } + + std::vector getNoiseVariance(std::vector tiles, float lambda_shot, float lambda_read) { + std::vector noise_variance; + for (auto tile : tiles) { + noise_variance.push_back(lambda_shot * tileRMS(tile) + lambda_read); + } + return noise_variance; + } + + cv::Mat cosineWindow1D(cv::Mat input, int window_size = TILE_SIZE) { + cv::Mat output = input.clone(); + for (int i = 0; i < input.cols; ++i) { + output.at(0, i) = 1. / 2. - 1. / 2. * cos(2 * M_PI * (input.at(0, i) + 1 / 2.) / window_size); + } + return output; + } + + cv::Mat cosineWindow2D(cv::Mat tile) { + int window_size = tile.rows; // Assuming square tile + cv::Mat output_tile = tile.clone(); + + cv::Mat window = cv::Mat::zeros(1, window_size, CV_32F); + for(int i = 0; i < window_size; ++i) { + window.at(i) = i; + } + + cv::Mat window_x = cosineWindow1D(window, window_size); + window_x = cv::repeat(window_x, window_size, 1); + cv::Mat window_2d = window_x.mul(window_x.t()); + + cv::Mat window_applied; + cv::multiply(tile, window_2d, window_applied, 1, CV_32F); + return window_applied; + } + + cv::Mat cat2Dtiles(std::vector> tiles) { + std::vector rows; + for (auto row_tiles : tiles) { + cv::Mat row; + cv::hconcat(row_tiles, row); + rows.push_back(row); + } + cv::Mat img; + cv::vconcat(rows, img); + return img; + } + + void circshift(cv::Mat &out, const cv::Point &delta) + { + cv::Size sz = out.size(); + + // error checking + assert(sz.height > 0 && sz.width > 0); + + // no need to shift + if ((sz.height == 1 && sz.width == 1) || (delta.x == 0 && delta.y == 0)) + return; + + // delta transform + int x = delta.x; + int y = delta.y; + if (x > 0) x = x % sz.width; + if (y > 0) y = y % sz.height; + if (x < 0) x = x % sz.width + sz.width; + if (y < 0) y = y % sz.height + sz.height; + + + // in case of multiple dimensions + std::vector planes; + split(out, planes); + + for (size_t i = 0; i < planes.size(); i++) + { + // vertical + cv::Mat tmp0, tmp1, tmp2, tmp3; + cv::Mat q0(planes[i], cv::Rect(0, 0, sz.width, sz.height - y)); + cv::Mat q1(planes[i], cv::Rect(0, sz.height - y, sz.width, y)); + q0.copyTo(tmp0); + q1.copyTo(tmp1); + tmp0.copyTo(planes[i](cv::Rect(0, y, sz.width, sz.height - y))); + tmp1.copyTo(planes[i](cv::Rect(0, 0, sz.width, y))); + + // horizontal + cv::Mat q2(planes[i], cv::Rect(0, 0, sz.width - x, sz.height)); + cv::Mat q3(planes[i], cv::Rect(sz.width - x, 0, x, sz.height)); + q2.copyTo(tmp2); + q3.copyTo(tmp3); + tmp2.copyTo(planes[i](cv::Rect(x, 0, sz.width - x, sz.height))); + tmp3.copyTo(planes[i](cv::Rect(0, 0, x, sz.height))); + } + + cv::merge(planes, out); + } + + void fftshift(cv::Mat &out) + { + cv::Size sz = out.size(); + cv::Point pt(0, 0); + pt.x = (int) floor(sz.width / 2.0); + pt.y = (int) floor(sz.height / 2.0); + circshift(out, pt); + } + + void ifftshift(cv::Mat &out) + { + cv::Size sz = out.size(); + cv::Point pt(0, 0); + pt.x = (int) ceil(sz.width / 2.0); + pt.y = (int) ceil(sz.height / 2.0); + circshift(out, pt); + } + + std::vector getReferenceTiles(cv::Mat reference_image); + + cv::Mat mergeTiles(std::vector tiles, int rows, int cols); + + cv::Mat processChannel( hdrplus::burst& burst_images, \ + std::vector>>>& alignments, \ + cv::Mat channel_image, \ + std::vector alternate_channel_i_list,\ + float lambda_shot, \ + float lambda_read); + + //temporal denoise + std::vector temporal_denoise(std::vector tiles, std::vector> alt_tiles, std::vector noise_variance, float temporal_factor); + std::vector spatial_denoise(std::vector tiles, int num_alts, std::vector noise_variance, float spatial_factor); + + +}; + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/params.h b/app/src/main/cpp/hdrplus/include/hdrplus/params.h new file mode 100644 index 00000000..6bf17447 --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/params.h @@ -0,0 +1,69 @@ +#pragma once + +#include +#include +#include // std::shared_ptr +#include // all opencv header +#include + +namespace hdrplus +{ +class RawpyArgs{ + public: + int demosaic_algorithm = 3;// 3 - AHD interpolation <->int user_qual + bool half_size = false; + bool use_camera_wb = true; + bool use_auto_wb = false; + bool no_auto_bright = true; + int output_color = LIBRAW_COLORSPACE_sRGB; + int gamma[2] = {1,1}; //# gamma correction not applied by rawpy (not quite understand) + int output_bps = 16; +}; + +class Options{ + public: + std::string input = ""; + std::string output = ""; + std::string mode = "full"; //'full' 'align' 'merge' 'finish' + int reference = 0; + float temporalfactor=75.0; + float spatialfactor = 0.1; + int ltmGain=-1; + double gtmContrast=0.075; + int verbose=2; // (0, 1, 2, 3, 4, 5) + +}; + +class Tuning{ + public: + std::string ltmGain = "auto"; + double gtmContrast = 0.075; + std::vector sharpenAmount{1,0.5,0.5}; + std::vector sharpenSigma{1,2,4}; + std::vector sharpenThreshold{0.02,0.04,0.06}; +}; + +class Parameters{ + public: + std::unordered_map flags; + + RawpyArgs rawpyArgs; + Options options; + Tuning tuning; + + Parameters() + { + const char* keys[] = {"writeReferenceImage", "writeGammaReference", "writeMergedImage", "writeGammaMerged", + "writeShortExposure", "writeLongExposure", "writeFusedExposure", "writeLTMImage", + "writeLTMGamma", "writeGTMImage", "writeReferenceFinal", "writeFinalImage"}; + for (int idx = 0; idx < sizeof(keys) / sizeof(const char*); idx++) { + flags[keys[idx]] = true; + } + } + +}; + +cv::Mat postprocess(std::shared_ptr& libraw_ptr, RawpyArgs rawpyArgs); +void setParams(std::shared_ptr& libraw_ptr, RawpyArgs rawpyArgs); + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/utility.h b/app/src/main/cpp/hdrplus/include/hdrplus/utility.h new file mode 100644 index 00000000..0b8c7e9b --- /dev/null +++ b/app/src/main/cpp/hdrplus/include/hdrplus/utility.h @@ -0,0 +1,326 @@ +#pragma once + +#include +#include // std::runtime_error +#include // all opencv header +#include + +// https://stackoverflow.com/questions/63404539/portable-loop-unrolling-with-template-parameter-in-c-with-gcc-icc +/// Helper macros for stringification +#define TO_STRING_HELPER(X) #X +#define TO_STRING(X) TO_STRING_HELPER(X) + +// Define loop unrolling depending on the compiler +#if defined(__ICC) || defined(__ICL) + #define UNROLL_LOOP(n) _Pragma(TO_STRING(unroll (n))) +#elif defined(__clang__) + #define UNROLL_LOOP(n) _Pragma(TO_STRING(unroll (n))) +#elif defined(__GNUC__) && !defined(__clang__) + #define UNROLL_LOOP(n) _Pragma(TO_STRING(GCC unroll (16))) +#elif defined(_MSC_BUILD) + #pragma message ("Microsoft Visual C++ (MSVC) detected: Loop unrolling not supported!") + #define UNROLL_LOOP(n) +#else + #warning "Unknown compiler: Loop unrolling not supported!" + #define UNROLL_LOOP(n) +#endif + + +namespace hdrplus +{ + + +template +cv::Mat box_filter_kxk( const cv::Mat& src_image ) +{ + const T* src_image_ptr = (T*)src_image.data; + int src_height = src_image.size().height; + int src_width = src_image.size().width; + int src_step = src_image.step1(); + + if ( kernel <= 0 ) + { +#ifdef __ANDROID__ + return cv::Mat(); +#else + throw std::runtime_error(std::string( __FILE__ ) + "::" + __func__ + " box filter only support kernel size >= 1"); +#endif + } + + // int(src_height / kernel) = floor(src_height / kernel) + // When input size is not multiplier of kernel, take floor + cv::Mat dst_image( src_height / kernel, src_width / kernel, src_image.type() ); + T* dst_image_ptr = (T*)dst_image.data; + int dst_height = dst_image.size().height; + int dst_width = dst_image.size().width; + int dst_step = dst_image.step1(); + + for ( int row_i = 0; row_i < dst_height; ++row_i ) + { + for ( int col_i = 0; col_i < dst_width; col_i++ ) + { + // Take ceiling for rounding + T box_sum = T( 0 ); + + UNROLL_LOOP( kernel ) + for ( int kernel_row_i = 0; kernel_row_i < kernel; ++kernel_row_i ) + { + UNROLL_LOOP( kernel ) + for ( int kernel_col_i = 0; kernel_col_i < kernel; ++kernel_col_i ) + { + box_sum += src_image_ptr[ ( row_i * kernel + kernel_row_i ) * src_step + ( col_i * kernel + kernel_col_i ) ]; + } + } + + // Average by taking ceiling + T box_avg = box_sum / T( kernel * kernel ); + dst_image_ptr[ row_i * dst_step + col_i ] = box_avg; + } + } + + return dst_image; +} + + +template +cv::Mat downsample_nearest_neighbour( const cv::Mat& src_image ) +{ + const T* src_image_ptr = (T*)src_image.data; + int src_height = src_image.size().height; + int src_width = src_image.size().width; + int src_step = src_image.step1(); + + // int(src_height / kernel) = floor(src_height / kernel) + // When input size is not multiplier of kernel, take floor + cv::Mat dst_image = cv::Mat( src_height / kernel, src_width / kernel, src_image.type() ); + T* dst_image_ptr = (T*)dst_image.data; + int dst_height = dst_image.size().height; + int dst_width = dst_image.size().width; + int dst_step = dst_image.step1(); + + // -03 should be enough to optimize below code + for ( int row_i = 0; row_i < dst_height; row_i++ ) + { + UNROLL_LOOP( 32 ) + for ( int col_i = 0; col_i < dst_width; col_i++ ) + { + dst_image_ptr[ row_i * dst_step + col_i ] = \ + src_image_ptr[ (row_i * kernel) * src_step + (col_i * kernel) ]; + } + } + + return dst_image; +} + + +template< typename T > +void print_cvmat( cv::Mat image ) +{ + const T* img_ptr = (const T*)image.data; + int height = image.size().height; + int width = image.size().width; + int step = image.step1(); + int chns = image.channels(); + + printf("print_cvmat()::Image of size height = %d, width = %d, step = %d\n", \ + height, width, step ); + + if ( chns == 1 ) + { + for ( int row_i = 0; row_i < height; ++row_i ) + { + int row_i_offset = row_i * step; + for ( int col_i = 0; col_i < width; ++col_i ) + { + printf("%3.d ", img_ptr[ row_i_offset + col_i ]); + //printf("%3.d ", int( image.at( row_i, col_i ) ) ); + } + printf("\n"); + } + } + else if ( chns == 3 ) + { + for ( int row_i = 0; row_i < height; ++row_i ) + { + int row_i_offset = row_i * step; + for ( int col_i = 0; col_i < width; ++col_i ) + { + printf("[%3.d, %3.d, %3.d] ", img_ptr[ row_i_offset + col_i * 3 + 0 ], \ + img_ptr[ row_i_offset + col_i * 3 + 1 ], \ + img_ptr[ row_i_offset + col_i * 3 + 2 ] ); + } + printf("\n"); + } + } + else + { +#ifdef __ANDROID__ +#else + throw std::runtime_error("cv::Mat number of channel currently not support to print\n"); +#endif + } +} + + +/** + * @brief Extract RGB channel seprately from bayer image + * + * @tparam T data tyoe of bayer image. + * @return vector of RGB image. OpenCV internally maintain reference count. + * Thus this step won't create deep copy overhead. + * + * @example extract_rgb_from_bayer( bayer_img, rgb_vector_container ); + */ +template +void extract_rgb_from_bayer( const cv::Mat& bayer_img, \ + cv::Mat& img_ch1, cv::Mat& img_ch2, cv::Mat& img_ch3, cv::Mat& img_ch4 ) +{ + const T* bayer_img_ptr = (const T*)bayer_img.data; + int bayer_width = bayer_img.size().width; + int bayer_height = bayer_img.size().height; + int bayer_step = bayer_img.step1(); + + if ( bayer_width % 2 != 0 || bayer_height % 2 != 0 ) + { +#ifdef __ANDROID__ +#else + throw std::runtime_error("Bayer image data size incorrect, must be multiplier of 2\n"); +#endif + } + + // RGB image is half the size of bayer image + int rgb_width = bayer_width / 2; + int rgb_height = bayer_height / 2; + img_ch1.create( rgb_height, rgb_width, bayer_img.type() ); + img_ch2.create( rgb_height, rgb_width, bayer_img.type() ); + img_ch3.create( rgb_height, rgb_width, bayer_img.type() ); + img_ch4.create( rgb_height, rgb_width, bayer_img.type() ); + int rgb_step = img_ch1.step1(); + + T* img_ch1_ptr = (T*)img_ch1.data; + T* img_ch2_ptr = (T*)img_ch2.data; + T* img_ch3_ptr = (T*)img_ch3.data; + T* img_ch4_ptr = (T*)img_ch4.data; + + #pragma omp parallel for + for ( int rgb_row_i = 0; rgb_row_i < rgb_height; rgb_row_i++ ) + { + int rgb_row_i_offset = rgb_row_i * rgb_step; + + // Every RGB row corresbonding to two Bayer image row + int bayer_row_i_offset0 = ( rgb_row_i * 2 + 0 ) * bayer_step; // For RG + int bayer_row_i_offset1 = ( rgb_row_i * 2 + 1 ) * bayer_step; // For GB + + for ( int rgb_col_j = 0; rgb_col_j < rgb_width; rgb_col_j++ ) + { + // img_ch1/2/3/4 : (0,0), (1,0), (0,1), (1,1) + int bayer_col_i_offset0 = rgb_col_j * 2 + 0; + int bayer_col_i_offset1 = rgb_col_j * 2 + 1; + + img_ch1_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset0 + bayer_col_i_offset0 ]; + img_ch3_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset0 + bayer_col_i_offset1 ]; + img_ch2_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset1 + bayer_col_i_offset0 ]; + img_ch4_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset1 + bayer_col_i_offset1 ]; + } + } +} + + +/** + * @brief Convert RGB image to gray image through same weight linear combination. + * Also support implicit data type conversion. + * + * @tparam RGB_DTYPE rgb image type (e.g. uint16_t) + * @tparam GRAY_DTYPE gray image type (e.g. uint16_t) + * @tparam GRAY_CVTYPE opencv gray image type + */ +template< typename RGB_DTYPE, typename GRAY_DTYPE, int GRAY_CVTYPE > +cv::Mat rgb_2_gray( const cv::Mat& rgb_img ) +{ + const RGB_DTYPE* rgb_img_ptr = (const RGB_DTYPE*)rgb_img.data; + int img_width = rgb_img.size().width; + int img_height = rgb_img.size().height; + int rgb_img_step = rgb_img.step1(); + + // Create output gray cv::Mat + cv::Mat gray_img( img_height, img_width, GRAY_CVTYPE ); + GRAY_DTYPE* gray_img_ptr = (GRAY_DTYPE*)gray_img.data; + int gray_img_step = gray_img.step1(); + + #pragma omp parallel for + for ( int row_i = 0; row_i < img_height; row_i++ ) + { + int rgb_row_i_offset = row_i * rgb_img_step; + int gray_row_i_offset = row_i * gray_img_step; + + UNROLL_LOOP( 32 ) // multiplier of cache line size + for ( int col_j = 0; col_j < img_width; col_j++ ) + { + GRAY_DTYPE avg_ij(0); + + avg_ij += rgb_img_ptr[ rgb_row_i_offset + (col_j * 3 + 0) ]; + avg_ij += rgb_img_ptr[ rgb_row_i_offset + (col_j * 3 + 1) ]; + avg_ij += rgb_img_ptr[ rgb_row_i_offset + (col_j * 3 + 2) ]; + + avg_ij /= 3; + + gray_img_ptr[ gray_row_i_offset + col_j ] = avg_ij; + } + } + + // OpenCV use reference count. Thus return won't create deep copy + return gray_img; +} + + +template +void print_tile( const cv::Mat& img, int tile_size, int start_idx_row, int start_idx_col ) +{ + const T* img_ptr = (T*)img.data; + int src_step = img.step1(); + + for ( int row = start_idx_row; row < tile_size + start_idx_row; ++row ) + { + const T* img_ptr_row = img_ptr + row * src_step; + for ( int col = start_idx_col; col < tile_size + start_idx_col; ++col ) + { + printf("%u ", img_ptr_row[ col ] ); + } + printf("\n"); + } + printf("\n"); +} + + +template< typename T> +void print_img( const cv::Mat& img, int img_height = -1, int img_width = -1 ) +{ + const T* img_ptr = (T*)img.data; + if ( img_height == -1 && img_width == -1 ) + { + img_height = img.size().height; + img_width = img.size().width; + } + else + { + img_height = std::min( img.size().height, img_height ); + img_width = std::min( img.size().width, img_width ); + } + printf("Image size (h=%d, w=%d), Print range (h=0-%d, w=0-%d)]\n", \ + img.size().height, img.size().width, img_height, img_width ); + + int img_step = img.step1(); + + for ( int row = 0; row < img_height; ++row ) + { + const T* img_ptr_row = img_ptr + row * img_step; + for ( int col = 0; col < img_width; ++col ) + { + printf("%u ", img_ptr_row[ col ]); + } + printf("\n"); + } + printf("\n"); +} + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/src/align.cpp b/app/src/main/cpp/hdrplus/src/align.cpp new file mode 100644 index 00000000..cc2d9219 --- /dev/null +++ b/app/src/main/cpp/hdrplus/src/align.cpp @@ -0,0 +1,992 @@ +#include +#include +#include +#include +#include // std::make_pair +#include // std::runtime_error +#include // all opencv header +#include +#include "hdrplus/align.h" +#include "hdrplus/burst.h" +#include "hdrplus/utility.h" + +namespace hdrplus +{ + +// Function declration +static void build_per_grayimg_pyramid( \ + std::vector& images_pyramid, \ + const cv::Mat& src_image, \ + const std::vector& inv_scale_factors ); + + +template< int pyramid_scale_factor_prev_curr, int tilesize_scale_factor_prev_curr, int tile_size > +static void build_upsampled_prev_aligement( \ + const std::vector>>& src_alignment, \ + std::vector>>& dst_alignment, \ + int num_tiles_h, int num_tiles_w, \ + const cv::Mat& ref_img, const cv::Mat& alt_img, \ + bool consider_nbr = false ); + + +template< typename data_type, typename return_type, int tile_size > +static unsigned long long l1_distance( const cv::Mat& img1, const cv::Mat& img2, \ + int img1_tile_row_start_idx, int img1_tile_col_start_idx, \ + int img2_tile_row_start_idx, int img2_tile_col_start_idx ); + + +template< typename data_type, typename return_type, int tile_size > +static return_type l2_distance( const cv::Mat& img1, const cv::Mat& img2, \ + int img1_tile_row_start_idx, int img1_tile_col_start_idx, \ + int img2_tile_row_start_idx, int img2_tile_col_start_idx ); + + +static void align_image_level( \ + const cv::Mat& ref_img, \ + const cv::Mat& alt_img, \ + std::vector>>& prev_aligement, \ + std::vector>>& curr_alignment, \ + int scale_factor_prev_curr, \ + int curr_tile_size, \ + int prev_tile_size, \ + int search_radiou, \ + int distance_type ); + + +// Function Implementations + + +// static function only visible within file +static void build_per_grayimg_pyramid( \ + std::vector& images_pyramid, \ + const cv::Mat& src_image, \ + const std::vector& inv_scale_factors ) +{ + // #ifndef NDEBUG + // printf("%s::%s build_per_grayimg_pyramid start with scale factor : ", __FILE__, __func__ ); + // for ( int i = 0; i < inv_scale_factors.size(); ++i ) + // { + // printf("%d ", inv_scale_factors.at( i )); + // } + // printf("\n"); + // #endif + + images_pyramid.resize( inv_scale_factors.size() ); + + for ( size_t i = 0; i < inv_scale_factors.size(); ++i ) + { + cv::Mat blur_image; + cv::Mat downsample_image; + + switch ( inv_scale_factors[ i ] ) + { + case 1: + images_pyramid[ i ] = src_image.clone(); + // cv::Mat use reference count, will not create deep copy + downsample_image = src_image; + break; + case 2: + // printf("(2) downsample with gaussian sigma %.2f", inv_scale_factors[ i ] * 0.5 ); + // // Gaussian blur + cv::GaussianBlur( images_pyramid.at( i-1 ), blur_image, cv::Size(0, 0), inv_scale_factors[ i ] * 0.5 ); + + // // Downsample + downsample_image = downsample_nearest_neighbour( blur_image ); + // downsample_image = downsample_nearest_neighbour( images_pyramid.at( i-1 ) ); + + // Add + images_pyramid.at( i ) = downsample_image.clone(); + + break; + case 4: + // printf("(4) downsample with gaussian sigma %.2f", inv_scale_factors[ i ] * 0.5 ); + cv::GaussianBlur( images_pyramid.at( i-1 ), blur_image, cv::Size(0, 0), inv_scale_factors[ i ] * 0.5 ); + downsample_image = downsample_nearest_neighbour( blur_image ); + // downsample_image = downsample_nearest_neighbour( images_pyramid.at( i-1 ) ); + images_pyramid.at( i ) = downsample_image.clone(); + break; + default: +#ifdef __ANDROID__ + +#else + throw std::runtime_error("inv scale factor " + std::to_string( inv_scale_factors[ i ]) + "invalid" ); +#endif + } + } +} + + +static bool operator!=( const std::pair& lhs, const std::pair& rhs ) +{ + return lhs.first != rhs.first || lhs.second != rhs.second; +} + + +template< int pyramid_scale_factor_prev_curr, int tilesize_scale_factor_prev_curr, int tile_size > +static void build_upsampled_prev_aligement( \ + const std::vector>>& src_alignment, \ + std::vector>>& dst_alignment, \ + int num_tiles_h, int num_tiles_w, \ + const cv::Mat& ref_img, const cv::Mat& alt_img, \ + bool consider_nbr ) +{ + int src_num_tiles_h = src_alignment.size(); + int src_num_tiles_w = src_alignment[ 0 ].size(); + + constexpr int repeat_factor = pyramid_scale_factor_prev_curr / tilesize_scale_factor_prev_curr; + + // printf("build_upsampled_prev_aligement with scale factor %d, repeat factor %d, tile size factor %d\n", \ + // pyramid_scale_factor_prev_curr, repeat_factor, tilesize_scale_factor_prev_curr ); + + int dst_num_tiles_main_h = src_num_tiles_h * repeat_factor; + int dst_num_tiles_main_w = src_num_tiles_w * repeat_factor; + + if ( dst_num_tiles_main_h > num_tiles_h || dst_num_tiles_main_w > num_tiles_w ) + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("current level number of tiles smaller than upsampled tiles\n"); +#endif + } + + // Allocate data for dst_alignment + // NOTE: number of tiles h, number of tiles w might be different from dst_num_tiles_main_h, dst_num_tiles_main_w + // For tiles between num_tile_h and dst_num_tiles_main_h, use (0,0) + dst_alignment.resize( num_tiles_h, std::vector>( num_tiles_w, std::pair(0, 0) ) ); + + // Upsample alignment + #pragma omp parallel for collapse(2) + for ( int row_i = 0; row_i < src_num_tiles_h; row_i++ ) + { + for ( int col_i = 0; col_i < src_num_tiles_w; col_i++ ) + { + // Scale alignment + std::pair align_i = src_alignment[ row_i ][ col_i ]; + align_i.first *= pyramid_scale_factor_prev_curr; + align_i.second *= pyramid_scale_factor_prev_curr; + + // repeat + UNROLL_LOOP( repeat_factor ) + for ( int repeat_row_i = 0; repeat_row_i < repeat_factor; ++repeat_row_i ) + { + int repeat_row_i_offset = row_i * repeat_factor + repeat_row_i; + UNROLL_LOOP( repeat_factor ) + for ( int repeat_col_i = 0; repeat_col_i < repeat_factor; ++repeat_col_i ) + { + int repeat_col_i_offset = col_i * repeat_factor + repeat_col_i; + dst_alignment[ repeat_row_i_offset ][ repeat_col_i_offset ] = align_i; + } + } + } + } + + if ( consider_nbr ) + { + // Copy consurtctor + std::vector>> upsampled_alignment{ dst_alignment }; + + // Distance function + unsigned long long (*distance_func_ptr)(const cv::Mat&, const cv::Mat&, int, int, int, int) = \ + &l1_distance; + + #pragma omp parallel for collapse(2) + for ( int tile_row_i = 0; tile_row_i < num_tiles_h; tile_row_i++ ) + { + for ( int tile_col_i = 0; tile_col_i < num_tiles_w; tile_col_i++ ) + { + const auto& curr_align_i = upsampled_alignment[ tile_row_i ][ tile_col_i ]; + + // Container for nbr alignment pair + std::vector> nbrs_align_i; + + // Consider 4 neighbour's alignment + // Only compute distance if alignment is different + if ( tile_col_i > 0 ) + { + const auto& nbr1_align_i = upsampled_alignment[ tile_row_i + 0 ][ tile_col_i - 1 ]; + if ( curr_align_i != nbr1_align_i ) nbrs_align_i.emplace_back( nbr1_align_i ); + } + + if ( tile_col_i < num_tiles_w - 1 ) + { + const auto& nbr2_align_i = upsampled_alignment[ tile_row_i + 0 ][ tile_col_i + 1 ]; + if ( curr_align_i != nbr2_align_i ) nbrs_align_i.emplace_back( nbr2_align_i ); + } + + if ( tile_row_i > 0 ) + { + const auto& nbr3_align_i = upsampled_alignment[ tile_row_i - 1 ][ tile_col_i + 0 ]; + if ( curr_align_i != nbr3_align_i ) nbrs_align_i.emplace_back( nbr3_align_i ); + } + + if ( tile_row_i < num_tiles_h - 1 ) + { + const auto& nbr4_align_i = upsampled_alignment[ tile_row_i + 1 ][ tile_col_i + 0 ]; + if ( curr_align_i != nbr4_align_i ) nbrs_align_i.emplace_back( nbr4_align_i ); + } + + // If there is a nbr alignment that need to be considered. Compute distance + if ( ! nbrs_align_i.empty() ) + { + int ref_tile_row_start_idx_i = tile_row_i * tile_size / 2; + int ref_tile_col_start_idx_i = tile_col_i * tile_size / 2; + + // curr_align_i's distance + auto curr_align_i_distance = distance_func_ptr( + ref_img, alt_img, \ + ref_tile_row_start_idx_i, \ + ref_tile_col_start_idx_i, \ + ref_tile_row_start_idx_i + curr_align_i.first, \ + ref_tile_col_start_idx_i + curr_align_i.second ); + + for ( const auto& nbr_align_i : nbrs_align_i ) + { + auto nbr_align_i_distance = distance_func_ptr( + ref_img, alt_img, \ + ref_tile_row_start_idx_i, \ + ref_tile_col_start_idx_i, \ + ref_tile_row_start_idx_i + nbr_align_i.first, \ + ref_tile_col_start_idx_i + nbr_align_i.second ); + + if ( nbr_align_i_distance < curr_align_i_distance ) + { + #ifdef NDEBUG + printf("tile [%d, %d] update align, prev align (%d, %d) curr align (%d, %d), prev distance %d curr distance %d\n", \ + tile_row_i, tile_col_i, \ + curr_align_i.first, curr_align_i.second, \ + nbr_align_i.first, nbr_align_i.second, \ + int(curr_align_i_distance), int(nbr_align_i_distance) ); + #endif + + dst_alignment[ tile_row_i ][ tile_col_i ] = nbr_align_i; + curr_align_i_distance = nbr_align_i_distance; + } + } + } + } + } + + } +} + + +// Set tilesize as template argument for better compiler optimization result. +template< typename data_type, typename return_type, int tile_size > +static unsigned long long l1_distance( const cv::Mat& img1, const cv::Mat& img2, \ + int img1_tile_row_start_idx, int img1_tile_col_start_idx, \ + int img2_tile_row_start_idx, int img2_tile_col_start_idx ) +{ + #define CUSTOME_ABS( x ) ( x ) > 0 ? ( x ) : - ( x ) + + const data_type* img1_ptr = (const data_type*)img1.data; + const data_type* img2_ptr = (const data_type*)img2.data; + + int img1_step = img1.step1(); + int img2_step = img2.step1(); + + int img1_width = img1.size().width; + int img1_height = img1.size().height; + + int img2_width = img2.size().width; + int img2_height = img2.size().height; + + // Range check for safety + if ( img1_tile_row_start_idx < 0 || img1_tile_row_start_idx > img1_height - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l1 distance img1_tile_row_start_idx" + std::to_string( img1_tile_row_start_idx ) + \ + " out of valid range (0, " + std::to_string( img1_height - tile_size ) + ")\n" ); +#endif + } + + if ( img1_tile_col_start_idx < 0 || img1_tile_col_start_idx > img1_width - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l1 distance img1_tile_col_start_idx" + std::to_string( img1_tile_col_start_idx ) + \ + " out of valid range (0, " + std::to_string( img1_width - tile_size ) + ")\n" ); +#endif + } + + if ( img2_tile_row_start_idx < 0 || img2_tile_row_start_idx > img2_height - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l1 distance img2_tile_row_start_idx out of valid range\n"); +#endif + } + + if ( img2_tile_col_start_idx < 0 || img2_tile_col_start_idx > img2_width - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l1 distance img2_tile_col_start_idx out of valid range\n"); +#endif + } + + return_type sum(0); + + UNROLL_LOOP( tile_size ) + for ( int row_i = 0; row_i < tile_size; ++row_i ) + { + const data_type* img1_ptr_row_i = img1_ptr + (img1_tile_row_start_idx + row_i) * img1_step + img1_tile_col_start_idx; + const data_type* img2_ptr_row_i = img2_ptr + (img2_tile_row_start_idx + row_i) * img2_step + img2_tile_col_start_idx; + + UNROLL_LOOP( tile_size ) + for ( int col_i = 0; col_i < tile_size; ++col_i ) + { + data_type l1 = CUSTOME_ABS( img1_ptr_row_i[ col_i ] - img2_ptr_row_i[ col_i ] ); + sum += l1; + } + } + + #undef CUSTOME_ABS + + return sum; +} + + +template< typename data_type, typename return_type, int tile_size > +static return_type l2_distance( const cv::Mat& img1, const cv::Mat& img2, \ + int img1_tile_row_start_idx, int img1_tile_col_start_idx, \ + int img2_tile_row_start_idx, int img2_tile_col_start_idx ) +{ + #define CUSTOME_ABS( x ) ( x ) > 0 ? ( x ) : - ( x ) + + const data_type* img1_ptr = (const data_type*)img1.data; + const data_type* img2_ptr = (const data_type*)img2.data; + + int img1_step = img1.step1(); + int img2_step = img2.step1(); + + int img1_width = img1.size().width; + int img1_height = img1.size().height; + + int img2_width = img2.size().width; + int img2_height = img2.size().height; + + // Range check for safety + if ( img1_tile_row_start_idx < 0 || img1_tile_row_start_idx > img1_height - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l2 distance img1_tile_row_start_idx" + std::to_string( img1_tile_row_start_idx ) + \ + " out of valid range (0, " + std::to_string( img1_height - tile_size ) + ")\n" ); +#endif + } + + if ( img1_tile_col_start_idx < 0 || img1_tile_col_start_idx > img1_width - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l2 distance img1_tile_col_start_idx" + std::to_string( img1_tile_col_start_idx ) + \ + " out of valid range (0, " + std::to_string( img1_width - tile_size ) + ")\n" ); +#endif + } + + if ( img2_tile_row_start_idx < 0 || img2_tile_row_start_idx > img2_height - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l2 distance img2_tile_row_start_idx out of valid range\n"); +#endif + } + + if ( img2_tile_col_start_idx < 0 || img2_tile_col_start_idx > img2_width - tile_size ) + { +#ifdef __ANDROID__ + return 0; +#else + throw std::runtime_error("l2 distance img2_tile_col_start_idx out of valid range\n"); +#endif + } + + // printf("Search two tile with ref : \n"); + // print_tile( img1, tile_size, img1_tile_row_start_idx, img1_tile_col_start_idx ); + // printf("Search two tile with alt :\n"); + // print_tile( img2, tile_size, img2_tile_row_start_idx, img2_tile_col_start_idx ); + + return_type sum(0); + + UNROLL_LOOP( tile_size ) + for ( int row_i = 0; row_i < tile_size; ++row_i ) + { + const data_type* img1_ptr_row_i = img1_ptr + (img1_tile_row_start_idx + row_i) * img1_step + img1_tile_col_start_idx; + const data_type* img2_ptr_row_i = img2_ptr + (img2_tile_row_start_idx + row_i) * img2_step + img2_tile_col_start_idx; + + UNROLL_LOOP( tile_size ) + for ( int col_i = 0; col_i < tile_size; ++col_i ) + { + data_type l1 = CUSTOME_ABS( img1_ptr_row_i[ col_i ] - img2_ptr_row_i[ col_i ] ); + sum += ( l1 * l1 ); + } + } + + #undef CUSTOME_ABS + + return sum; +} + + +template +static cv::Mat extract_img_tile( const cv::Mat& img, int img_tile_row_start_idx, int img_tile_col_start_idx ) +{ + const T* img_ptr = (const T*)img.data; + int img_width = img.size().width; + int img_height = img.size().height; + int img_step = img.step1(); + + if ( img_tile_row_start_idx < 0 || img_tile_row_start_idx > img_height - tile_size ) + { +#ifdef __ANDROID__ + return cv::Mat(); +#else + throw std::runtime_error("extract_img_tile img_tile_row_start_idx " + std::to_string( img_tile_row_start_idx ) + \ + " out of valid range (0, " + std::to_string( img_height - tile_size ) + ")\n" ); +#endif + } + + if ( img_tile_col_start_idx < 0 || img_tile_col_start_idx > img_width - tile_size ) + { +#ifdef __ANDROID__ + return cv::Mat(); +#else + throw std::runtime_error("extract_img_tile img_tile_col_start_idx " + std::to_string( img_tile_col_start_idx ) + \ + " out of valid range (0, " + std::to_string( img_width - tile_size ) + ")\n" ); +#endif + } + + cv::Mat img_tile( tile_size, tile_size, img.type() ); + T* img_tile_ptr = (T*)img_tile.data; + int img_tile_step = img_tile.step1(); + + UNROLL_LOOP( tile_size ) + for ( int row_i = 0; row_i < tile_size; ++row_i ) + { + const T* img_ptr_row_i = img_ptr + img_step * ( img_tile_row_start_idx + row_i ); + T* img_tile_ptr_row_i = img_tile_ptr + img_tile_step * row_i; + + UNROLL_LOOP( tile_size ) + for ( int col_i = 0; col_i < tile_size; ++col_i ) + { + img_tile_ptr_row_i[ col_i ] = img_ptr_row_i[ img_tile_col_start_idx + col_i ]; + } + } + + return img_tile; +} + + +void align_image_level( \ + const cv::Mat& ref_img, \ + const cv::Mat& alt_img, \ + std::vector>>& prev_aligement, \ + std::vector>>& curr_alignment, \ + int scale_factor_prev_curr, \ + int curr_tile_size, \ + int prev_tile_size, \ + int search_radiou, \ + int distance_type ) +{ + // Every align image level share the same distance function. + // Use function ptr to reduce if else overhead inside for loop + unsigned long long (*distance_func_ptr)(const cv::Mat&, const cv::Mat&, int, int, int, int) = nullptr; + + if ( distance_type == 1 ) // l1 distance + { + if ( curr_tile_size == 8 ) + { + distance_func_ptr = &l1_distance; + } + else if ( curr_tile_size == 16 ) + { + distance_func_ptr = &l1_distance; + } + } + else if ( distance_type == 2 ) // l2 distance + { + if ( curr_tile_size == 8 ) + { + distance_func_ptr = &l2_distance; + } + else if ( curr_tile_size == 16 ) + { + distance_func_ptr = &l2_distance; + } + } + + // Every level share the same upsample function + void (*upsample_alignment_func_ptr)(const std::vector>>&, \ + std::vector>>&, \ + int, int, const cv::Mat&, const cv::Mat&, bool) = nullptr; + if ( scale_factor_prev_curr == 2 ) + { + if ( curr_tile_size / prev_tile_size == 2 ) + { + if ( curr_tile_size == 8 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 2, 8>; + } + else if ( curr_tile_size == 16 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 2, 16>; + } + else + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Something wrong with upsampling function setting\n"); +#endif + } + + } + else if ( curr_tile_size / prev_tile_size == 1 ) + { + if ( curr_tile_size == 8 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 1, 8>; + } + else if ( curr_tile_size == 16 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 1, 16>; + } + else + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Something wrong with upsampling function setting\n"); +#endif + } + } + else + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Something wrong with upsampling function setting\n"); +#endif + } + } + else if ( scale_factor_prev_curr == 4 ) + { + if ( curr_tile_size / prev_tile_size == 2 ) + { + if ( curr_tile_size == 8 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 2, 8>; + } + else if ( curr_tile_size == 16 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 2, 16>; + } + else + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Something wrong with upsampling function setting\n"); +#endif + } + + } + else if ( curr_tile_size / prev_tile_size == 1 ) + { + if ( curr_tile_size == 8 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 1, 8>; + } + else if ( curr_tile_size == 16 ) + { + upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 1, 16>; + } + else + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Something wrong with upsampling function setting\n"); +#endif + } + } + else + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Something wrong with upsampling function setting\n"); +#endif + } + } + + + // Function to extract reference image tile for memory cache + cv::Mat (*extract_ref_img_tile)(const cv::Mat&, int, int) = nullptr; + if ( curr_tile_size == 8 ) + { + extract_ref_img_tile = &extract_img_tile; + } + else if ( curr_tile_size == 16 ) + { + extract_ref_img_tile = &extract_img_tile; + } + + // Function to extract search image tile for memory cache + cv::Mat (*extract_alt_img_search)(const cv::Mat&, int, int) = nullptr; + if ( curr_tile_size == 8 ) + { + if ( search_radiou == 1 ) + { + extract_alt_img_search = &extract_img_tile; + } + else if ( search_radiou == 4 ) + { + extract_alt_img_search = &extract_img_tile; + } + } + else if ( curr_tile_size == 16 ) + { + if ( search_radiou == 1 ) + { + extract_alt_img_search = &extract_img_tile; + } + else if ( search_radiou == 4 ) + { + extract_alt_img_search = &extract_img_tile; + } + } + + int num_tiles_h = ref_img.size().height / (curr_tile_size / 2) - 1; + int num_tiles_w = ref_img.size().width / (curr_tile_size / 2 ) - 1; + + /* Upsample pervious layer alignment */ + std::vector>> upsampled_prev_aligement; + + // Coarsest level + // prev_alignment is invalid / empty, construct alignment as (0,0) + if ( prev_tile_size == -1 ) + { + upsampled_prev_aligement.resize( num_tiles_h, \ + std::vector>( num_tiles_w, std::pair(0, 0) ) ); + } + // Upsample previous level alignment + else + { + upsample_alignment_func_ptr( prev_aligement, upsampled_prev_aligement, \ + num_tiles_h, num_tiles_w, ref_img, alt_img, false ); + + // printf("\n!!!!!Upsampled previous alignment\n"); + // for ( int tile_row = 0; tile_row < int(upsampled_prev_aligement.size()); tile_row++ ) + // { + // for ( int tile_col = 0; tile_col < int(upsampled_prev_aligement.at(0).size()); tile_col++ ) + // { + // const auto tile_start = upsampled_prev_aligement.at( tile_row ).at( tile_col ); + // printf("up tile (%d, %d) -> start idx (%d, %d)\n", \ + // tile_row, tile_col, tile_start.first, tile_start.second); + // } + // } + + } + + #ifndef NDEBUG + printf("%s::%s start: \n", __FILE__, __func__ ); + printf(" scale_factor_prev_curr %d, tile_size %d, prev_tile_size %d, search_radiou %d, distance L%d, \n", \ + scale_factor_prev_curr, curr_tile_size, prev_tile_size, search_radiou, distance_type ); + printf(" ref img size h=%d w=%d, alt img size h=%d w=%d, \n", \ + ref_img.size().height, ref_img.size().width, alt_img.size().height, alt_img.size().width ); + printf(" num tile h (upsampled) %d, num tile w (upsampled) %d\n", num_tiles_h, num_tiles_w); + #endif + + // allocate memory for current alignmenr + curr_alignment.resize( num_tiles_h, std::vector>( num_tiles_w, std::pair(0, 0) ) ); + + /* Pad alternative image */ + cv::Mat alt_img_pad; + cv::copyMakeBorder( alt_img, \ + alt_img_pad, \ + search_radiou, search_radiou, search_radiou, search_radiou, \ + cv::BORDER_CONSTANT, cv::Scalar( UINT_LEAST16_MAX ) ); + + // printf("Reference image h=%d, w=%d: \n", ref_img.size().height, ref_img.size().width ); + // print_img( ref_img ); + + // printf("Alter image pad h=%d, w=%d: \n", alt_img_pad.size().height, alt_img_pad.size().width ); + // print_img( alt_img_pad ); + + // printf("!! enlarged tile size %d\n", curr_tile_size + 2 * search_radiou ); + + int alt_tile_row_idx_max = alt_img_pad.size().height - ( curr_tile_size + 2 * search_radiou ); + int alt_tile_col_idx_max = alt_img_pad.size().width - ( curr_tile_size + 2 * search_radiou ); + + // Dlete below distance vector, this is for debug only + std::vector> distances( num_tiles_h, std::vector( num_tiles_w, 0 )); + + /* Iterate through all reference tile & compute distance */ + #pragma omp parallel for collapse(2) + for ( int ref_tile_row_i = 0; ref_tile_row_i < num_tiles_h; ref_tile_row_i++ ) + { + for ( int ref_tile_col_i = 0; ref_tile_col_i < num_tiles_w; ref_tile_col_i++ ) + { + // Upper left index of reference tile + int ref_tile_row_start_idx_i = ref_tile_row_i * curr_tile_size / 2; + int ref_tile_col_start_idx_i = ref_tile_col_i * curr_tile_size / 2; + + // printf("\nRef img tile [%d, %d] -> start idx [%d, %d] (row, col)\n", \ + // ref_tile_row_i, ref_tile_col_i, ref_tile_row_start_idx_i, ref_tile_col_start_idx_i ); + // printf("\nRef img tile [%d, %d]\n", ref_tile_row_i, ref_tile_col_i ); + // print_tile( ref_img, curr_tile_size, ref_tile_row_start_idx_i, ref_tile_col_start_idx_i ); + + // Upsampled alignment at this tile + // Alignment are relative displacement in pixel value + int prev_alignment_row_i = upsampled_prev_aligement.at( ref_tile_row_i ).at( ref_tile_col_i ).first; + int prev_alignment_col_i = upsampled_prev_aligement.at( ref_tile_row_i ).at( ref_tile_col_i ).second; + + // Alternative image tile start idx + int alt_tile_row_start_idx_i = ref_tile_row_start_idx_i + prev_alignment_row_i; + int alt_tile_col_start_idx_i = ref_tile_col_start_idx_i + prev_alignment_col_i; + + // Ensure alternative image tile within range + if ( alt_tile_row_start_idx_i < 0 ) + alt_tile_row_start_idx_i = 0; + if ( alt_tile_col_start_idx_i < 0 ) + alt_tile_col_start_idx_i = 0; + if ( alt_tile_row_start_idx_i > alt_tile_row_idx_max ) + { + // int before = alt_tile_row_start_idx_i; + alt_tile_row_start_idx_i = alt_tile_row_idx_max; + // printf("@@ change start x from %d to %d\n", before, alt_tile_row_idx_max); + } + if ( alt_tile_col_start_idx_i > alt_tile_col_idx_max ) + { + // int before = alt_tile_col_start_idx_i; + alt_tile_col_start_idx_i = alt_tile_col_idx_max; + // printf("@@ change start y from %d to %d\n", before, alt_tile_col_idx_max ); + } + + // Explicitly caching reference image tile + cv::Mat ref_img_tile_i = extract_ref_img_tile( ref_img, ref_tile_row_start_idx_i, ref_tile_col_start_idx_i ); + cv::Mat alt_img_search_i = extract_alt_img_search( alt_img_pad, alt_tile_row_start_idx_i, alt_tile_col_start_idx_i ); + + // Because alternative image is padded with search radious. + // Using same coordinate with reference image will automatically considered search radious * 2 + // printf("Alt image tile [%d, %d]-> start idx [%d, %d]\n", \ + // ref_tile_row_i, ref_tile_col_i, alt_tile_row_start_idx_i, alt_tile_col_start_idx_i ); + // printf("\nAlt image tile [%d, %d]\n", ref_tile_row_i, ref_tile_col_i ); + // print_tile( alt_img_pad, curr_tile_size + 2 * search_radiou, alt_tile_row_start_idx_i, alt_tile_col_start_idx_i ); + + // Search based on L1/L2 distance + unsigned long long min_distance_i = ULONG_LONG_MAX; + int min_distance_row_i = -1; + int min_distance_col_i = -1; + for ( int search_row_j = 0; search_row_j < ( search_radiou * 2 + 1 ); search_row_j++ ) + { + for ( int search_col_j = 0; search_col_j < ( search_radiou * 2 + 1 ); search_col_j++ ) + { + // printf("\n--->tile at [%d, %d] search (%d, %d)\n", \ + // ref_tile_row_i, ref_tile_col_i, search_row_j - search_radiou, search_col_j - search_radiou ); + + // unsigned long long distance_j = distance_func_ptr( ref_img, alt_img_pad, \ + // ref_tile_row_start_idx_i, ref_tile_col_start_idx_i, \ + // alt_tile_row_start_idx_i + search_row_j, alt_tile_col_start_idx_i + search_col_j ); + + // unsigned long long distance_j = distance_func_ptr( ref_img_tile_i, alt_img_pad, \ + // 0, 0, \ + // alt_tile_row_start_idx_i + search_row_j, alt_tile_col_start_idx_i + search_col_j ); + + unsigned long long distance_j = distance_func_ptr( ref_img_tile_i, alt_img_search_i, \ + 0, 0, \ + search_row_j, search_col_j ); + + // printf("<---tile at [%d, %d] search (%d, %d), new dis %llu, old dis %llu\n", \ + // ref_tile_row_i, ref_tile_col_i, search_row_j - search_radiou, search_col_j - search_radiou, distance_j, min_distance_i ); + + // If this is smaller distance + if ( distance_j < min_distance_i ) + { + min_distance_i = distance_j; + min_distance_col_i = search_col_j; + min_distance_row_i = search_row_j; + } + + // If same value, choose the one closer to the original tile location + if ( distance_j == min_distance_i && min_distance_row_i != -1 && min_distance_col_i != -1 ) + { + int prev_distance_row_2_ref = min_distance_row_i - search_radiou; + int prev_distance_col_2_ref = min_distance_col_i - search_radiou; + int curr_distance_row_2_ref = search_row_j - search_radiou; + int curr_distance_col_2_ref = search_col_j - search_radiou; + + int prev_distance_2_ref_sqr = prev_distance_row_2_ref * prev_distance_row_2_ref + prev_distance_col_2_ref * prev_distance_col_2_ref; + int curr_distance_2_ref_sqr = curr_distance_row_2_ref * curr_distance_row_2_ref + curr_distance_col_2_ref * curr_distance_col_2_ref; + + // previous min distance idx is farther away from ref tile start location + if ( prev_distance_2_ref_sqr > curr_distance_2_ref_sqr ) + { + // printf("@@@ Same distance %d, choose closer one (%d, %d) instead of (%d, %d)\n", \ + // distance_j, search_row_j, search_col_j, min_distance_row_i, min_distance_col_i); + min_distance_col_i = search_col_j; + min_distance_row_i = search_row_j; + } + } + } + } + + // printf("tile at (%d, %d) alignment (%d, %d)\n", \ + // ref_tile_row_i, ref_tile_col_i, min_distance_row_i, min_distance_col_i ); + + int alignment_row_i = prev_alignment_row_i + min_distance_row_i - search_radiou; + int alignment_col_i = prev_alignment_col_i + min_distance_col_i - search_radiou; + + std::pair alignment_i( alignment_row_i, alignment_col_i ); + + // Add min_distance_i's corresbonding idx as min + curr_alignment.at( ref_tile_row_i ).at( ref_tile_col_i ) = alignment_i; + distances.at( ref_tile_row_i ).at( ref_tile_col_i ) = min_distance_i; + } + } + + // printf("\n!!!!!Min distance for each tile \n"); + // for ( int tile_row = 0; tile_row < num_tiles_h; tile_row++ ) + // { + // for ( int tile_col = 0; tile_col < num_tiles_w; ++tile_col ) + // { + // printf("tile (%d, %d) distance %u\n", \ + // tile_row, tile_col, distances.at( tile_row).at(tile_col ) ); + // } + // } + + // printf("\n!!!!!Alignment at current level\n"); + // for ( int tile_row = 0; tile_row < num_tiles_h; tile_row++ ) + // { + // for ( int tile_col = 0; tile_col < num_tiles_w; tile_col++ ) + // { + // const auto tile_start = curr_alignment.at( tile_row ).at( tile_col ); + // printf("tile (%d, %d) -> start idx (%d, %d)\n", \ + // tile_row, tile_col, tile_start.first, tile_start.second); + // } + // } + +} + + + +void align::process( const hdrplus::burst& burst_images, \ + std::vector>>>& images_alignment ) +{ + #ifndef NDEBUG + printf("%s::%s align::process start\n", __FILE__, __func__ ); fflush(stdout); + #endif + + images_alignment.clear(); + images_alignment.resize( burst_images.num_images ); + + // image pyramid per image, per pyramid level + std::vector> per_grayimg_pyramid; + + // printf("!!!!! ref bayer padded\n"); + // print_img( burst_images.bayer_images_pad.at( burst_images.reference_image_idx) ); + // exit(1); + + // printf("!!!!! ref gray padded\n"); + // print_img( burst_images.grayscale_images_pad.at( burst_images.reference_image_idx) ); + // exit(1); + + per_grayimg_pyramid.resize( burst_images.num_images ); + + #pragma omp parallel for + for ( int img_idx = 0; img_idx < burst_images.num_images; ++img_idx ) + { + // per_grayimg_pyramid[ img_idx ][ 0 ] is the original image + // per_grayimg_pyramid[ img_idx ][ 3 ] is the coarsest image + build_per_grayimg_pyramid( per_grayimg_pyramid.at( img_idx ), \ + burst_images.grayscale_images_pad.at( img_idx ), \ + this->inv_scale_factors ); + } + + // #ifndef NDEBUG + // printf("%s::%s build image pyramid of size : ", __FILE__, __func__ ); + // for ( int level_i = 0; level_i < num_levels; ++level_i ) + // { + // printf("(%d, %d) ", per_grayimg_pyramid[ 0 ][ level_i ].size().height, + // per_grayimg_pyramid[ 0 ][ level_i ].size().width ); + // } + // printf("\n"); fflush(stdout); + // #endif + + // print image pyramid + // for ( int level_i; level_i < num_levels; ++level_i ) + // { + // printf("\n\n!!!!! ref gray pyramid level %d img : \n" , level_i ); + // print_img( per_grayimg_pyramid[ burst_images.reference_image_idx ][ level_i ] ); + // } + // exit(-1); + + // Align every image + const std::vector& ref_grayimg_pyramid = per_grayimg_pyramid[ burst_images.reference_image_idx ]; + std::vector>> curr_alignment; + std::vector>> prev_alignment; + for ( int img_idx = 0; img_idx < burst_images.num_images; ++img_idx ) + { + // Do not align with reference image + if ( img_idx == burst_images.reference_image_idx ) + continue; + + const std::vector& alt_grayimg_pyramid = per_grayimg_pyramid[ img_idx ]; + + // Align every level from coarse to grain + // level 0 : finest level, the original image + // level 3 : coarsest level + curr_alignment.clear(); + prev_alignment.clear(); + for ( int level_i = num_levels - 1; level_i >= 0; level_i-- ) // 3,2,1,0 + { + // make curr alignment as previous alignment + prev_alignment.swap( curr_alignment ); + curr_alignment.clear(); + + // printf("\n\n########################align level %d\n", level_i ); + align_image_level( + ref_grayimg_pyramid[ level_i ], // reference image at current level + alt_grayimg_pyramid[ level_i ], // alternative image at current level + prev_alignment, // previous layer alignment + curr_alignment, // current layer alignment + ( level_i == ( num_levels - 1 ) ? -1 : inv_scale_factors[ level_i + 1 ] ), // scale factor between previous layer and current layer. -1 if current layer is the coarsest layer, [-1, 4, 4, 2] + grayimg_tile_sizes[ level_i ], // current level tile size + ( level_i == ( num_levels - 1 ) ? -1 : grayimg_tile_sizes[ level_i + 1 ] ), // previous level tile size + grayimg_search_radious[ level_i ], // search radious + distances[ level_i ] ); // L1/L2 distance + + // printf("@@@Alignment at level %d is h=%d, w=%d", level_i, curr_alignment.size(), curr_alignment.at(0).size() ); + + + } // for pyramid level + + // Alignment at grayscale image + images_alignment.at( img_idx ).swap( curr_alignment ); + + // printf("\n!!!!!Alternative Image Alignment\n"); + // for ( int tile_row = 0; tile_row < images_alignment.at( img_idx ).size(); tile_row++ ) + // { + // for ( int tile_col = 0; tile_col < images_alignment.at( img_idx ).at(0).size(); tile_col++ ) + // { + // const auto tile_start = images_alignment.at( img_idx ).at( tile_row ).at( tile_col ); + // printf("tile (%d, %d) -> start idx (%d, %d)\n", \ + // tile_row, tile_col, tile_start.first, tile_start.second); + // } + // } + + } // for alternative image + +} + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/src/bayer_image.cpp b/app/src/main/cpp/hdrplus/src/bayer_image.cpp new file mode 100644 index 00000000..0d84199a --- /dev/null +++ b/app/src/main/cpp/hdrplus/src/bayer_image.cpp @@ -0,0 +1,166 @@ +#include +#include +#include +#include // std::pair, std::makr_pair +#include // std::shared_ptr +#include // std::runtime_error +#include // all opencv header +#include +#include // exiv2 +#include "hdrplus/bayer_image.h" +#include "hdrplus/utility.h" // box_filter_kxk +namespace hdrplus +{ + +bayer_image::bayer_image( const std::string& bayer_image_path ) +{ + libraw_processor = std::make_shared(); + + // Open RAW image file + int return_code; + if ( ( return_code = libraw_processor->open_file( bayer_image_path.c_str() ) ) != LIBRAW_SUCCESS ) + { + libraw_processor->recycle(); +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code )); +#endif + } + + // Unpack the raw image + if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS ) + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code )); +#endif + } + + // Get image basic info + width = int( libraw_processor->imgdata.rawdata.sizes.raw_width ); + height = int( libraw_processor->imgdata.rawdata.sizes.raw_height ); + + // Read exif tags + Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(bayer_image_path); + assert(image.get() != 0); + image->readMetadata(); + Exiv2::ExifData &exifData = image->exifData(); + if (exifData.empty()) { + std::string error(bayer_image_path); + error += ": No Exif data found in the file"; + std::cout << error << std::endl; + } + + white_level = exifData["Exif.Image.WhiteLevel"].toLong(); + black_level_per_channel.resize( 4 ); + black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0); + black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1); + black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2); + black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3); + iso = exifData["Exif.Image.ISOSpeedRatings"].toLong(); + + // Create CV mat + // https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/ + // https://www.libraw.org/node/2141 + raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height + + // 2x2 box filter + grayscale_image = box_filter_kxk( raw_image ); + + #ifndef NDEBUG + printf("%s::%s read bayer image %s with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \ + __FILE__, __func__, bayer_image_path.c_str(), width, height, iso, white_level, \ + black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] ); + fflush( stdout ); + #endif +} + +bayer_image::bayer_image( const std::vector& bayer_image_content ) +{ + libraw_processor = std::make_shared(); + + // Open RAW image file + int return_code; + if ( ( return_code = libraw_processor->open_buffer( (void *)(&bayer_image_content[0]), bayer_image_content.size() ) ) != LIBRAW_SUCCESS ) + { + libraw_processor->recycle(); +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code )); +#endif + } + + // Unpack the raw image + if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS ) + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code )); +#endif + } + + // Get image basic info + width = int( libraw_processor->imgdata.rawdata.sizes.raw_width ); + height = int( libraw_processor->imgdata.rawdata.sizes.raw_height ); + + // Read exif tags + Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(&bayer_image_content[0], bayer_image_content.size()); + assert(image.get() != 0); + image->readMetadata(); + Exiv2::ExifData &exifData = image->exifData(); + if (exifData.empty()) { + std::string error = "No Exif data found in the file"; + std::cout << error << std::endl; + } + + white_level = exifData["Exif.Image.WhiteLevel"].toLong(); + black_level_per_channel.resize( 4 ); + black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0); + black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1); + black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2); + black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3); + iso = exifData["Exif.Image.ISOSpeedRatings"].toLong(); + + // Create CV mat + // https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/ + // https://www.libraw.org/node/2141 + raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height + + // 2x2 box filter + grayscale_image = box_filter_kxk( raw_image ); + +#ifndef NDEBUG + printf("%s::%s read bayer image with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \ + __FILE__, __func__, width, height, iso, white_level, \ + black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] ); + fflush( stdout ); +#endif +} + +std::pair bayer_image::get_noise_params() const +{ + // Set ISO to 100 if not positive + double iso_ = iso <= 0 ? 100 : iso; + + // Calculate shot noise and read noise parameters w.r.t ISO 100 + double lambda_shot_p = iso_ / 100.0f * baseline_lambda_shot; + double lambda_read_p = (iso_ / 100.0f) * (iso_ / 100.0f) * baseline_lambda_read; + + double black_level = (black_level_per_channel[0] + \ + black_level_per_channel[1] + \ + black_level_per_channel[2] + \ + black_level_per_channel[3]) / 4.0; + + // Rescale shot and read noise to normal range + double lambda_shot = lambda_shot_p * (white_level - black_level); + double lambda_read = lambda_read_p * (white_level - black_level) * (white_level - black_level); + + // return pair + return std::make_pair(lambda_shot, lambda_read); +} + +} diff --git a/app/src/main/cpp/hdrplus/src/burst.cpp b/app/src/main/cpp/hdrplus/src/burst.cpp new file mode 100644 index 00000000..08ab324c --- /dev/null +++ b/app/src/main/cpp/hdrplus/src/burst.cpp @@ -0,0 +1,251 @@ +#include +#include +#include +#include // all opencv header +#include "hdrplus/burst.h" +#include "hdrplus/utility.h" + +namespace hdrplus +{ + +burst::burst( const std::string& burst_path, const std::string& reference_image_path ) +{ + std::vector bayer_image_paths; + // Search through the input path directory to get all input image path + if ( burst_path.at( burst_path.size() - 1) == '/') + cv::glob( burst_path + "*.dng", bayer_image_paths, false ); + else + cv::glob( burst_path + "/*.dng", bayer_image_paths, false ); + + #ifndef NDEBUG + for ( const auto& bayer_img_path_i : bayer_image_paths ) + { + printf("img i path %s\n", bayer_img_path_i.c_str()); fflush(stdout); + } + printf("ref img path %s\n", reference_image_path.c_str()); fflush(stdout); + #endif + + // Number of images + num_images = bayer_image_paths.size(); + + // Find reference image path in input directory + // reference image path need to be absolute path + reference_image_idx = -1; + for ( size_t i = 0; i < bayer_image_paths.size(); ++i ) + { + if ( bayer_image_paths[ i ] == reference_image_path ) + { + reference_image_idx = i; + } + } + + if ( reference_image_idx == -1 ) + { + return; + // throw std::runtime_error("Error unable to locate reference image " + reference_image_path ); + } + + #ifndef NDEBUG + for ( const auto& bayer_image_path_i : bayer_image_paths ) + { + printf("%s::%s Find image %s\n", \ + __FILE__, __func__, bayer_image_path_i.c_str()); + } + + printf("%s::%s reference image idx %d\n", \ + __FILE__, __func__, reference_image_idx ); + #endif + + // Get source bayer image + // Downsample original bayer image by 2x2 box filter + for ( const auto& bayer_image_path_i : bayer_image_paths ) + { + bayer_images.emplace_back( bayer_image_path_i ); + } + + // Pad information + int tile_size_bayer = 32; + int padding_top = tile_size_bayer / 2; + int padding_bottom = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer ); + int padding_left = tile_size_bayer / 2; + int padding_right = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer ); + padding_info_bayer = std::vector{ padding_top, padding_bottom, padding_left, padding_right }; + + // Pad bayer image + for ( const auto& bayer_image_i : bayer_images ) + { + cv::Mat bayer_image_pad_i; + cv::copyMakeBorder( bayer_image_i.raw_image, \ + bayer_image_pad_i, \ + padding_top, padding_bottom, padding_left, padding_right, \ + cv::BORDER_REFLECT ); + + // cv::Mat use internal reference count + bayer_images_pad.emplace_back( bayer_image_pad_i ); + grayscale_images_pad.emplace_back( box_filter_kxk( bayer_image_pad_i ) ); + } + + #ifndef NDEBUG + printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \ + __FILE__, __func__, \ + bayer_images[ 0 ].height, \ + bayer_images[ 0 ].width, \ + bayer_images_pad[ 0 ].size().height, \ + bayer_images_pad[ 0 ].size().width ); + printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \ + __FILE__, __func__, \ + padding_top, padding_bottom, padding_left, padding_right ); + #endif +} + +burst::burst( const std::vector& bayer_image_paths, int reference_image_index ) +{ + // Number of images + num_images = bayer_image_paths.size(); + + // Find reference image path in input directory + // reference image path need to be absolute path + reference_image_idx = -1; + if ( reference_image_index >= 0 && reference_image_index < bayer_image_paths.size() ) + { + reference_image_idx = reference_image_index; + } + + if ( reference_image_idx == -1 ) + { + return; + // throw std::runtime_error("Error reference image index is out of range " ); + } + + #ifndef NDEBUG + for ( const auto& bayer_image_path_i : bayer_image_paths ) + { + printf("%s::%s Find image %s\n", \ + __FILE__, __func__, bayer_image_path_i.c_str()); + } + + printf("%s::%s reference image idx %d\n", \ + __FILE__, __func__, reference_image_idx ); + #endif + + // Get source bayer image + // Downsample original bayer image by 2x2 box filter + for ( const auto& bayer_image_path_i : bayer_image_paths ) + { + bayer_images.emplace_back( bayer_image_path_i ); + } + + // Pad information + int tile_size_bayer = 32; + int padding_top = tile_size_bayer / 2; + int padding_bottom = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer ); + int padding_left = tile_size_bayer / 2; + int padding_right = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer ); + padding_info_bayer = std::vector{ padding_top, padding_bottom, padding_left, padding_right }; + + // Pad bayer image + for ( const auto& bayer_image_i : bayer_images ) + { + cv::Mat bayer_image_pad_i; + cv::copyMakeBorder( bayer_image_i.raw_image, \ + bayer_image_pad_i, \ + padding_top, padding_bottom, padding_left, padding_right, \ + cv::BORDER_REFLECT ); + + // cv::Mat use internal reference count + bayer_images_pad.emplace_back( bayer_image_pad_i ); + grayscale_images_pad.emplace_back( box_filter_kxk( bayer_image_pad_i ) ); + } + + #ifndef NDEBUG + printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \ + __FILE__, __func__, \ + bayer_images[ 0 ].height, \ + bayer_images[ 0 ].width, \ + bayer_images_pad[ 0 ].size().height, \ + bayer_images_pad[ 0 ].size().width ); + printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \ + __FILE__, __func__, \ + padding_top, padding_bottom, padding_left, padding_right ); + #endif +} + +burst::burst( const std::vector >& bayer_image_contents, int reference_image_index ) +{ + // Number of images + num_images = bayer_image_contents.size(); + + // Find reference image path in input directory + // reference image path need to be absolute path + reference_image_idx = -1; + if ( reference_image_index >= 0 && reference_image_index < bayer_image_contents.size() ) + { + reference_image_idx = reference_image_index; + } + + if ( reference_image_idx == -1 ) + { + return; + // throw std::runtime_error("Error reference image index is out of range " ); + } + +#ifndef NDEBUG + printf("%s::%s reference image idx %d\n", \ + __FILE__, __func__, reference_image_idx ); +#endif + + // Get source bayer image + // Downsample original bayer image by 2x2 box filter + for ( const auto& bayer_image_content : bayer_image_contents ) + { + bayer_images.emplace_back( bayer_image_content ); + } + + // Pad information + int tile_size_bayer = 32; + int padding_top = tile_size_bayer / 2; + int padding_bottom = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer ); + int padding_left = tile_size_bayer / 2; + int padding_right = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer ); + padding_info_bayer = std::vector{ padding_top, padding_bottom, padding_left, padding_right }; + + // Pad bayer image + for ( const auto& bayer_image_i : bayer_images ) + { + cv::Mat bayer_image_pad_i; + cv::copyMakeBorder( bayer_image_i.raw_image, \ + bayer_image_pad_i, \ + padding_top, padding_bottom, padding_left, padding_right, \ + cv::BORDER_REFLECT ); + + // cv::Mat use internal reference count + bayer_images_pad.emplace_back( bayer_image_pad_i ); + grayscale_images_pad.emplace_back( box_filter_kxk( bayer_image_pad_i ) ); + } + +#ifndef NDEBUG + printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \ + __FILE__, __func__, \ + bayer_images[ 0 ].height, \ + bayer_images[ 0 ].width, \ + bayer_images_pad[ 0 ].size().height, \ + bayer_images_pad[ 0 ].size().width ); + printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \ + __FILE__, __func__, \ + padding_top, padding_bottom, padding_left, padding_right ); +#endif +} + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/src/finish.cpp b/app/src/main/cpp/hdrplus/src/finish.cpp new file mode 100644 index 00000000..42f82fb2 --- /dev/null +++ b/app/src/main/cpp/hdrplus/src/finish.cpp @@ -0,0 +1,784 @@ +#include +#include // all opencv header +#include "hdrplus/finish.h" +#include "hdrplus/utility.h" +#include + +#ifdef __ANDROID__ +#define DBG_OUTPUT_ROOT "/sdcard/com.xypower.mpapp/tmp/" +#else +#define DBG_OUTPUT_ROOT "" +#endif +// #include + +namespace hdrplus +{ + + + cv::Mat convert16bit2_8bit_(cv::Mat ans){ + if(ans.type()==CV_16UC3){ + cv::MatIterator_ it, end; + for( it = ans.begin(), end = ans.end(); it != end; ++it) + { + // std::cout< it, end; + for( it = ans.begin(), end = ans.end(); it != end; ++it) + { + // std::cout< it, end; + for( it = ans.begin(), end = ans.end(); it != end; ++it) + { + // std::cout<1){ + x = 1; + } + } + + x*=USHRT_MAX; + + return (uint16_t)x; + } + + uint16_t uGammaDecompress_1pix(float x, float threshold,float gainMin,float gainMax,float exponent){ + // Normalize pixel val + x/=65535.0; + // check the val against the threshold + if(x<=threshold){ + x = x/gainMin; + }else{ + x = pow((x+gainMax-1)/gainMax,exponent); + } + // clip + if(x<0){ + x=0; + }else{ + if(x>1){ + x = 1; + } + } + x*=65535; + + return (uint16_t)x; + } + + cv::Mat uGammaCompress_(cv::Mat m,float threshold,float gainMin,float gainMax,float exponent){ + if(m.type()==CV_16UC3){ + cv::MatIterator_ it, end; + for( it = m.begin(), end = m.end(); it != end; ++it) + { + (*it)[0] =uGammaCompress_1pix((*it)[0],threshold,gainMin,gainMax,exponent); + (*it)[1] =uGammaCompress_1pix((*it)[1],threshold,gainMin,gainMax,exponent); + (*it)[2] =uGammaCompress_1pix((*it)[2],threshold,gainMin,gainMax,exponent); + } + }else if(m.type()==CV_16UC1){ + u_int16_t* ptr = (u_int16_t*)m.data; + int end = m.rows*m.cols; + for(int i=0;i it, end; + for( it = m.begin(), end = m.end(); it != end; ++it) + { + (*it)[0] =uGammaDecompress_1pix((*it)[0],threshold,gainMin,gainMax,exponent); + (*it)[1] =uGammaDecompress_1pix((*it)[1],threshold,gainMin,gainMax,exponent); + (*it)[2] =uGammaDecompress_1pix((*it)[2],threshold,gainMin,gainMax,exponent); + } + }else if(m.type()==CV_16UC1){ + u_int16_t* ptr = (u_int16_t*)m.data; + int end = m.rows*m.cols; + for(int i=0;i it, end; + for( it = img.begin(), end = img.end(); it != end; ++it) + { + uint32_t tmp = (*it)[0]+(*it)[1]+(*it)[2]; + uint16_t avg_val = tmp/3; + *(ptr+idx) = avg_val; + idx++; + } + + return processedImg; + } + + double getMean(cv::Mat img){ + u_int16_t* ptr = (u_int16_t*)img.data; + int max_idx = img.rows*img.cols*img.channels(); + double sum=0; + for(int i=0;iUSHRT_MAX){ + *(ptr+i) = USHRT_MAX; + }else{ + *(ptr+i)=(u_int16_t)tmp; + } + } + return img; + } + + double getSaturated(cv::Mat img, double threshold){ + threshold *= USHRT_MAX; + double count=0; + u_int16_t* ptr = (u_int16_t*)img.data; + int max_idx = img.rows*img.cols*img.channels(); + for(int i=0;ithreshold){ + count++; + } + } + return count/(double)max_idx; + + } + + cv::Mat meanGain_(cv::Mat img,int gain){ + if(img.channels()!=3){ + std::cout<<"unsupport img type in meanGain_()"< it, end; + for( it = img.begin(), end = img.end(); it != end; ++it) + { + double sum = 0; + // R + double tmp = (*it)[0]*gain; + if(tmp<0) tmp=0; + if(tmp>USHRT_MAX) tmp = USHRT_MAX; + sum+=tmp; + + // G + tmp = (*it)[1]*gain; + if(tmp<0) tmp=0; + if(tmp>USHRT_MAX) tmp = USHRT_MAX; + sum+=tmp; + + // B + tmp = (*it)[2]*gain; + if(tmp<0) tmp=0; + if(tmp>USHRT_MAX) tmp = USHRT_MAX; + sum+=tmp; + + // put into processedImg + uint16_t avg_val = sum/3; + *(ptr+idx) = avg_val; + idx++; + } + return processedImg; + } + + } + + cv::Mat applyScaling_(cv::Mat mergedImage, cv::Mat shortGray, cv::Mat fusedGray){ + cv::Mat result = mergedImage.clone(); + u_int16_t* ptr_shortg = (u_int16_t*)shortGray.data; + u_int16_t* ptr_fusedg = (u_int16_t*)fusedGray.data; + int count = 0; + cv::MatIterator_ it, end; + for( it = result.begin(), end = result.end(); it != end; ++it) + { + double s = 1; + if(*(ptr_shortg+count)!=0){ + s = *(ptr_fusedg+count); + s/=*(ptr_shortg+count); + } + for(int c=0;cUSHRT_MAX){ + (*it)[c] = USHRT_MAX; + }else{ + (*it)[c] = tmp; + } + } + } + return result; + } + + void localToneMap(cv::Mat& mergedImage, Options options, cv::Mat& shortg, + cv::Mat& longg, cv::Mat& fusedg, int& gain){ + std::cout<<"HDR Tone Mapping..."<(mergedImage); //mean_(mergedImage); + std::cout<<"--- Compute grayscale image"< (1 - sSMean) / 2; // only works if burst underexposed + saturated = getSaturated(longSg,0.95); + if(options.verbose==4){ + + } + } + + }else{ + if(options.ltmGain>0){ + gain = options.ltmGain; + } + } + std::cout<<"--- Compute gain"< mergeMertens = cv::createMergeMertens(); + std::cout<<"--- Create Mertens"< src_expos; + src_expos.push_back(convert16bit2_8bit_(shortg.clone())); + src_expos.push_back(convert16bit2_8bit_(longg.clone())); + mergeMertens->process(src_expos, fusedg); + fusedg = fusedg*USHRT_MAX; + fusedg.convertTo(fusedg, CV_16UC1); + std::cout<<"--- Apply Mertens"<1){ + x = 1; + } + u_int16_t result = x*USHRT_MAX; + return result; + } + + cv::Mat enhanceContrast(cv::Mat image, Options options){ + if(options.gtmContrast>=0 && options.gtmContrast<=1){ + u_int16_t* ptr = (u_int16_t*)image.data; + int end = image.rows*image.cols*image.channels(); + for(int idx = 0;idxUSHRT_MAX) r = USHRT_MAX; + *(ptr_r+idx) = (u_int16_t)r; + } + return result; + } + + cv::Mat sharpenTriple(cv::Mat image, Tuning tuning, Options options){ + // sharpen the image using unsharp masking + std::vector amounts = tuning.sharpenAmount; + std::vector sigmas = tuning.sharpenSigma; + std::vector thresholds = tuning.sharpenThreshold; + // Compute all Gaussian blur + cv::Mat blur0,blur1,blur2; + cv::GaussianBlur(image,blur0,cv::Size(0,0),sigmas[0]); + cv::GaussianBlur(image,blur1,cv::Size(0,0),sigmas[1]); + cv::GaussianBlur(image,blur2,cv::Size(0,0),sigmas[2]); + std::cout<<" --- gaussian blur"< dvals; + for(int c = 0; c < mergedImg.cols; c++) { + dvals.push_back(*(ptr+r*mergedImg.cols+c)); + } + cv::Mat mline(dvals, true); + cv::transpose(mline, mline); + m.push_back(mline); + } + int ch = CV_MAT_CN(opencv_type); + + m = m.reshape(ch); + m.convertTo(m, opencv_type); + + return m; + + } + + void show20_20(cv::Mat m){ + u_int16_t* ptr = (u_int16_t*)m.data; + for(int i=0;i<20;i++){ + for(int j=0;j<20;j++){ + std::cout<<*(ptr+i*m.cols+j)<<", "; + } + std::cout<refIdx = burst_images.reference_image_idx; + // this->burstPath = burstPath; + // std::cout<<"processMerged:"<mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1); +#ifndef HDRPLUS_NO_DETAILED_OUTPUT + // this->mergedBayer = processMergedMat(mergedB,CV_16UC1);//loadFromCSV("merged.csv", CV_16UC1); + // std::cout<<"processMerged:"<mergedBayer); + // this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1); + // this->mergedBayer = processMergedMat(burst_images.merged_bayer_image, CV_16UC1); +#else + // this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1); + // this->mergedBayer = processMergedMat(burst_images.merged_bayer_image, CV_16UC1); + // std::cout<<"processMerged:"<mergedBayer); + // load_rawPathList(burstPath); + +// read in ref img + // bayer_image* ref = new bayer_image(rawPathList[refIdx]); + bayer_image* ref = new bayer_image(burst_images.bayer_images[burst_images.reference_image_idx]); + cv::Mat processedRefImage = postprocess(ref->libraw_processor,params.rawpyArgs); + + std::cout<<"size ref: "<refIdx]); + mergedImg->libraw_processor->imgdata.rawdata.raw_image = (uint16_t*)this->mergedBayer.data; + // copy_mat_16U_3(mergedImg->libraw_processor->imgdata.rawdata.raw_image,this->mergedBayer); + cv::Mat processedMerge = postprocess(mergedImg->libraw_processor,params.rawpyArgs); + +// write merged image +#ifndef HDRPLUS_NO_DETAILED_OUTPUT + if(params.flags["writeMergedImage"]){ + std::cout<<"writing Merged img ..."<& libraw_ptr, cv::Mat B){ + u_int16_t* ptr_A = (u_int16_t*)libraw_ptr->imgdata.rawdata.raw_image; + u_int16_t* ptr_B = (u_int16_t*)B.data; + for(int r = 0; r < B.rows; r++) { + for(int c = 0; c < B.cols; c++) { + *(ptr_A+r*B.cols+c) = *(ptr_B+r*B.cols+c); + } + } + + } + + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/src/hdrplus_pipeline.cpp b/app/src/main/cpp/hdrplus/src/hdrplus_pipeline.cpp new file mode 100644 index 00000000..90307a8b --- /dev/null +++ b/app/src/main/cpp/hdrplus/src/hdrplus_pipeline.cpp @@ -0,0 +1,105 @@ +#include +#include +#include +#include // std::pair +#include // all opencv header +#include "hdrplus/hdrplus_pipeline.h" +#include "hdrplus/burst.h" +#include "hdrplus/align.h" +#include "hdrplus/merge.h" +#include "hdrplus/finish.h" +#include + +#ifdef __ANDROID__ +#include +#endif + +namespace hdrplus +{ + +void hdrplus_pipeline::run_pipeline( \ + const std::string& burst_path, \ + const std::string& reference_image_path ) +{ + // Create burst of images + burst burst_images( burst_path, reference_image_path ); + + std::vector>>> alignments; + + // Run align + align_module.process( burst_images, alignments ); + + // Run merging + merge_module.process( burst_images, alignments ); + + + // Run finishing + cv::Mat finalImg; + finish_module.process( burst_images, finalImg); +} + +bool hdrplus_pipeline::run_pipeline( \ + const std::vector& burst_paths, \ + int reference_image_index, cv::Mat& finalImg ) +{ + // Create burst of images + burst burst_images( burst_paths, reference_image_index ); + std::vector>>> alignments; +#ifdef __ANDROID__ + ALOGI("Finish loading images"); +#endif + + // Run align + align_module.process( burst_images, alignments ); +#ifdef __ANDROID__ + ALOGI("Finish align"); +#endif + + // Run merging + merge_module.process( burst_images, alignments ); +#ifdef __ANDROID__ + ALOGI("Finish merging"); +#endif + + // Run finishing + finish_module.process( burst_images, finalImg); +#ifdef __ANDROID__ + ALOGI("Finish process"); +#endif + + return true; +} + +bool hdrplus_pipeline::run_pipeline( \ + const std::vector >& burst_contents, \ + int reference_image_index, cv::Mat& finalImg ) +{ + // Create burst of images + burst burst_images( burst_contents, reference_image_index ); + std::vector>>> alignments; +#ifdef __ANDROID__ + ALOGI("Finish loading images"); +#endif + + // Run align + align_module.process( burst_images, alignments ); +#ifdef __ANDROID__ + ALOGI("Finish align"); +#endif + + // Run merging + merge_module.process( burst_images, alignments ); +#ifdef __ANDROID__ + ALOGI("Finish merging"); +#endif + + // Run finishing + finish_module.process( burst_images, finalImg); +#ifdef __ANDROID__ + ALOGI("Finish process"); +#endif + + return true; +} + +} // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/src/merge.cpp b/app/src/main/cpp/hdrplus/src/merge.cpp new file mode 100644 index 00000000..bdf5c400 --- /dev/null +++ b/app/src/main/cpp/hdrplus/src/merge.cpp @@ -0,0 +1,338 @@ +#include // all opencv header +#include +#include +#include "hdrplus/merge.h" +#include "hdrplus/burst.h" +#include "hdrplus/utility.h" + +namespace hdrplus +{ + + void merge::process(hdrplus::burst& burst_images, \ + std::vector>>>& alignments) + { + // 4.1 Noise Parameters and RMS + // Noise parameters calculated from baseline ISO noise parameters + double lambda_shot, lambda_read; + std::tie(lambda_shot, lambda_read) = burst_images.bayer_images[burst_images.reference_image_idx].get_noise_params(); + + // 4.2-4.4 Denoising and Merging + + // Get padded bayer image + cv::Mat reference_image = burst_images.bayer_images_pad[burst_images.reference_image_idx]; + cv::imwrite("ref.jpg", reference_image); + + // Get raw channels + std::vector channels(4); + hdrplus::extract_rgb_from_bayer(reference_image, channels[0], channels[1], channels[2], channels[3]); + + std::vector processed_channels(4); + // For each channel, perform denoising and merge + for (int i = 0; i < 4; ++i) { + // Get channel mat + cv::Mat channel_i = channels[i]; + // cv::imwrite("ref" + std::to_string(i) + ".jpg", channel_i); + + //we should be getting the individual channel in the same place where we call the processChannel function with the reference channel in its arguments + //possibly we could add another argument in the processChannel function which is the channel_i for the alternate image. maybe using a loop to cover all the other images + + //create list of channel_i of alternate images: + std::vector alternate_channel_i_list; + for (int j = 0; j < burst_images.num_images; j++) { + if (j != burst_images.reference_image_idx) { + + //get alternate image + cv::Mat alt_image = burst_images.bayer_images_pad[j]; + std::vector alt_channels(4); + hdrplus::extract_rgb_from_bayer(alt_image, alt_channels[0], alt_channels[1], alt_channels[2], alt_channels[3]); + + alternate_channel_i_list.push_back(alt_channels[i]); + } + } + + // Apply merging on the channel + cv::Mat merged_channel = processChannel(burst_images, alignments, channel_i, alternate_channel_i_list, lambda_shot, lambda_read); + // cv::imwrite("merged" + std::to_string(i) + ".jpg", merged_channel); + + // Put channel raw data back to channels + merged_channel.convertTo(processed_channels[i], CV_16U); + } + + // Write all channels back to a bayer mat + cv::Mat merged(reference_image.rows, reference_image.cols, CV_16U); + int x, y; + for (y = 0; y < reference_image.rows; ++y){ + uint16_t* row = merged.ptr(y); + if (y % 2 == 0){ + uint16_t* i0 = processed_channels[0].ptr(y / 2); + uint16_t* i1 = processed_channels[1].ptr(y / 2); + + for (x = 0; x < reference_image.cols;){ + //R + row[x] = i0[x / 2]; + x++; + + //G1 + row[x] = i1[x / 2]; + x++; + } + } + else { + uint16_t* i2 = processed_channels[2].ptr(y / 2); + uint16_t* i3 = processed_channels[3].ptr(y / 2); + + for(x = 0; x < reference_image.cols;){ + //G2 + row[x] = i2[x / 2]; + x++; + + //B + row[x] = i3[x / 2]; + x++; + } + } + } + + // Remove padding + std::vector padding = burst_images.padding_info_bayer; + cv::Range horizontal = cv::Range(padding[2], reference_image.cols - padding[3]); + cv::Range vertical = cv::Range(padding[0], reference_image.rows - padding[1]); + burst_images.merged_bayer_image = merged(vertical, horizontal); + cv::imwrite("merged.jpg", burst_images.merged_bayer_image); + } + + std::vector merge::getReferenceTiles(cv::Mat reference_image) { + std::vector reference_tiles; + for (int y = 0; y < reference_image.rows - offset; y += offset) { + for (int x = 0; x < reference_image.cols - offset; x += offset) { + cv::Mat tile = reference_image(cv::Rect(x, y, TILE_SIZE, TILE_SIZE)); + reference_tiles.push_back(tile); + } + } + return reference_tiles; + } + + cv::Mat merge::mergeTiles(std::vector tiles, int num_rows, int num_cols) { + // 1. get all four subsets: original (evenly split), horizontal overlapped, + // vertical overlapped, 2D overlapped + std::vector> tiles_original; + std::vector row; + for (int y = 0; y < num_rows / offset - 1; y += 2) { + row.clear(); + for (int x = 0; x < num_cols / offset - 1; x += 2) { + row.push_back(tiles[y * (num_cols / offset - 1) + x]); + } + tiles_original.push_back(row); + } + + std::vector> tiles_horizontal; + // std::vector row; + for (int y = 0; y < num_rows / offset - 1; y += 2) { + row.clear(); + for (int x = 1; x < num_cols / offset - 1; x += 2) { + row.push_back(tiles[y * (num_cols / offset - 1) + x]); + } + tiles_horizontal.push_back(row); + } + + std::vector> tiles_vertical; + // std::vector row; + for (int y = 1; y < num_rows / offset - 1; y += 2) { + row.clear(); + for (int x = 0; x < num_cols / offset - 1; x += 2) { + row.push_back(tiles[y * (num_cols / offset - 1) + x]); + } + tiles_vertical.push_back(row); + } + + std::vector> tiles_2d; + // std::vector row; + for (int y = 1; y < num_rows / offset - 1; y += 2) { + row.clear(); + for (int x = 1; x < num_cols / offset - 1; x += 2) { + row.push_back(tiles[y * (num_cols / offset - 1) + x]); + } + tiles_2d.push_back(row); + } + + // 2. Concatenate the four subsets + cv::Mat img_original = cat2Dtiles(tiles_original); + cv::Mat img_horizontal = cat2Dtiles(tiles_horizontal); + cv::Mat img_vertical = cat2Dtiles(tiles_vertical); + cv::Mat img_2d = cat2Dtiles(tiles_2d); + + // 3. Add the four subsets together + img_original(cv::Rect(offset, 0, num_cols - TILE_SIZE, num_rows)) += img_horizontal; + img_original(cv::Rect(0, offset, num_cols, num_rows - TILE_SIZE)) += img_vertical; + img_original(cv::Rect(offset, offset, num_cols - TILE_SIZE, num_rows - TILE_SIZE)) += img_2d; + + return img_original; + } + + cv::Mat merge::processChannel(hdrplus::burst& burst_images, \ + std::vector>>>& alignments, \ + cv::Mat channel_image, \ + std::vector alternate_channel_i_list,\ + float lambda_shot, \ + float lambda_read) { + // Get tiles of the reference image + std::vector reference_tiles = getReferenceTiles(channel_image); + + // Get noise variance (sigma**2 = lambda_shot * tileRMS + lambda_read) + std::vector noise_variance = getNoiseVariance(reference_tiles, lambda_shot, lambda_read); + + // Apply FFT on reference tiles (spatial to frequency) + std::vector reference_tiles_DFT; + for (auto ref_tile : reference_tiles) { + cv::Mat ref_tile_DFT; + ref_tile.convertTo(ref_tile_DFT, CV_32F); + cv::dft(ref_tile_DFT, ref_tile_DFT, cv::DFT_COMPLEX_OUTPUT); + reference_tiles_DFT.push_back(ref_tile_DFT); + } + + // Acquire alternate tiles and apply FFT on them as well + std::vector> alt_tiles_list(reference_tiles.size()); + int num_tiles_row = alternate_channel_i_list[0].rows / offset - 1; + int num_tiles_col = alternate_channel_i_list[0].cols / offset - 1; + std::vector alt_tiles; + for (int y = 0; y < num_tiles_row; ++y) { + for (int x = 0; x < num_tiles_col; ++x) { + alt_tiles.clear(); + // Get reference tile location + int top_left_y = y * offset; + int top_left_x = x * offset; + + for (int i = 0; i < alternate_channel_i_list.size(); ++i) { + // Get alignment displacement + int displacement_y, displacement_x; + std::tie(displacement_y, displacement_x) = alignments[i + 1][y][x]; + // Get tile + cv::Mat alt_tile = alternate_channel_i_list[i](cv::Rect(top_left_x + displacement_x, top_left_y + displacement_y, TILE_SIZE, TILE_SIZE)); + // Apply FFT + cv::Mat alt_tile_DFT; + alt_tile.convertTo(alt_tile_DFT, CV_32F); + cv::dft(alt_tile_DFT, alt_tile_DFT, cv::DFT_COMPLEX_OUTPUT); + alt_tiles.push_back(alt_tile_DFT); + } + alt_tiles_list[y * num_tiles_col + x] = alt_tiles; + } + } + + // 4.2 Temporal Denoising + reference_tiles_DFT = temporal_denoise(reference_tiles_DFT, alt_tiles_list, noise_variance, TEMPORAL_FACTOR); + + // 4.3 Spatial Denoising + reference_tiles_DFT = spatial_denoise(reference_tiles_DFT, alternate_channel_i_list.size(), noise_variance, SPATIAL_FACTOR); + //now reference tiles are temporally and spatially denoised + + // Apply IFFT on reference tiles (frequency to spatial) + std::vector denoised_tiles; + for (auto dft_tile : reference_tiles_DFT) { + cv::Mat denoised_tile; + cv::divide(dft_tile, TILE_SIZE * TILE_SIZE, dft_tile); + cv::dft(dft_tile, denoised_tile, cv::DFT_INVERSE | cv::DFT_REAL_OUTPUT); + denoised_tiles.push_back(denoised_tile); + } + reference_tiles = denoised_tiles; + + // 4.4 Cosine Window Merging + // Process tiles through 2D cosine window + std::vector windowed_tiles; + for (auto tile : reference_tiles) { + windowed_tiles.push_back(cosineWindow2D(tile)); + } + + // Merge tiles + return mergeTiles(windowed_tiles, channel_image.rows, channel_image.cols); + } + + std::vector merge::temporal_denoise(std::vector tiles, std::vector> alt_tiles, std::vector noise_variance, float temporal_factor) { + // goal: temporially denoise using the weiner filter + // input: + // 1. array of 2D dft tiles of the reference image + // 2. array of 2D dft tiles of the aligned alternate image + // 3. estimated noise variance + // 4. temporal factor + // return: merged image patches dft + + // calculate noise scaling + double temporal_noise_scaling = (TILE_SIZE * TILE_SIZE * (2.0 / 16)) * TEMPORAL_FACTOR; + + // loop across tiles + std::vector denoised; + for (int i = 0; i < tiles.size(); ++i) { + // sum of pairwise denoising + cv::Mat tile_sum = tiles[i].clone(); + double coeff = temporal_noise_scaling * noise_variance[i]; + + // Ref tile + cv::Mat tile = tiles[i]; + // Alt tiles + std::vector alt_tiles_i = alt_tiles[i]; + + for (int j = 0; j < alt_tiles_i.size(); ++j) { + // Alt tile + cv::Mat alt_tile = alt_tiles_i[j]; + // Tile difference + cv::Mat diff = tile - alt_tile; + + // Calculate absolute difference + cv::Mat complexMats[2]; + cv::split(diff, complexMats); // planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I)) + cv::magnitude(complexMats[0], complexMats[1], complexMats[0]); // planes[0] = magnitude + cv::Mat absolute_diff = complexMats[0].mul(complexMats[0]); + + // find shrinkage operator A + cv::Mat shrinkage; + cv::divide(absolute_diff, absolute_diff + coeff, shrinkage); + cv::merge(std::vector{shrinkage, shrinkage}, shrinkage); + + // Interpolation + tile_sum += alt_tile + diff.mul(shrinkage); + } + // Average by num of frames + cv::divide(tile_sum, alt_tiles_i.size() + 1, tile_sum); + denoised.push_back(tile_sum); + } + + return denoised; + } + + std::vector merge::spatial_denoise(std::vector tiles, int num_alts, std::vector noise_variance, float spatial_factor) { + + double spatial_noise_scaling = (TILE_SIZE * TILE_SIZE * (1.0 / 16)) * spatial_factor; + + // Calculate |w| using ifftshift + cv::Mat row_distances = cv::Mat::zeros(1, TILE_SIZE, CV_32F); + for(int i = 0; i < TILE_SIZE; ++i) { + row_distances.at(i) = i - offset; + } + row_distances = cv::repeat(row_distances.t(), 1, TILE_SIZE); + cv::Mat col_distances = row_distances.t(); + cv::Mat distances; + cv::sqrt(row_distances.mul(row_distances) + col_distances.mul(col_distances), distances); + ifftshift(distances); + + std::vector denoised; + // Loop through all tiles + for (int i = 0; i < tiles.size(); ++i) { + cv::Mat tile = tiles[i]; + float coeff = noise_variance[i] / (num_alts + 1) * spatial_noise_scaling; + + // Calculate absolute difference + cv::Mat complexMats[2]; + cv::split(tile, complexMats); // planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I)) + cv::magnitude(complexMats[0], complexMats[1], complexMats[0]); // planes[0] = magnitude + cv::Mat absolute_diff = complexMats[0].mul(complexMats[0]); + + // Division + cv::Mat scale; + cv::divide(absolute_diff, absolute_diff + distances * coeff, scale); + cv::merge(std::vector{scale, scale}, scale); + denoised.push_back(tile.mul(scale)); + } + return denoised; + } + + +} // namespace hdrplus \ No newline at end of file diff --git a/app/src/main/cpp/hdrplus/src/params.cpp b/app/src/main/cpp/hdrplus/src/params.cpp new file mode 100644 index 00000000..889bff1e --- /dev/null +++ b/app/src/main/cpp/hdrplus/src/params.cpp @@ -0,0 +1,53 @@ +#include +#include // all opencv header +#include + +namespace hdrplus +{ + +void setParams(std::shared_ptr& libraw_ptr, RawpyArgs rawpyArgs){ + libraw_ptr->imgdata.params.user_qual = rawpyArgs.demosaic_algorithm; + libraw_ptr->imgdata.params.half_size = rawpyArgs.half_size; + libraw_ptr->imgdata.params.use_camera_wb = rawpyArgs.use_camera_wb; + libraw_ptr->imgdata.params.use_auto_wb = rawpyArgs.use_auto_wb; + libraw_ptr->imgdata.params.no_auto_bright = rawpyArgs.no_auto_bright; + libraw_ptr->imgdata.params.output_color = rawpyArgs.output_color; + libraw_ptr->imgdata.params.gamm[0] = rawpyArgs.gamma[0]; + libraw_ptr->imgdata.params.gamm[1] = rawpyArgs.gamma[1]; + libraw_ptr->imgdata.params.output_bps = rawpyArgs.output_bps; +} + +cv::Mat postprocess(std::shared_ptr& libraw_ptr, RawpyArgs rawpyArgs){ + std::cout<<"postprocessing..."<dcraw_process(); + int errorcode; + + libraw_processed_image_t *ret_img = libraw_ptr->dcraw_make_mem_image(&errorcode); + + int opencv_type = CV_16UC3; // 16bit RGB + if(ret_img->colors==1){ // grayscale + if(ret_img->bits == 8){ // uint8 + opencv_type = CV_8UC1; + }else{ // uint16 + opencv_type = CV_16UC1; + } + }else{// RGB + if(ret_img->bits == 8){ //8bit + opencv_type = CV_8UC3; + }else{ // 16bit + opencv_type = CV_16UC3; + } + } + + cv::Mat processedImg(ret_img->height,ret_img->width,opencv_type,ret_img->data); + + std::cout<<"postprocess finished!"< + +#include +// #include + +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * Utility class that accumulates written bytes into a buffer. + */ +class ANDROID_API ByteArrayOutput : public Output { + public: + + ByteArrayOutput(); + + virtual ~ByteArrayOutput(); + + /** + * Open this ByteArrayOutput. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t open(); + + /** + * Write bytes from the given buffer. The number of bytes given in the count + * argument will be written. Bytes will be written from the given buffer starting + * at the index given in the offset argument. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t write(const uint8_t* buf, size_t offset, size_t count); + + /** + * Close this ByteArrayOutput. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t close(); + + /** + * Get current size of the array of bytes written. + */ + virtual size_t getSize() const; + + /** + * Get pointer to array of bytes written. It is not valid to use this pointer if + * open, write, or close is called after this method. + */ + virtual const uint8_t* getArray() const; + + protected: + std::vector mByteArray; +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_BYTE_ARRAY_OUTPUT_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/DngUtils.h b/app/src/main/cpp/img_utils/include/img_utils/DngUtils.h new file mode 100644 index 00000000..8819f87b --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/DngUtils.h @@ -0,0 +1,232 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_DNG_UTILS_H +#define IMG_UTILS_DNG_UTILS_H + +#include +#include + +#include +#include +#include + +#include +#include + +namespace android { +namespace img_utils { + +#define NELEMS(x) ((int) (sizeof(x) / sizeof((x)[0]))) +#define CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x))) + +/** + * Utility class for building values for the OpcodeList tags specified + * in the Adobe DNG 1.4 spec. + */ +class ANDROID_API OpcodeListBuilder : public LightRefBase { + public: + // Note that the Adobe DNG 1.4 spec for Bayer phase (defined for the + // FixBadPixelsConstant and FixBadPixelsList opcodes) is incorrect. It's + // inconsistent with the DNG SDK (cf. dng_negative::SetBayerMosaic and + // dng_opcode_FixBadPixelsList::IsGreen), and Adobe confirms that the + // spec should be updated to match the SDK. + enum CfaLayout { + CFA_GRBG = 0, + CFA_RGGB, + CFA_BGGR, + CFA_GBRG, + CFA_NONE, + }; + + OpcodeListBuilder(); + virtual ~OpcodeListBuilder(); + + /** + * Get the total size of this opcode list in bytes. + */ + virtual size_t getSize() const; + + /** + * Get the number of opcodes defined in this list. + */ + virtual uint32_t getCount() const; + + /** + * Write the opcode list into the given buffer. This buffer + * must be able to hold at least as many elements as returned + * by calling the getSize() method. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t buildOpList(/*out*/ uint8_t* buf) const; + + /** + * Add GainMap opcode(s) for the given metadata parameters. The given + * CFA layout must match the layout of the shading map passed into the + * lensShadingMap parameter. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t addGainMapsForMetadata(uint32_t lsmWidth, + uint32_t lsmHeight, + uint32_t activeAreaTop, + uint32_t activeAreaLeft, + uint32_t activeAreaBottom, + uint32_t activeAreaRight, + CfaLayout cfa, + const float* lensShadingMap); + + /** + * Add a GainMap opcode with the given fields. The mapGains array + * must have mapPointsV * mapPointsH * mapPlanes elements. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t addGainMap(uint32_t top, + uint32_t left, + uint32_t bottom, + uint32_t right, + uint32_t plane, + uint32_t planes, + uint32_t rowPitch, + uint32_t colPitch, + uint32_t mapPointsV, + uint32_t mapPointsH, + double mapSpacingV, + double mapSpacingH, + double mapOriginV, + double mapOriginH, + uint32_t mapPlanes, + const float* mapGains); + + /** + * Add WarpRectilinear opcode for the given metadata parameters. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t addWarpRectilinearForMetadata(const float* kCoeffs, + uint32_t activeArrayWidth, + uint32_t activeArrayHeight, + float opticalCenterX, + float opticalCenterY); + + /** + * Add a WarpRectilinear opcode. + * + * numPlanes - Number of planes included in this opcode. + * opticalCenterX, opticalCenterY - Normalized x,y coordinates of the sensor optical + * center relative to the top,left pixel of the produced images (e.g. [0.5, 0.5] + * gives a sensor optical center in the image center. + * kCoeffs - A list of coefficients for the polynomial equation representing the distortion + * correction. For each plane, 6 coefficients must be included: + * {k_r0, k_r1, k_r2, k_r3, k_t0, k_t1}. See the DNG 1.4 specification for an + * outline of the polynomial used here. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t addWarpRectilinear(uint32_t numPlanes, + double opticalCenterX, + double opticalCenterY, + const double* kCoeffs); + + + /** + * Add FixBadPixelsList opcode for the given metadata parameters. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t addBadPixelListForMetadata(const uint32_t* hotPixels, + uint32_t xyPairCount, + uint32_t colorFilterArrangement); + + /** + * Add FixBadPixelsList opcode. + * + * bayerPhase - 0=top-left of image is red, 1=top-left of image is green pixel in red row, + * 2=top-left of image is green pixel in blue row, 3=top-left of image is + * blue. + * badPointCount - number of (x,y) pairs of bad pixels are given in badPointRowColPairs. + * badRectCount - number of (top, left, bottom, right) tuples are given in + * badRectTopLeftBottomRightTuples + * + * Returns OK on success, or a negative error code. + */ + virtual status_t addBadPixelList(uint32_t bayerPhase, + uint32_t badPointCount, + uint32_t badRectCount, + const uint32_t* badPointRowColPairs, + const uint32_t* badRectTopLeftBottomRightTuples); + + // TODO: Add other Opcode methods + protected: + static const uint32_t FLAG_OPTIONAL = 0x1u; + static const uint32_t FLAG_OPTIONAL_FOR_PREVIEW = 0x2u; + + // Opcode IDs + enum { + WARP_RECTILINEAR_ID = 1, + FIX_BAD_PIXELS_LIST = 5, + GAIN_MAP_ID = 9, + }; + + // LSM mosaic indices + enum { + LSM_R_IND = 0, + LSM_GE_IND = 1, + LSM_GO_IND = 2, + LSM_B_IND = 3, + }; + + uint32_t mCount; + ByteArrayOutput mOpList; + EndianOutput mEndianOut; + + status_t addOpcodePreamble(uint32_t opcodeId); + + private: + /** + * Add Bayer GainMap opcode(s) for the given metadata parameters. + * CFA layout must match the layout of the shading map passed into the + * lensShadingMap parameter. + * + * Returns OK on success, or a negative error code. + */ + status_t addBayerGainMapsForMetadata(uint32_t lsmWidth, + uint32_t lsmHeight, + uint32_t activeAreaWidth, + uint32_t activeAreaHeight, + CfaLayout cfa, + const float* lensShadingMap); + + /** + * Add Bayer GainMap opcode(s) for the given metadata parameters. + * CFA layout must match the layout of the shading map passed into the + * lensShadingMap parameter. + * + * Returns OK on success, or a negative error code. + */ + status_t addMonochromeGainMapsForMetadata(uint32_t lsmWidth, + uint32_t lsmHeight, + uint32_t activeAreaWidth, + uint32_t activeAreaHeight, + const float* lensShadingMap); +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_DNG_UTILS_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/EndianUtils.h b/app/src/main/cpp/img_utils/include/img_utils/EndianUtils.h new file mode 100644 index 00000000..bfa42e97 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/EndianUtils.h @@ -0,0 +1,250 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_ENDIAN_UTILS +#define IMG_UTILS_ENDIAN_UTILS + +#include + +#include +#include +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * Endianness types supported. + */ +enum ANDROID_API Endianness { + UNDEFINED_ENDIAN, // Default endianness will be used. + BIG, + LITTLE +}; + +/** + * Convert from the native device endianness to big endian. + */ +template +T convertToBigEndian(T in); + +/** + * Convert from the native device endianness to little endian. + */ +template +T convertToLittleEndian(T in); + +/** + * A utility class for writing to an Output with the given endianness. + */ +class ANDROID_API EndianOutput : public Output { + public: + /** + * Wrap the given Output. Calling write methods will result in + * writes to this output. + */ + explicit EndianOutput(Output* out, Endianness end=LITTLE); + + virtual ~EndianOutput(); + + /** + * Call open on the wrapped output. + */ + virtual status_t open(); + + /** + * Call close on the wrapped output. + */ + virtual status_t close(); + + /** + * Set the endianness to use when writing. + */ + virtual void setEndianness(Endianness end); + + /** + * Get the currently configured endianness. + */ + virtual Endianness getEndianness() const; + + /** + * Get the current number of bytes written by this EndianOutput. + */ + virtual uint32_t getCurrentOffset() const; + + + // TODO: switch write methods to uint32_t instead of size_t, + // the max size of a TIFF files is bounded + + /** + * The following methods will write elements from given input buffer to the output. + * Count elements in the buffer will be written with the endianness set for this + * EndianOutput. If the given offset is greater than zero, that many elements will + * be skipped in the buffer before writing. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t write(const uint8_t* buf, size_t offset, size_t count); + + virtual status_t write(const int8_t* buf, size_t offset, size_t count); + + virtual status_t write(const uint16_t* buf, size_t offset, size_t count); + + virtual status_t write(const int16_t* buf, size_t offset, size_t count); + + virtual status_t write(const uint32_t* buf, size_t offset, size_t count); + + virtual status_t write(const int32_t* buf, size_t offset, size_t count); + + virtual status_t write(const uint64_t* buf, size_t offset, size_t count); + + virtual status_t write(const int64_t* buf, size_t offset, size_t count); + + virtual status_t write(const float* buf, size_t offset, size_t count); + + virtual status_t write(const double* buf, size_t offset, size_t count); + + protected: + template + inline status_t writeHelper(const T* buf, size_t offset, size_t count); + + uint32_t mOffset; + Output* mOutput; + Endianness mEndian; +}; + +template +inline status_t EndianOutput::writeHelper(const T* buf, size_t offset, size_t count) { + assert(offset <= count); + status_t res = OK; + size_t size = sizeof(T); + switch(mEndian) { + case BIG: { + for (size_t i = offset; i < count; ++i) { + T tmp = convertToBigEndian(buf[offset + i]); + if ((res = mOutput->write(reinterpret_cast(&tmp), 0, size)) + != OK) { + return res; + } + mOffset += size; + } + break; + } + case LITTLE: { + for (size_t i = offset; i < count; ++i) { + T tmp = convertToLittleEndian(buf[offset + i]); + if ((res = mOutput->write(reinterpret_cast(&tmp), 0, size)) + != OK) { + return res; + } + mOffset += size; + } + break; + } + default: { + return BAD_VALUE; + } + } + return res; +} + +template<> +inline uint8_t convertToBigEndian(uint8_t in) { + return in; +} + +template<> +inline int8_t convertToBigEndian(int8_t in) { + return in; +} + +template<> +inline uint16_t convertToBigEndian(uint16_t in) { + return htobe16(in); +} + +template<> +inline int16_t convertToBigEndian(int16_t in) { + return htobe16(in); +} + +template<> +inline uint32_t convertToBigEndian(uint32_t in) { + return htobe32(in); +} + +template<> +inline int32_t convertToBigEndian(int32_t in) { + return htobe32(in); +} + +template<> +inline uint64_t convertToBigEndian(uint64_t in) { + return htobe64(in); +} + +template<> +inline int64_t convertToBigEndian(int64_t in) { + return htobe64(in); +} + +template<> +inline uint8_t convertToLittleEndian(uint8_t in) { + return in; +} + +template<> +inline int8_t convertToLittleEndian(int8_t in) { + return in; +} + +template<> +inline uint16_t convertToLittleEndian(uint16_t in) { + return htole16(in); +} + +template<> +inline int16_t convertToLittleEndian(int16_t in) { + return htole16(in); +} + +template<> +inline uint32_t convertToLittleEndian(uint32_t in) { + return htole32(in); +} + +template<> +inline int32_t convertToLittleEndian(int32_t in) { + return htole32(in); +} + +template<> +inline uint64_t convertToLittleEndian(uint64_t in) { + return htole64(in); +} + +template<> +inline int64_t convertToLittleEndian(int64_t in) { + return htole64(in); +} + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_ENDIAN_UTILS*/ + diff --git a/app/src/main/cpp/img_utils/include/img_utils/FileInput.h b/app/src/main/cpp/img_utils/include/img_utils/FileInput.h new file mode 100644 index 00000000..66afaff5 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/FileInput.h @@ -0,0 +1,76 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_FILE_INPUT_H +#define IMG_UTILS_FILE_INPUT_H + +#include + +#include +#include +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * Utility class for reading from a file. + */ +class ANDROID_API FileInput : public Input { + public: + /** + * Create a file input for the given path. + */ + explicit FileInput(String8 path); + + virtual ~FileInput(); + + /** + * Open a file descriptor to the path given in the constructor. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t open(); + + /** + * Read bytes from the file into the given buffer. At most, the number + * of bytes given in the count argument will be read. Bytes will be written + * into the given buffer starting at the index given in the offset argument. + * + * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ + virtual ssize_t read(uint8_t* buf, size_t offset, size_t count); + + /** + * Close the file descriptor to the path given in the constructor. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t close(); + private: + FILE *mFp; + String8 mPath; + bool mOpen; +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + + +#endif /*IMG_UTILS_INPUT_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/FileOutput.h b/app/src/main/cpp/img_utils/include/img_utils/FileOutput.h new file mode 100644 index 00000000..3d4cf764 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/FileOutput.h @@ -0,0 +1,46 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_FILE_OUTPUT_H +#define IMG_UTILS_FILE_OUTPUT_H + +#include +#include +#include +#include +#include +#include + +namespace android { +namespace img_utils { + +class ANDROID_API FileOutput : public Output { + public: + explicit FileOutput(String8 path); + virtual ~FileOutput(); + virtual status_t open(); + virtual status_t write(const uint8_t* buf, size_t offset, size_t count); + virtual status_t close(); + private: + FILE *mFp; + String8 mPath; + bool mOpen; +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_FILE_OUTPUT_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/Input.h b/app/src/main/cpp/img_utils/include/img_utils/Input.h new file mode 100644 index 00000000..6a03647f --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/Input.h @@ -0,0 +1,71 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_INPUT_H +#define IMG_UTILS_INPUT_H + +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * Utility class used as a source of bytes. + */ +class ANDROID_API Input { + public: + virtual ~Input(); + + /** + * Open this Input. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t open(); + + /** + * Read bytes into the given buffer. At most, the number of bytes given in the + * count argument will be read. Bytes will be written into the given buffer starting + * at the index given in the offset argument. + * + * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ + virtual ssize_t read(uint8_t* buf, size_t offset, size_t count) = 0; + + /** + * Skips bytes in the input. + * + * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an + * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA. + */ + virtual ssize_t skip(size_t count); + + /** + * Close the Input. It is not valid to call open on a previously closed Input. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t close(); +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + + +#endif /*IMG_UTILS_INPUT_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/Orderable.h b/app/src/main/cpp/img_utils/include/img_utils/Orderable.h new file mode 100644 index 00000000..87253a4c --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/Orderable.h @@ -0,0 +1,57 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_ORDERABLE +#define IMG_UTILS_ORDERABLE + +#include +#include + +namespace android { +namespace img_utils { + +#define COMPARE_DEF(op) \ +inline bool operator op (const Orderable& orderable) const; + +/** + * Subclasses of Orderable can be compared and sorted. This is + * intended to be used to create sorted arrays of TIFF entries + * and IFDs. + */ +class ANDROID_API Orderable { + public: + virtual ~Orderable(); + + /** + * Comparison operatotors are based on the value returned + * from this method. + */ + virtual uint32_t getComparableValue() const = 0; + + COMPARE_DEF(>) + COMPARE_DEF(<) + COMPARE_DEF(>=) + COMPARE_DEF(<=) + COMPARE_DEF(==) + COMPARE_DEF(!=) +}; + +#undef COMPARE_DEF + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_ORDERABLE*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/Output.h b/app/src/main/cpp/img_utils/include/img_utils/Output.h new file mode 100644 index 00000000..35fae239 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/Output.h @@ -0,0 +1,61 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_OUTPUT_H +#define IMG_UTILS_OUTPUT_H + +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * Utility class used to output bytes. + */ +class ANDROID_API Output { + public: + virtual ~Output(); + + /** + * Open this Output. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t open(); + + /** + * Write bytes from the given buffer. The number of bytes given in the count + * argument will be written. Bytes will be written from the given buffer starting + * at the index given in the offset argument. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t write(const uint8_t* buf, size_t offset, size_t count) = 0; + + /** + * Close this Output. It is not valid to call open on a previously closed Output. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t close(); +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_OUTPUT_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/Pair.h b/app/src/main/cpp/img_utils/include/img_utils/Pair.h new file mode 100644 index 00000000..d651cacc --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/Pair.h @@ -0,0 +1,44 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_PAIR_H +#define IMG_UTILS_PAIR_H + +#include + +namespace android { +namespace img_utils { + +/** + * Generic pair utility class. Nothing special here. + */ +template +class ANDROID_API Pair { + public: + F first; + S second; + + Pair() {} + + Pair(const Pair& o) : first(o.first), second(o.second) {} + + Pair(const F& f, const S& s) : first(f), second(s) {} +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_PAIR_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/SortedEntryVector.h b/app/src/main/cpp/img_utils/include/img_utils/SortedEntryVector.h new file mode 100644 index 00000000..f059a82a --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/SortedEntryVector.h @@ -0,0 +1,53 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_SORTED_ENTRY_VECTOR_H +#define IMG_UTILS_SORTED_ENTRY_VECTOR_H + +#include + +#include +#include + +namespace android { +namespace img_utils { + +/** + * Subclass of SortedVector that has been extended to + * do comparisons/lookups based on the tag ID of the entries. + */ +class SortedEntryVector : public SortedVector > { + public: + virtual ~SortedEntryVector(); + + /** + * Returns the index of the entry with the given tag ID, or + * -1 if none exists. + */ + ssize_t indexOfTag(uint16_t tag) const; + + protected: + /** + * Compare tag ID. + */ + virtual int do_compare(const void* lhs, const void* rhs) const; +}; + + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_SORTED_ENTRY_VECTOR_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/StripSource.h b/app/src/main/cpp/img_utils/include/img_utils/StripSource.h new file mode 100644 index 00000000..b5c6b609 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/StripSource.h @@ -0,0 +1,53 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_STRIP_SOURCE_H +#define IMG_UTILS_STRIP_SOURCE_H + +#include + +#include +#include + +#include + +namespace android { +namespace img_utils { + +/** + * This class acts as a data source for strips set in a TiffIfd. + */ +class ANDROID_API StripSource { + public: + virtual ~StripSource(); + + /** + * Write count bytes to the stream. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t writeToStream(Output& stream, uint32_t count) = 0; + + /** + * Return the source IFD. + */ + virtual uint32_t getIfd() const = 0; +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_STRIP_SOURCE_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/TagDefinitions.h b/app/src/main/cpp/img_utils/include/img_utils/TagDefinitions.h new file mode 100644 index 00000000..1cc98669 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/TagDefinitions.h @@ -0,0 +1,1404 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_TIFF_TAG_DEFINITION_H +#define IMG_UTILS_TIFF_TAG_DEFINITION_H + +#include +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * Tag definitions contain information about standard TIFF compatible tags. + */ +typedef struct TagDefinition { + // The tag name. + const char* tagName; + // The specified tag ID. + const uint16_t tagId; + // The default type for this tag. This must be a valid TIFF type. + const TagType defaultType; + // The default Image File Directory (IFD) for this tag. + const uint32_t defaultIfd; + // The valid count for this tag, or 0 if the count is not fixed. + const uint32_t fixedCount; + // The endianness of the tag value, or UNDEFINED_ENDIAN if there is no fixed endian + const Endianness fixedEndian; +} TagDefinition_t; + +/** + * Convenience defines for tag ids. + */ +enum { + TAG_RAWTOPREVIEWGAIN = 0xC7A8u, + TAG_NEWRAWIMAGEDIGEST = 0xC7A7u, + TAG_ORIGINALDEFAULTCROPSIZE = 0xC793u, + TAG_ORIGINALBESTQUALITYFINALSIZE = 0xC792u, + TAG_ORIGINALDEFAULTFINALSIZE = 0xC791u, + TAG_PROFILEHUESATMAPENCODING = 0xC7A3u, + TAG_PROFILELOOKTABLEENCODING = 0xC7A4u, + TAG_BASELINEEXPOSUREOFFSET = 0xC7A5u, + TAG_DEFAULTBLACKRENDER = 0xC7A6u, + TAG_DEFAULTUSERCROP = 0xC7B5u, + TAG_NOISEPROFILE = 0xC761u, + TAG_OPCODELIST3 = 0xC74Eu, + TAG_OPCODELIST2 = 0xC741u, + TAG_OPCODELIST1 = 0xC740u, + TAG_PROFILELOOKTABLEDATA = 0xC726u, + TAG_PROFILELOOKTABLEDIMS = 0xC725u, + TAG_ROWINTERLEAVEFACTOR = 0xC71Fu, + TAG_SUBTILEBLOCKSIZE = 0xC71Eu, + TAG_ORIGINALRAWFILEDIGEST = 0xC71Du, + TAG_RAWIMAGEDIGEST = 0xC71Cu, + TAG_PREVIEWDATETIME = 0xC71Bu, + TAG_PREVIEWCOLORSPACE = 0xC71Au, + TAG_PREVIEWSETTINGSDIGEST = 0xC719u, + TAG_PREVIEWSETTINGSNAME = 0xC718u, + TAG_PREVIEWAPPLICATIONVERSION = 0xC717u, + TAG_PREVIEWAPPLICATIONNAME = 0xC716u, + TAG_FORWARDMATRIX2 = 0xC715u, + TAG_FORWARDMATRIX1 = 0xC714u, + TAG_PROFILECOPYRIGHT = 0xC6FEu, + TAG_PROFILEEMBEDPOLICY = 0xC6FDu, + TAG_PROFILETONECURVE = 0xC6FCu, + TAG_PROFILEHUESATMAPDATA2 = 0xC6FBu, + TAG_PROFILEHUESATMAPDATA1 = 0xC6FAu, + TAG_PROFILEHUESATMAPDIMS = 0xC6F9u, + TAG_PROFILENAME = 0xC6F8u, + TAG_NOISEREDUCTIONAPPLIED = 0xC6F7u, + TAG_ASSHOTPROFILENAME = 0xC6F6u, + TAG_EXTRACAMERAPROFILES = 0xC6F5u, + TAG_PROFILECALIBRATIONSIGNATURE = 0xC6F4u, + TAG_CAMERACALIBRATIONSIGNATURE = 0xC6F3u, + TAG_COLORIMETRICREFERENCE = 0xC6BFu, + TAG_CURRENTPREPROFILEMATRIX = 0xC692u, + TAG_CURRENTICCPROFILE = 0xC691u, + TAG_ASSHOTPREPROFILEMATRIX = 0xC690u, + TAG_ASSHOTICCPROFILE = 0xC68Fu, + TAG_MASKEDAREAS = 0xC68Eu, + TAG_ACTIVEAREA = 0xC68Du, + TAG_ORIGINALRAWFILEDATA = 0xC68Cu, + TAG_ORIGINALRAWFILENAME = 0xC68Bu, + TAG_RAWDATAUNIQUEID = 0xC65Du, + TAG_MAKERNOTESAFETY = 0xC635u, + TAG_DNGPRIVATEDATA = 0xC634u, + TAG_SHADOWSCALE = 0xC633u, + TAG_ANTIALIASSTRENGTH = 0xC632u, + TAG_CHROMABLURRADIUS = 0xC631u, + TAG_LENSINFO = 0xC630u, + TAG_CAMERASERIALNUMBER = 0xC62Fu, + TAG_LINEARRESPONSELIMIT = 0xC62Eu, + TAG_BAYERGREENSPLIT = 0xC62Du, + TAG_BASELINESHARPNESS = 0xC62Cu, + TAG_BASELINENOISE = 0xC62Bu, + TAG_BASELINEEXPOSURE = 0xC62Au, + TAG_ASSHOTWHITEXY = 0xC629u, + TAG_ASSHOTNEUTRAL = 0xC628u, + TAG_ANALOGBALANCE = 0xC627u, + TAG_REDUCTIONMATRIX2 = 0xC626u, + TAG_REDUCTIONMATRIX1 = 0xC625u, + TAG_CAMERACALIBRATION2 = 0xC624u, + TAG_CAMERACALIBRATION1 = 0xC623u, + TAG_COLORMATRIX2 = 0xC622u, + TAG_COLORMATRIX1 = 0xC621u, + TAG_CALIBRATIONILLUMINANT2 = 0xC65Bu, + TAG_CALIBRATIONILLUMINANT1 = 0xC65Au, + TAG_DEFAULTCROPSIZE = 0xC620u, + TAG_DEFAULTCROPORIGIN = 0xC61Fu, + TAG_BESTQUALITYSCALE = 0xC65Cu, + TAG_DEFAULTSCALE = 0xC61Eu, + TAG_WHITELEVEL = 0xC61Du, + TAG_BLACKLEVELDELTAV = 0xC61Cu, + TAG_BLACKLEVELDELTAH = 0xC61Bu, + TAG_BLACKLEVEL = 0xC61Au, + TAG_BLACKLEVELREPEATDIM = 0xC619u, + TAG_LINEARIZATIONTABLE = 0xC618u, + TAG_CFALAYOUT = 0xC617u, + TAG_CFAPLANECOLOR = 0xC616u, + TAG_LOCALIZEDCAMERAMODEL = 0xC615u, + TAG_UNIQUECAMERAMODEL = 0xC614u, + TAG_DNGBACKWARDVERSION = 0xC613u, + TAG_DNGVERSION = 0xC612u, + TAG_SUBFILETYPE = 0x00FFu, + TAG_YRESOLUTION = 0x011Bu, + TAG_XRESOLUTION = 0x011Au, + TAG_THRESHHOLDING = 0x0107u, + TAG_STRIPOFFSETS = 0x0111u, + TAG_STRIPBYTECOUNTS = 0x0117u, + TAG_SOFTWARE = 0x0131u, + TAG_SAMPLESPERPIXEL = 0x0115u, + TAG_ROWSPERSTRIP = 0x0116u, + TAG_RESOLUTIONUNIT = 0x0128u, + TAG_PLANARCONFIGURATION = 0x011Cu, + TAG_PHOTOMETRICINTERPRETATION = 0x0106u, + TAG_ORIENTATION = 0x0112u, + TAG_NEWSUBFILETYPE = 0x00FEu, + TAG_MODEL = 0x0110u, + TAG_MINSAMPLEVALUE = 0x0118u, + TAG_MAXSAMPLEVALUE = 0x0119u, + TAG_MAKE = 0x010Fu, + TAG_IMAGEWIDTH = 0x0100u, + TAG_IMAGELENGTH = 0x0101u, + TAG_IMAGEDESCRIPTION = 0x010Eu, + TAG_HOSTCOMPUTER = 0x013Cu, + TAG_GRAYRESPONSEUNIT = 0x0122u, + TAG_GRAYRESPONSECURVE = 0x0123u, + TAG_FREEOFFSETS = 0x0120u, + TAG_FREEBYTECOUNTS = 0x0121u, + TAG_FILLORDER = 0x010Au, + TAG_EXTRASAMPLES = 0x0152u, + TAG_DATETIME = 0x0132u, + TAG_COPYRIGHT = 0x8298u, + TAG_COMPRESSION = 0x0103u, + TAG_COLORMAP = 0x0140u, + TAG_CELLWIDTH = 0x0108u, + TAG_CELLLENGTH = 0x0109u, + TAG_BITSPERSAMPLE = 0x0102u, + TAG_ARTIST = 0x013Bu, + TAG_EXIFVERSION = 0x9000u, + TAG_CFAREPEATPATTERNDIM = 0x828Du, + TAG_DATETIMEORIGINAL = 0x9003u, + TAG_CFAPATTERN = 0x828Eu, + TAG_SUBIFDS = 0x014Au, + TAG_TIFFEPSTANDARDID = 0x9216u, + TAG_EXPOSURETIME = 0x829Au, + TAG_ISOSPEEDRATINGS = 0x8827u, + TAG_FOCALLENGTH = 0x920Au, + TAG_FNUMBER = 0x829Du, + TAG_GPSINFO = 0x8825u, + TAG_GPSVERSIONID = 0x0u, + TAG_GPSLATITUDEREF = 0x1u, + TAG_GPSLATITUDE = 0x2u, + TAG_GPSLONGITUDEREF = 0x3u, + TAG_GPSLONGITUDE = 0x4u, + TAG_GPSTIMESTAMP = 0x7u, + TAG_GPSDATESTAMP = 0x001Du, +}; + +/** + * Convenience values for tags with enumerated values + */ + +enum { + TAG_ORIENTATION_NORMAL = 1, + TAG_ORIENTATION_ROTATE_180 = 3, + TAG_ORIENTATION_ROTATE_90 = 6, + TAG_ORIENTATION_ROTATE_270 = 8, + TAG_ORIENTATION_UNKNOWN = 9 +}; + +/** + * TIFF_EP_TAG_DEFINITIONS contains tags defined in the TIFF EP spec + */ +const TagDefinition_t TIFF_EP_TAG_DEFINITIONS[] = { + { // PhotometricInterpretation + "PhotometricInterpretation", + 0x0106u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // SubIfds + "SubIfds", + 0x014Au, + LONG, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CFAPattern + "CFAPattern", + 0x828Eu, + BYTE, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CFARepeatPatternDim + "CFARepeatPatternDim", + 0x828Du, + SHORT, + IFD_0, + 2, + UNDEFINED_ENDIAN + }, + { // DateTimeOriginal + "DateTimeOriginal", + 0x9003u, + ASCII, + IFD_0, + 20, + UNDEFINED_ENDIAN + }, + { // Tiff/EPStandardID + "Tiff", + 0x9216u, + BYTE, + IFD_0, + 4, + UNDEFINED_ENDIAN + }, + { // ExposureTime + "ExposureTime", + 0x829Au, + RATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // ISOSpeedRatings + "ISOSpeedRatings", + 0x8827u, + SHORT, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // FocalLength + "FocalLength", + 0x920Au, + RATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // FNumber + "FNumber", + 0x829Du, + RATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // GPSInfo + "GPSInfo", + 0x8825u, + LONG, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // GPSVersionID + "GPSVersionID", + 0x0u, + BYTE, + IFD_0, + 4, + UNDEFINED_ENDIAN + }, + { // GPSLatitudeRef + "GPSLatitudeRef", + 0x1u, + ASCII, + IFD_0, + 2, + UNDEFINED_ENDIAN + }, + { // GPSLatitude + "GPSLatitude", + 0x2u, + RATIONAL, + IFD_0, + 3, + UNDEFINED_ENDIAN + }, + { // GPSLongitudeRef + "GPSLongitudeRef", + 0x3u, + ASCII, + IFD_0, + 2, + UNDEFINED_ENDIAN + }, + { // GPSLongitude + "GPSLongitude", + 0x4u, + RATIONAL, + IFD_0, + 3, + UNDEFINED_ENDIAN + }, + { // GPSTimeStamp + "GPSTimeStamp", + 0x7u, + RATIONAL, + IFD_0, + 3, + UNDEFINED_ENDIAN + }, + /*TODO: Remaining TIFF EP tags*/ +}; + +/** + * EXIF_2_3_TAG_DEFINITIONS contains tags defined in the Jeita EXIF 2.3 spec + */ +const TagDefinition_t EXIF_2_3_TAG_DEFINITIONS[] = { + { // ExifVersion + "ExifVersion", + 0x9000u, + UNDEFINED, + IFD_0, + 4, + UNDEFINED_ENDIAN + }, + { // GPSDateStamp + "GPSDateStamp", + 0x001Du, + ASCII, + IFD_0, + 11, + UNDEFINED_ENDIAN + }, + /*TODO: Remaining EXIF 2.3 tags*/ +}; + +/** + * TIFF_6_TAG_DEFINITIONS contains tags defined in the TIFF 6.0 spec + */ +const TagDefinition_t TIFF_6_TAG_DEFINITIONS[] = { + { // SubFileType + "SubFileType", + 0x00FFu, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // Artist + "Artist", + 0x013Bu, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // BitsPerSample + "BitsPerSample", + 0x0102u, + SHORT, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CellLength + "CellLength", + 0x0109u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // CellWidth + "CellWidth", + 0x0108u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // ColorMap + "ColorMap", + 0x0140u, + SHORT, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // Compression + "Compression", + 0x0103u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // Copyright + "Copyright", + 0x8298u, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // DateTime + "DateTime", + 0x0132u, + ASCII, + IFD_0, + 20, + UNDEFINED_ENDIAN + }, + { // ExtraSamples + "ExtraSamples", + 0x0152u, + SHORT, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // FillOrder + "FillOrder", + 0x010Au, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // FreeByteCounts + "FreeByteCounts", + 0x0121u, + LONG, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // FreeOffsets + "FreeOffsets", + 0x0120u, + LONG, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // GrayResponseCurve + "GrayResponseCurve", + 0x0123u, + SHORT, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // GrayResponseUnit + "GrayResponseUnit", + 0x0122u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // HostComputer + "HostComputer", + 0x013Cu, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // ImageDescription + "ImageDescription", + 0x010Eu, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // ImageLength + "ImageLength", + 0x0101u, + LONG, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // ImageWidth + "ImageWidth", + 0x0100u, + LONG, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // Make + "Make", + 0x010Fu, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // MaxSampleValue + "MaxSampleValue", + 0x0119u, + SHORT, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // MinSampleValue + "MinSampleValue", + 0x0118u, + SHORT, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // Model + "Model", + 0x0110u, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // NewSubfileType + "NewSubfileType", + 0x00FEu, + LONG, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // Orientation + "Orientation", + 0x0112u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // PhotoMetricInterpretation + "PhotoMetricInterpretation", + 0x0106u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // PlanarConfiguration + "PlanarConfiguration", + 0x011Cu, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // ResolutionUnit + "ResolutionUnit", + 0x0128u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // RowsPerStrip + "RowsPerStrip", + 0x0116u, + LONG, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // SamplesPerPixel + "SamplesPerPixel", + 0x0115u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // Software + "Software", + 0x0131u, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // StripByteCounts + "StripByteCounts", + 0x0117u, + LONG, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // StripOffsets + "StripOffsets", + 0x0111u, + LONG, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // SubfileType + "SubfileType", + 0x00FFu, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // Threshholding + "Threshholding", + 0x0107u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // XResolution + "XResolution", + 0x011Au, + RATIONAL, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // YResolution + "YResolution", + 0x011Bu, + RATIONAL, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, +}; + +/** + * DNG_TAG_DEFINITIONS contains tags defined in the DNG 1.4 spec + */ +const TagDefinition_t DNG_TAG_DEFINITIONS[] = { + { // DNGVersion + "DNGVersion", + 0xC612u, + BYTE, + IFD_0, + 4, + UNDEFINED_ENDIAN + }, + { // DNGBackwardVersion + "DNGBackwardVersion", + 0xC613u, + BYTE, + IFD_0, + 4, + UNDEFINED_ENDIAN + }, + { // UniqueCameraModel + "UniqueCameraModel", + 0xC614u, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // LocalizedCameraModel + "LocalizedCameraModel", + 0xC615u, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CFAPlaneColor + "CFAPlaneColor", + 0xC616u, + BYTE, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // CFALayout + "CFALayout", + 0xC617u, + SHORT, + RAW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // LinearizationTable + "LinearizationTable", + 0xC618u, + SHORT, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // BlackLevelRepeatDim + "BlackLevelRepeatDim", + 0xC619u, + SHORT, + RAW_IFD, + 2, + UNDEFINED_ENDIAN + }, + { // BlackLevel + "BlackLevel", + 0xC61Au, + RATIONAL, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // BlackLevelDeltaH + "BlackLevelDeltaH", + 0xC61Bu, + SRATIONAL, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // BlackLevelDeltaV + "BlackLevelDeltaV", + 0xC61Cu, + SRATIONAL, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // WhiteLevel + "WhiteLevel", + 0xC61Du, + LONG, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // DefaultScale + "DefaultScale", + 0xC61Eu, + RATIONAL, + RAW_IFD, + 2, + UNDEFINED_ENDIAN + }, + { // BestQualityScale + "BestQualityScale", + 0xC65Cu, + RATIONAL, + RAW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // DefaultCropOrigin + "DefaultCropOrigin", + 0xC61Fu, + LONG, + RAW_IFD, + 2, + UNDEFINED_ENDIAN + }, + { // DefaultCropSize + "DefaultCropSize", + 0xC620u, + LONG, + RAW_IFD, + 2, + UNDEFINED_ENDIAN + }, + { // CalibrationIlluminant1 + "CalibrationIlluminant1", + 0xC65Au, + SHORT, + PROFILE_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // CalibrationIlluminant2 + "CalibrationIlluminant2", + 0xC65Bu, + SHORT, + PROFILE_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // ColorMatrix1 + "ColorMatrix1", + 0xC621u, + SRATIONAL, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ColorMatrix2 + "ColorMatrix2", + 0xC622u, + SRATIONAL, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // CameraCalibration1 + "CameraCalibration1", + 0xC623u, + SRATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CameraCalibration2 + "CameraCalibration2", + 0xC624u, + SRATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // ReductionMatrix1 + "ReductionMatrix1", + 0xC625u, + SRATIONAL, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ReductionMatrix2 + "ReductionMatrix2", + 0xC626u, + SRATIONAL, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // AnalogBalance + "AnalogBalance", + 0xC627u, + RATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // AsShotNeutral + "AsShotNeutral", + 0xC628u, + RATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // AsShotWhiteXY + "AsShotWhiteXY", + 0xC629u, + RATIONAL, + IFD_0, + 2, + UNDEFINED_ENDIAN + }, + { // BaselineExposure + "BaselineExposure", + 0xC62Au, + SRATIONAL, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // BaselineNoise + "BaselineNoise", + 0xC62Bu, + RATIONAL, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // BaselineSharpness + "BaselineSharpness", + 0xC62Cu, + RATIONAL, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // BayerGreenSplit + "BayerGreenSplit", + 0xC62Du, + LONG, + RAW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // LinearResponseLimit + "LinearResponseLimit", + 0xC62Eu, + RATIONAL, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // CameraSerialNumber + "CameraSerialNumber", + 0xC62Fu, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // LensInfo + "LensInfo", + 0xC630u, + RATIONAL, + IFD_0, + 4, + UNDEFINED_ENDIAN + }, + { // ChromaBlurRadius + "ChromaBlurRadius", + 0xC631u, + RATIONAL, + RAW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // AntiAliasStrength + "AntiAliasStrength", + 0xC632u, + RATIONAL, + RAW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // ShadowScale + "ShadowScale", + 0xC633u, + RATIONAL, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // DNGPrivateData + "DNGPrivateData", + 0xC634u, + BYTE, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // MakerNoteSafety + "MakerNoteSafety", + 0xC635u, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // RawDataUniqueID + "RawDataUniqueID", + 0xC65Du, + BYTE, + IFD_0, + 16, + UNDEFINED_ENDIAN + }, + { // OriginalRawFileName + "OriginalRawFileName", + 0xC68Bu, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // OriginalRawFileData + "OriginalRawFileData", + 0xC68Cu, + UNDEFINED, + IFD_0, + 0, + BIG + }, + { // ActiveArea + "ActiveArea", + 0xC68Du, + LONG, + RAW_IFD, + 4, + UNDEFINED_ENDIAN + }, + { // MaskedAreas + "MaskedAreas", + 0xC68Eu, + LONG, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // AsShotICCProfile + "AsShotICCProfile", + 0xC68Fu, + UNDEFINED, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // AsShotPreProfileMatrix + "AsShotPreProfileMatrix", + 0xC690u, + SRATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CurrentICCProfile + "CurrentICCProfile", + 0xC691u, + UNDEFINED, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CurrentICCProfile + "CurrentICCProfile", + 0xC691u, + UNDEFINED, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // CurrentPreProfileMatrix + "CurrentPreProfileMatrix", + 0xC692u, + SRATIONAL, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // ColorimetricReference + "ColorimetricReference", + 0xC6BFu, + SHORT, + IFD_0, + 1, + UNDEFINED_ENDIAN + }, + { // CameraCalibrationSignature + "CameraCalibrationSignature", + 0xC6F3u, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // ProfileCalibrationSignature + "ProfileCalibrationSignature", + 0xC6F4u, + ASCII, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ExtraCameraProfiles + "ExtraCameraProfiles", + 0xC6F5u, + LONG, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // AsShotProfileName + "AsShotProfileName", + 0xC6F6u, + ASCII, + IFD_0, + 0, + UNDEFINED_ENDIAN + }, + { // NoiseReductionApplied + "NoiseReductionApplied", + 0xC6F7u, + RATIONAL, + RAW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // ProfileName + "ProfileName", + 0xC6F8u, + ASCII, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ProfileHueSatMapDims + "ProfileHueSatMapDims", + 0xC6F9u, + LONG, + PROFILE_IFD, + 3, + UNDEFINED_ENDIAN + }, + { // ProfileHueSatMapData1 + "ProfileHueSatMapData1", + 0xC6FAu, + FLOAT, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ProfileHueSatMapData2 + "ProfileHueSatMapData2", + 0xC6FBu, + FLOAT, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ProfileToneCurve + "ProfileToneCurve", + 0xC6FCu, + FLOAT, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ProfileEmbedPolicy + "ProfileEmbedPolicy", + 0xC6FDu, + LONG, + PROFILE_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // ProfileCopyright + "ProfileCopyright", + 0xC6FEu, + ASCII, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ForwardMatrix1 + "ForwardMatrix1", + 0xC714u, + SRATIONAL, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // ForwardMatrix2 + "ForwardMatrix2", + 0xC715u, + SRATIONAL, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // PreviewApplicationName + "PreviewApplicationName", + 0xC716u, + ASCII, + PREVIEW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // PreviewApplicationVersion + "PreviewApplicationVersion", + 0xC717u, + ASCII, + PREVIEW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // PreviewSettingsName + "PreviewSettingsName", + 0xC718u, + ASCII, + PREVIEW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // PreviewSettingsDigest + "PreviewSettingsDigest", + 0xC719u, + BYTE, + PREVIEW_IFD, + 16, + UNDEFINED_ENDIAN + }, + { // PreviewColorSpace + "PreviewColorSpace", + 0xC71Au, + LONG, + PREVIEW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // PreviewDateTime + "PreviewDateTime", + 0xC71Bu, + ASCII, + PREVIEW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // RawImageDigest + "RawImageDigest", + 0xC71Cu, + BYTE, + IFD_0, + 16, + UNDEFINED_ENDIAN + }, + { // OriginalRawFileDigest + "OriginalRawFileDigest", + 0xC71Du, + BYTE, + IFD_0, + 16, + UNDEFINED_ENDIAN + }, + { // SubTileBlockSize + "SubTileBlockSize", + 0xC71Eu, + LONG, + RAW_IFD, + 2, + UNDEFINED_ENDIAN + }, + { // RowInterleaveFactor + "RowInterleaveFactor", + 0xC71Fu, + LONG, + RAW_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // ProfileLookTableDims + "ProfileLookTableDims", + 0xC725u, + LONG, + PROFILE_IFD, + 3, + UNDEFINED_ENDIAN + }, + { // ProfileLookTableData + "ProfileLookTableData", + 0xC726u, + FLOAT, + PROFILE_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // OpcodeList1 + "OpcodeList1", + 0xC740u, + UNDEFINED, + RAW_IFD, + 0, + BIG + }, + { // OpcodeList2 + "OpcodeList2", + 0xC741u, + UNDEFINED, + RAW_IFD, + 0, + BIG + }, + { // OpcodeList3 + "OpcodeList3", + 0xC74Eu, + UNDEFINED, + RAW_IFD, + 0, + BIG + }, + { // NoiseProfile + "NoiseProfile", + 0xC761u, + DOUBLE, + RAW_IFD, + 0, + UNDEFINED_ENDIAN + }, + { // DefaultUserCrop + "DefaultUserCrop", + 0xC7B5u, + RATIONAL, + RAW_IFD, + 4, + UNDEFINED_ENDIAN + }, + { // DefaultBlackRender + "DefaultBlackRender", + 0xC7A6u, + LONG, + PROFILE_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // BaselineExposureOffset + "BaselineExposureOffset", + 0xC7A5u, + RATIONAL, + PROFILE_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // ProfileLookTableEncoding + "ProfileLookTableEncoding", + 0xC7A4u, + LONG, + PROFILE_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // ProfileHueSatMapEncoding + "ProfileHueSatMapEncoding", + 0xC7A3u, + LONG, + PROFILE_IFD, + 1, + UNDEFINED_ENDIAN + }, + { // OriginalDefaultFinalSize + "OriginalDefaultFinalSize", + 0xC791u, + LONG, + IFD_0, + 2, + UNDEFINED_ENDIAN + }, + { // OriginalBestQualityFinalSize + "OriginalBestQualityFinalSize", + 0xC792u, + LONG, + IFD_0, + 2, + UNDEFINED_ENDIAN + }, + { // OriginalDefaultCropSize + "OriginalDefaultCropSize", + 0xC793u, + LONG, + IFD_0, + 2, + UNDEFINED_ENDIAN + }, + { // NewRawImageDigest + "NewRawImageDigest", + 0xC7A7u, + BYTE, + IFD_0, + 16, + UNDEFINED_ENDIAN + }, + { // RawToPreviewGain + "RawToPreviewGain", + 0xC7A8u, + DOUBLE, + PREVIEW_IFD, + 1, + UNDEFINED_ENDIAN + }, +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_TIFF_TAG_DEFINITION_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/TiffEntry.h b/app/src/main/cpp/img_utils/include/img_utils/TiffEntry.h new file mode 100644 index 00000000..09f86a55 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/TiffEntry.h @@ -0,0 +1,130 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_TIFF_ENTRY +#define IMG_UTILS_TIFF_ENTRY + +#include +#include +#include + +#include +// #include +#include +#include + +namespace android { +namespace img_utils { + +#define COMPARE_DEF(op) \ +inline bool operator op (const TiffEntry& entry) const; + +/** + * This class holds a single TIFF IFD entry. + * + * Subclasses are expected to support assignment and copying operations. + */ +class ANDROID_API TiffEntry : public TiffWritable { + public: + virtual ~TiffEntry(); + + /** + * Write the 12-byte IFD entry to the output. The given offset will be + * set as the tag value if the size of the tag value exceeds the max + * size for the TIFF Value field (4 bytes), and should be word aligned. + * + * Returns OK on success, or a negative error code on failure. + */ + virtual status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const = 0; + + /** + * Get the count set for this entry. This corresponds to the TIFF Count + * field. + */ + virtual uint32_t getCount() const = 0; + + /** + * Get the tag id set for this entry. This corresponds to the TIFF Tag + * field. + */ + virtual uint16_t getTag() const = 0; + + /** + * Get the type set for this entry. This corresponds to the TIFF Type + * field. + */ + virtual TagType getType() const = 0; + + /** + * Get the defined endianness for this entry. If this is defined, + * the tag value will be written with the given byte order. + */ + virtual Endianness getEndianness() const = 0; + + /** + * Get the value for this entry. This corresponds to the TIFF Value + * field. + * + * Returns NULL if the value is NULL, or if the type used does not + * match the type of this tag. + */ + template + const T* getData() const; + + virtual std::string toString() const; + + /** + * Force the type used here to be a valid TIFF type. + * + * Returns NULL if the given value is NULL, or if the type given does + * not match the type of the value given. + */ + template + static const T* forceValidType(TagType type, const T* value); + + virtual const void* getDataHelper() const = 0; + + COMPARE_DEF(>) + COMPARE_DEF(<) + + protected: + enum { + MAX_PRINT_STRING_LENGTH = 256 + }; +}; + +#define COMPARE(op) \ +bool TiffEntry::operator op (const TiffEntry& entry) const { \ + return getComparableValue() op entry.getComparableValue(); \ +} + +COMPARE(>) +COMPARE(<) + + +template +const T* TiffEntry::getData() const { + const T* value = reinterpret_cast(getDataHelper()); + return forceValidType(getType(), value); +} + +#undef COMPARE +#undef COMPARE_DEF + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_TIFF_ENTRY*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/TiffEntryImpl.h b/app/src/main/cpp/img_utils/include/img_utils/TiffEntryImpl.h new file mode 100644 index 00000000..ffdd3274 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/TiffEntryImpl.h @@ -0,0 +1,219 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_TIFF_ENTRY_IMPL +#define IMG_UTILS_TIFF_ENTRY_IMPL + +#include +#include +#include +#include +#include + +#include +#include +// #include +#include +#include +#include + +namespace android { +namespace img_utils { + +template +class TiffEntryImpl : public TiffEntry { + public: + TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end, const T* data); + virtual ~TiffEntryImpl(); + + status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const; + status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const; + + uint32_t getCount() const; + uint16_t getTag() const; + TagType getType() const; + Endianness getEndianness() const; + size_t getSize() const; + uint32_t getComparableValue() const; + + protected: + const void* getDataHelper() const; + uint32_t getActualSize() const; + + uint16_t mTag; + uint16_t mType; + uint32_t mCount; + Endianness mEnd; + std::vector mData; + +}; + +template +TiffEntryImpl::TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end, + const T* data) + : mTag(tag), mType(static_cast(type)), mCount(count), mEnd(end) { + count = (type == RATIONAL || type == SRATIONAL) ? count * 2 : count; + auto it = mData.insert(mData.end(), data, data + count); + // LOG_ALWAYS_FATAL_IF(index < 0, "%s: Could not allocate vector for data.", __FUNCTION__); +} + +template +TiffEntryImpl::~TiffEntryImpl() {} + +template +uint32_t TiffEntryImpl::getCount() const { + return mCount; +} + +template +uint16_t TiffEntryImpl::getTag() const { + return mTag; +} + +template +TagType TiffEntryImpl::getType() const { + return static_cast(mType); +} + +template +const void* TiffEntryImpl::getDataHelper() const { + return reinterpret_cast(&mData[0]); +} + +template +size_t TiffEntryImpl::getSize() const { + uint32_t total = getActualSize(); + WORD_ALIGN(total) + return (total <= OFFSET_SIZE) ? 0 : total; +} + +template +uint32_t TiffEntryImpl::getActualSize() const { + uint32_t total = sizeof(T) * mCount; + if (getType() == RATIONAL || getType() == SRATIONAL) { + // 2 ints stored for each rational, multiply by 2 + total <<= 1; + } + return total; +} + +template +Endianness TiffEntryImpl::getEndianness() const { + return mEnd; +} + +template +uint32_t TiffEntryImpl::getComparableValue() const { + return mTag; +} + +template +status_t TiffEntryImpl::writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const { + assert((offset % TIFF_WORD_SIZE) == 0); + status_t ret = OK; + BAIL_ON_FAIL(out->write(&mTag, 0, 1), ret); + BAIL_ON_FAIL(out->write(&mType, 0, 1), ret); + BAIL_ON_FAIL(out->write(&mCount, 0, 1), ret); + + uint32_t dataSize = getActualSize(); + if (dataSize > OFFSET_SIZE) { + BAIL_ON_FAIL(out->write(&offset, 0, 1), ret); + } else { + uint32_t count = mCount; + if (getType() == RATIONAL || getType() == SRATIONAL) { + /** + * Rationals are stored as an array of ints. Each + * rational is represented by 2 ints. To recover the + * size of the array here, multiply the count by 2. + */ + count <<= 1; + } + BAIL_ON_FAIL(out->write(&mData[0], 0, count), ret); + ZERO_TILL_WORD(out, dataSize, ret); + } + return ret; +} + +template +status_t TiffEntryImpl::writeData(uint32_t /*offset*/, EndianOutput* out) const { + status_t ret = OK; + + // Some tags have fixed-endian value output + Endianness tmp = UNDEFINED_ENDIAN; + if (mEnd != UNDEFINED_ENDIAN) { + tmp = out->getEndianness(); + out->setEndianness(mEnd); + } + + uint32_t count = mCount; + if (getType() == RATIONAL || getType() == SRATIONAL) { + /** + * Rationals are stored as an array of ints. Each + * rational is represented by 2 ints. To recover the + * size of the array here, multiply the count by 2. + */ + count <<= 1; + } + + BAIL_ON_FAIL(out->write(&mData[0], 0, count), ret); + + if (mEnd != UNDEFINED_ENDIAN) { + out->setEndianness(tmp); + } + + // Write to next word alignment + ZERO_TILL_WORD(out, sizeof(T) * count, ret); + return ret; +} + +template<> +inline status_t TiffEntryImpl >::writeTagInfo(uint32_t offset, + /*out*/EndianOutput* out) const { + assert((offset % TIFF_WORD_SIZE) == 0); + status_t ret = OK; + BAIL_ON_FAIL(out->write(&mTag, 0, 1), ret); + BAIL_ON_FAIL(out->write(&mType, 0, 1), ret); + BAIL_ON_FAIL(out->write(&mCount, 0, 1), ret); + + BAIL_ON_FAIL(out->write(&offset, 0, 1), ret); + return ret; +} + +template<> +inline uint32_t TiffEntryImpl >::getActualSize() const { + uint32_t total = 0; + for (size_t i = 0; i < mData.size(); ++i) { + total += mData[i]->getSize(); + } + return total; +} + +template<> +inline status_t TiffEntryImpl >::writeData(uint32_t offset, EndianOutput* out) const { + status_t ret = OK; + for (uint32_t i = 0; i < mCount; ++i) { + BAIL_ON_FAIL(mData[i]->writeData(offset, out), ret); + offset += mData[i]->getSize(); + } + return ret; +} + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_TIFF_ENTRY_IMPL*/ + + diff --git a/app/src/main/cpp/img_utils/include/img_utils/TiffHelpers.h b/app/src/main/cpp/img_utils/include/img_utils/TiffHelpers.h new file mode 100644 index 00000000..3e5f8630 --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/TiffHelpers.h @@ -0,0 +1,132 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_TIFF_HELPERS_H +#define IMG_UTILS_TIFF_HELPERS_H + +#include + +namespace android { +namespace img_utils { + +const uint8_t ZERO_WORD[] = {0, 0, 0, 0}; + +#define BAIL_ON_FAIL(x, flag) \ + if (((flag) = (x)) != OK) return flag; + +#define BYTES_TILL_WORD(index) \ + ((TIFF_WORD_SIZE - ((index) % TIFF_WORD_SIZE)) % TIFF_WORD_SIZE) + +#define WORD_ALIGN(count) \ + count += BYTES_TILL_WORD(count); + +#define ZERO_TILL_WORD(output, index, ret) \ + { \ + size_t remaining = BYTES_TILL_WORD(index); \ + if (remaining > 0) { \ + BAIL_ON_FAIL((output)->write(ZERO_WORD, 0, remaining), ret); \ + } \ + } + +/** + * Basic TIFF header constants. + */ +enum { + BAD_OFFSET = 0, + TIFF_WORD_SIZE = 4, // Size in bytes + IFD_HEADER_SIZE = 2, // Size in bytes + IFD_FOOTER_SIZE = 4, // Size in bytes + TIFF_ENTRY_SIZE = 12, // Size in bytes + MAX_IFD_ENTRIES = UINT16_MAX, + FILE_HEADER_SIZE = 8, // Size in bytes + ENDIAN_MARKER_SIZE = 2, // Size in bytes + TIFF_MARKER_SIZE = 2, // Size in bytes + OFFSET_MARKER_SIZE = 4, // Size in bytes + TIFF_FILE_MARKER = 42, + BIG_ENDIAN_MARKER = 0x4D4Du, + LITTLE_ENDIAN_MARKER = 0x4949u +}; + +/** + * Constants for the TIFF tag types. + */ +enum TagType { + UNKNOWN_TAGTYPE = 0, + BYTE=1, + ASCII, + SHORT, + LONG, + RATIONAL, + SBYTE, + UNDEFINED, + SSHORT, + SLONG, + SRATIONAL, + FLOAT, + DOUBLE +}; + +/** + * Sizes of the TIFF entry fields (in bytes). + */ +enum { + TAG_SIZE = 2, + TYPE_SIZE = 2, + COUNT_SIZE = 4, + OFFSET_SIZE = 4 +}; + +/** + * Convenience IFD id constants. + */ +enum { + IFD_0 = 0, + RAW_IFD, + PROFILE_IFD, + PREVIEW_IFD +}; + +inline size_t getTypeSize(TagType type) { + switch(type) { + case UNDEFINED: + case ASCII: + case BYTE: + case SBYTE: + return 1; + case SHORT: + case SSHORT: + return 2; + case LONG: + case SLONG: + case FLOAT: + return 4; + case RATIONAL: + case SRATIONAL: + case DOUBLE: + return 8; + default: + return 0; + } +} + +inline uint32_t calculateIfdSize(size_t numberOfEntries) { + return IFD_HEADER_SIZE + IFD_FOOTER_SIZE + TIFF_ENTRY_SIZE * numberOfEntries; +} + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_TIFF_HELPERS_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/TiffIfd.h b/app/src/main/cpp/img_utils/include/img_utils/TiffIfd.h new file mode 100644 index 00000000..0021e8ce --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/TiffIfd.h @@ -0,0 +1,164 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_TIFF_IFD_H +#define IMG_UTILS_TIFF_IFD_H + +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * This class holds a single TIFF Image File Directory (IFD) structure. + * + * This maps to the TIFF IFD structure that is logically composed of: + * - A 2-byte field listing the number of entries. + * - A list of 12-byte TIFF entries. + * - A 4-byte offset to the next IFD. + */ +class ANDROID_API TiffIfd : public TiffWritable { + public: + explicit TiffIfd(uint32_t ifdId); + virtual ~TiffIfd(); + + /** + * Add a TiffEntry to this IFD or replace an existing entry with the + * same tag ID. No validation is done. + * + * Returns OK on success, or a negative error code on failure. + */ + virtual status_t addEntry(const sp& entry); + + /** + * Set the pointer to the next IFD. This is used to create a linked + * list of IFDs as defined by the TIFF 6.0 spec., and is not included + * when calculating the size of IFD and entries for the getSize() + * method (unlike SubIFDs). + */ + virtual void setNextIfd(const sp& ifd); + + /** + * Get the pointer to the next IFD, or NULL if none exists. + */ + virtual sp getNextIfd() const; + + /** + * Write the IFD data. This includes the IFD header, entries, footer, + * and the corresponding values for each entry (recursively including + * sub-IFDs). The written amount should end on a word boundary, and + * the given offset should be word aligned. + * + * Returns OK on success, or a negative error code on failure. + */ + virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const; + + /** + * Get the size of the IFD. This includes the IFD header, entries, footer, + * and the corresponding values for each entry (recursively including + * any sub-IFDs). + */ + virtual size_t getSize() const; + + /** + * Get the id of this IFD. + */ + virtual uint32_t getId() const; + + /** + * Get an entry with the given tag ID. + * + * Returns a strong pointer to the entry if it exists, or an empty strong + * pointer. + */ + virtual sp getEntry(uint16_t tag) const; + + /** + * Remove the entry with the given tag ID if it exists. + */ + virtual void removeEntry(uint16_t tag); + + /** + * Convenience method to validate and set strip-related image tags. + * + * This sets all strip related tags, but leaves offset values unitialized. + * setStripOffsets must be called with the desired offset before writing. + * The strip tag values are calculated from the existing tags for image + * dimensions and pixel type set in the IFD. + * + * Does not handle planar image configurations (PlanarConfiguration != 1). + * + * Returns OK on success, or a negative error code. + */ + virtual status_t validateAndSetStripTags(); + + /** + * Returns true if validateAndSetStripTags has been called, but not setStripOffsets. + */ + virtual bool uninitializedOffsets() const; + + /** + * Convenience method to set beginning offset for strips. + * + * Call this to update the strip offsets before calling writeData. + * + * Returns OK on success, or a negative error code. + */ + virtual status_t setStripOffset(uint32_t offset); + + /** + * Get the total size of the strips in bytes. + * + * This sums the byte count at each strip offset, and returns + * the total count of bytes stored in strips for this IFD. + */ + virtual uint32_t getStripSize() const; + + /** + * Get a formatted string representing this IFD. + */ + virtual std::string toString() const; + + /** + * Print a formatted string representing this IFD to logcat. + */ + void log() const; + + /** + * Get value used to determine sort order. + */ + virtual uint32_t getComparableValue() const; + + protected: + virtual uint32_t checkAndGetOffset(uint32_t offset) const; + std::map > mEntries; + sp mNextIfd; + uint32_t mIfdId; + bool mStripOffsetsInitialized; +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_TIFF_IFD_H*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/TiffWritable.h b/app/src/main/cpp/img_utils/include/img_utils/TiffWritable.h new file mode 100644 index 00000000..a72cecca --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/TiffWritable.h @@ -0,0 +1,60 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_TIFF_WRITABLE +#define IMG_UTILS_TIFF_WRITABLE + +#include +#include +#include + +#include +#include +#include +#include + +namespace android { +namespace img_utils { + +/** + * TiffWritable subclasses represent TIFF metadata objects that can be written + * to an EndianOutput object. This is used for TIFF entries and IFDs. + */ +class ANDROID_API TiffWritable : public Orderable, public LightRefBase { + public: + TiffWritable(); + virtual ~TiffWritable(); + + /** + * Write the data to the output. The given offset is used to calculate + * the header offset for values written. The offset is defined + * relative to the beginning of the TIFF header, and is word aligned. + * + * Returns OK on success, or a negative error code on failure. + */ + virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const = 0; + + /** + * Get the size of the data to write. + */ + virtual size_t getSize() const = 0; + +}; + +} /*namespace img_utils*/ +} /*namespace android*/ + +#endif /*IMG_UTILS_TIFF_WRITABLE*/ diff --git a/app/src/main/cpp/img_utils/include/img_utils/TiffWriter.h b/app/src/main/cpp/img_utils/include/img_utils/TiffWriter.h new file mode 100644 index 00000000..3cb7bbff --- /dev/null +++ b/app/src/main/cpp/img_utils/include/img_utils/TiffWriter.h @@ -0,0 +1,328 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef IMG_UTILS_TIFF_WRITER_H +#define IMG_UTILS_TIFF_WRITER_H + +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include + +#include +#include + +namespace android { +namespace img_utils { + +class TiffEntry; +class TiffIfd; +class Output; + +/** + * This class holds a collection of TIFF IFDs that can be written as a + * complete DNG file header. + * + * This maps to the TIFF header structure that is logically composed of: + * - An 8-byte file header containing an endianness indicator, the TIFF + * file marker, and the offset to the first IFD. + * - A list of TIFF IFD structures. + */ +class ANDROID_API TiffWriter : public LightRefBase { + public: + enum SubIfdType { + SUBIFD = 0, + GPSINFO + }; + + /** + * Constructs a TiffWriter with the default tag mappings. This enables + * all of the tags defined in TagDefinitions.h, and uses the following + * mapping precedence to resolve collisions: + * (highest precedence) TIFF/EP > DNG > EXIF 2.3 > TIFF 6.0 + */ + TiffWriter(); + + /** + * Constructs a TiffWriter with the given tag mappings. The mapping + * precedence will be in the order that the definition maps are given, + * where the lower index map gets precedence. + * + * This can be used with user-defined definitions, or definitions form + * TagDefinitions.h + * + * The enabledDefinitions mapping object is owned by the caller, and must + * stay alive for the lifespan of the constructed TiffWriter object. + */ + TiffWriter(std::map* enabledDefinitions, + size_t length); + + virtual ~TiffWriter(); + + /** + * Write a TIFF header containing each IFD set. This will recursively + * write all SubIFDs and tags. + * + * Any StripSources passed in will be written to the output as image strips + * at the appropriate offests. The StripByteCounts, RowsPerStrip, and + * StripOffsets tags must be set to use this. To set these tags in a + * given IFD, use the addStrip method. + * + * Returns OK on success, or a negative error code on failure. + */ + virtual status_t write(Output* out, StripSource** sources, size_t sourcesCount, + Endianness end = LITTLE); + + /** + * Write a TIFF header containing each IFD set. This will recursively + * write all SubIFDs and tags. + * + * Image data for strips or tiles must be written separately at the + * appropriate offsets. These offsets must not fall within the file + * header written this way. The size of the header written is given + * by the getTotalSize() method. + * + * Returns OK on success, or a negative error code on failure. + */ + virtual status_t write(Output* out, Endianness end = LITTLE); + + /** + * Get the total size in bytes of the TIFF header. This includes all + * IFDs, tags, and values set for this TiffWriter. + */ + virtual uint32_t getTotalSize() const; + + /** + * Add an entry to the IFD with the given ID. + * + * Returns OK on success, or a negative error code on failure. Valid + * error codes for this method are: + * - BAD_INDEX - The given tag doesn't exist. + * - BAD_VALUE - The given count doesn't match the required count for + * this tag. + * - BAD_TYPE - The type of the given data isn't compatible with the + * type required for this tag. + * - NAME_NOT_FOUND - No ifd exists with the given ID. + */ + virtual status_t addEntry(const sp& entry, uint32_t ifd); + + /** + * Build an entry for a known tag and add it to the IFD with the given ID. + * This tag must be defined in one of the definition vectors this TIFF writer + * was constructed with. The count and type are validated. + * + * Returns OK on success, or a negative error code on failure. Valid + * error codes for this method are: + * - BAD_INDEX - The given tag doesn't exist. + * - BAD_VALUE - The given count doesn't match the required count for + * this tag. + * - BAD_TYPE - The type of the given data isn't compatible with the + * type required for this tag. + * - NAME_NOT_FOUND - No ifd exists with the given ID. + */ + template + status_t addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd); + + /** + * Build an entry for a known tag. This tag must be one of the tags + * defined in one of the definition vectors this TIFF writer was constructed + * with. The count and type are validated. If this succeeds, the resulting + * entry will be placed in the outEntry pointer. + * + * Returns OK on success, or a negative error code on failure. Valid + * error codes for this method are: + * - BAD_INDEX - The given tag doesn't exist. + * - BAD_VALUE - The given count doesn't match the required count for + * this tag. + * - BAD_TYPE - The type of the given data isn't compatible with the + * type required for this tag. + */ + template + status_t buildEntry(uint16_t tag, uint32_t count, const T* data, + /*out*/sp* outEntry) const; + + /** + * Convenience function to set the strip related tags for a given IFD. + * + * Call this before using a StripSource as an input to write. + * The following tags must be set before calling this method: + * - ImageWidth + * - ImageLength + * - SamplesPerPixel + * - BitsPerSample + * + * Returns OK on success, or a negative error code. + */ + virtual status_t addStrip(uint32_t ifd); + + /** + * Return the TIFF entry with the given tag ID in the IFD with the given ID, + * or an empty pointer if none exists. + */ + virtual sp getEntry(uint16_t tag, uint32_t ifd) const; + + /** + * Remove the TIFF entry with the given tag ID in the given IFD if it exists. + */ + virtual void removeEntry(uint16_t tag, uint32_t ifd); + + /** + * Create an empty IFD with the given ID and add it to the end of the + * list of IFDs. + */ + virtual status_t addIfd(uint32_t ifd); + + /** + * Create an empty IFD with the given ID and add it as a SubIfd of the + * parent IFD. + */ + virtual status_t addSubIfd(uint32_t parentIfd, uint32_t ifd, SubIfdType type = SUBIFD); + + /** + * Returns the default type for the given tag ID. + */ + virtual TagType getDefaultType(uint16_t tag) const; + + /** + * Returns the default count for a given tag ID, or 0 if this + * tag normally has a variable count. + */ + virtual uint32_t getDefaultCount(uint16_t tag) const; + + /** + * Returns true if an IFD with the given ID exists. + */ + virtual bool hasIfd(uint32_t ifd) const; + + /** + * Returns true if a definition exist for the given tag ID. + */ + virtual bool checkIfDefined(uint16_t tag) const; + + /** + * Returns the name of the tag if a definition exists for the given tag + * ID, or null if no definition exists. + */ + virtual const char* getTagName(uint16_t tag) const; + + /** + * Print the currently configured IFDs and entries to logcat. + */ + virtual void log() const; + + /** + * Build an entry. No validation is done. + * + * WARNING: Using this method can result in creating poorly formatted + * TIFF files. + * + * Returns a TiffEntry with the given tag, type, count, endianness, + * and data. + */ + template + static sp uncheckedBuildEntry(uint16_t tag, TagType type, + uint32_t count, Endianness end, const T* data); + + /** + * Utility function to build atag-to-definition mapping from a given + * array of tag definitions. + */ +#if 0 + static KeyedVector buildTagMap( + const TagDefinition_t* definitions, size_t length); +#endif + + protected: + enum { + DEFAULT_NUM_TAG_MAPS = 4, + }; + + sp findLastIfd(); + status_t writeFileHeader(EndianOutput& out); + const TagDefinition_t* lookupDefinition(uint16_t tag) const; + status_t calculateOffsets(); + + sp mIfd; + std::map > mNamedIfds; + std::vector > mTagMaps; + size_t mNumTagMaps; +#if 0 + static KeyedVector sTagMaps[]; +#endif +}; + +template +status_t TiffWriter::buildEntry(uint16_t tag, uint32_t count, const T* data, + /*out*/sp* outEntry) const { + const TagDefinition_t* definition = lookupDefinition(tag); + + if (definition == NULL) { + ALOGE("%s: No such tag exists for id %x.", __FUNCTION__, tag); + return BAD_INDEX; + } + + uint32_t fixedCount = definition->fixedCount; + if (fixedCount > 0 && fixedCount != count) { + ALOGE("%s: Invalid count %d for tag %x (expects %d).", __FUNCTION__, count, tag, + fixedCount); + return BAD_VALUE; + } + + TagType fixedType = definition->defaultType; + if (TiffEntry::forceValidType(fixedType, data) == NULL) { + ALOGE("%s: Invalid type used for tag value for tag %x.", __FUNCTION__, tag); + return BAD_TYPE; + } + + *outEntry = new TiffEntryImpl(tag, fixedType, count, + definition->fixedEndian, data); + + return OK; +} + +template +status_t TiffWriter::addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd) { + sp outEntry; + + status_t ret = buildEntry(tag, count, data, &outEntry); + if (ret != OK) { + ALOGE("%s: Could not build entry for tag %x.", __FUNCTION__, tag); + return ret; + } + + return addEntry(outEntry, ifd); +} + +template +sp TiffWriter::uncheckedBuildEntry(uint16_t tag, TagType type, uint32_t count, + Endianness end, const T* data) { + TiffEntryImpl* entry = new TiffEntryImpl(tag, type, count, end, data); + return sp(entry); +} + +} /*namespace img_utils*/ +} /*namespace android*/ + + +#endif /*IMG_UTILS_TIFF_WRITER_H*/ diff --git a/app/src/main/cpp/img_utils/src/ByteArrayOutput.cpp b/app/src/main/cpp/img_utils/src/ByteArrayOutput.cpp new file mode 100644 index 00000000..c4f0ea93 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/ByteArrayOutput.cpp @@ -0,0 +1,54 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +namespace android { +namespace img_utils { + +ByteArrayOutput::ByteArrayOutput() {} + +ByteArrayOutput::~ByteArrayOutput() {} + +status_t ByteArrayOutput::open() { + return OK; +} + +status_t ByteArrayOutput::write(const uint8_t* buf, size_t offset, size_t count) { + if (mByteArray.insert(mByteArray.end(), buf + offset, buf + offset + count) == mByteArray.end()) { + ALOGE("%s: Failed to write to ByteArrayOutput.", __FUNCTION__); + return BAD_VALUE; + } + return OK; +} + +status_t ByteArrayOutput::close() { + mByteArray.clear(); + return OK; +} + +size_t ByteArrayOutput::getSize() const { + return mByteArray.size(); +} + +const uint8_t* ByteArrayOutput::getArray() const { + return &mByteArray[0]; +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/DngUtils.cpp b/app/src/main/cpp/img_utils/src/DngUtils.cpp new file mode 100644 index 00000000..79140302 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/DngUtils.cpp @@ -0,0 +1,496 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include +#include +#include + +namespace android { +namespace img_utils { + +OpcodeListBuilder::OpcodeListBuilder() : mCount(0), mOpList(), mEndianOut(&mOpList, BIG) { + if(mEndianOut.open() != OK) { + ALOGE("%s: Open failed.", __FUNCTION__); + } +} + +OpcodeListBuilder::~OpcodeListBuilder() { + if(mEndianOut.close() != OK) { + ALOGE("%s: Close failed.", __FUNCTION__); + } +} + +size_t OpcodeListBuilder::getSize() const { + return mOpList.getSize() + sizeof(mCount); +} + +uint32_t OpcodeListBuilder::getCount() const { + return mCount; +} + +status_t OpcodeListBuilder::buildOpList(uint8_t* buf) const { + uint32_t count = convertToBigEndian(mCount); + memcpy(buf, &count, sizeof(count)); + memcpy(buf + sizeof(count), mOpList.getArray(), mOpList.getSize()); + return OK; +} + +status_t OpcodeListBuilder::addGainMapsForMetadata(uint32_t lsmWidth, + uint32_t lsmHeight, + uint32_t activeAreaTop, + uint32_t activeAreaLeft, + uint32_t activeAreaBottom, + uint32_t activeAreaRight, + CfaLayout cfa, + const float* lensShadingMap) { + status_t err = OK; + uint32_t activeAreaWidth = activeAreaRight - activeAreaLeft; + uint32_t activeAreaHeight = activeAreaBottom - activeAreaTop; + + switch (cfa) { + case CFA_RGGB: + case CFA_GRBG: + case CFA_GBRG: + case CFA_BGGR: + err = addBayerGainMapsForMetadata(lsmWidth, lsmHeight, activeAreaWidth, + activeAreaHeight, cfa, lensShadingMap); + break; + case CFA_NONE: + err = addMonochromeGainMapsForMetadata(lsmWidth, lsmHeight, activeAreaWidth, + activeAreaHeight, lensShadingMap); + break; + default: + ALOGE("%s: Unknown CFA layout %d", __FUNCTION__, cfa); + err = BAD_VALUE; + break; + } + return err; +} + +status_t OpcodeListBuilder::addBayerGainMapsForMetadata(uint32_t lsmWidth, + uint32_t lsmHeight, + uint32_t activeAreaWidth, + uint32_t activeAreaHeight, + CfaLayout cfa, + const float* lensShadingMap) { + uint32_t redTop = 0; + uint32_t redLeft = 0; + uint32_t greenEvenTop = 0; + uint32_t greenEvenLeft = 1; + uint32_t greenOddTop = 1; + uint32_t greenOddLeft = 0; + uint32_t blueTop = 1; + uint32_t blueLeft = 1; + + switch(cfa) { + case CFA_RGGB: + redTop = 0; + redLeft = 0; + greenEvenTop = 0; + greenEvenLeft = 1; + greenOddTop = 1; + greenOddLeft = 0; + blueTop = 1; + blueLeft = 1; + break; + case CFA_GRBG: + redTop = 0; + redLeft = 1; + greenEvenTop = 0; + greenEvenLeft = 0; + greenOddTop = 1; + greenOddLeft = 1; + blueTop = 1; + blueLeft = 0; + break; + case CFA_GBRG: + redTop = 1; + redLeft = 0; + greenEvenTop = 0; + greenEvenLeft = 0; + greenOddTop = 1; + greenOddLeft = 1; + blueTop = 0; + blueLeft = 1; + break; + case CFA_BGGR: + redTop = 1; + redLeft = 1; + greenEvenTop = 0; + greenEvenLeft = 1; + greenOddTop = 1; + greenOddLeft = 0; + blueTop = 0; + blueLeft = 0; + break; + default: + ALOGE("%s: Unknown CFA layout %d", __FUNCTION__, cfa); + return BAD_VALUE; + } + + std::vector redMapVector(lsmWidth * lsmHeight); + float *redMap = redMapVector.data(); + + std::vector greenEvenMapVector(lsmWidth * lsmHeight); + float *greenEvenMap = greenEvenMapVector.data(); + + std::vector greenOddMapVector(lsmWidth * lsmHeight); + float *greenOddMap = greenOddMapVector.data(); + + std::vector blueMapVector(lsmWidth * lsmHeight); + float *blueMap = blueMapVector.data(); + + double spacingV = 1.0 / std::max(1u, lsmHeight - 1); + double spacingH = 1.0 / std::max(1u, lsmWidth - 1); + + size_t lsmMapSize = lsmWidth * lsmHeight * 4; + + // Split lens shading map channels into separate arrays + size_t j = 0; + for (size_t i = 0; i < lsmMapSize; i += 4, ++j) { + redMap[j] = lensShadingMap[i + LSM_R_IND]; + greenEvenMap[j] = lensShadingMap[i + LSM_GE_IND]; + greenOddMap[j] = lensShadingMap[i + LSM_GO_IND]; + blueMap[j] = lensShadingMap[i + LSM_B_IND]; + } + + status_t err = addGainMap(/*top*/redTop, + /*left*/redLeft, + /*bottom*/activeAreaHeight, + /*right*/activeAreaWidth, + /*plane*/0, + /*planes*/1, + /*rowPitch*/2, + /*colPitch*/2, + /*mapPointsV*/lsmHeight, + /*mapPointsH*/lsmWidth, + /*mapSpacingV*/spacingV, + /*mapSpacingH*/spacingH, + /*mapOriginV*/0, + /*mapOriginH*/0, + /*mapPlanes*/1, + /*mapGains*/redMap); + if (err != OK) return err; + + err = addGainMap(/*top*/greenEvenTop, + /*left*/greenEvenLeft, + /*bottom*/activeAreaHeight, + /*right*/activeAreaWidth, + /*plane*/0, + /*planes*/1, + /*rowPitch*/2, + /*colPitch*/2, + /*mapPointsV*/lsmHeight, + /*mapPointsH*/lsmWidth, + /*mapSpacingV*/spacingV, + /*mapSpacingH*/spacingH, + /*mapOriginV*/0, + /*mapOriginH*/0, + /*mapPlanes*/1, + /*mapGains*/greenEvenMap); + if (err != OK) return err; + + err = addGainMap(/*top*/greenOddTop, + /*left*/greenOddLeft, + /*bottom*/activeAreaHeight, + /*right*/activeAreaWidth, + /*plane*/0, + /*planes*/1, + /*rowPitch*/2, + /*colPitch*/2, + /*mapPointsV*/lsmHeight, + /*mapPointsH*/lsmWidth, + /*mapSpacingV*/spacingV, + /*mapSpacingH*/spacingH, + /*mapOriginV*/0, + /*mapOriginH*/0, + /*mapPlanes*/1, + /*mapGains*/greenOddMap); + if (err != OK) return err; + + err = addGainMap(/*top*/blueTop, + /*left*/blueLeft, + /*bottom*/activeAreaHeight, + /*right*/activeAreaWidth, + /*plane*/0, + /*planes*/1, + /*rowPitch*/2, + /*colPitch*/2, + /*mapPointsV*/lsmHeight, + /*mapPointsH*/lsmWidth, + /*mapSpacingV*/spacingV, + /*mapSpacingH*/spacingH, + /*mapOriginV*/0, + /*mapOriginH*/0, + /*mapPlanes*/1, + /*mapGains*/blueMap); + return err; +} + +status_t OpcodeListBuilder::addMonochromeGainMapsForMetadata(uint32_t lsmWidth, + uint32_t lsmHeight, + uint32_t activeAreaWidth, + uint32_t activeAreaHeight, + const float* lensShadingMap) { + std::vector mapVector(lsmWidth * lsmHeight); + float *map = mapVector.data(); + + double spacingV = 1.0 / std::max(1u, lsmHeight - 1); + double spacingH = 1.0 / std::max(1u, lsmWidth - 1); + + size_t lsmMapSize = lsmWidth * lsmHeight * 4; + + // Split lens shading map channels into separate arrays + size_t j = 0; + for (size_t i = 0; i < lsmMapSize; i += 4, ++j) { + map[j] = lensShadingMap[i]; + } + + status_t err = addGainMap(/*top*/0, + /*left*/0, + /*bottom*/activeAreaHeight, + /*right*/activeAreaWidth, + /*plane*/0, + /*planes*/1, + /*rowPitch*/1, + /*colPitch*/1, + /*mapPointsV*/lsmHeight, + /*mapPointsH*/lsmWidth, + /*mapSpacingV*/spacingV, + /*mapSpacingH*/spacingH, + /*mapOriginV*/0, + /*mapOriginH*/0, + /*mapPlanes*/1, + /*mapGains*/map); + if (err != OK) return err; + + return err; +} + +status_t OpcodeListBuilder::addGainMap(uint32_t top, + uint32_t left, + uint32_t bottom, + uint32_t right, + uint32_t plane, + uint32_t planes, + uint32_t rowPitch, + uint32_t colPitch, + uint32_t mapPointsV, + uint32_t mapPointsH, + double mapSpacingV, + double mapSpacingH, + double mapOriginV, + double mapOriginH, + uint32_t mapPlanes, + const float* mapGains) { + + status_t err = addOpcodePreamble(GAIN_MAP_ID); + if (err != OK) return err; + + // Allow this opcode to be skipped if not supported + uint32_t flags = FLAG_OPTIONAL; + + err = mEndianOut.write(&flags, 0, 1); + if (err != OK) return err; + + const uint32_t NUMBER_INT_ARGS = 11; + const uint32_t NUMBER_DOUBLE_ARGS = 4; + + uint32_t totalSize = NUMBER_INT_ARGS * sizeof(uint32_t) + NUMBER_DOUBLE_ARGS * sizeof(double) + + mapPointsV * mapPointsH * mapPlanes * sizeof(float); + + err = mEndianOut.write(&totalSize, 0, 1); + if (err != OK) return err; + + // Batch writes as much as possible + uint32_t settings1[] = { top, + left, + bottom, + right, + plane, + planes, + rowPitch, + colPitch, + mapPointsV, + mapPointsH }; + + err = mEndianOut.write(settings1, 0, NELEMS(settings1)); + if (err != OK) return err; + + double settings2[] = { mapSpacingV, + mapSpacingH, + mapOriginV, + mapOriginH }; + + err = mEndianOut.write(settings2, 0, NELEMS(settings2)); + if (err != OK) return err; + + err = mEndianOut.write(&mapPlanes, 0, 1); + if (err != OK) return err; + + err = mEndianOut.write(mapGains, 0, mapPointsV * mapPointsH * mapPlanes); + if (err != OK) return err; + + mCount++; + + return OK; +} + +status_t OpcodeListBuilder::addWarpRectilinearForMetadata(const float* kCoeffs, + uint32_t activeArrayWidth, + uint32_t activeArrayHeight, + float opticalCenterX, + float opticalCenterY) { + if (activeArrayWidth <= 1 || activeArrayHeight <= 1) { + ALOGE("%s: Cannot add opcode for active array with dimensions w=%" PRIu32 ", h=%" PRIu32, + __FUNCTION__, activeArrayWidth, activeArrayHeight); + return BAD_VALUE; + } + + double normalizedOCX = opticalCenterX / static_cast(activeArrayWidth); + double normalizedOCY = opticalCenterY / static_cast(activeArrayHeight); + + normalizedOCX = CLAMP(normalizedOCX, 0, 1); + normalizedOCY = CLAMP(normalizedOCY, 0, 1); + + double coeffs[6] = { + kCoeffs[0], + kCoeffs[1], + kCoeffs[2], + kCoeffs[3], + kCoeffs[4], + kCoeffs[5] + }; + + return addWarpRectilinear(/*numPlanes*/1, + /*opticalCenterX*/normalizedOCX, + /*opticalCenterY*/normalizedOCY, + coeffs); +} + +status_t OpcodeListBuilder::addWarpRectilinear(uint32_t numPlanes, + double opticalCenterX, + double opticalCenterY, + const double* kCoeffs) { + + status_t err = addOpcodePreamble(WARP_RECTILINEAR_ID); + if (err != OK) return err; + + // Allow this opcode to be skipped if not supported + uint32_t flags = FLAG_OPTIONAL; + + err = mEndianOut.write(&flags, 0, 1); + if (err != OK) return err; + + const uint32_t NUMBER_CENTER_ARGS = 2; + const uint32_t NUMBER_COEFFS = numPlanes * 6; + uint32_t totalSize = (NUMBER_CENTER_ARGS + NUMBER_COEFFS) * sizeof(double) + sizeof(uint32_t); + + err = mEndianOut.write(&totalSize, 0, 1); + if (err != OK) return err; + + err = mEndianOut.write(&numPlanes, 0, 1); + if (err != OK) return err; + + err = mEndianOut.write(kCoeffs, 0, NUMBER_COEFFS); + if (err != OK) return err; + + err = mEndianOut.write(&opticalCenterX, 0, 1); + if (err != OK) return err; + + err = mEndianOut.write(&opticalCenterY, 0, 1); + if (err != OK) return err; + + mCount++; + + return OK; +} + +status_t OpcodeListBuilder::addBadPixelListForMetadata(const uint32_t* hotPixels, + uint32_t xyPairCount, + uint32_t colorFilterArrangement) { + if (colorFilterArrangement > 3) { + ALOGE("%s: Unknown color filter arrangement %" PRIu32, __FUNCTION__, + colorFilterArrangement); + return BAD_VALUE; + } + + return addBadPixelList(colorFilterArrangement, xyPairCount, 0, hotPixels, nullptr); +} + +status_t OpcodeListBuilder::addBadPixelList(uint32_t bayerPhase, + uint32_t badPointCount, + uint32_t badRectCount, + const uint32_t* badPointRowColPairs, + const uint32_t* badRectTopLeftBottomRightTuples) { + + status_t err = addOpcodePreamble(FIX_BAD_PIXELS_LIST); + if (err != OK) return err; + + // Allow this opcode to be skipped if not supported + uint32_t flags = FLAG_OPTIONAL; + + err = mEndianOut.write(&flags, 0, 1); + if (err != OK) return err; + + const uint32_t NUM_NON_VARLEN_FIELDS = 3; + const uint32_t SIZE_OF_POINT = 2; + const uint32_t SIZE_OF_RECT = 4; + + uint32_t totalSize = (NUM_NON_VARLEN_FIELDS + badPointCount * SIZE_OF_POINT + + badRectCount * SIZE_OF_RECT) * sizeof(uint32_t); + err = mEndianOut.write(&totalSize, 0, 1); + if (err != OK) return err; + + err = mEndianOut.write(&bayerPhase, 0, 1); + if (err != OK) return err; + + err = mEndianOut.write(&badPointCount, 0, 1); + if (err != OK) return err; + + err = mEndianOut.write(&badRectCount, 0, 1); + if (err != OK) return err; + + if (badPointCount > 0) { + err = mEndianOut.write(badPointRowColPairs, 0, SIZE_OF_POINT * badPointCount); + if (err != OK) return err; + } + + if (badRectCount > 0) { + err = mEndianOut.write(badRectTopLeftBottomRightTuples, 0, SIZE_OF_RECT * badRectCount); + if (err != OK) return err; + } + + mCount++; + return OK; +} + +status_t OpcodeListBuilder::addOpcodePreamble(uint32_t opcodeId) { + status_t err = mEndianOut.write(&opcodeId, 0, 1); + if (err != OK) return err; + + uint8_t version[] = {1, 3, 0, 0}; + err = mEndianOut.write(version, 0, NELEMS(version)); + if (err != OK) return err; + return OK; +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/EndianUtils.cpp b/app/src/main/cpp/img_utils/src/EndianUtils.cpp new file mode 100644 index 00000000..8681cbe2 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/EndianUtils.cpp @@ -0,0 +1,83 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +namespace android { +namespace img_utils { + +EndianOutput::EndianOutput(Output* out, Endianness end) + : mOffset(0), mOutput(out), mEndian(end) {} + +EndianOutput::~EndianOutput() {} + +status_t EndianOutput::open() { + mOffset = 0; + return mOutput->open(); +} + +status_t EndianOutput::close() { + return mOutput->close(); +} + +void EndianOutput::setEndianness(Endianness end) { + mEndian = end; +} + +uint32_t EndianOutput::getCurrentOffset() const { + return mOffset; +} + +Endianness EndianOutput::getEndianness() const { + return mEndian; +} + +status_t EndianOutput::write(const uint8_t* buf, size_t offset, size_t count) { + status_t res = OK; + if((res = mOutput->write(buf, offset, count)) == OK) { + mOffset += count; + } + return res; +} + +status_t EndianOutput::write(const int8_t* buf, size_t offset, size_t count) { + return write(reinterpret_cast(buf), offset, count); +} + +#define DEFINE_WRITE(_type_) \ +status_t EndianOutput::write(const _type_* buf, size_t offset, size_t count) { \ + return writeHelper<_type_>(buf, offset, count); \ +} + +DEFINE_WRITE(uint16_t) +DEFINE_WRITE(int16_t) +DEFINE_WRITE(uint32_t) +DEFINE_WRITE(int32_t) +DEFINE_WRITE(uint64_t) +DEFINE_WRITE(int64_t) + +status_t EndianOutput::write(const float* buf, size_t offset, size_t count) { + assert(sizeof(float) == sizeof(uint32_t)); + return writeHelper(reinterpret_cast(buf), offset, count); +} + +status_t EndianOutput::write(const double* buf, size_t offset, size_t count) { + assert(sizeof(double) == sizeof(uint64_t)); + return writeHelper(reinterpret_cast(buf), offset, count); +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/FileInput.cpp b/app/src/main/cpp/img_utils/src/FileInput.cpp new file mode 100644 index 00000000..4c85a518 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/FileInput.cpp @@ -0,0 +1,85 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +namespace android { +namespace img_utils { + +FileInput::FileInput(String8 path) : mFp(NULL), mPath(path), mOpen(false) {} + +FileInput::~FileInput() { + if (mOpen) { + ALOGE("%s: FileInput destroyed without calling close!", __FUNCTION__); + close(); + } + +} + +status_t FileInput::open() { + if (mOpen) { + ALOGW("%s: Open called when file %s already open.", __FUNCTION__, mPath.string()); + return OK; + } + mFp = ::fopen(mPath, "rb"); + if (!mFp) { + ALOGE("%s: Could not open file %s", __FUNCTION__, mPath.string()); + return BAD_VALUE; + } + mOpen = true; + return OK; +} + +ssize_t FileInput::read(uint8_t* buf, size_t offset, size_t count) { + if (!mOpen) { + ALOGE("%s: Could not read file %s, file not open.", __FUNCTION__, mPath.string()); + return BAD_VALUE; + } + + size_t bytesRead = ::fread(buf + offset, sizeof(uint8_t), count, mFp); + int error = ::ferror(mFp); + if (error != 0) { + ALOGE("%s: Error %d occurred while reading file %s.", __FUNCTION__, error, mPath.string()); + return BAD_VALUE; + } + + // End of file reached + if (::feof(mFp) != 0 && bytesRead == 0) { + return NOT_ENOUGH_DATA; + } + + return bytesRead; +} + +status_t FileInput::close() { + if(!mOpen) { + ALOGW("%s: Close called when file %s already close.", __FUNCTION__, mPath.string()); + return OK; + } + + status_t ret = OK; + if(::fclose(mFp) != 0) { + ALOGE("%s: Failed to close file %s.", __FUNCTION__, mPath.string()); + ret = BAD_VALUE; + } + mOpen = false; + return ret; +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/FileOutput.cpp b/app/src/main/cpp/img_utils/src/FileOutput.cpp new file mode 100644 index 00000000..0346762b --- /dev/null +++ b/app/src/main/cpp/img_utils/src/FileOutput.cpp @@ -0,0 +1,79 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +namespace android { +namespace img_utils { + +FileOutput::FileOutput(String8 path) : mFp(NULL), mPath(path), mOpen(false) {} + +FileOutput::~FileOutput() { + if (mOpen) { + ALOGW("%s: Destructor called with %s still open.", __FUNCTION__, mPath.string()); + close(); + } +} + +status_t FileOutput::open() { + if (mOpen) { + ALOGW("%s: Open called when file %s already open.", __FUNCTION__, mPath.string()); + return OK; + } + mFp = ::fopen(mPath, "wb"); + if (!mFp) { + ALOGE("%s: Could not open file %s", __FUNCTION__, mPath.string()); + return BAD_VALUE; + } + mOpen = true; + return OK; +} + +status_t FileOutput::write(const uint8_t* buf, size_t offset, size_t count) { + if (!mOpen) { + ALOGE("%s: Could not write file %s, file not open.", __FUNCTION__, mPath.string()); + return BAD_VALUE; + } + + ::fwrite(buf + offset, sizeof(uint8_t), count, mFp); + + int error = ::ferror(mFp); + if (error != 0) { + ALOGE("%s: Error %d occurred while writing file %s.", __FUNCTION__, error, mPath.string()); + return BAD_VALUE; + } + return OK; +} + +status_t FileOutput::close() { + if(!mOpen) { + ALOGW("%s: Close called when file %s already close.", __FUNCTION__, mPath.string()); + return OK; + } + + status_t ret = OK; + if(::fclose(mFp) != 0) { + ALOGE("%s: Failed to close file %s.", __FUNCTION__, mPath.string()); + ret = BAD_VALUE; + } + mOpen = false; + return ret; +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/Input.cpp b/app/src/main/cpp/img_utils/src/Input.cpp new file mode 100644 index 00000000..3782014f --- /dev/null +++ b/app/src/main/cpp/img_utils/src/Input.cpp @@ -0,0 +1,57 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +namespace android { +namespace img_utils { + +Input::~Input() {} + +status_t Input::open() { return OK; } + +status_t Input::close() { return OK; } + +ssize_t Input::skip(size_t count) { + const size_t SKIP_BUF_SIZE = 1024; + uint8_t skipBuf[SKIP_BUF_SIZE]; + + size_t remaining = count; + while (remaining > 0) { + size_t amt = (SKIP_BUF_SIZE > remaining) ? remaining : SKIP_BUF_SIZE; + ssize_t ret = read(skipBuf, 0, amt); + if (ret < 0) { + if(ret == NOT_ENOUGH_DATA) { + // End of file encountered + if (remaining == count) { + // Read no bytes, return EOF + return NOT_ENOUGH_DATA; + } else { + // Return num bytes read + return count - remaining; + } + } + // Return error code. + return ret; + } + remaining -= ret; + } + return count; +} + +} /*namespace img_utils*/ +} /*namespace android*/ + diff --git a/app/src/main/cpp/img_utils/src/Orderable.cpp b/app/src/main/cpp/img_utils/src/Orderable.cpp new file mode 100644 index 00000000..300f122a --- /dev/null +++ b/app/src/main/cpp/img_utils/src/Orderable.cpp @@ -0,0 +1,39 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +namespace android { +namespace img_utils { + +#define COMPARE(op) \ +bool Orderable::operator op (const Orderable& orderable) const { \ + return getComparableValue() op orderable.getComparableValue(); \ +} + +COMPARE(>) +COMPARE(<) +COMPARE(>=) +COMPARE(<=) +COMPARE(==) +COMPARE(!=) + +Orderable::~Orderable() {} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/Output.cpp b/app/src/main/cpp/img_utils/src/Output.cpp new file mode 100644 index 00000000..0e395b95 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/Output.cpp @@ -0,0 +1,28 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#include + +namespace android { +namespace img_utils { + +Output::~Output() {} +status_t Output::open() { return OK; } +status_t Output::close() { return OK; } + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/SortedEntryVector.cpp b/app/src/main/cpp/img_utils/src/SortedEntryVector.cpp new file mode 100644 index 00000000..f0e1fa1e --- /dev/null +++ b/app/src/main/cpp/img_utils/src/SortedEntryVector.cpp @@ -0,0 +1,44 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +namespace android { +namespace img_utils { + +SortedEntryVector::~SortedEntryVector() {} + +ssize_t SortedEntryVector::indexOfTag(uint16_t tag) const { + // TODO: Use binary search here. + for (size_t i = 0; i < size(); ++i) { + if (itemAt(i)->getTag() == tag) { + return i; + } + } + return -1; +} + +int SortedEntryVector::do_compare(const void* lhs, const void* rhs) const { + const sp* lEntry = reinterpret_cast*>(lhs); + const sp* rEntry = reinterpret_cast*>(rhs); + return compare_type(**lEntry, **rEntry); +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/StripSource.cpp b/app/src/main/cpp/img_utils/src/StripSource.cpp new file mode 100644 index 00000000..57b60824 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/StripSource.cpp @@ -0,0 +1,25 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +namespace android { +namespace img_utils { + +StripSource::~StripSource() {} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/TiffEntry.cpp b/app/src/main/cpp/img_utils/src/TiffEntry.cpp new file mode 100644 index 00000000..ba5e8126 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/TiffEntry.cpp @@ -0,0 +1,251 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include +#include + +#include + +namespace android { +namespace img_utils { + +TiffEntry::~TiffEntry() {} + +/** + * Specialize for each valid type, including sub-IFDs. + * + * Values with types other than the ones given here should not compile. + */ + +template<> +const sp* TiffEntry::forceValidType >(TagType type, const sp* value) { + if (type == LONG) { + return value; + } + ALOGE("%s: Value of type 'ifd' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const uint8_t* TiffEntry::forceValidType(TagType type, const uint8_t* value) { + if (type == BYTE || type == ASCII || type == UNDEFINED) { + return value; + } + ALOGE("%s: Value of type 'uint8_t' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const int8_t* TiffEntry::forceValidType(TagType type, const int8_t* value) { + if (type == SBYTE || type == ASCII || type == UNDEFINED) { + return value; + } + ALOGE("%s: Value of type 'int8_t' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const uint16_t* TiffEntry::forceValidType(TagType type, const uint16_t* value) { + if (type == SHORT) { + return value; + } + ALOGE("%s: Value of type 'uint16_t' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const int16_t* TiffEntry::forceValidType(TagType type, const int16_t* value) { + if (type == SSHORT) { + return value; + } + ALOGE("%s: Value of type 'int16_t' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const uint32_t* TiffEntry::forceValidType(TagType type, const uint32_t* value) { + if (type == LONG || type == RATIONAL) { + return value; + } + ALOGE("%s: Value of type 'uint32_t' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const int32_t* TiffEntry::forceValidType(TagType type, const int32_t* value) { + if (type == SLONG || type == SRATIONAL) { + return value; + } + ALOGE("%s: Value of type 'int32_t' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const double* TiffEntry::forceValidType(TagType type, const double* value) { + if (type == DOUBLE) { + return value; + } + ALOGE("%s: Value of type 'double' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +template<> +const float* TiffEntry::forceValidType(TagType type, const float* value) { + if (type == FLOAT) { + return value; + } + ALOGE("%s: Value of type 'float' is not valid for tag with TIFF type %d.", + __FUNCTION__, type); + return NULL; +} + +std::string TiffEntry::toString() const { + std::string output; + uint32_t count = getCount(); + char buf[256] = { 0 }; + snprintf(buf, sizeof(buf), "[id: %x, type: %d, count: %u, value: '", getTag(), getType(), count); + output.append(buf); + + size_t cappedCount = count; + if (count > MAX_PRINT_STRING_LENGTH) { + cappedCount = MAX_PRINT_STRING_LENGTH; + } + + TagType type = getType(); + switch (type) { + case UNDEFINED: + case BYTE: { + const uint8_t* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + case ASCII: { + const char* typed_data = reinterpret_cast(getData()); + size_t len = count; + if (count > MAX_PRINT_STRING_LENGTH) { + len = MAX_PRINT_STRING_LENGTH; + } + output.append(typed_data, len); + break; + } + case SHORT: { + const uint16_t* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + case LONG: { + const uint32_t* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + case RATIONAL: { + const uint32_t* typed_data = getData(); + cappedCount <<= 1; + for (size_t i = 0; i < cappedCount; i+=2) { + output.append(std::to_string(typed_data[i])); + output.append("/"); + output.append(std::to_string(typed_data[i + 1])); + output.append(" "); + } + break; + } + case SBYTE: { + const int8_t* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + case SSHORT: { + const int16_t* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + case SLONG: { + const int32_t* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + case SRATIONAL: { + const int32_t* typed_data = getData(); + cappedCount <<= 1; + for (size_t i = 0; i < cappedCount; i+=2) { + output.append(std::to_string(typed_data[i])); + output.append("/"); + output.append(std::to_string(typed_data[i + 1])); + output.append(" "); + } + break; + } + case FLOAT: { + const float* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + case DOUBLE: { + const double* typed_data = getData(); + for (size_t i = 0; i < cappedCount; ++i) { + output.append(std::to_string(typed_data[i])); + output.append(" "); + } + break; + } + default: { + output.append("unknown type "); + break; + } + } + + if (count > MAX_PRINT_STRING_LENGTH) { + output.append("..."); + } + output.append("']"); + return output; +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/TiffEntryImpl.cpp b/app/src/main/cpp/img_utils/src/TiffEntryImpl.cpp new file mode 100644 index 00000000..416d66be --- /dev/null +++ b/app/src/main/cpp/img_utils/src/TiffEntryImpl.cpp @@ -0,0 +1,25 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +// #include + +namespace android { +namespace img_utils { + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/TiffIfd.cpp b/app/src/main/cpp/img_utils/src/TiffIfd.cpp new file mode 100644 index 00000000..ff0a525c --- /dev/null +++ b/app/src/main/cpp/img_utils/src/TiffIfd.cpp @@ -0,0 +1,386 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "TiffIfd" + +#include +#include +#include +#include + +#include + +namespace android { +namespace img_utils { + +TiffIfd::TiffIfd(uint32_t ifdId) + : mNextIfd(), mIfdId(ifdId), mStripOffsetsInitialized(false) {} + +TiffIfd::~TiffIfd() {} + +status_t TiffIfd::addEntry(const sp& entry) { + size_t size = mEntries.size(); + if (size >= MAX_IFD_ENTRIES) { + ALOGW("%s: Failed to add entry for tag 0x%x to IFD %u, too many entries in IFD!", + __FUNCTION__, entry->getTag(), mIfdId); + return BAD_INDEX; + } + + mEntries[entry->getTag()] = entry; + return OK; +} + +sp TiffIfd::getEntry(uint16_t tag) const { + auto it = mEntries.find(tag); + if (it == mEntries.cend()) { + ALOGW("%s: No entry for tag 0x%x in ifd %u.", __FUNCTION__, tag, mIfdId); + return NULL; + } + return it->second; +} + +void TiffIfd::removeEntry(uint16_t tag) { + std::map >::iterator it = mEntries.find(tag); + if (it != mEntries.end()) { + mEntries.erase(it); + } +} + + +void TiffIfd::setNextIfd(const sp& ifd) { + mNextIfd = ifd; +} + +sp TiffIfd::getNextIfd() const { + return mNextIfd; +} + +uint32_t TiffIfd::checkAndGetOffset(uint32_t offset) const { + size_t size = mEntries.size(); + + if (size > MAX_IFD_ENTRIES) { + ALOGW("%s: Could not calculate IFD offsets, IFD %u contains too many entries.", + __FUNCTION__, mIfdId); + return BAD_OFFSET; + } + + if (size <= 0) { + ALOGW("%s: Could not calculate IFD offsets, IFD %u contains no entries.", __FUNCTION__, + mIfdId); + return BAD_OFFSET; + } + + if (offset == BAD_OFFSET) { + ALOGW("%s: Could not calculate IFD offsets, IFD %u had a bad initial offset.", + __FUNCTION__, mIfdId); + return BAD_OFFSET; + } + + uint32_t ifdSize = calculateIfdSize(size); + WORD_ALIGN(ifdSize); + return offset + ifdSize; +} + +status_t TiffIfd::writeData(uint32_t offset, /*out*/EndianOutput* out) const { + assert((offset % TIFF_WORD_SIZE) == 0); + status_t ret = OK; + + ALOGV("%s: IFD %u written to offset %u", __FUNCTION__, mIfdId, offset ); + uint32_t valueOffset = checkAndGetOffset(offset); + if (valueOffset == 0) { + return BAD_VALUE; + } + + size_t size = mEntries.size(); + + // Writer IFD header (2 bytes, number of entries). + uint16_t header = static_cast(size); + BAIL_ON_FAIL(out->write(&header, 0, 1), ret); + + // Write tag entries + for (auto it = mEntries.cbegin(); it != mEntries.cend(); ++it) { + BAIL_ON_FAIL(it->second->writeTagInfo(valueOffset, out), ret); + valueOffset += it->second->getSize(); + } + + // Writer IFD footer (4 bytes, offset to next IFD). + uint32_t footer = (mNextIfd != NULL) ? offset + getSize() : 0; + BAIL_ON_FAIL(out->write(&footer, 0, 1), ret); + + assert(out->getCurrentOffset() == offset + calculateIfdSize(size)); + + // Write zeroes till word aligned + ZERO_TILL_WORD(out, calculateIfdSize(size), ret); + + // Write values for each tag entry + for (auto it = mEntries.cbegin(); it != mEntries.cend(); ++it) { + size_t last = out->getCurrentOffset(); + // Only write values that are too large to fit in the 12-byte TIFF entry + if (it->second->getSize() > OFFSET_SIZE) { + BAIL_ON_FAIL(it->second->writeData(out->getCurrentOffset(), out), ret); + } + size_t next = out->getCurrentOffset(); + size_t diff = (next - last); + size_t actual = it->second->getSize(); + if (diff != actual) { + ALOGW("Sizes do not match for tag %x. Expected %zu, received %zu", + it->first, actual, diff); + } + } + + assert(out->getCurrentOffset() == offset + getSize()); + + return ret; +} + +size_t TiffIfd::getSize() const { + size_t size = mEntries.size(); + uint32_t total = calculateIfdSize(size); + WORD_ALIGN(total); + for (auto it = mEntries.cbegin(); it != mEntries.cend(); ++it) { + total += it->second->getSize(); + } + return total; +} + +uint32_t TiffIfd::getId() const { + return mIfdId; +} + +uint32_t TiffIfd::getComparableValue() const { + return mIfdId; +} + +status_t TiffIfd::validateAndSetStripTags() { + sp widthEntry = getEntry(TAG_IMAGEWIDTH); + if (widthEntry == NULL) { + ALOGE("%s: IFD %u doesn't have a ImageWidth tag set", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + sp heightEntry = getEntry(TAG_IMAGELENGTH); + if (heightEntry == NULL) { + ALOGE("%s: IFD %u doesn't have a ImageLength tag set", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + sp samplesEntry = getEntry(TAG_SAMPLESPERPIXEL); + if (samplesEntry == NULL) { + ALOGE("%s: IFD %u doesn't have a SamplesPerPixel tag set", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + sp bitsEntry = getEntry(TAG_BITSPERSAMPLE); + if (bitsEntry == NULL) { + ALOGE("%s: IFD %u doesn't have a BitsPerSample tag set", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + uint32_t width = *(widthEntry->getData()); + uint32_t height = *(heightEntry->getData()); + uint16_t bitsPerSample = *(bitsEntry->getData()); + uint16_t samplesPerPixel = *(samplesEntry->getData()); + + if ((bitsPerSample % 8) != 0) { + ALOGE("%s: BitsPerSample %d in IFD %u is not byte-aligned.", __FUNCTION__, + bitsPerSample, mIfdId); + return BAD_VALUE; + } + + uint32_t bytesPerSample = bitsPerSample / 8; + + // Choose strip size as close to 8kb as possible without splitting rows. + // If the row length is >8kb, each strip will only contain a single row. + const uint32_t rowLengthBytes = bytesPerSample * samplesPerPixel * width; + const uint32_t idealChunkSize = (1 << 13); // 8kb + uint32_t rowsPerChunk = idealChunkSize / rowLengthBytes; + rowsPerChunk = (rowsPerChunk == 0) ? 1 : rowsPerChunk; + const uint32_t actualChunkSize = rowLengthBytes * rowsPerChunk; + + const uint32_t lastChunkRows = height % rowsPerChunk; + const uint32_t lastChunkSize = lastChunkRows * rowLengthBytes; + + if (actualChunkSize > /*max strip size for TIFF/EP*/65536) { + ALOGE("%s: Strip length too long.", __FUNCTION__); + return BAD_VALUE; + } + + size_t numStrips = height / rowsPerChunk; + + // Add another strip for the incomplete chunk. + if (lastChunkRows > 0) { + numStrips += 1; + } + + // Put each row in it's own strip + uint32_t rowsPerStripVal = rowsPerChunk; + sp rowsPerStrip = TiffWriter::uncheckedBuildEntry(TAG_ROWSPERSTRIP, LONG, 1, + UNDEFINED_ENDIAN, &rowsPerStripVal); + + if (rowsPerStrip == NULL) { + ALOGE("%s: Could not build entry for RowsPerStrip tag.", __FUNCTION__); + return BAD_VALUE; + } + + std::vector byteCounts; + byteCounts.reserve(numStrips); + + for (size_t i = 0; i < numStrips; ++i) { + if (lastChunkRows > 0 && i == (numStrips - 1)) { + byteCounts.push_back(lastChunkSize); + } else { + byteCounts.push_back(actualChunkSize); + } + } + + // Set byte counts for each strip + sp stripByteCounts = TiffWriter::uncheckedBuildEntry(TAG_STRIPBYTECOUNTS, LONG, + static_cast(numStrips), UNDEFINED_ENDIAN, &byteCounts[0]); + + if (stripByteCounts == NULL) { + ALOGE("%s: Could not build entry for StripByteCounts tag.", __FUNCTION__); + return BAD_VALUE; + } + + std::vector stripOffsetsVector; + stripOffsetsVector.resize(numStrips); + + // Set uninitialized offsets + sp stripOffsets = TiffWriter::uncheckedBuildEntry(TAG_STRIPOFFSETS, LONG, + static_cast(numStrips), UNDEFINED_ENDIAN, &stripOffsetsVector[0]); + + if (stripOffsets == NULL) { + ALOGE("%s: Could not build entry for StripOffsets tag.", __FUNCTION__); + return BAD_VALUE; + } + + if(addEntry(stripByteCounts) != OK) { + ALOGE("%s: Could not add entry for StripByteCounts to IFD %u", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + if(addEntry(rowsPerStrip) != OK) { + ALOGE("%s: Could not add entry for StripByteCounts to IFD %u", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + if(addEntry(stripOffsets) != OK) { + ALOGE("%s: Could not add entry for StripByteCounts to IFD %u", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + mStripOffsetsInitialized = true; + return OK; +} + +bool TiffIfd::uninitializedOffsets() const { + return mStripOffsetsInitialized; +} + +status_t TiffIfd::setStripOffset(uint32_t offset) { + + // Get old offsets and bytecounts + sp oldOffsets = getEntry(TAG_STRIPOFFSETS); + if (oldOffsets == NULL) { + ALOGE("%s: IFD %u does not contain StripOffsets entry.", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + sp stripByteCounts = getEntry(TAG_STRIPBYTECOUNTS); + if (stripByteCounts == NULL) { + ALOGE("%s: IFD %u does not contain StripByteCounts entry.", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + uint32_t offsetsCount = oldOffsets->getCount(); + uint32_t byteCount = stripByteCounts->getCount(); + if (offsetsCount != byteCount) { + ALOGE("%s: StripOffsets count (%u) doesn't match StripByteCounts count (%u) in IFD %u", + __FUNCTION__, offsetsCount, byteCount, mIfdId); + return BAD_VALUE; + } + + const uint32_t* stripByteCountsArray = stripByteCounts->getData(); + + size_t numStrips = offsetsCount; + + std::vector stripOffsets; + stripOffsets.reserve(numStrips); + + // Calculate updated byte offsets + for (size_t i = 0; i < numStrips; ++i) { + stripOffsets.push_back(offset); + offset += stripByteCountsArray[i]; + } + + sp newOffsets = TiffWriter::uncheckedBuildEntry(TAG_STRIPOFFSETS, LONG, + static_cast(numStrips), UNDEFINED_ENDIAN, &stripOffsets[0]); + + if (newOffsets == NULL) { + ALOGE("%s: Coult not build updated offsets entry in IFD %u", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + if (addEntry(newOffsets) != OK) { + ALOGE("%s: Failed to add updated offsets entry in IFD %u", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + return OK; +} + +uint32_t TiffIfd::getStripSize() const { + sp stripByteCounts = getEntry(TAG_STRIPBYTECOUNTS); + if (stripByteCounts == NULL) { + ALOGE("%s: IFD %u does not contain StripByteCounts entry.", __FUNCTION__, mIfdId); + return BAD_VALUE; + } + + uint32_t count = stripByteCounts->getCount(); + const uint32_t* byteCounts = stripByteCounts->getData(); + + uint32_t total = 0; + for (size_t i = 0; i < static_cast(count); ++i) { + total += byteCounts[i]; + } + return total; +} + +std::string TiffIfd::toString() const { + size_t s = mEntries.size(); + std::string output; + char buf[1024] = { 0 }; + snprintf(buf, sizeof(buf), "[ifd: %x, num_entries: %zu, entries:\n", getId(), s); + output.append(buf); + for(auto it = mEntries.cbegin(); it != mEntries.cend(); ++it) { + output.append("\t"); + output.append(it->second->toString()); + output.append("\n"); + } + output.append(", next_ifd: %x]", ((mNextIfd != NULL) ? mNextIfd->getId() : 0)); + return output; +} + +void TiffIfd::log() const { + size_t s = mEntries.size(); + ALOGI("[ifd: %x, num_entries: %zu, entries:\n", getId(), s); + for(auto it = mEntries.cbegin(); it != mEntries.cend(); ++it) { + ALOGI("\t%s", it->second->toString().c_str()); + } + ALOGI(", next_ifd: %x]", ((mNextIfd != NULL) ? mNextIfd->getId() : 0)); +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/TiffWritable.cpp b/app/src/main/cpp/img_utils/src/TiffWritable.cpp new file mode 100644 index 00000000..f8d7de76 --- /dev/null +++ b/app/src/main/cpp/img_utils/src/TiffWritable.cpp @@ -0,0 +1,31 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#include +#include + +#include + +namespace android { +namespace img_utils { + +TiffWritable::TiffWritable() {} + +TiffWritable::~TiffWritable() {} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/img_utils/src/TiffWriter.cpp b/app/src/main/cpp/img_utils/src/TiffWriter.cpp new file mode 100644 index 00000000..33f8790e --- /dev/null +++ b/app/src/main/cpp/img_utils/src/TiffWriter.cpp @@ -0,0 +1,425 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "TiffWriter" + +#include +#include +#include + +#include + +namespace android { +namespace img_utils { + +#if 0 +KeyedVector TiffWriter::buildTagMap( + const TagDefinition_t* definitions, size_t length) { + KeyedVector map; + for(size_t i = 0; i < length; ++i) { + map.add(definitions[i].tagId, definitions + i); + } + return map; +} +#endif + +#define COMPARE(op) \ +bool Orderable::operator op (const Orderable& orderable) const { \ + return getComparableValue() op orderable.getComparableValue(); \ +} + +#define ARRAY_SIZE(array) \ + (sizeof(array) / sizeof((array)[0])) + +#if 0 +KeyedVector TiffWriter::sTagMaps[] = { + buildTagMap(TIFF_EP_TAG_DEFINITIONS, ARRAY_SIZE(TIFF_EP_TAG_DEFINITIONS)), + buildTagMap(DNG_TAG_DEFINITIONS, ARRAY_SIZE(DNG_TAG_DEFINITIONS)), + buildTagMap(EXIF_2_3_TAG_DEFINITIONS, ARRAY_SIZE(EXIF_2_3_TAG_DEFINITIONS)), + buildTagMap(TIFF_6_TAG_DEFINITIONS, ARRAY_SIZE(TIFF_6_TAG_DEFINITIONS)) +}; +#endif + +TiffWriter::TiffWriter() : mNumTagMaps(DEFAULT_NUM_TAG_MAPS) +{ + mTagMaps.reserve(DEFAULT_NUM_TAG_MAPS); + // = new KeyedVector[DEFAULT_NUM_TAG_MAPS]; + + std::vector >::iterator it = mTagMaps.insert(mTagMaps.end(), std::map()); + for(size_t i = 0; i < ARRAY_SIZE(TIFF_EP_TAG_DEFINITIONS); ++i) { + (*it)[TIFF_EP_TAG_DEFINITIONS[i].tagId] = TIFF_EP_TAG_DEFINITIONS + i; + } + it = mTagMaps.insert(mTagMaps.end(), std::map()); + for(size_t i = 0; i < ARRAY_SIZE(DNG_TAG_DEFINITIONS); ++i) { + (*it)[DNG_TAG_DEFINITIONS[i].tagId] = DNG_TAG_DEFINITIONS + i; + } + it = mTagMaps.insert(mTagMaps.end(), std::map()); + for(size_t i = 0; i < ARRAY_SIZE(EXIF_2_3_TAG_DEFINITIONS); ++i) { + (*it)[EXIF_2_3_TAG_DEFINITIONS[i].tagId] = EXIF_2_3_TAG_DEFINITIONS + i; + } + it = mTagMaps.insert(mTagMaps.end(), std::map()); + for(size_t i = 0; i < ARRAY_SIZE(TIFF_6_TAG_DEFINITIONS); ++i) { + (*it)[TIFF_6_TAG_DEFINITIONS[i].tagId] = TIFF_6_TAG_DEFINITIONS + i; + } +} + +TiffWriter::TiffWriter(std::map* enabledDefinitions, + size_t length) : mNumTagMaps(length) +{ + mTagMaps.reserve(length); + for (int i = 0; i < length; ++i) + { + auto it = mTagMaps.insert(mTagMaps.end(), std::map()); + for(auto it2 = enabledDefinitions[i].cbegin(); it2 != enabledDefinitions[i].cend(); ++it2) { + (*it)[it2->first] = it2->second; + } + } +} + +TiffWriter::~TiffWriter() {} + +status_t TiffWriter::write(Output* out, StripSource** sources, size_t sourcesCount, + Endianness end) { + status_t ret = OK; + EndianOutput endOut(out, end); + + if (mIfd == NULL) { + ALOGE("%s: Tiff header is empty.", __FUNCTION__); + return BAD_VALUE; + } + + uint32_t totalSize = getTotalSize(); + + std::map offsetVector; + + for (std::map >::iterator it = mNamedIfds.begin(); it != mNamedIfds.end(); ++it) { + if (it->second->uninitializedOffsets()) { + uint32_t stripSize = it->second->getStripSize(); + if (it->second->setStripOffset(totalSize) != OK) { + ALOGE("%s: Could not set strip offsets.", __FUNCTION__); + return BAD_VALUE; + } + totalSize += stripSize; + WORD_ALIGN(totalSize); + offsetVector[it->first] = totalSize; + } + } + + size_t offVecSize = offsetVector.size(); + if (offVecSize != sourcesCount) { + ALOGE("%s: Mismatch between number of IFDs with uninitialized strips (%zu) and" + " sources (%zu).", __FUNCTION__, offVecSize, sourcesCount); + return BAD_VALUE; + } + + BAIL_ON_FAIL(writeFileHeader(endOut), ret); + + uint32_t offset = FILE_HEADER_SIZE; + sp ifd = mIfd; + while(ifd != NULL) { + BAIL_ON_FAIL(ifd->writeData(offset, &endOut), ret); + offset += ifd->getSize(); + ifd = ifd->getNextIfd(); + } + +#ifndef NDEBUG + log(); +#endif + + for (auto it = offsetVector.begin(); it != offsetVector.end(); ++it) { + uint32_t ifdKey = it->first; + uint32_t sizeToWrite = mNamedIfds[ifdKey]->getStripSize(); + bool found = false; + for (size_t j = 0; j < sourcesCount; ++j) { + if (sources[j]->getIfd() == ifdKey) { + int i = std::distance(offsetVector.begin(), it); + if ((ret = sources[i]->writeToStream(endOut, sizeToWrite)) != OK) { + ALOGE("%s: Could not write to stream, received %d.", __FUNCTION__, ret); + return ret; + } + ZERO_TILL_WORD(&endOut, sizeToWrite, ret); + found = true; + break; + } + } + if (!found) { + ALOGE("%s: No stream for byte strips for IFD %u", __FUNCTION__, ifdKey); + return BAD_VALUE; + } + assert(it->second == endOut.getCurrentOffset()); + } + + return ret; +} + +status_t TiffWriter::write(Output* out, Endianness end) { + status_t ret = OK; + EndianOutput endOut(out, end); + + if (mIfd == NULL) { + ALOGE("%s: Tiff header is empty.", __FUNCTION__); + return BAD_VALUE; + } + BAIL_ON_FAIL(writeFileHeader(endOut), ret); + + uint32_t offset = FILE_HEADER_SIZE; + sp ifd = mIfd; + while(ifd != NULL) { + BAIL_ON_FAIL(ifd->writeData(offset, &endOut), ret); + offset += ifd->getSize(); + ifd = ifd->getNextIfd(); + } + return ret; +} + + +const TagDefinition_t* TiffWriter::lookupDefinition(uint16_t tag) const { + const TagDefinition_t* definition = NULL; + for (size_t i = 0; i < mNumTagMaps; ++i) { + auto it = mTagMaps[i].find(tag); + if (it != mTagMaps[i].cend()) { + definition = it->second; + break; + } + } + + if (definition == NULL) { + ALOGE("%s: No definition exists for tag with id %x.", __FUNCTION__, tag); + } + return definition; +} + +sp TiffWriter::getEntry(uint16_t tag, uint32_t ifd) const { + auto it = mNamedIfds.find(ifd); + if (it == mNamedIfds.cend()) { + ALOGE("%s: No IFD %d set for this writer.", __FUNCTION__, ifd); + return NULL; + } + return it->second->getEntry(tag); +} + +void TiffWriter::removeEntry(uint16_t tag, uint32_t ifd) { + auto it = mNamedIfds.find(ifd); + if (it != mNamedIfds.end()) { + it->second->removeEntry(tag); + } +} + +status_t TiffWriter::addEntry(const sp& entry, uint32_t ifd) { + uint16_t tag = entry->getTag(); + + const TagDefinition_t* definition = lookupDefinition(tag); + + if (definition == NULL) { + ALOGE("%s: No definition exists for tag 0x%x.", __FUNCTION__, tag); + return BAD_INDEX; + } + + std::map >::iterator it = mNamedIfds.find(ifd); + + // Add a new IFD if necessary + if (it == mNamedIfds.end()) { + ALOGE("%s: No IFD %u exists.", __FUNCTION__, ifd); + return NAME_NOT_FOUND; + } + + sp selectedIfd = it->second; + return selectedIfd->addEntry(entry); +} + +status_t TiffWriter::addStrip(uint32_t ifd) { + std::map >::iterator it = mNamedIfds.find(ifd); + if (it == mNamedIfds.end()) { + ALOGE("%s: Ifd %u doesn't exist, cannot add strip entries.", __FUNCTION__, ifd); + return BAD_VALUE; + } + sp selected = it->second; + return selected->validateAndSetStripTags(); +} + +status_t TiffWriter::addIfd(uint32_t ifd) { + std::map >::iterator it = mNamedIfds.find(ifd); + if (it != mNamedIfds.end()) { + ALOGE("%s: Ifd with ID 0x%x already exists.", __FUNCTION__, ifd); + return BAD_VALUE; + } + + sp newIfd = new TiffIfd(ifd); + if (mIfd == NULL) { + mIfd = newIfd; + } else { + sp last = findLastIfd(); + last->setNextIfd(newIfd); + } + + mNamedIfds[ifd] = newIfd; + + return OK; +} + +status_t TiffWriter::addSubIfd(uint32_t parentIfd, uint32_t ifd, SubIfdType type) { + + std::map >::iterator it = mNamedIfds.find(ifd); + if (it != mNamedIfds.end()) { + ALOGE("%s: Ifd with ID 0x%x already exists.", __FUNCTION__, ifd); + return BAD_VALUE; + } + + std::map >::iterator parentIt = mNamedIfds.find(parentIfd); + if (parentIt == mNamedIfds.end()) { + ALOGE("%s: Parent IFD with ID 0x%x does not exist.", __FUNCTION__, parentIfd); + return BAD_VALUE; + } + + sp parent = parentIt->second; + sp newIfd = new TiffIfd(ifd); + + uint16_t subIfdTag; + if (type == SUBIFD) { + subIfdTag = TAG_SUBIFDS; + } else if (type == GPSINFO) { + subIfdTag = TAG_GPSINFO; + } else { + ALOGE("%s: Unknown SubIFD type %d.", __FUNCTION__, type); + return BAD_VALUE; + } + + sp subIfds = parent->getEntry(subIfdTag); + if (subIfds == NULL) { + if (buildEntry(subIfdTag, 1, &newIfd, &subIfds) < 0) { + ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd); + return BAD_VALUE; + } + } else { + if (type == GPSINFO) { + ALOGE("%s: Cannot add GPSInfo SubIFD to IFD %u, one already exists.", __FUNCTION__, + ifd); + return BAD_VALUE; + } + + std::vector > subIfdList; + const sp* oldIfdArray = subIfds->getData >(); + subIfdList.insert(subIfdList.end(), oldIfdArray, oldIfdArray + subIfds->getCount()); +#if 0 + ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd); + return BAD_VALUE; + } +#endif + + subIfdList.push_back(newIfd); // < 0) { +#if 0 + ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd); + return BAD_VALUE; + } +#endif + + uint32_t count = subIfdList.size(); + if (buildEntry(subIfdTag, count, &subIfdList[0], &subIfds) < 0) { + ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd); + return BAD_VALUE; + } + } + + if (parent->addEntry(subIfds) < 0) { + ALOGE("%s: Failed to add SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd); + return BAD_VALUE; + } + + mNamedIfds[ifd] = newIfd; + + return OK; +} + +TagType TiffWriter::getDefaultType(uint16_t tag) const { + const TagDefinition_t* definition = lookupDefinition(tag); + if (definition == NULL) { + ALOGE("%s: Could not find definition for tag %x", __FUNCTION__, tag); + return UNKNOWN_TAGTYPE; + } + return definition->defaultType; +} + +uint32_t TiffWriter::getDefaultCount(uint16_t tag) const { + const TagDefinition_t* definition = lookupDefinition(tag); + if (definition == NULL) { + ALOGE("%s: Could not find definition for tag %x", __FUNCTION__, tag); + return 0; + } + return definition->fixedCount; +} + +bool TiffWriter::hasIfd(uint32_t ifd) const { + auto it = mNamedIfds.find(ifd); + return it != mNamedIfds.cend(); +} + +bool TiffWriter::checkIfDefined(uint16_t tag) const { + return lookupDefinition(tag) != NULL; +} + +const char* TiffWriter::getTagName(uint16_t tag) const { + const TagDefinition_t* definition = lookupDefinition(tag); + if (definition == NULL) { + return NULL; + } + return definition->tagName; +} + +sp TiffWriter::findLastIfd() { + sp ifd = mIfd; + while(ifd != NULL) { + sp nextIfd = ifd->getNextIfd(); + if (nextIfd == NULL) { + break; + } + ifd = std::move(nextIfd); + } + return ifd; +} + +status_t TiffWriter::writeFileHeader(EndianOutput& out) { + status_t ret = OK; + uint16_t endMarker = (out.getEndianness() == BIG) ? BIG_ENDIAN_MARKER : LITTLE_ENDIAN_MARKER; + BAIL_ON_FAIL(out.write(&endMarker, 0, 1), ret); + + uint16_t tiffMarker = TIFF_FILE_MARKER; + BAIL_ON_FAIL(out.write(&tiffMarker, 0, 1), ret); + + uint32_t offsetMarker = FILE_HEADER_SIZE; + BAIL_ON_FAIL(out.write(&offsetMarker, 0, 1), ret); + return ret; +} + +uint32_t TiffWriter::getTotalSize() const { + uint32_t totalSize = FILE_HEADER_SIZE; + sp ifd = mIfd; + while(ifd != NULL) { + totalSize += ifd->getSize(); + ifd = ifd->getNextIfd(); + } + return totalSize; +} + +void TiffWriter::log() const { + ALOGI("%s: TiffWriter:", __FUNCTION__); + size_t length = mNamedIfds.size(); + for (auto it = mNamedIfds.begin(); it != mNamedIfds.end(); ++it) { + it->second->log(); + } +} + +} /*namespace img_utils*/ +} /*namespace android*/ diff --git a/app/src/main/cpp/libcutils/android_reboot.cpp b/app/src/main/cpp/libcutils/android_reboot.cpp new file mode 100644 index 00000000..e0def711 --- /dev/null +++ b/app/src/main/cpp/libcutils/android_reboot.cpp @@ -0,0 +1,53 @@ +/* + * Copyright 2011, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include + +#define TAG "android_reboot" + +int android_reboot(unsigned cmd, int /*flags*/, const char* arg) { + int ret; + const char* restart_cmd = NULL; + char* prop_value; + + switch (cmd) { + case ANDROID_RB_RESTART: // deprecated + case ANDROID_RB_RESTART2: + restart_cmd = "reboot"; + break; + case ANDROID_RB_POWEROFF: + restart_cmd = "shutdown"; + break; + case ANDROID_RB_THERMOFF: + restart_cmd = "shutdown,thermal"; + break; + } + if (!restart_cmd) return -1; + if (arg && arg[0]) { + ret = asprintf(&prop_value, "%s,%s", restart_cmd, arg); + } else { + ret = asprintf(&prop_value, "%s", restart_cmd); + } + if (ret < 0) return -1; + ret = property_set(ANDROID_RB_PROPERTY, prop_value); + free(prop_value); + return ret; +} diff --git a/app/src/main/cpp/libcutils/arch-x86/cache.h b/app/src/main/cpp/libcutils/arch-x86/cache.h new file mode 100644 index 00000000..1c22feaa --- /dev/null +++ b/app/src/main/cpp/libcutils/arch-x86/cache.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if defined(__slm__) +/* Values are optimized for Silvermont */ +#define SHARED_CACHE_SIZE (1024*1024) /* Silvermont L2 Cache */ +#define DATA_CACHE_SIZE (24*1024) /* Silvermont L1 Data Cache */ +#else +/* Values are optimized for Atom */ +#define SHARED_CACHE_SIZE (512*1024) /* Atom L2 Cache */ +#define DATA_CACHE_SIZE (24*1024) /* Atom L1 Data Cache */ +#endif + +#define SHARED_CACHE_SIZE_HALF (SHARED_CACHE_SIZE / 2) +#define DATA_CACHE_SIZE_HALF (DATA_CACHE_SIZE / 2) diff --git a/app/src/main/cpp/libcutils/arch-x86_64/cache.h b/app/src/main/cpp/libcutils/arch-x86_64/cache.h new file mode 100644 index 00000000..f1443093 --- /dev/null +++ b/app/src/main/cpp/libcutils/arch-x86_64/cache.h @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Values are optimized for Silvermont */ +#define SHARED_CACHE_SIZE (1024*1024) /* Silvermont L2 Cache */ +#define DATA_CACHE_SIZE (24*1024) /* Silvermont L1 Data Cache */ + +#define SHARED_CACHE_SIZE_HALF (SHARED_CACHE_SIZE / 2) +#define DATA_CACHE_SIZE_HALF (DATA_CACHE_SIZE / 2) diff --git a/app/src/main/cpp/libcutils/ashmem-dev.cpp b/app/src/main/cpp/libcutils/ashmem-dev.cpp new file mode 100644 index 00000000..8c232f0c --- /dev/null +++ b/app/src/main/cpp/libcutils/ashmem-dev.cpp @@ -0,0 +1,468 @@ +/* + * Copyright (C) 2008 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +/* + * Implementation of the user-space ashmem API for devices, which have our + * ashmem-enabled kernel. See ashmem-sim.c for the "fake" tmp-based version, + * used by the simulator. + */ +#define LOG_TAG "ashmem" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +/* Will be added to UAPI once upstream change is merged */ +#define F_SEAL_FUTURE_WRITE 0x0010 + +/* + * The minimum vendor API level at and after which it is safe to use memfd. + * This is to facilitate deprecation of ashmem. + */ +#define MIN_MEMFD_VENDOR_API_LEVEL 29 +#define MIN_MEMFD_VENDOR_API_LEVEL_CHAR 'Q' + +/* ashmem identity */ +static dev_t __ashmem_rdev; +/* + * If we trigger a signal handler in the middle of locked activity and the + * signal handler calls ashmem, we could get into a deadlock state. + */ +static pthread_mutex_t __ashmem_lock = PTHREAD_MUTEX_INITIALIZER; + +/* + * has_memfd_support() determines if the device can use memfd. memfd support + * has been there for long time, but certain things in it may be missing. We + * check for needed support in it. Also we check if the VNDK version of + * libcutils being used is new enough, if its not, then we cannot use memfd + * since the older copies may be using ashmem so we just use ashmem. Once all + * Android devices that are getting updates are new enough (ex, they were + * originally shipped with Android release > P), then we can just use memfd and + * delete all ashmem code from libcutils (while preserving the interface). + * + * NOTE: + * The sys.use_memfd property is set by default to false in Android + * to temporarily disable memfd, till vendor and apps are ready for it. + * The main issue: either apps or vendor processes can directly make ashmem + * IOCTLs on FDs they receive by assuming they are ashmem, without going + * through libcutils. Such fds could have very well be originally created with + * libcutils hence they could be memfd. Thus the IOCTLs will break. + * + * Set default value of sys.use_memfd property to true once the issue is + * resolved, so that the code can then self-detect if kernel support is present + * on the device. The property can also set to true from adb shell, for + * debugging. + */ + +static bool debug_log = false; /* set to true for verbose logging and other debug */ +static bool pin_deprecation_warn = true; /* Log the pin deprecation warning only once */ + +/* Determine if vendor processes would be ok with memfd in the system: + * + * If VNDK is using older libcutils, don't use memfd. This is so that the + * same shared memory mechanism is used across binder transactions between + * vendor partition processes and system partition processes. + */ +static bool check_vendor_memfd_allowed() { + std::string vndk_version = android::base::GetProperty("ro.vndk.version", ""); + + if (vndk_version == "") { + ALOGE("memfd: ro.vndk.version not defined or invalid (%s), this is mandated since P.\n", + vndk_version.c_str()); + return false; + } + + /* No issues if vendor is targetting current Dessert */ + if (vndk_version == "current") { + return false; + } + + /* Check if VNDK version is a number and act on it */ + char* p; + long int vers = strtol(vndk_version.c_str(), &p, 10); + if (*p == 0) { + if (vers < MIN_MEMFD_VENDOR_API_LEVEL) { + ALOGI("memfd: device VNDK version (%s) is < Q so using ashmem.\n", + vndk_version.c_str()); + return false; + } + + return true; + } + + /* If its not a number, assume string, but check if its a sane string */ + if (tolower(vndk_version[0]) < 'a' || tolower(vndk_version[0]) > 'z') { + ALOGE("memfd: ro.vndk.version not defined or invalid (%s), this is mandated since P.\n", + vndk_version.c_str()); + return false; + } + + if (tolower(vndk_version[0]) < tolower(MIN_MEMFD_VENDOR_API_LEVEL_CHAR)) { + ALOGI("memfd: device is using VNDK version (%s) which is less than Q. Use ashmem only.\n", + vndk_version.c_str()); + return false; + } + + return true; +} + + +/* Determine if memfd can be supported. This is just one-time hardwork + * which will be cached by the caller. + */ +static bool __has_memfd_support() { + if (check_vendor_memfd_allowed() == false) { + return false; + } + + /* Used to turn on/off the detection at runtime, in the future this + * property will be removed once we switch everything over to ashmem. + * Currently it is used only for debugging to switch the system over. + */ + if (!android::base::GetBoolProperty("sys.use_memfd", false)) { + if (debug_log) { + ALOGD("sys.use_memfd=false so memfd disabled\n"); + } + return false; + } + + /* Check if kernel support exists, otherwise fall back to ashmem */ + android::base::unique_fd fd( + syscall(__NR_memfd_create, "test_android_memfd", MFD_ALLOW_SEALING)); + if (fd == -1) { + ALOGE("memfd_create failed: %s, no memfd support.\n", strerror(errno)); + return false; + } + + if (fcntl(fd, F_ADD_SEALS, F_SEAL_FUTURE_WRITE) == -1) { + ALOGE("fcntl(F_ADD_SEALS) failed: %s, no memfd support.\n", strerror(errno)); + return false; + } + + if (debug_log) { + ALOGD("memfd: device has memfd support, using it\n"); + } + return true; +} + +static bool has_memfd_support() { + /* memfd_supported is the initial global per-process state of what is known + * about memfd. + */ + static bool memfd_supported = __has_memfd_support(); + + return memfd_supported; +} + +static std::string get_ashmem_device_path() { + static const std::string boot_id_path = "/proc/sys/kernel/random/boot_id"; + std::string boot_id; + if (!android::base::ReadFileToString(boot_id_path, &boot_id)) { + ALOGE("Failed to read %s: %s.\n", boot_id_path.c_str(), strerror(errno)); + return ""; + }; + boot_id = android::base::Trim(boot_id); + + return "/dev/ashmem" + boot_id; +} + +/* logistics of getting file descriptor for ashmem */ +static int __ashmem_open_locked() +{ + static const std::string ashmem_device_path = get_ashmem_device_path(); + + if (ashmem_device_path.empty()) { + return -1; + } + + int fd = TEMP_FAILURE_RETRY(open(ashmem_device_path.c_str(), O_RDWR | O_CLOEXEC)); + + // fallback for APEX w/ use_vendor on Q, which would have still used /dev/ashmem + if (fd < 0) { + fd = TEMP_FAILURE_RETRY(open("/dev/ashmem", O_RDWR | O_CLOEXEC)); + } + + if (fd < 0) { + return fd; + } + + struct stat st; + int ret = TEMP_FAILURE_RETRY(fstat(fd, &st)); + if (ret < 0) { + int save_errno = errno; + close(fd); + errno = save_errno; + return ret; + } + if (!S_ISCHR(st.st_mode) || !st.st_rdev) { + close(fd); + errno = ENOTTY; + return -1; + } + + __ashmem_rdev = st.st_rdev; + return fd; +} + +static int __ashmem_open() +{ + int fd; + + pthread_mutex_lock(&__ashmem_lock); + fd = __ashmem_open_locked(); + pthread_mutex_unlock(&__ashmem_lock); + + return fd; +} + +/* Make sure file descriptor references ashmem, negative number means false */ +static int __ashmem_is_ashmem(int fd, int fatal) +{ + dev_t rdev; + struct stat st; + + if (fstat(fd, &st) < 0) { + return -1; + } + + rdev = 0; /* Too much complexity to sniff __ashmem_rdev */ + if (S_ISCHR(st.st_mode) && st.st_rdev) { + pthread_mutex_lock(&__ashmem_lock); + rdev = __ashmem_rdev; + if (rdev) { + pthread_mutex_unlock(&__ashmem_lock); + } else { + int fd = __ashmem_open_locked(); + if (fd < 0) { + pthread_mutex_unlock(&__ashmem_lock); + return -1; + } + rdev = __ashmem_rdev; + pthread_mutex_unlock(&__ashmem_lock); + + close(fd); + } + + if (st.st_rdev == rdev) { + return 0; + } + } + + if (fatal) { + if (rdev) { + LOG_ALWAYS_FATAL("illegal fd=%d mode=0%o rdev=%d:%d expected 0%o %d:%d", + fd, st.st_mode, major(st.st_rdev), minor(st.st_rdev), + S_IFCHR | S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IRGRP, + major(rdev), minor(rdev)); + } else { + LOG_ALWAYS_FATAL("illegal fd=%d mode=0%o rdev=%d:%d expected 0%o", + fd, st.st_mode, major(st.st_rdev), minor(st.st_rdev), + S_IFCHR | S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IRGRP); + } + /* NOTREACHED */ + } + + errno = ENOTTY; + return -1; +} + +static int __ashmem_check_failure(int fd, int result) +{ + if (result == -1 && errno == ENOTTY) __ashmem_is_ashmem(fd, 1); + return result; +} + +static bool memfd_is_ashmem(int fd) { + static bool fd_check_error_once = false; + + if (__ashmem_is_ashmem(fd, 0) == 0) { + if (!fd_check_error_once) { + ALOGE("memfd: memfd expected but ashmem fd used - please use libcutils.\n"); + fd_check_error_once = true; + } + + return true; + } + + return false; +} + +int ashmem_valid(int fd) +{ + if (has_memfd_support() && !memfd_is_ashmem(fd)) { + return 1; + } + + return __ashmem_is_ashmem(fd, 0) >= 0; +} + +static int memfd_create_region(const char* name, size_t size) { + android::base::unique_fd fd(syscall(__NR_memfd_create, name, MFD_ALLOW_SEALING)); + + if (fd == -1) { + ALOGE("memfd_create(%s, %zd) failed: %s\n", name, size, strerror(errno)); + return -1; + } + + if (ftruncate(fd, size) == -1) { + ALOGE("ftruncate(%s, %zd) failed for memfd creation: %s\n", name, size, strerror(errno)); + return -1; + } + + if (debug_log) { + ALOGE("memfd_create(%s, %zd) success. fd=%d\n", name, size, fd.get()); + } + return fd.release(); +} + +/* + * ashmem_create_region - creates a new ashmem region and returns the file + * descriptor, or <0 on error + * + * `name' is an optional label to give the region (visible in /proc/pid/maps) + * `size' is the size of the region, in page-aligned bytes + */ +int ashmem_create_region(const char *name, size_t size) +{ + int ret, save_errno; + + if (has_memfd_support()) { + return memfd_create_region(name ? name : "none", size); + } + + int fd = __ashmem_open(); + if (fd < 0) { + return fd; + } + + if (name) { + char buf[ASHMEM_NAME_LEN] = {0}; + + strlcpy(buf, name, sizeof(buf)); + ret = TEMP_FAILURE_RETRY(ioctl(fd, ASHMEM_SET_NAME, buf)); + if (ret < 0) { + goto error; + } + } + + ret = TEMP_FAILURE_RETRY(ioctl(fd, ASHMEM_SET_SIZE, size)); + if (ret < 0) { + goto error; + } + + return fd; + +error: + save_errno = errno; + close(fd); + errno = save_errno; + return ret; +} + +static int memfd_set_prot_region(int fd, int prot) { + /* Only proceed if an fd needs to be write-protected */ + if (prot & PROT_WRITE) { + return 0; + } + + if (fcntl(fd, F_ADD_SEALS, F_SEAL_FUTURE_WRITE) == -1) { + ALOGE("memfd_set_prot_region(%d, %d): F_SEAL_FUTURE_WRITE seal failed: %s\n", fd, prot, + strerror(errno)); + return -1; + } + + return 0; +} + +int ashmem_set_prot_region(int fd, int prot) +{ + if (has_memfd_support() && !memfd_is_ashmem(fd)) { + return memfd_set_prot_region(fd, prot); + } + + return __ashmem_check_failure(fd, TEMP_FAILURE_RETRY(ioctl(fd, ASHMEM_SET_PROT_MASK, prot))); +} + +int ashmem_pin_region(int fd, size_t offset, size_t len) +{ + if (!pin_deprecation_warn || debug_log) { + ALOGE("Pinning is deprecated since Android Q. Please use trim or other methods.\n"); + pin_deprecation_warn = true; + } + + if (has_memfd_support() && !memfd_is_ashmem(fd)) { + return 0; + } + + // TODO: should LP64 reject too-large offset/len? + ashmem_pin pin = { static_cast(offset), static_cast(len) }; + return __ashmem_check_failure(fd, TEMP_FAILURE_RETRY(ioctl(fd, ASHMEM_PIN, &pin))); +} + +int ashmem_unpin_region(int fd, size_t offset, size_t len) +{ + if (!pin_deprecation_warn || debug_log) { + ALOGE("Pinning is deprecated since Android Q. Please use trim or other methods.\n"); + pin_deprecation_warn = true; + } + + if (has_memfd_support() && !memfd_is_ashmem(fd)) { + return 0; + } + + // TODO: should LP64 reject too-large offset/len? + ashmem_pin pin = { static_cast(offset), static_cast(len) }; + return __ashmem_check_failure(fd, TEMP_FAILURE_RETRY(ioctl(fd, ASHMEM_UNPIN, &pin))); +} + +int ashmem_get_size_region(int fd) +{ + if (has_memfd_support() && !memfd_is_ashmem(fd)) { + struct stat sb; + + if (fstat(fd, &sb) == -1) { + ALOGE("ashmem_get_size_region(%d): fstat failed: %s\n", fd, strerror(errno)); + return -1; + } + + if (debug_log) { + ALOGD("ashmem_get_size_region(%d): %d\n", fd, static_cast(sb.st_size)); + } + + return sb.st_size; + } + + return __ashmem_check_failure(fd, TEMP_FAILURE_RETRY(ioctl(fd, ASHMEM_GET_SIZE, NULL))); +} diff --git a/app/src/main/cpp/libcutils/ashmem-host.cpp b/app/src/main/cpp/libcutils/ashmem-host.cpp new file mode 100644 index 00000000..2ba1eb0c --- /dev/null +++ b/app/src/main/cpp/libcutils/ashmem-host.cpp @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2008 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +/* + * Implementation of the user-space ashmem API for the simulator, which lacks + * an ashmem-enabled kernel. See ashmem-dev.c for the real ashmem-based version. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +static bool ashmem_validate_stat(int fd, struct stat* buf) { + int result = fstat(fd, buf); + if (result == -1) { + return false; + } + + /* + * Check if this is an "ashmem" region. + * TODO: This is very hacky, and can easily break. + * We need some reliable indicator. + */ + if (!(buf->st_nlink == 0 && S_ISREG(buf->st_mode))) { + errno = ENOTTY; + return false; + } + return true; +} + +int ashmem_valid(int fd) { + struct stat buf; + return ashmem_validate_stat(fd, &buf); +} + +int ashmem_create_region(const char* /*ignored*/, size_t size) { + char pattern[PATH_MAX]; + snprintf(pattern, sizeof(pattern), "/tmp/android-ashmem-%d-XXXXXXXXX", getpid()); + int fd = mkstemp(pattern); + if (fd == -1) return -1; + + unlink(pattern); + + if (TEMP_FAILURE_RETRY(ftruncate(fd, size)) == -1) { + close(fd); + return -1; + } + + return fd; +} + +int ashmem_set_prot_region(int /*fd*/, int /*prot*/) { + return 0; +} + +int ashmem_pin_region(int /*fd*/, size_t /*offset*/, size_t /*len*/) { + return 0 /*ASHMEM_NOT_PURGED*/; +} + +int ashmem_unpin_region(int /*fd*/, size_t /*offset*/, size_t /*len*/) { + return 0 /*ASHMEM_IS_UNPINNED*/; +} + +int ashmem_get_size_region(int fd) +{ + struct stat buf; + if (!ashmem_validate_stat(fd, &buf)) { + return -1; + } + + return buf.st_size; +} diff --git a/app/src/main/cpp/libcutils/canned_fs_config.cpp b/app/src/main/cpp/libcutils/canned_fs_config.cpp new file mode 100644 index 00000000..2772ef0e --- /dev/null +++ b/app/src/main/cpp/libcutils/canned_fs_config.cpp @@ -0,0 +1,128 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +typedef struct { + const char* path; + unsigned uid; + unsigned gid; + unsigned mode; + uint64_t capabilities; +} Path; + +static Path* canned_data = NULL; +static int canned_alloc = 0; +static int canned_used = 0; + +static int path_compare(const void* a, const void* b) { + return strcmp(((Path*)a)->path, ((Path*)b)->path); +} + +int load_canned_fs_config(const char* fn) { + char buf[PATH_MAX + 200]; + FILE* f; + + f = fopen(fn, "r"); + if (f == NULL) { + fprintf(stderr, "failed to open %s: %s\n", fn, strerror(errno)); + return -1; + } + + while (fgets(buf, sizeof(buf), f)) { + Path* p; + char* token; + char* line = buf; + bool rootdir; + + while (canned_used >= canned_alloc) { + canned_alloc = (canned_alloc+1) * 2; + canned_data = (Path*) realloc(canned_data, canned_alloc * sizeof(Path)); + } + p = canned_data + canned_used; + if (line[0] == '/') line++; + rootdir = line[0] == ' '; + p->path = strdup(rootdir ? "" : strtok(line, " ")); + p->uid = atoi(strtok(rootdir ? line : NULL, " ")); + p->gid = atoi(strtok(NULL, " ")); + p->mode = strtol(strtok(NULL, " "), NULL, 8); // mode is in octal + p->capabilities = 0; + + do { + token = strtok(NULL, " "); + if (token && strncmp(token, "capabilities=", 13) == 0) { + p->capabilities = strtoll(token+13, NULL, 0); + break; + } + } while (token); + + canned_used++; + } + + fclose(f); + + qsort(canned_data, canned_used, sizeof(Path), path_compare); + printf("loaded %d fs_config entries\n", canned_used); + + return 0; +} + +static const int kDebugCannedFsConfig = 0; + +void canned_fs_config(const char* path, int dir, const char* target_out_path, + unsigned* uid, unsigned* gid, unsigned* mode, uint64_t* capabilities) { + Path key, *p; + + key.path = path; + if (path[0] == '/') key.path++; // canned paths lack the leading '/' + p = (Path*) bsearch(&key, canned_data, canned_used, sizeof(Path), path_compare); + if (p == NULL) { + fprintf(stderr, "failed to find [%s] in canned fs_config\n", path); + exit(1); + } + *uid = p->uid; + *gid = p->gid; + *mode = p->mode; + *capabilities = p->capabilities; + + if (kDebugCannedFsConfig) { + // for debugging, run the built-in fs_config and compare the results. + + unsigned c_uid, c_gid, c_mode; + uint64_t c_capabilities; + + fs_config(path, dir, target_out_path, &c_uid, &c_gid, &c_mode, &c_capabilities); + + if (c_uid != *uid) printf("%s uid %d %d\n", path, *uid, c_uid); + if (c_gid != *gid) printf("%s gid %d %d\n", path, *gid, c_gid); + if (c_mode != *mode) printf("%s mode 0%o 0%o\n", path, *mode, c_mode); + if (c_capabilities != *capabilities) { + printf("%s capabilities %" PRIx64 " %" PRIx64 "\n", + path, + *capabilities, + c_capabilities); + } + } +} diff --git a/app/src/main/cpp/libcutils/config_utils.cpp b/app/src/main/cpp/libcutils/config_utils.cpp new file mode 100644 index 00000000..a3af01a5 --- /dev/null +++ b/app/src/main/cpp/libcutils/config_utils.cpp @@ -0,0 +1,328 @@ +/* + * Copyright (C) 2007 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include +#include +#include +#include + +#include + +cnode* config_node(const char *name, const char *value) +{ + cnode* node = static_cast(calloc(sizeof(cnode), 1)); + if(node) { + node->name = name ? name : ""; + node->value = value ? value : ""; + } + + return node; +} + +cnode* config_find(cnode *root, const char *name) +{ + cnode *node, *match = NULL; + + /* we walk the whole list, as we need to return the last (newest) entry */ + for(node = root->first_child; node; node = node->next) + if(!strcmp(node->name, name)) + match = node; + + return match; +} + +static cnode* _config_create(cnode *root, const char *name) +{ + cnode *node; + + node = config_node(name, NULL); + + if(root->last_child) + root->last_child->next = node; + else + root->first_child = node; + + root->last_child = node; + + return node; +} + +int config_bool(cnode *root, const char *name, int _default) +{ + cnode *node; + + node = config_find(root, name); + if(!node) + return _default; + + switch(node->value[0]) { + case 'y': + case 'Y': + case '1': + return 1; + default: + return 0; + } +} + +const char* config_str(cnode *root, const char *name, const char *_default) +{ + cnode *node; + + node = config_find(root, name); + if(!node) + return _default; + return node->value; +} + +void config_set(cnode *root, const char *name, const char *value) +{ + cnode *node; + + node = config_find(root, name); + if(node) + node->value = value; + else { + node = _config_create(root, name); + node->value = value; + } +} + +#define T_EOF 0 +#define T_TEXT 1 +#define T_DOT 2 +#define T_OBRACE 3 +#define T_CBRACE 4 + +typedef struct +{ + char *data; + char *text; + int len; + char next; +} cstate; + +static int _lex(cstate *cs, int value) +{ + char c; + char *s; + char *data; + + data = cs->data; + + if(cs->next != 0) { + c = cs->next; + cs->next = 0; + goto got_c; + } + +restart: + for(;;) { + c = *data++; + got_c: + if(isspace(c)) + continue; + + switch(c) { + case 0: + return T_EOF; + + case '#': + for(;;) { + switch(*data) { + case 0: + cs->data = data; + return T_EOF; + case '\n': + cs->data = data + 1; + goto restart; + default: + data++; + } + } + break; + + case '.': + cs->data = data; + return T_DOT; + + case '{': + cs->data = data; + return T_OBRACE; + + case '}': + cs->data = data; + return T_CBRACE; + + default: + s = data - 1; + + if(value) { + for(;;) { + if(*data == 0) { + cs->data = data; + break; + } + if(*data == '\n') { + cs->data = data + 1; + *data-- = 0; + break; + } + data++; + } + + /* strip trailing whitespace */ + while(data > s){ + if(!isspace(*data)) break; + *data-- = 0; + } + + goto got_text; + } else { + for(;;) { + if(isspace(*data)) { + *data = 0; + cs->data = data + 1; + goto got_text; + } + switch(*data) { + case 0: + cs->data = data; + goto got_text; + case '.': + case '{': + case '}': + cs->next = *data; + *data = 0; + cs->data = data + 1; + goto got_text; + default: + data++; + } + } + } + } + } + +got_text: + cs->text = s; + return T_TEXT; +} + +#if 0 +char *TOKENNAMES[] = { "EOF", "TEXT", "DOT", "OBRACE", "CBRACE" }; + +static int lex(cstate *cs, int value) +{ + int tok = _lex(cs, value); + printf("TOKEN(%d) %s %s\n", value, TOKENNAMES[tok], + tok == T_TEXT ? cs->text : ""); + return tok; +} +#else +#define lex(cs,v) _lex(cs,v) +#endif + +static int parse_expr(cstate *cs, cnode *node); + +static int parse_block(cstate *cs, cnode *node) +{ + for(;;){ + switch(lex(cs, 0)){ + case T_TEXT: + if(parse_expr(cs, node)) return -1; + continue; + + case T_CBRACE: + return 0; + + default: + return -1; + } + } +} + +static int parse_expr(cstate *cs, cnode *root) +{ + cnode *node; + + /* last token was T_TEXT */ + node = config_find(root, cs->text); + if(!node || *node->value) + node = _config_create(root, cs->text); + + for(;;) { + switch(lex(cs, 1)) { + case T_DOT: + if(lex(cs, 0) != T_TEXT) + return -1; + node = _config_create(node, cs->text); + continue; + + case T_TEXT: + node->value = cs->text; + return 0; + + case T_OBRACE: + return parse_block(cs, node); + + default: + return -1; + } + } +} + +void config_load(cnode *root, char *data) +{ + if(data != 0) { + cstate cs; + cs.data = data; + cs.next = 0; + + for(;;) { + switch(lex(&cs, 0)) { + case T_TEXT: + if(parse_expr(&cs, root)) + return; + break; + default: + return; + } + } + } +} + +void config_load_file(cnode *root, const char *fn) +{ + char* data = static_cast(load_file(fn, nullptr)); + config_load(root, data); + // TODO: deliberate leak :-/ +} + +void config_free(cnode *root) +{ + cnode *cur = root->first_child; + + while (cur) { + cnode *prev = cur; + config_free(cur); + cur = cur->next; + free(prev); + } +} diff --git a/app/src/main/cpp/libcutils/include/cutils/compiler.h b/app/src/main/cpp/libcutils/include/cutils/compiler.h new file mode 100644 index 00000000..70f884a1 --- /dev/null +++ b/app/src/main/cpp/libcutils/include/cutils/compiler.h @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_CUTILS_COMPILER_H +#define ANDROID_CUTILS_COMPILER_H + +/* + * helps the compiler's optimizer predicting branches + */ + +#ifdef __cplusplus +# define CC_LIKELY( exp ) (__builtin_expect( !!(exp), true )) +# define CC_UNLIKELY( exp ) (__builtin_expect( !!(exp), false )) +#else +# define CC_LIKELY( exp ) (__builtin_expect( !!(exp), 1 )) +# define CC_UNLIKELY( exp ) (__builtin_expect( !!(exp), 0 )) +#endif + +/** + * exports marked symbols + * + * if used on a C++ class declaration, this macro must be inserted + * after the "class" keyword. For instance: + * + * template + * class ANDROID_API Singleton { } + */ + +#define ANDROID_API __attribute__((visibility("default"))) + +#endif // ANDROID_CUTILS_COMPILER_H diff --git a/app/src/main/cpp/libcutils/include/log/log.h b/app/src/main/cpp/libcutils/include/log/log.h new file mode 100644 index 00000000..6bf1f5af --- /dev/null +++ b/app/src/main/cpp/libcutils/include/log/log.h @@ -0,0 +1,65 @@ +// DO NOT INCLUDE ANYTHING NEW IN THIS FILE. + +// has replaced this file and all changes should go there instead. +// This path remains strictly to include that header as there are thousands of +// references to in the tree. + +// #include +#include +#include + + +#ifdef NDEBUG +#define LOG_NDEBUG 1 +#else +#define LOG_NDEBUG 0 +#endif + +#define LOG_TAG "MPLOG" +#define ALOGV(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG,__VA_ARGS__) +#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG,__VA_ARGS__) +#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__) +#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__) +#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,__VA_ARGS__) + + +#define android_printAssert(cond, tag, ...) \ + __android_log_assert(cond, tag, \ + __android_second(0, ##__VA_ARGS__, NULL) \ + __android_rest(__VA_ARGS__)) + + +#define __FAKE_USE_VA_ARGS(...) ((void)(0)) + +#ifndef LOG_ALWAYS_FATAL_IF +#define LOG_ALWAYS_FATAL_IF(cond, ...) \ + ((__predict_false(cond)) ? (__FAKE_USE_VA_ARGS(__VA_ARGS__), \ + ((void)android_printAssert(#cond, LOG_TAG, ##__VA_ARGS__))) \ + : ((void)0)) +#endif + +#ifndef LOG_ALWAYS_FATAL +#define LOG_ALWAYS_FATAL(...) \ + (((void)android_printAssert(NULL, LOG_TAG, ##__VA_ARGS__))) +#endif + +#if NDEBUG + +#ifndef LOG_FATAL_IF +#define LOG_FATAL_IF(cond, ...) __FAKE_USE_VA_ARGS(__VA_ARGS__) +#endif +#ifndef LOG_FATAL +#define LOG_FATAL(...) __FAKE_USE_VA_ARGS(__VA_ARGS__) +#endif + +#else + +#ifndef LOG_FATAL_IF +#define LOG_FATAL_IF(cond, ...) LOG_ALWAYS_FATAL_IF(cond, ##__VA_ARGS__) +#endif +#ifndef LOG_FATAL +#define LOG_FATAL(...) LOG_ALWAYS_FATAL(__VA_ARGS__) +#endif + +#endif + diff --git a/app/src/main/cpp/libutils/SharedBuffer.cpp b/app/src/main/cpp/libutils/SharedBuffer.cpp new file mode 100644 index 00000000..394492bb --- /dev/null +++ b/app/src/main/cpp/libutils/SharedBuffer.cpp @@ -0,0 +1,144 @@ +/* + * Copyright (C) 2005 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "sharedbuffer" + +#include "SharedBuffer.h" + +#include +#include + +#include + +// --------------------------------------------------------------------------- + +namespace android { + +SharedBuffer* SharedBuffer::alloc(size_t size) +{ + // Don't overflow if the combined size of the buffer / header is larger than + // size_max. +#if 0 + LOG_ALWAYS_FATAL_IF((size >= (SIZE_MAX - sizeof(SharedBuffer))), + "Invalid buffer size %zu", size); +#endif + + SharedBuffer* sb = static_cast(malloc(sizeof(SharedBuffer) + size)); + if (sb) { + // Should be std::atomic_init(&sb->mRefs, 1); + // But that generates a warning with some compilers. + // The following is OK on Android-supported platforms. + sb->mRefs.store(1, std::memory_order_relaxed); + sb->mSize = size; + sb->mClientMetadata = 0; + } + return sb; +} + + +void SharedBuffer::dealloc(const SharedBuffer* released) +{ + free(const_cast(released)); +} + +SharedBuffer* SharedBuffer::edit() const +{ + if (onlyOwner()) { + return const_cast(this); + } + SharedBuffer* sb = alloc(mSize); + if (sb) { + memcpy(sb->data(), data(), size()); + release(); + } + return sb; +} + +SharedBuffer* SharedBuffer::editResize(size_t newSize) const +{ + if (onlyOwner()) { + SharedBuffer* buf = const_cast(this); + if (buf->mSize == newSize) return buf; + // Don't overflow if the combined size of the new buffer / header is larger than + // size_max. +#if 0 + LOG_ALWAYS_FATAL_IF((newSize >= (SIZE_MAX - sizeof(SharedBuffer))), + "Invalid buffer size %zu", newSize); +#endif + + buf = (SharedBuffer*)realloc(buf, sizeof(SharedBuffer) + newSize); + if (buf != nullptr) { + buf->mSize = newSize; + return buf; + } + } + SharedBuffer* sb = alloc(newSize); + if (sb) { + const size_t mySize = mSize; + memcpy(sb->data(), data(), newSize < mySize ? newSize : mySize); + release(); + } + return sb; +} + +SharedBuffer* SharedBuffer::attemptEdit() const +{ + if (onlyOwner()) { + return const_cast(this); + } + return nullptr; +} + +SharedBuffer* SharedBuffer::reset(size_t new_size) const +{ + // cheap-o-reset. + SharedBuffer* sb = alloc(new_size); + if (sb) { + release(); + } + return sb; +} + +void SharedBuffer::acquire() const { + mRefs.fetch_add(1, std::memory_order_relaxed); +} + +int32_t SharedBuffer::release(uint32_t flags) const +{ + const bool useDealloc = ((flags & eKeepStorage) == 0); + if (onlyOwner()) { + // Since we're the only owner, our reference count goes to zero. + mRefs.store(0, std::memory_order_relaxed); + if (useDealloc) { + dealloc(this); + } + // As the only owner, our previous reference count was 1. + return 1; + } + // There's multiple owners, we need to use an atomic decrement. + int32_t prevRefCount = mRefs.fetch_sub(1, std::memory_order_release); + if (prevRefCount == 1) { + // We're the last reference, we need the acquire fence. + atomic_thread_fence(std::memory_order_acquire); + if (useDealloc) { + dealloc(this); + } + } + return prevRefCount; +} + + +}; // namespace android diff --git a/app/src/main/cpp/libutils/SharedBuffer.h b/app/src/main/cpp/libutils/SharedBuffer.h new file mode 100644 index 00000000..476c842f --- /dev/null +++ b/app/src/main/cpp/libutils/SharedBuffer.h @@ -0,0 +1,151 @@ +/* + * Copyright (C) 2005 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * DEPRECATED. DO NOT USE FOR NEW CODE. + */ + +#ifndef ANDROID_SHARED_BUFFER_H +#define ANDROID_SHARED_BUFFER_H + +#include +#include +#include + +// --------------------------------------------------------------------------- + +namespace android { + +class SharedBuffer +{ +public: + + /* flags to use with release() */ + enum { + eKeepStorage = 0x00000001 + }; + + /*! allocate a buffer of size 'size' and acquire() it. + * call release() to free it. + */ + static SharedBuffer* alloc(size_t size); + + /*! free the memory associated with the SharedBuffer. + * Fails if there are any users associated with this SharedBuffer. + * In other words, the buffer must have been release by all its + * users. + */ + static void dealloc(const SharedBuffer* released); + + //! access the data for read + inline const void* data() const; + + //! access the data for read/write + inline void* data(); + + //! get size of the buffer + inline size_t size() const; + + //! get back a SharedBuffer object from its data + static inline SharedBuffer* bufferFromData(void* data); + + //! get back a SharedBuffer object from its data + static inline const SharedBuffer* bufferFromData(const void* data); + + //! get the size of a SharedBuffer object from its data + static inline size_t sizeFromData(const void* data); + + //! edit the buffer (get a writtable, or non-const, version of it) + SharedBuffer* edit() const; + + //! edit the buffer, resizing if needed + SharedBuffer* editResize(size_t size) const; + + //! like edit() but fails if a copy is required + SharedBuffer* attemptEdit() const; + + //! resize and edit the buffer, loose it's content. + SharedBuffer* reset(size_t size) const; + + //! acquire/release a reference on this buffer + void acquire() const; + + /*! release a reference on this buffer, with the option of not + * freeing the memory associated with it if it was the last reference + * returns the previous reference count + */ + int32_t release(uint32_t flags = 0) const; + + //! returns wether or not we're the only owner + inline bool onlyOwner() const; + + +private: + inline SharedBuffer() { } + inline ~SharedBuffer() { } + SharedBuffer(const SharedBuffer&); + SharedBuffer& operator = (const SharedBuffer&); + + // Must be sized to preserve correct alignment. + mutable std::atomic mRefs; + size_t mSize; + uint32_t mReserved; +public: + // mClientMetadata is reserved for client use. It is initialized to 0 + // and the clients can do whatever they want with it. Note that this is + // placed last so that it is adjcent to the buffer allocated. + uint32_t mClientMetadata; +}; + +static_assert(sizeof(SharedBuffer) % 8 == 0 + && (sizeof(size_t) > 4 || sizeof(SharedBuffer) == 16), + "SharedBuffer has unexpected size"); + +// --------------------------------------------------------------------------- + +const void* SharedBuffer::data() const { + return this + 1; +} + +void* SharedBuffer::data() { + return this + 1; +} + +size_t SharedBuffer::size() const { + return mSize; +} + +SharedBuffer* SharedBuffer::bufferFromData(void* data) { + return data ? static_cast(data)-1 : nullptr; +} + +const SharedBuffer* SharedBuffer::bufferFromData(const void* data) { + return data ? static_cast(data)-1 : nullptr; +} + +size_t SharedBuffer::sizeFromData(const void* data) { + return data ? bufferFromData(data)->mSize : 0; +} + +bool SharedBuffer::onlyOwner() const { + return (mRefs.load(std::memory_order_acquire) == 1); +} + +} // namespace android + +// --------------------------------------------------------------------------- + +#endif // ANDROID_VECTOR_H diff --git a/app/src/main/cpp/libutils/StrongPointer.cpp b/app/src/main/cpp/libutils/StrongPointer.cpp new file mode 100644 index 00000000..f8c67919 --- /dev/null +++ b/app/src/main/cpp/libutils/StrongPointer.cpp @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "sp" + +#include + +namespace android { + +void sp_report_race() +{ +#if 0 + LOG_ALWAYS_FATAL("sp<> assignment detected data race"); +#endif +} + +void sp_report_stack_pointer() +{ +#if 0 + LOG_ALWAYS_FATAL("sp<> constructed with stack pointer argument"); +#endif +} + +} diff --git a/app/src/main/cpp/libutils/include/utils/Errors.h b/app/src/main/cpp/libutils/include/utils/Errors.h new file mode 100644 index 00000000..d14d2231 --- /dev/null +++ b/app/src/main/cpp/libutils/include/utils/Errors.h @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2007 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +namespace android { + +/** + * The type used to return success/failure from frameworks APIs. + * See the anonymous enum below for valid values. + */ +typedef int32_t status_t; + +/* + * Error codes. + * All error codes are negative values. + */ + +// Win32 #defines NO_ERROR as well. It has the same value, so there's no +// real conflict, though it's a bit awkward. +#ifdef _WIN32 +# undef NO_ERROR +#endif + +enum { + OK = 0, // Preferred constant for checking success. + NO_ERROR = OK, // Deprecated synonym for `OK`. Prefer `OK` because it doesn't conflict with Windows. + + UNKNOWN_ERROR = (-2147483647-1), // INT32_MIN value + + NO_MEMORY = -ENOMEM, + INVALID_OPERATION = -ENOSYS, + BAD_VALUE = -EINVAL, + BAD_TYPE = (UNKNOWN_ERROR + 1), + NAME_NOT_FOUND = -ENOENT, + PERMISSION_DENIED = -EPERM, + NO_INIT = -ENODEV, + ALREADY_EXISTS = -EEXIST, + DEAD_OBJECT = -EPIPE, + FAILED_TRANSACTION = (UNKNOWN_ERROR + 2), +#if !defined(_WIN32) + BAD_INDEX = -EOVERFLOW, + NOT_ENOUGH_DATA = -ENODATA, + WOULD_BLOCK = -EWOULDBLOCK, + TIMED_OUT = -ETIMEDOUT, + UNKNOWN_TRANSACTION = -EBADMSG, +#else + BAD_INDEX = -E2BIG, + NOT_ENOUGH_DATA = (UNKNOWN_ERROR + 3), + WOULD_BLOCK = (UNKNOWN_ERROR + 4), + TIMED_OUT = (UNKNOWN_ERROR + 5), + UNKNOWN_TRANSACTION = (UNKNOWN_ERROR + 6), +#endif + FDS_NOT_ALLOWED = (UNKNOWN_ERROR + 7), + UNEXPECTED_NULL = (UNKNOWN_ERROR + 8), +}; + +// Human readable name of error +std::string statusToString(status_t status); + +// Restore define; enumeration is in "android" namespace, so the value defined +// there won't work for Win32 code in a different namespace. +#ifdef _WIN32 +# define NO_ERROR 0L +#endif + +} // namespace android diff --git a/app/src/main/cpp/libutils/include/utils/LightRefBase.h b/app/src/main/cpp/libutils/include/utils/LightRefBase.h new file mode 100644 index 00000000..b04e5c15 --- /dev/null +++ b/app/src/main/cpp/libutils/include/utils/LightRefBase.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +/* + * See documentation in RefBase.h + */ + +#include + +#include + +namespace android { + +class ReferenceRenamer; + +template +class LightRefBase +{ +public: + inline LightRefBase() : mCount(0) { } + inline void incStrong(__attribute__((unused)) const void* id) const { + mCount.fetch_add(1, std::memory_order_relaxed); + } + inline void decStrong(__attribute__((unused)) const void* id) const { + if (mCount.fetch_sub(1, std::memory_order_release) == 1) { + std::atomic_thread_fence(std::memory_order_acquire); + delete static_cast(this); + } + } + //! DEBUGGING ONLY: Get current strong ref count. + inline int32_t getStrongCount() const { + return mCount.load(std::memory_order_relaxed); + } + +protected: + inline ~LightRefBase() { } + +private: + friend class ReferenceMover; + inline static void renameRefs(size_t /*n*/, const ReferenceRenamer& /*renamer*/) { } + inline static void renameRefId(T* /*ref*/, const void* /*old_id*/ , const void* /*new_id*/) { } + +private: + mutable std::atomic mCount; +}; + + +// This is a wrapper around LightRefBase that simply enforces a virtual +// destructor to eliminate the template requirement of LightRefBase +class VirtualLightRefBase : public LightRefBase { +public: + virtual ~VirtualLightRefBase() = default; +}; + +} // namespace android diff --git a/app/src/main/cpp/libutils/include/utils/Log.h b/app/src/main/cpp/libutils/include/utils/Log.h new file mode 100644 index 00000000..408d717f --- /dev/null +++ b/app/src/main/cpp/libutils/include/utils/Log.h @@ -0,0 +1,9 @@ +// DO NOT INCLUDE ANYTHING NEW IN THIS FILE. + +// has replaced this file and all changes should go there instead. +// This path remains strictly to include that header as there are thousands of +// references to in the tree. + +// #include +#include +#include \ No newline at end of file diff --git a/app/src/main/cpp/libutils/include/utils/RefBase.h b/app/src/main/cpp/libutils/include/utils/RefBase.h new file mode 100644 index 00000000..89f048db --- /dev/null +++ b/app/src/main/cpp/libutils/include/utils/RefBase.h @@ -0,0 +1,713 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +// SOME COMMENTS ABOUT USAGE: + +// This provides primarily wp<> weak pointer types and RefBase, which work +// together with sp<> from . + +// sp<> (and wp<>) are a type of smart pointer that use a well defined protocol +// to operate. As long as the object they are templated with implements that +// protocol, these smart pointers work. In several places the platform +// instantiates sp<> with non-RefBase objects; the two are not tied to each +// other. + +// RefBase is such an implementation and it supports strong pointers, weak +// pointers and some magic features for the binder. + +// So, when using RefBase objects, you have the ability to use strong and weak +// pointers through sp<> and wp<>. + +// Normally, when the last strong pointer goes away, the object is destroyed, +// i.e. it's destructor is called. HOWEVER, parts of its associated memory is not +// freed until the last weak pointer is released. + +// Weak pointers are essentially "safe" pointers. They are always safe to +// access through promote(). They may return nullptr if the object was +// destroyed because it ran out of strong pointers. This makes them good candidates +// for keys in a cache for instance. + +// Weak pointers remain valid for comparison purposes even after the underlying +// object has been destroyed. Even if object A is destroyed and its memory reused +// for B, A remaining weak pointer to A will not compare equal to one to B. +// This again makes them attractive for use as keys. + +// How is this supposed / intended to be used? + +// Our recommendation is to use strong references (sp<>) when there is an +// ownership relation. e.g. when an object "owns" another one, use a strong +// ref. And of course use strong refs as arguments of functions (it's extremely +// rare that a function will take a wp<>). + +// Typically a newly allocated object will immediately be used to initialize +// a strong pointer, which may then be used to construct or assign to other +// strong and weak pointers. + +// Use weak references when there are no ownership relation. e.g. the keys in a +// cache (you cannot use plain pointers because there is no safe way to acquire +// a strong reference from a vanilla pointer). + +// This implies that two objects should never (or very rarely) have sp<> on +// each other, because they can't both own each other. + + +// Caveats with reference counting + +// Obviously, circular strong references are a big problem; this creates leaks +// and it's hard to debug -- except it's in fact really easy because RefBase has +// tons of debugging code for that. It can basically tell you exactly where the +// leak is. + +// Another problem has to do with destructors with side effects. You must +// assume that the destructor of reference counted objects can be called AT ANY +// TIME. For instance code as simple as this: + +// void setStuff(const sp& stuff) { +// std::lock_guard lock(mMutex); +// mStuff = stuff; +// } + +// is very dangerous. This code WILL deadlock one day or another. + +// What isn't obvious is that ~Stuff() can be called as a result of the +// assignment. And it gets called with the lock held. First of all, the lock is +// protecting mStuff, not ~Stuff(). Secondly, if ~Stuff() uses its own internal +// mutex, now you have mutex ordering issues. Even worse, if ~Stuff() is +// virtual, now you're calling into "user" code (potentially), by that, I mean, +// code you didn't even write. + +// A correct way to write this code is something like: + +// void setStuff(const sp& stuff) { +// std::unique_lock lock(mMutex); +// sp hold = mStuff; +// mStuff = stuff; +// lock.unlock(); +// } + +// More importantly, reference counted objects should do as little work as +// possible in their destructor, or at least be mindful that their destructor +// could be called from very weird and unintended places. + +// Other more specific restrictions for wp<> and sp<>: + +// Do not construct a strong pointer to "this" in an object's constructor. +// The onFirstRef() callback would be made on an incompletely constructed +// object. +// Construction of a weak pointer to "this" in an object's constructor is also +// discouraged. But the implementation was recently changed so that, in the +// absence of extendObjectLifetime() calls, weak pointers no longer impact +// object lifetime, and hence this no longer risks premature deallocation, +// and hence usually works correctly. + +// Such strong or weak pointers can be safely created in the RefBase onFirstRef() +// callback. + +// Use of wp::unsafe_get() for any purpose other than debugging is almost +// always wrong. Unless you somehow know that there is a longer-lived sp<> to +// the same object, it may well return a pointer to a deallocated object that +// has since been reallocated for a different purpose. (And if you know there +// is a longer-lived sp<>, why not use an sp<> directly?) A wp<> should only be +// dereferenced by using promote(). + +// Any object inheriting from RefBase should always be destroyed as the result +// of a reference count decrement, not via any other means. Such objects +// should never be stack allocated, or appear directly as data members in other +// objects. Objects inheriting from RefBase should have their strong reference +// count incremented as soon as possible after construction. Usually this +// will be done via construction of an sp<> to the object, but may instead +// involve other means of calling RefBase::incStrong(). +// Explicitly deleting or otherwise destroying a RefBase object with outstanding +// wp<> or sp<> pointers to it will result in an abort or heap corruption. + +// It is particularly important not to mix sp<> and direct storage management +// since the sp from raw pointer constructor is implicit. Thus if a RefBase- +// -derived object of type T is managed without ever incrementing its strong +// count, and accidentally passed to f(sp), a strong pointer to the object +// will be temporarily constructed and destroyed, prematurely deallocating the +// object, and resulting in heap corruption. None of this would be easily +// visible in the source. + +// Extra Features: + +// RefBase::extendObjectLifetime() can be used to prevent destruction of the +// object while there are still weak references. This is really special purpose +// functionality to support Binder. + +// Wp::promote(), implemented via the attemptIncStrong() member function, is +// used to try to convert a weak pointer back to a strong pointer. It's the +// normal way to try to access the fields of an object referenced only through +// a wp<>. Binder code also sometimes uses attemptIncStrong() directly. + +// RefBase provides a number of additional callbacks for certain reference count +// events, as well as some debugging facilities. + +// Debugging support can be enabled by turning on DEBUG_REFS in RefBase.cpp. +// Otherwise little checking is provided. + +// Thread safety: + +// Like std::shared_ptr, sp<> and wp<> allow concurrent accesses to DIFFERENT +// sp<> and wp<> instances that happen to refer to the same underlying object. +// They do NOT support concurrent access (where at least one access is a write) +// to THE SAME sp<> or wp<>. In effect, their thread-safety properties are +// exactly like those of T*, NOT atomic. + +#ifndef ANDROID_REF_BASE_H +#define ANDROID_REF_BASE_H + +#include +#include +#include // for common_type. + +#include +#include +#include +#include + +// LightRefBase used to be declared in this header, so we have to include it +#include + +#include +#include + +// --------------------------------------------------------------------------- +namespace android { + +// --------------------------------------------------------------------------- + +#define COMPARE_WEAK(_op_) \ +template \ +inline bool operator _op_ (const U* o) const { \ + return m_ptr _op_ o; \ +} \ +/* Needed to handle type inference for nullptr: */ \ +inline bool operator _op_ (const T* o) const { \ + return m_ptr _op_ o; \ +} + +template class comparator, typename T, typename U> +static inline bool _wp_compare_(T* a, U* b) { + return comparator::type>()(a, b); +} + +// Use std::less and friends to avoid undefined behavior when ordering pointers +// to different objects. +#define COMPARE_WEAK_FUNCTIONAL(_op_, _compare_) \ +template \ +inline bool operator _op_ (const U* o) const { \ + return _wp_compare_<_compare_>(m_ptr, o); \ +} + +// --------------------------------------------------------------------------- + +// RefererenceRenamer is pure abstract, there is no virtual method +// implementation to put in a translation unit in order to silence the +// weak vtables warning. +#if defined(__clang__) +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wweak-vtables" +#endif + +class ReferenceRenamer { +protected: + // destructor is purposely not virtual so we avoid code overhead from + // subclasses; we have to make it protected to guarantee that it + // cannot be called from this base class (and to make strict compilers + // happy). + ~ReferenceRenamer() { } +public: + virtual void operator()(size_t i) const = 0; +}; + +#if defined(__clang__) +#pragma clang diagnostic pop +#endif + +// --------------------------------------------------------------------------- + +class RefBase +{ +public: + void incStrong(const void* id) const; + void decStrong(const void* id) const; + + void forceIncStrong(const void* id) const; + + //! DEBUGGING ONLY: Get current strong ref count. + int32_t getStrongCount() const; + + class weakref_type + { + public: + RefBase* refBase() const; + + void incWeak(const void* id); + void decWeak(const void* id); + + // acquires a strong reference if there is already one. + bool attemptIncStrong(const void* id); + + // acquires a weak reference if there is already one. + // This is not always safe. see ProcessState.cpp and BpBinder.cpp + // for proper use. + bool attemptIncWeak(const void* id); + + //! DEBUGGING ONLY: Get current weak ref count. + int32_t getWeakCount() const; + + //! DEBUGGING ONLY: Print references held on object. + void printRefs() const; + + //! DEBUGGING ONLY: Enable tracking for this object. + // enable -- enable/disable tracking + // retain -- when tracking is enable, if true, then we save a stack trace + // for each reference and dereference; when retain == false, we + // match up references and dereferences and keep only the + // outstanding ones. + + void trackMe(bool enable, bool retain); + }; + + weakref_type* createWeak(const void* id) const; + + weakref_type* getWeakRefs() const; + + //! DEBUGGING ONLY: Print references held on object. + inline void printRefs() const { getWeakRefs()->printRefs(); } + + //! DEBUGGING ONLY: Enable tracking of object. + inline void trackMe(bool enable, bool retain) + { + getWeakRefs()->trackMe(enable, retain); + } + +protected: + RefBase(); + virtual ~RefBase(); + + //! Flags for extendObjectLifetime() + enum { + OBJECT_LIFETIME_STRONG = 0x0000, + OBJECT_LIFETIME_WEAK = 0x0001, + OBJECT_LIFETIME_MASK = 0x0001 + }; + + void extendObjectLifetime(int32_t mode); + + //! Flags for onIncStrongAttempted() + enum { + FIRST_INC_STRONG = 0x0001 + }; + + // Invoked after creation of initial strong pointer/reference. + virtual void onFirstRef(); + // Invoked when either the last strong reference goes away, or we need to undo + // the effect of an unnecessary onIncStrongAttempted. + virtual void onLastStrongRef(const void* id); + // Only called in OBJECT_LIFETIME_WEAK case. Returns true if OK to promote to + // strong reference. May have side effects if it returns true. + // The first flags argument is always FIRST_INC_STRONG. + // TODO: Remove initial flag argument. + virtual bool onIncStrongAttempted(uint32_t flags, const void* id); + // Invoked in the OBJECT_LIFETIME_WEAK case when the last reference of either + // kind goes away. Unused. + // TODO: Remove. + virtual void onLastWeakRef(const void* id); + +private: + friend class weakref_type; + class weakref_impl; + + RefBase(const RefBase& o); + RefBase& operator=(const RefBase& o); + +private: + friend class ReferenceMover; + + static void renameRefs(size_t n, const ReferenceRenamer& renamer); + + static void renameRefId(weakref_type* ref, + const void* old_id, const void* new_id); + + static void renameRefId(RefBase* ref, + const void* old_id, const void* new_id); + + weakref_impl* const mRefs; +}; + +// --------------------------------------------------------------------------- + +template +class wp +{ +public: + typedef typename RefBase::weakref_type weakref_type; + + inline wp() : m_ptr(nullptr), m_refs(nullptr) { } + + wp(T* other); // NOLINT(implicit) + wp(const wp& other); + explicit wp(const sp& other); + template wp(U* other); // NOLINT(implicit) + template wp(const sp& other); // NOLINT(implicit) + template wp(const wp& other); // NOLINT(implicit) + + ~wp(); + + // Assignment + + wp& operator = (T* other); + wp& operator = (const wp& other); + wp& operator = (const sp& other); + + template wp& operator = (U* other); + template wp& operator = (const wp& other); + template wp& operator = (const sp& other); + + void set_object_and_refs(T* other, weakref_type* refs); + + // promotion to sp + + sp promote() const; + + // Reset + + void clear(); + + // Accessors + + inline weakref_type* get_refs() const { return m_refs; } + + inline T* unsafe_get() const { return m_ptr; } + + // Operators + + COMPARE_WEAK(==) + COMPARE_WEAK(!=) + COMPARE_WEAK_FUNCTIONAL(>, std::greater) + COMPARE_WEAK_FUNCTIONAL(<, std::less) + COMPARE_WEAK_FUNCTIONAL(<=, std::less_equal) + COMPARE_WEAK_FUNCTIONAL(>=, std::greater_equal) + + template + inline bool operator == (const wp& o) const { + return m_refs == o.m_refs; // Implies m_ptr == o.mptr; see invariants below. + } + + template + inline bool operator == (const sp& o) const { + // Just comparing m_ptr fields is often dangerous, since wp<> may refer to an older + // object at the same address. + if (o == nullptr) { + return m_ptr == nullptr; + } else { + return m_refs == o->getWeakRefs(); // Implies m_ptr == o.mptr. + } + } + + template + inline bool operator != (const sp& o) const { + return !(*this == o); + } + + template + inline bool operator > (const wp& o) const { + if (m_ptr == o.m_ptr) { + return _wp_compare_(m_refs, o.m_refs); + } else { + return _wp_compare_(m_ptr, o.m_ptr); + } + } + + template + inline bool operator < (const wp& o) const { + if (m_ptr == o.m_ptr) { + return _wp_compare_(m_refs, o.m_refs); + } else { + return _wp_compare_(m_ptr, o.m_ptr); + } + } + template inline bool operator != (const wp& o) const { return !operator == (o); } + template inline bool operator <= (const wp& o) const { return !operator > (o); } + template inline bool operator >= (const wp& o) const { return !operator < (o); } + +private: + template friend class sp; + template friend class wp; + + T* m_ptr; + weakref_type* m_refs; +}; + +#undef COMPARE_WEAK +#undef COMPARE_WEAK_FUNCTIONAL + +// --------------------------------------------------------------------------- +// No user serviceable parts below here. + +// Implementation invariants: +// Either +// 1) m_ptr and m_refs are both null, or +// 2) m_refs == m_ptr->mRefs, or +// 3) *m_ptr is no longer live, and m_refs points to the weakref_type object that corresponded +// to m_ptr while it was live. *m_refs remains live while a wp<> refers to it. +// +// The m_refs field in a RefBase object is allocated on construction, unique to that RefBase +// object, and never changes. Thus if two wp's have identical m_refs fields, they are either both +// null or point to the same object. If two wp's have identical m_ptr fields, they either both +// point to the same live object and thus have the same m_ref fields, or at least one of the +// objects is no longer live. +// +// Note that the above comparison operations go out of their way to provide an ordering consistent +// with ordinary pointer comparison; otherwise they could ignore m_ptr, and just compare m_refs. + +template +wp::wp(T* other) + : m_ptr(other) +{ + m_refs = other ? m_refs = other->createWeak(this) : nullptr; +} + +template +wp::wp(const wp& other) + : m_ptr(other.m_ptr), m_refs(other.m_refs) +{ + if (m_ptr) m_refs->incWeak(this); +} + +template +wp::wp(const sp& other) + : m_ptr(other.m_ptr) +{ + m_refs = m_ptr ? m_ptr->createWeak(this) : nullptr; +} + +template template +wp::wp(U* other) + : m_ptr(other) +{ + m_refs = other ? other->createWeak(this) : nullptr; +} + +template template +wp::wp(const wp& other) + : m_ptr(other.m_ptr) +{ + if (m_ptr) { + m_refs = other.m_refs; + m_refs->incWeak(this); + } else { + m_refs = nullptr; + } +} + +template template +wp::wp(const sp& other) + : m_ptr(other.m_ptr) +{ + m_refs = m_ptr ? m_ptr->createWeak(this) : nullptr; +} + +template +wp::~wp() +{ + if (m_ptr) m_refs->decWeak(this); +} + +template +wp& wp::operator = (T* other) +{ + weakref_type* newRefs = + other ? other->createWeak(this) : nullptr; + if (m_ptr) m_refs->decWeak(this); + m_ptr = other; + m_refs = newRefs; + return *this; +} + +template +wp& wp::operator = (const wp& other) +{ + weakref_type* otherRefs(other.m_refs); + T* otherPtr(other.m_ptr); + if (otherPtr) otherRefs->incWeak(this); + if (m_ptr) m_refs->decWeak(this); + m_ptr = otherPtr; + m_refs = otherRefs; + return *this; +} + +template +wp& wp::operator = (const sp& other) +{ + weakref_type* newRefs = + other != nullptr ? other->createWeak(this) : nullptr; + T* otherPtr(other.m_ptr); + if (m_ptr) m_refs->decWeak(this); + m_ptr = otherPtr; + m_refs = newRefs; + return *this; +} + +template template +wp& wp::operator = (U* other) +{ + weakref_type* newRefs = + other ? other->createWeak(this) : 0; + if (m_ptr) m_refs->decWeak(this); + m_ptr = other; + m_refs = newRefs; + return *this; +} + +template template +wp& wp::operator = (const wp& other) +{ + weakref_type* otherRefs(other.m_refs); + U* otherPtr(other.m_ptr); + if (otherPtr) otherRefs->incWeak(this); + if (m_ptr) m_refs->decWeak(this); + m_ptr = otherPtr; + m_refs = otherRefs; + return *this; +} + +template template +wp& wp::operator = (const sp& other) +{ + weakref_type* newRefs = + other != nullptr ? other->createWeak(this) : 0; + U* otherPtr(other.m_ptr); + if (m_ptr) m_refs->decWeak(this); + m_ptr = otherPtr; + m_refs = newRefs; + return *this; +} + +template +void wp::set_object_and_refs(T* other, weakref_type* refs) +{ + if (other) refs->incWeak(this); + if (m_ptr) m_refs->decWeak(this); + m_ptr = other; + m_refs = refs; +} + +template +sp wp::promote() const +{ + sp result; + if (m_ptr && m_refs->attemptIncStrong(&result)) { + result.set_pointer(m_ptr); + } + return result; +} + +template +void wp::clear() +{ + if (m_ptr) { + m_refs->decWeak(this); + m_refs = 0; + m_ptr = 0; + } +} + +// --------------------------------------------------------------------------- + +// this class just serves as a namespace so TYPE::moveReferences can stay +// private. +class ReferenceMover { +public: + // it would be nice if we could make sure no extra code is generated + // for sp or wp when TYPE is a descendant of RefBase: + // Using a sp override doesn't work; it's a bit like we wanted + // a template template... + + template static inline + void move_references(sp* dest, sp const* src, size_t n) { + + class Renamer : public ReferenceRenamer { + sp* d_; + sp const* s_; + virtual void operator()(size_t i) const { + // The id are known to be the sp<>'s this pointer + TYPE::renameRefId(d_[i].get(), &s_[i], &d_[i]); + } + public: + Renamer(sp* d, sp const* s) : d_(d), s_(s) { } + virtual ~Renamer() { } + }; + + memmove(dest, src, n*sizeof(sp)); + TYPE::renameRefs(n, Renamer(dest, src)); + } + + + template static inline + void move_references(wp* dest, wp const* src, size_t n) { + + class Renamer : public ReferenceRenamer { + wp* d_; + wp const* s_; + virtual void operator()(size_t i) const { + // The id are known to be the wp<>'s this pointer + TYPE::renameRefId(d_[i].get_refs(), &s_[i], &d_[i]); + } + public: + Renamer(wp* rd, wp const* rs) : d_(rd), s_(rs) { } + virtual ~Renamer() { } + }; + + memmove(dest, src, n*sizeof(wp)); + TYPE::renameRefs(n, Renamer(dest, src)); + } +}; + +// specialization for moving sp<> and wp<> types. +// these are used by the [Sorted|Keyed]Vector<> implementations +// sp<> and wp<> need to be handled specially, because they do not +// have trivial copy operation in the general case (see RefBase.cpp +// when DEBUG ops are enabled), but can be implemented very +// efficiently in most cases. + +template inline +void move_forward_type(sp* d, sp const* s, size_t n) { + ReferenceMover::move_references(d, s, n); +} + +template inline +void move_backward_type(sp* d, sp const* s, size_t n) { + ReferenceMover::move_references(d, s, n); +} + +template inline +void move_forward_type(wp* d, wp const* s, size_t n) { + ReferenceMover::move_references(d, s, n); +} + +template inline +void move_backward_type(wp* d, wp const* s, size_t n) { + ReferenceMover::move_references(d, s, n); +} + +} // namespace android + +// --------------------------------------------------------------------------- + +#endif // ANDROID_REF_BASE_H diff --git a/app/src/main/cpp/libutils/include/utils/StrongPointer.h b/app/src/main/cpp/libutils/include/utils/StrongPointer.h new file mode 100644 index 00000000..6f4fb478 --- /dev/null +++ b/app/src/main/cpp/libutils/include/utils/StrongPointer.h @@ -0,0 +1,317 @@ +/* + * Copyright (C) 2005 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_STRONG_POINTER_H +#define ANDROID_STRONG_POINTER_H + +#include +#include // for common_type. + +// --------------------------------------------------------------------------- +namespace android { + +template class wp; + +// --------------------------------------------------------------------------- + +template +class sp { +public: + inline sp() : m_ptr(nullptr) { } + + sp(T* other); // NOLINT(implicit) + sp(const sp& other); + sp(sp&& other) noexcept; + template sp(U* other); // NOLINT(implicit) + template sp(const sp& other); // NOLINT(implicit) + template sp(sp&& other); // NOLINT(implicit) + + ~sp(); + + // Assignment + + sp& operator = (T* other); + sp& operator = (const sp& other); + sp& operator=(sp&& other) noexcept; + + template sp& operator = (const sp& other); + template sp& operator = (sp&& other); + template sp& operator = (U* other); + + //! Special optimization for use by ProcessState (and nobody else). + void force_set(T* other); + + // Reset + + void clear(); + + // Accessors + + inline T& operator* () const { return *m_ptr; } + inline T* operator-> () const { return m_ptr; } + inline T* get() const { return m_ptr; } + inline explicit operator bool () const { return m_ptr != nullptr; } + + // Punt these to the wp<> implementation. + template + inline bool operator == (const wp& o) const { + return o == *this; + } + + template + inline bool operator != (const wp& o) const { + return o != *this; + } + +private: + template friend class sp; + template friend class wp; + void set_pointer(T* ptr); + static inline void check_not_on_stack(const void* ptr); + T* m_ptr; +}; + +#define COMPARE_STRONG(_op_) \ + template \ + static inline bool operator _op_(const sp& t, const sp& u) { \ + return t.get() _op_ u.get(); \ + } \ + template \ + static inline bool operator _op_(const T* t, const sp& u) { \ + return t _op_ u.get(); \ + } \ + template \ + static inline bool operator _op_(const sp& t, const U* u) { \ + return t.get() _op_ u; \ + } \ + template \ + static inline bool operator _op_(const sp& t, std::nullptr_t) { \ + return t.get() _op_ nullptr; \ + } \ + template \ + static inline bool operator _op_(std::nullptr_t, const sp& t) { \ + return nullptr _op_ t.get(); \ + } + +template