Android11.0(R) HAL 相机集成水印算法+多帧算法
文摘 无标签 2021-09-19 阅读:6537一、写在前面
上网经常看到一些好文章,除了收藏之外,顺手一会照着搞一搞。这次看到 qiuxintai 写的
MTK HAL算法集成之单帧算法
MTK HAL算法集成之多帧算法
有点东西,写的很详细,
适合来一遍,文章是基于MTK 9.0 的,大差不差。我就基于最新的 11 搞了。
二、准备工作
qiuxintai 文章中并没有提供 libwatermark.so watermark.rgba
libmultiframe.so 这几个关键的东西,所以我们要先把这几个东西集齐才能召唤神龙。
经过一顿翻看,好在 qiuxintai 还是留了点线索,去他 github 中提取这几个关键的东西。
https://github.com/qiuxintai/Watermark/
https://github.com/qiuxintai/YUV420Converter/
虽然你找到了这些原材料,好戏才刚刚开始,你需要将原材料提取成可放到 aosp 中编译的格式且不报错还是
有一点麻烦的。这期间停滞了好几天,也算是补充了不少 so 编译相关的知识。 那么开始干吧!
三、提取精华
新建 AS 工程,就叫 jnidemo 吧,从 YUV420Converter 中提取 cpp 文件夹下所有文件到 jnidemo 中
从 qiuxintai 多帧文章中提取 mf_processor_impl.cpp
从 Watermark 中提取 watermark.cpp 并进行改造
整合后的 jnidemo 链接
这里说一下 CMakeLists.txt 含义
cmake_minimum_required(VERSION 3.4.1)
#引入头文件依赖编译
include_directories(${CMAKE_SOURCE_DIR}/include)
find_library(log-lib log)
#编译 libNativeUtils.so
add_library(
NativeUtils
SHARED
native.cpp
native_utils.cpp
)
target_link_libraries(NativeUtils ${log-lib})
#引入第三方libopencv_java3.so 编译 opencv3
add_library(opencv3 SHARED IMPORTED)
set_target_properties(
opencv3
PROPERTIES
IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libopencv_java3.so
)
#编译 libOpencvUtils.so 并链接刚刚 opencv3
add_library(
OpencvUtils
SHARED
opencv.cpp
opencv_utils.cpp
)
target_link_libraries(OpencvUtils opencv3 ${log-lib})
#引入第三方libyuv.so 编译 libyuv
add_library(libyuv SHARED IMPORTED)
set_target_properties(
libyuv
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libyuv.so
)
#编译 libLibyuvUtils.so 并链接刚刚 libyuv
add_library(
LibyuvUtils
SHARED
libyuv.cpp
libyuv_utils.cpp
)
target_link_libraries(LibyuvUtils libyuv ${log-lib})
#编译 libmultiframe.so 并链接刚刚 libyuv
add_library(
multiframe
SHARED
mf_processor_impl.cpp
)
target_link_libraries(multiframe libyuv ${log-lib})
#编译 libwatermark.so
add_library(
watermark
SHARED
watermark.cpp
)
target_link_libraries(watermark ${log-lib})
build.gradle 中配置使用 cmake 编译 so,并制定 so 对应平台
defaultConfig {
....
/*externalNativeBuild {
ndkBuild {
path "src/main/jni/Android.mk"
}
}*/
externalNativeBuild {
cmake {
cppFlags ""
}
}
ndk {
abiFilters 'arm64-v8a', 'armeabi-v7a'
}
}
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
version "3.10.2"
}
}
使用CMake引入第三方so库及头文件
AS使用ndkbuild创建cpp工程记录
ubuntu16.04 安装交叉编译工具aarch64-linux-gnu-gcc/g++
项目 build 成功后, so 位于 jnidemobuildintermediatescmakereleaseobjarm64-v8a
问题一
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: error: DT_NEEDED "libstdc++.so" is not specified in shared_libs.
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note:
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note: Fix suggestions:
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note: Android.bp: shared_libs: ["libc", "libdl", "libm", "libstdc++"],
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note: Android.mk: LOCAL_SHARED_LIBRARIES := libc libdl libm libstdc++
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note:
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note: If the fix above doesn't work, bypass this check with:
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note: Android.bp: check_elf_files: false,
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/lib/arm64-v8a/libwatermark.so: note: Android.mk: LOCAL_CHECK_ELF_FILES := false
[ 5% 661/11941] target Strip: vendor.mediatek.hardware.camera.bgservice@1.1-impl
libwatermark.so 有问题,最开始用的 ndk 编译 so 文件,扔到源码里就出现上面奇怪的错误,一开始并没有想到用
cmake 编译,而是去 Ubuntu 下使用 ndk 再次编译,结果还是一样的。
四、注入灵魂(水印算法+多帧算法)
接下来就是跟着 qiuxintai 步骤来
4.1 添加自定义feature
添加新feature,我们选择添加一个自定义feature:TP_FEATURE_WATERMARK
vendor/mediatek/proprietary/hardware/mtkcam3/include/mtkcam3/3rdparty/customer/customer_feature_type.h
@@ -61,6 +61,7 @@ enum eFeatureIndexCustomer {
TP_FEATURE_RELIGHTING = 1ULL << 45,
TP_FEATURE_ASYNC_ASD = 1ULL << 46,
TP_FEATURE_ZOOM_FUSION = 1ULL << 47, // used by zoom scenario
+ TP_FEATURE_WATERMARK = 1ULL << 48, //cczheng
// TODO: reserve for customer feature index (bit 32-63)
};
vendor/mediatek/proprietary/hardware/mtkcam3/feature/core/featurePipe/capture/CaptureFeature_Common.cpp
@@ -592,6 +592,7 @@ const char* FeatID2Name(FeatureID_T fid)
case FID_RELIGHTING_3RD_PARTY: return "relighting_3rd_party";
case FID_AINR_YHDR: return "ainr_yhdr";
case FID_AIHDR: return "aihdr";
+ case FID_WATERMARK_3RD_PARTY: return "watermark_3rd_party";//cczheng
default: return "unknown";
};
vendor/mediatek/proprietary/hardware/mtkcam3/feature/core/featurePipe/capture/nodes/YUVNode.cpp
@@ -619,6 +619,9 @@ MBOOL YUVNode::onInit()
featId = FID_RELIGHTING_3RD_PARTY;
else if(rProperty.mFeatures & MTK_FEATURE_YHDR_FOR_AINR)
featId = FID_AINR_YHDR;
+ else if (rProperty.mFeatures & TP_FEATURE_WATERMARK)
+ featId = FID_WATERMARK_3RD_PARTY;//cczheng
+
if (featId != NULL_FEATURE) {
MY_LOGD_IF(mLogLevel, "%s finds plugin:%s, priority:%d",
vendor/mediatek/proprietary/hardware/mtkcam3/include/mtkcam3/feature/featurePipe/ICaptureFeaturePipe.h
@@ -191,6 +191,7 @@ enum CaptureFeatureFeatureID {
FID_RELIGHTING_3RD_PARTY,
FID_AINR_YHDR,
FID_AIHDR,
+ FID_WATERMARK_3RD_PARTY,
NUM_OF_FEATURE,
NULL_FEATURE = 0xFF,
};
vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/hwnode/p2/P2_CaptureProcessor.cpp
@@ -487,6 +487,8 @@ MBOOL CaptureProcessor::onEnque(const sp<P2FrameRequest> &pP2FrameRequest)
pCapRequest->addFeature(FID_HFG);
if (feature & MTK_FEATURE_DCE)
pCapRequest->addFeature(FID_DCE);
+ if (feature & TP_FEATURE_WATERMARK)
+ pCapRequest->addFeature(FID_WATERMARK_3RD_PARTY);//cczheng
if (feature & MTK_FEATURE_AINR_YUV)
pCapRequest->addFeature(FID_AINR_YUV);
if (feature & MTK_FEATURE_AINR_YHDR)
4.2 将算法对应的feature添加到scenario配置表
在我们打开camera进行预览和拍照的时候,MTK HAL3会执行vendor/mediatek/proprietary/hardware/mtkcam3/pipeline/policy/FeatureSettingPolicy.cpp的代码,会分别调用
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/scenario_mgr.cpp的
get_streaming_scenario函数和get_capture_scenario函数。它们会读取一个scenario的feature配置表,遍历所有的feature,决定哪些feature会被执行。这个配置表中有许多的scenario,一个scenario可能对应多个feature。因此添加自定义feature后,还需将自定义的feature添加到配置表中。MTK feature 对应的配置表是 gMtkScenarioFeaturesMaps,customer feature 对应的配置表是 gCustomerScenarioFeaturesMaps。
vendor/mediatek/proprietary/custom/mt6765/hal/camera/camera_custom_feature_table.cpp
#include "camera_custom_feature_table.h"
using namespace NSCam::NSPipelinePlugin;
// ======================================================================================================
// For Camera HAL Server
// ======================================================================================================
// TODO: Feature Combinations for MTK Camera HAL server
// #define <feature combination> (key feature | post-processing features | ...)
//
// logical stream
// single cam capture feature combination
#define MTK_FEATURE_COMBINATION_SINGLE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_HDR (TP_FEATURE_HDR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_AINR (MTK_FEATURE_AINR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_AIHDR (MTK_FEATURE_AIHDR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_REMOSAIC_MFNR (MTK_FEATURE_REMOSAIC_MFNR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_REMOSAIC (MTK_FEATURE_REMOSAIC| MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_CSHOT (NO_FEATURE_NORMAL | MTK_FEATURE_CZ| MTK_FEATURE_HFG)
#define MTK_FEATURE_COMBINATION_MULTICAM (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_YUV_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_RAW_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_SUPER_NIGHT_RAW_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB| TP_FEATURE_WATERMARK)
// request source frame from camerahalserver.
// no feature pure image quality for processing, the full image processing will be process in isp hidl.
#define MTK_FEATURE_COMBINATION_ISP_HIDL_SOURCE_COMMON (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE)
#define MTK_FEATURE_COMBINATION_ISP_HIDL_SOURCE_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE)
#define MTK_FEATURE_COMBINATION_ISP_HIDL_SOURCE_AINR (MTK_FEATURE_AINR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE)
// dual cam capture feature combination
// the VSDOF means the combination of Bokeh feature and Depth feature
#define MTK_FEATURE_COMBINATION_MTK_VSDOF (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| MTK_FEATURE_VSDOF| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_MTK_VSDOF_HDR (TP_FEATURE_HDR_DC | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| MTK_FEATURE_VSDOF| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_MTK_VSDOF_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| MTK_FEATURE_VSDOF| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_MTK_VSDOF_AINR (MTK_FEATURE_AINR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| MTK_FEATURE_AINR_YUV| MTK_FEATURE_VSDOF)
// zoom fusion combination, need TP_FEATURE_ZOOM_FUSION when sensor nums >= 2 && has tp fusion plugin
#define MTK_FEATURE_COMBINATION_MTK_ZOOM (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_ZOOM_FUSION)
#define MTK_FEATURE_COMBINATION_MTK_ZOOM_HDR (TP_FEATURE_HDR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_ZOOM_FUSION)
#define MTK_FEATURE_COMBINATION_MTK_ZOOM_AINR (MTK_FEATURE_AINR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_ZOOM_FUSION)
#define MTK_FEATURE_COMBINATION_MTK_ZOOM_AIHDR (MTK_FEATURE_AIHDR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_ZOOM_FUSION)
#define MTK_FEATURE_COMBINATION_MTK_ZOOM_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_ZOOM_FUSION)
#define MTK_FEATURE_COMBINATION_MTK_ZOOM_REMOSAIC (MTK_FEATURE_REMOSAIC| MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_ZOOM_FUSION)
// physical stream
// single cam capture feature combination
#define MTK_FEATURE_COMBINATION_MASTER (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_SLAVE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_REMOSAIC_MASTER (MTK_FEATURE_REMOSAIC| MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_REMOSAIC_SLAVE (MTK_FEATURE_REMOSAIC| MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_MULTICAM_MASTER (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_MULTICAM_SLAVE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
//
#define MTK_FEATURE_COMBINATION_VSDOF_MASTER (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_VSDOF_SLAVE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_ZOOM_MASTER (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_ZOOM_SLAVE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_TP_MFNR (TP_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| MTK_FEATURE_MFNR)
// request source frame from camerahalserver.
// no feature pure image quality for processing, the full image processing will be process in isp hidl.
#define MTK_FEATURE_COMBINATION_YUV_REPROCESS_MASTER (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_RAW_REPROCESS_MASTER (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_YUV_REPROCESS_SLAVE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_FB)
#define MTK_FEATURE_COMBINATION_RAW_REPROCESS_SLAVE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB)
// streaming feature combination (TODO: it should be refined by streaming scenario feature)
#define MTK_FEATURE_COMBINATION_VIDEO_NORMAL (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|MTK_FEATURE_FOVA|TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_VIDEO_NORMAL_VIDEO (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|MTK_FEATURE_FOVA)
#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_YUV (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|MTK_FEATURE_DUAL_YUV|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|TP_FEATURE_DUAL_YUV|TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWDEPTH (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|MTK_FEATURE_DUAL_HWDEPTH|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|TP_FEATURE_DUAL_HWDEPTH|TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWVSDOF (MTK_FEATURE_FB|TP_FEATURE_FB|TP_FEATURE_WATERMARK)
// ======================================================================================================
//
// add scenario/feature set for logical camera scenario
const std::vector<std::unordered_map<int32_t, ScenarioFeatures>> gCustomerScenarioFeaturesMaps =
{
{
// capture
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_NORMAL)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AIHDR, MTK_FEATURE_COMBINATION_AIHDR)
ADD_CAMERA_FEATURE_SET(TP_FEATURE_HDR, MTK_FEATURE_COMBINATION_HDR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_AINR)
//ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC_MFNR, MTK_FEATURE_COMBINATION_REMOSAIC_MFNR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MFNR)
ADD_CAMERA_FEATURE_SET(TP_FEATURE_MFNR, MTK_FEATURE_COMBINATION_TP_MFNR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_VSDOF)
ADD_CAMERA_FEATURE_SET(TP_FEATURE_HDR_DC, MTK_FEATURE_COMBINATION_MTK_VSDOF_HDR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MTK_VSDOF_MFNR)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_MTK_VSDOF)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_MULTICAM)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AIHDR, MTK_FEATURE_COMBINATION_AIHDR)
ADD_CAMERA_FEATURE_SET(TP_FEATURE_HDR, MTK_FEATURE_COMBINATION_HDR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_AINR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MFNR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_MULTICAM)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_ZOOM)
ADD_CAMERA_FEATURE_SET(TP_FEATURE_HDR, MTK_FEATURE_COMBINATION_MTK_ZOOM_HDR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_MTK_ZOOM_AINR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MTK_ZOOM_MFNR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_MTK_ZOOM_REMOSAIC)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_MTK_ZOOM)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_CSHOT)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_CSHOT)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_DNG_OPAQUE_RAW)
/* not support multiframe features with dng capture yet */
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_YUV_REPROCESS)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_YUV_REPROCESS)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_RAW_REPROCESS)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_RAW_REPROCESS)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(CUSTOMER_CAMERA_SCENARIO_CAPTURE_SUPER_NIGHT)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_AINR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MFNR)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(CUSTOMER_CAMERA_SCENARIO_CAPTURE_SUPER_NIGHT_RAW_REPROCESS)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SUPER_NIGHT_RAW_REPROCESS)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_REQUEST_FRAME_FOR_ISP_HIDL_YUV)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_ISP_HIDL_SOURCE_COMMON)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_REQUEST_FRAME_FOR_ISP_HIDL_RAW)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_ISP_HIDL_SOURCE_AINR)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_ISP_HIDL_SOURCE_MFNR)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_ISP_HIDL_SOURCE_COMMON)
CAMERA_SCENARIO_END
//
// streaming
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_NORMAL)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_NORMAL)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_NORMAL_VIDEO)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_NORMAL_VIDEO)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_YUV)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_YUV)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_HWDEPTH)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWDEPTH)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_HWVSDOF)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWVSDOF)
CAMERA_SCENARIO_END
},
};
// add scenario/feature set for master physical camera scenario
const std::vector<std::unordered_map<int32_t, ScenarioFeatures>> gCustomerScenarioFeaturesMapsPhyMaster =
{
{
// capture physical master
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_NORMAL)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_MASTER)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_MASTER)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_VSDOF)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_MASTER)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VSDOF_MASTER)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_MULTICAM)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_MASTER)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_MULTICAM_MASTER)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_ZOOM)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_MASTER)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_ZOOM_MASTER)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_YUV_REPROCESS)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_YUV_REPROCESS_MASTER)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_RAW_REPROCESS)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_RAW_REPROCESS_MASTER)
CAMERA_SCENARIO_END
//
// streaming
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_NORMAL)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_NORMAL)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_NORMAL_VIDEO)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_NORMAL_VIDEO)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_YUV)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_YUV)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_HWDEPTH)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWDEPTH)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_HWVSDOF)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWVSDOF)
CAMERA_SCENARIO_END
},
};
// add scenario/feature set for slave physical camera scenario
const std::vector<std::unordered_map<int32_t, ScenarioFeatures>> gCustomerScenarioFeaturesMapsPhySlave =
{
{
// capture physical slave
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_NORMAL)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_SLAVE)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SLAVE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_VSDOF)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_SLAVE)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VSDOF_SLAVE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_MULTICAM)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_SLAVE)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_MULTICAM_SLAVE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_ZOOM)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_REMOSAIC, MTK_FEATURE_COMBINATION_REMOSAIC_SLAVE)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_ZOOM_SLAVE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_YUV_REPROCESS)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_YUV_REPROCESS_SLAVE)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_CAPTURE_RAW_REPROCESS)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_RAW_REPROCESS_SLAVE)
CAMERA_SCENARIO_END
//
// streaming
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_NORMAL)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_NORMAL)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_NORMAL_VIDEO)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_NORMAL_VIDEO)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_YUV)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_YUV)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_HWDEPTH)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWDEPTH)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_CAMERA_SCENARIO_STREAMING_DUAL_HWVSDOF)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWVSDOF)
CAMERA_SCENARIO_END
},
};
// ======================================================================================================
// For ISP HIDL only
// ======================================================================================================
// TODO: Feature Combinations for MTK ISP Device HIDL
// #define <feature combination> (key feature | post-processing features | ...)
//
// isp hidl feature combination
#define MTK_FEATURE_COMBINATION_SINGLE_FULL (NO_FEATURE_NORMAL | MTK_FEATURE_DCE| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
#define MTK_FEATURE_COMBINATION_AINR_FULL (MTK_FEATURE_AINR | MTK_FEATURE_DCE| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
#define MTK_FEATURE_COMBINATION_MFNR_FULL (MTK_FEATURE_MFNR | MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
// single frame in
#define MTK_FEATURE_COMBINATION_SINGLE_RAW_TO_YUV (NO_FEATURE_NORMAL | MTK_FEATURE_DCE| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
#define MTK_FEATURE_COMBINATION_SINGLE_RAW_TO_JPEG (NO_FEATURE_NORMAL | MTK_FEATURE_DCE| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
#define MTK_FEATURE_COMBINATION_SINGLE_YUV_TO_YUV (NO_FEATURE_NORMAL) // image transform and crop/resize only
#define MTK_FEATURE_COMBINATION_SINGLE_YUV_TO_JPEG (NO_FEATURE_NORMAL) // encode yuv to jpeg only.
// multiple frame in
#define MTK_FEATURE_COMBINATION_AINR_RAW_TO_RAW (MTK_FEATURE_AINR) // output AINR RAW only
#define MTK_FEATURE_COMBINATION_AINR_RAW_TO_YUV (MTK_FEATURE_AINR | MTK_FEATURE_DCE| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
#define MTK_FEATURE_COMBINATION_AINR_RAW_TO_JPEG (MTK_FEATURE_AINR | MTK_FEATURE_DCE| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
#define MTK_FEATURE_COMBINATION_MFNR_RAW_TO_YUV (MTK_FEATURE_MFNR | MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
#define MTK_FEATURE_COMBINATION_MFNR_RAW_TO_JPEG (MTK_FEATURE_MFNR | MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_NR| MTK_FEATURE_CZ)
// ======================================================================================================
//
const std::vector<std::unordered_map<int32_t, ScenarioFeatures>> gCustomerIspHidlScenarioFeaturesMaps =
{
{
// capture
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_ALL)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_AINR_FULL)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MFNR_FULL)
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE_FULL)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_SINGLE_RAW_TO_YUV) // encode single raw to yuv
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE_RAW_TO_YUV)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_SINGLE_RAW_TO_JPEG) // encode single raw to yuv and encode to jepg
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE_RAW_TO_JPEG)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_SINGLE_YUV_TO_YUV) // single yuv reprocessing
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE_YUV_TO_YUV)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_SINGLE_YUV_TO_JPEG) // encode single yuv to jpeg
ADD_CAMERA_FEATURE_SET(NO_FEATURE_NORMAL, MTK_FEATURE_COMBINATION_SINGLE_YUV_TO_JPEG)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_MULTIPLE_RAW_TO_RAW) // multiframe raw to single raw
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_AINR_RAW_TO_RAW)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_MULTIPLE_RAW_TO_YUV) // multiframe raw to single yuv
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_AINR_RAW_TO_YUV)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MFNR_RAW_TO_YUV)
CAMERA_SCENARIO_END
//
CAMERA_SCENARIO_START(MTK_ISP_HIDL_SCENARIO_CAPTURE_MULTIPLE_RAW_TO_JPEG) // multiframe raw to jpeg
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_AINR, MTK_FEATURE_COMBINATION_AINR_RAW_TO_JPEG)
ADD_CAMERA_FEATURE_SET(MTK_FEATURE_MFNR, MTK_FEATURE_COMBINATION_MFNR_RAW_TO_JPEG)
CAMERA_SCENARIO_END
},
};
vendor/mediatek/proprietary/packages/apps/Camera2/camerapostalgo/main/3rdparty/mtk/mtk_scenario_mgr.cpp
@@ -80,31 +80,31 @@ using namespace NSCam::v3::pipeline::policy::scenariomgr;
// #define <feature combination> (key feature | post-processing features | ...)
//
// single cam capture feature combination
-#define MTK_FEATURE_COMBINATION_SINGLE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
-#define MTK_FEATURE_COMBINATION_HDR (TP_FEATURE_HDR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
-#define MTK_FEATURE_COMBINATION_AINR (MTK_FEATURE_AINR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
+#define MTK_FEATURE_COMBINATION_SINGLE (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_HDR (TP_FEATURE_HDR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_AINR (MTK_FEATURE_AINR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| T
P_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_AINR_HDR (MTK_FEATURE_AINR_YHDR| MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
-#define MTK_FEATURE_COMBINATION_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
-#define MTK_FEATURE_COMBINATION_REMOSAIC (MTK_FEATURE_REMOSAIC| MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB)
+#define MTK_FEATURE_COMBINATION_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| T
P_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_REMOSAIC (MTK_FEATURE_REMOSAIC| MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_FB| T
P_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_CSHOT (NO_FEATURE_NORMAL | MTK_FEATURE_CZ| MTK_FEATURE_HFG)
#define MTK_FEATURE_COMBINATION_MULTICAM (NO_FEATURE_NORMAL)
-#define MTK_FEATURE_COMBINATION_YUV_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_FB)
-#define MTK_FEATURE_COMBINATION_RAW_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB)
-#define MTK_FEATURE_COMBINATION_SUPER_NIGHT_RAW_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB)
+#define MTK_FEATURE_COMBINATION_YUV_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_FB| TP_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_RAW_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB| TP_FEATURE_WATERMARK
)
+#define MTK_FEATURE_COMBINATION_SUPER_NIGHT_RAW_REPROCESS (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE| TP_FEATURE_FB| TP_FEATURE_
WATERMARK)
// dual cam capture feature combination
// the VSDOF means the combination of Bokeh feature and Depth feature
-#define MTK_FEATURE_COMBINATION_MTK_VSDOF (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_F
B| MTK_FEATURE_VSDOF)
-#define MTK_FEATURE_COMBINATION_MTK_VSDOF_HDR (TP_FEATURE_HDR_DC | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_F
B| MTK_FEATURE_VSDOF)
-#define MTK_FEATURE_COMBINATION_MTK_VSDOF_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_F
B| MTK_FEATURE_VSDOF)
+#define MTK_FEATURE_COMBINATION_MTK_VSDOF (NO_FEATURE_NORMAL | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_F
B| MTK_FEATURE_VSDOF| TP_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_MTK_VSDOF_HDR (TP_FEATURE_HDR_DC | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_F
B| MTK_FEATURE_VSDOF| TP_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_MTK_VSDOF_MFNR (MTK_FEATURE_MFNR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_F
B| MTK_FEATURE_VSDOF| TP_FEATURE_WATERMARK)
#define MTK_FEATURE_COMBINATION_MTK_VSDOF_AINR (MTK_FEATURE_AINR | MTK_FEATURE_NR| MTK_FEATURE_ABF| MTK_FEATURE_CZ| MTK_FEATURE_DRE| MTK_FEATURE_HFG| MTK_FEATURE_DCE | MTK_FEATURE_F
B| MTK_FEATURE_AINR_YUV| MTK_FEATURE_VSDOF)
// streaming feature combination (TODO: it should be refined by streaming scenario feature)
-#define MTK_FEATURE_COMBINATION_VIDEO_NORMAL (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|MTK_FEATURE_FOVA)
+#define MTK_FEATURE_COMBINATION_VIDEO_NORMAL (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|MTK_FEATURE_FOVA|TP_FEATURE_WATERMA
RK)
#define MTK_FEATURE_COMBINATION_VIDEO_NORMAL_VIDEO (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|MTK_FEATURE_FOVA)
-#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_YUV (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|MTK_FEATURE_DUAL_YUV|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|TP_FEATURE_DUA
L_YUV)
-#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWDEPTH (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|MTK_FEATURE_DUAL_HWDEPTH|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|TP_FEATU
RE_DUAL_HWDEPTH)
-#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWVSDOF (MTK_FEATURE_FB|TP_FEATURE_FB)
+#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_YUV (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|MTK_FEATURE_DUAL_YUV|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|TP_FEATURE_DUA
L_YUV|TP_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWDEPTH (MTK_FEATURE_FB|MTK_FEATURE_ASYNC_ASD|MTK_FEATURE_EIS|MTK_FEATURE_DUAL_HWDEPTH|TP_FEATURE_FB|TP_FEATURE_ASYNC_ASD|TP_FEATURE_EIS|TP_FEATU
RE_DUAL_HWDEPTH|TP_FEATURE_WATERMARK)
+#define MTK_FEATURE_COMBINATION_VIDEO_DUAL_HWVSDOF (MTK_FEATURE_FB|TP_FEATURE_FB|TP_FEATURE_WATERMARK)
4.3 为算法选择plugin
MTK HAL3在vendor/mediatek/proprietary/hardware/mtkcam3/include/mtkcam3/3rdparty/plugin/PipelinePluginType.h 中将三方算法的挂载点大致分为以下几类:
BokehPlugin: Bokeh算法挂载点,双摄景深算法的虚化部分。
DepthPlugin: Depth算法挂载点,双摄景深算法的计算深度部分。
FusionPlugin: Depth和Bokeh放在1个算法中,即合并的双摄景深算法挂载点。
JoinPlugin: Streaming相关算法挂载点,预览算法都挂载在这里。
MultiFramePlugin: 多帧算法挂载点,包括YUV与RAW,例如MFNR/HDR
RawPlugin: RAW算法挂载点,例如remosaic
YuvPlugin: Yuv单帧算法挂载点,例如美颜、广角镜头畸变校正等
对号入座,将要集成的算法选择相应的plugin。
单帧水印算法,所以预览我们选择JoinPlugin,拍照选择YuvPlugin。
多帧算法,只能选择MultiFramePlugin。并且,一般情况下多帧算法只用于拍照,不用于预览。
4.4 编写算法集成文件
水印算法目录结构
参照 FBImpl.cpp 和 sample_streaming_fb.cpp 中分别实现拍照和预览。目录结构如下:
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/
├── Android.mk
├── include
│ └── watermark.h
├── lib
│ ├── arm64-v8a
│ │ └── libwatermark.so
│ └── armeabi-v7a
│ └── libwatermark.so
├── res
│ └── watermark.rgba
├── WatermarkCapture.cpp
└── WatermarkPreview.cpp
文件说明:
Android.mk中配置算法库、头文件、集成的源代码CPP文件编译成库libmtkcam.plugin.tp_watermark,供libmtkcam_3rdparty.customer依赖调用。
集成的源代码CPP文件,WatermarkCapture.cpp用于拍照,WatermarkPreview.cpp用于预览。
libwatermark.so实现了添加水印的功能,libwatermark.so用来模拟需要接入的第三方算法库。watermark.h是头文件。
watermark.rgba是对应的水印文件。
多帧算法目录结构
参照mfnr/MFNRImpl.cpp中实现MFNR拍照。目录结构如下:
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/cp_tp_mfnr/
├── Android.mk
├── include
│ └── mf_processor.h
├── lib
│ ├── arm64-v8a
│ │ └── libmultiframe.so
│ └── armeabi-v7a
│ └── libmultiframe.so
└── MFNRImpl.cpp
文件说明:
Android.mk中配置算法库、头文件、集成的源代码MFNRImpl.cpp文件,将它们编译成库libmtkcam.plugin.tp_mfnr,供libmtkcam_3rdparty.customer依赖调用。
libmultiframe.so实现了将连续4帧图像缩小,并拼接成一张图的功能,libmultiframe.so用来模拟需要接入的第三方多帧算法库。mf_processor.h是头文件。
MFNRImpl.cpp是集成的源代码CPP文件。
4.5 开撸代码
添加全局宏控,是否编译三方算法
device/mediateksample/[platform]/ProjectConfig.mk
@@ -5,6 +5,8 @@ AUTO_ADD_GLOBAL_DEFINE_BY_VALUE = BOOT_LOGO MTK_PLATFORM CUSTOM_KERNEL_MAIN_IMGS
BUILD_KERNEL = yes
+QXT_MFNR_SUPPORT = yes
+QXT_WATERMARK_SUPPORT = yes
BUILD_LK = yes
新增 vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/ 文件夹
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/Android.mk
ifeq ($(QXT_WATERMARK_SUPPORT),yes)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := libwatermark
LOCAL_SRC_FILES_32 := lib/armeabi-v7a/libwatermark.so
LOCAL_SRC_FILES_64 := lib/arm64-v8a/libwatermark.so
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE_CLASS := SHARED_LIBRARIES
LOCAL_MODULE_SUFFIX := .so
LOCAL_PROPRIETARY_MODULE := true
LOCAL_CHECK_ELF_FILES := false
LOCAL_MULTILIB := both
include $(BUILD_PREBUILT)
################################################################################
################################################################################
include $(CLEAR_VARS)
#-----------------------------------------------------------
include $(TOP)/$(MTK_PATH_SOURCE)/hardware/mtkcam/mtkcam.mk
#-----------------------------------------------------------
LOCAL_SRC_FILES += WatermarkCapture.cpp
LOCAL_SRC_FILES += WatermarkPreview.cpp
#-----------------------------------------------------------
LOCAL_C_INCLUDES += $(MTKCAM_C_INCLUDES)
LOCAL_C_INCLUDES += $(TOP)/$(MTK_PATH_SOURCE)/hardware/mtkcam3/include
LOCAL_C_INCLUDES += $(TOP)/$(MTK_PATH_SOURCE)/hardware/mtkcam/include
#
LOCAL_C_INCLUDES += system/media/camera/include
LOCAL_C_INCLUDES += $(TOP)/external/libyuv/files/include/
LOCAL_C_INCLUDES += $(TOP)/$(MTK_PATH_SOURCE)/hardware/mtkcam3/3rdparty/customer/tp_watermark/include
#-----------------------------------------------------------
LOCAL_CFLAGS += $(MTKCAM_CFLAGS)
#
#-----------------------------------------------------------
LOCAL_STATIC_LIBRARIES +=
#
LOCAL_WHOLE_STATIC_LIBRARIES +=
#-----------------------------------------------------------
LOCAL_SHARED_LIBRARIES += liblog
LOCAL_SHARED_LIBRARIES += libutils
LOCAL_SHARED_LIBRARIES += libcutils
LOCAL_SHARED_LIBRARIES += libmtkcam_modulehelper
LOCAL_SHARED_LIBRARIES += libmtkcam_stdutils
LOCAL_SHARED_LIBRARIES += libmtkcam_pipeline
LOCAL_SHARED_LIBRARIES += libmtkcam_metadata
LOCAL_SHARED_LIBRARIES += libmtkcam_metastore
LOCAL_SHARED_LIBRARIES += libmtkcam_streamutils
LOCAL_SHARED_LIBRARIES += libmtkcam_imgbuf
LOCAL_SHARED_LIBRARIES += libyuv.vendor
#-----------------------------------------------------------
LOCAL_HEADER_LIBRARIES := libutils_headers liblog_headers libhardware_headers
#-----------------------------------------------------------
LOCAL_MODULE := libmtkcam.plugin.tp_watermark
LOCAL_PROPRIETARY_MODULE := true
LOCAL_MODULE_OWNER := mtk
LOCAL_MODULE_TAGS := optional
include $(MTK_STATIC_LIBRARY)
################################################################################
include $(call all-makefiles-under,$(LOCAL_PATH))
endif
新增
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/include/watermark.h
#include <stdio.h>
#include <string.h>
// #include <android/log.h>
namespace Watermark
{
void add(unsigned char* src, int srcWidth, int srcHeight, unsigned char* watermark,
int watermarkWidth, int watermarkHeight, int x, int y);
新增
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/WatermarkCapture.cpp
主要函数介绍:
在property函数中feature类型设置我们在第三步中添加的TP_FEATURE_WATERMARK,并设置名称、优先级等等属性。
在negotiate函数中配置算法需要的输入、输出图像的格式、尺寸。
在negotiate函数或者process函数中获取上层传下来的metadata参数,根据参数决定算法是否运行,或者将参数传给算法。
在process函数中接入算法。
#define LOG_TAG "WatermarkCapture"
//
#include <mtkcam/utils/std/Log.h>
//
#include <stdlib.h>
#include <utils/Errors.h>
#include <utils/List.h>
#include <utils/RefBase.h>
#include <sstream>
//
#include <mtkcam/utils/metadata/client/mtk_metadata_tag.h>
#include <mtkcam/utils/metadata/hal/mtk_platform_metadata_tag.h>
//
//
#include <mtkcam/utils/imgbuf/IIonImageBufferHeap.h>
//
#include <mtkcam/drv/IHalSensor.h>
#include <mtkcam/utils/std/Format.h>
//
#include <mtkcam3/pipeline/hwnode/NodeId.h>
#include <mtkcam/utils/metastore/ITemplateRequest.h>
#include <mtkcam/utils/metastore/IMetadataProvider.h>
#include <mtkcam3/3rdparty/plugin/PipelinePlugin.h>
#include <mtkcam3/3rdparty/plugin/PipelinePluginType.h>
#include <stdlib.h>
#include <watermark.h>
#include <mtkcam/utils/std/Time.h>
#include <time.h>
#include <libyuv.h>
//
using namespace NSCam;
using namespace android;
using namespace std;
using namespace NSCam::NSPipelinePlugin;
/******************************************************************************
*
******************************************************************************/
#define MY_LOGV(fmt, arg...) CAM_LOGV("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGD(fmt, arg...) CAM_LOGD("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGI(fmt, arg...) CAM_LOGI("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGW(fmt, arg...) CAM_LOGW("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGE(fmt, arg...) CAM_LOGE("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
//
#define FUNCTION_IN MY_LOGD("%s +", __FUNCTION__)
#define FUNCTION_OUT MY_LOGD("%s -", __FUNCTION__)
//systrace
#if 1
#ifndef ATRACE_TAG
#define ATRACE_TAG ATRACE_TAG_CAMERA
#endif
#include <utils/Trace.h>
#define WATERMARK_TRACE_CALL() ATRACE_CALL()
#define WATERMARK_TRACE_NAME(name) ATRACE_NAME(name)
#define WATERMARK_TRACE_BEGIN(name) ATRACE_BEGIN(name)
#define WATERMARK_TRACE_END() ATRACE_END()
#else
#define WATERMARK_TRACE_CALL()
#define WATERMARK_TRACE_NAME(name)
#define WATERMARK_TRACE_BEGIN(name)
#define WATERMARK_TRACE_END()
#endif
template <class T>
inline bool
tryGetMetadata(IMetadata const *pMetadata, MUINT32 tag, T& rVal)
{
if(pMetadata == nullptr) return MFALSE;
IMetadata::IEntry entry = pMetadata->entryFor(tag);
if(!entry.isEmpty())
{
rVal = entry.itemAt(0,Type2Type<T>());
return true;
}
else
{
#define var(v) #v
#define type(t) #t
MY_LOGW("no metadata %s in %s", var(tag), type(pMetadata));
#undef type
#undef var
}
return false;
}
/******************************************************************************
*
******************************************************************************/
class WatermarkCapture : public YuvPlugin::IProvider {
public:
typedef YuvPlugin::Property Property;
typedef YuvPlugin::Selection Selection;
typedef YuvPlugin::Request::Ptr RequestPtr;
typedef YuvPlugin::RequestCallback::Ptr RequestCallbackPtr;
private:
int mOpenid;
MBOOL mEnable = 1;
MBOOL mDump = 0;
unsigned char *mSrcRGBA = nullptr;
unsigned char *mWatermarkRGBA = nullptr;
int mWatermarkWidth = 0;
int mWatermarkHeight = 0;
public:
WatermarkCapture();
~WatermarkCapture();
void init();
void uninit();
void abort(vector <RequestPtr> &pRequests);
void set(MINT32 iOpenId, MINT32 iOpenId2);
const Property &property();
MERROR negotiate(Selection &sel);
MERROR process(RequestPtr pRequest, RequestCallbackPtr pCallback);
};
WatermarkCapture::WatermarkCapture() : mOpenid(-1) {
FUNCTION_IN;
mEnable = property_get_bool("vendor.debug.camera.watermark.capture.enable", 1);
mDump = property_get_bool("vendor.debug.camera.watermark.capture.dump", 0);
FUNCTION_OUT;
}
WatermarkCapture::~WatermarkCapture() {
FUNCTION_IN;
FUNCTION_OUT;
}
void WatermarkCapture::init() {
FUNCTION_IN;
mWatermarkWidth = 180;
mWatermarkHeight = 640;
int watermarkSize = mWatermarkWidth * mWatermarkHeight * 4;
mWatermarkRGBA = (unsigned char *) malloc(watermarkSize);
FILE *fp;
char path[256];
snprintf(path, sizeof(path), "/vendor/res/images/watermark.rgba");
if ((fp = fopen(path, "r")) == NULL) {
MY_LOGE("Failed to open /vendor/res/images/watermark.rgba");
}
fread(mWatermarkRGBA, 1, watermarkSize, fp);
fclose(fp);
FUNCTION_OUT;
}
void WatermarkCapture::uninit() {
FUNCTION_IN;
free(mWatermarkRGBA);
FUNCTION_OUT;
}
void WatermarkCapture::abort(vector <RequestPtr> &pRequests) {
FUNCTION_IN;
(void)pRequests;
FUNCTION_OUT;
}
void WatermarkCapture::set(MINT32 iOpenId, MINT32 iOpenId2) {
FUNCTION_IN;
MY_LOGD("set openId:%d openId2:%d", iOpenId, iOpenId2);
mOpenid = iOpenId;
FUNCTION_OUT;
}
const WatermarkCapture::Property &WatermarkCapture::property() {
FUNCTION_IN;
static Property prop;
static bool inited;
if (!inited) {
prop.mName = "TP_WATERMARK";
prop.mFeatures = TP_FEATURE_WATERMARK;
prop.mInPlace = MTRUE;
prop.mFaceData = eFD_Current;
prop.mPosition = 0;
inited = true;
}
FUNCTION_OUT;
return prop;
}
MERROR WatermarkCapture::negotiate(Selection &sel) {
FUNCTION_IN;
if (!mEnable) {
MY_LOGD("Force off TP_WATERMARK");
FUNCTION_OUT;
return -EINVAL;
}
sel.mIBufferFull
.setRequired(MTRUE)
.addAcceptedFormat(eImgFmt_I420)
.addAcceptedSize(eImgSize_Full);
sel.mIMetadataDynamic.setRequired(MTRUE);
sel.mIMetadataApp.setRequired(MTRUE);
sel.mIMetadataHal.setRequired(MTRUE);
sel.mOMetadataApp.setRequired(MTRUE);
sel.mOMetadataHal.setRequired(MTRUE);
FUNCTION_OUT;
return OK;
}
MERROR WatermarkCapture::process(RequestPtr pRequest,
RequestCallbackPtr pCallback = nullptr) {
FUNCTION_IN;
WATERMARK_TRACE_CALL();
MBOOL needRun = MFALSE;
if (pRequest->mIBufferFull != nullptr && pRequest->mOBufferFull != nullptr) {
IImageBuffer *pIBufferFull = pRequest->mIBufferFull->acquire();
IImageBuffer *pOBufferFull = pRequest->mOBufferFull->acquire();
if (pRequest->mIMetadataDynamic != nullptr) {
IMetadata *meta = pRequest->mIMetadataDynamic->acquire();
if (meta != NULL)
MY_LOGD("[IN] Dynamic metadata count: %d", meta->count());
else
MY_LOGD("[IN] Dynamic metadata empty");
}
int frameNo = 0, requestNo = 0;
if (pRequest->mIMetadataHal != nullptr) {
IMetadata *pIMetataHAL = pRequest->mIMetadataHal->acquire();
if (pIMetataHAL != NULL) {
MY_LOGD("[IN] HAL metadata count: %d", pIMetataHAL->count());
if (!tryGetMetadata<int>(pIMetataHAL, MTK_PIPELINE_FRAME_NUMBER, frameNo)) {
frameNo = 0;
}
if (!tryGetMetadata<int>(pIMetataHAL, MTK_PIPELINE_REQUEST_NUMBER, requestNo)) {
requestNo = 0;
}
MY_LOGD("frameNo: %d, requestNo: %d", frameNo, requestNo);
} else {
MY_LOGD("[IN] HAL metadata empty");
}
}
if (pRequest->mIMetadataApp != nullptr) {
IMetadata *pIMetadataApp = pRequest->mIMetadataApp->acquire();
MINT32 mode = 0;
if (!tryGetMetadata<MINT32>(pIMetadataApp, QXT_FEATURE_WATERMARK, mode)) {
mode = 0;
}
needRun = mode == 1 ? 1 : 0;
}
MY_LOGD("needRun: %d", needRun);
int width = pIBufferFull->getImgSize().w;
int height = pIBufferFull->getImgSize().h;
MINT inFormat = pIBufferFull->getImgFormat();
if (needRun && inFormat == NSCam::eImgFmt_I420) {
uint32_t currentTime = (NSCam::Utils::TimeTool::getReadableTime()) % 1000;
time_t timep;
time (&timep);
char currentDate[20];
strftime(currentDate, sizeof(currentDate), "%Y%m%d_%H%M%S", localtime(&timep));
//dump input I420
if (mDump) {
char path[256];
snprintf(path, sizeof(path), "/data/vendor/camera_dump/capture_in_frame%d_%dx%d_%s_%d.i420",
frameNo, width, height, currentDate, currentTime);
pIBufferFull->saveToFile(path);
}
nsecs_t t1 = systemTime(CLOCK_MONOTONIC);
if (mSrcRGBA == NULL) {
mSrcRGBA = (unsigned char *) malloc(width * height * 4);
}
//convert I420 to RGBA
libyuv::I420ToABGR((unsigned char *) (pIBufferFull->getBufVA(0)), width,
(unsigned char *) (pIBufferFull->getBufVA(1)), width >> 1,
(unsigned char *) (pIBufferFull->getBufVA(2)), width >> 1,
mSrcRGBA, width * 4,
width, height);
nsecs_t t2 = systemTime(CLOCK_MONOTONIC);
MY_LOGD("Prepare src cost %02ld ms", ns2ms(t2 - t1));
Watermark::add(mSrcRGBA, width, height, mWatermarkRGBA, mWatermarkWidth, mWatermarkHeight, (width - mWatermarkWidth) / 2, (height - mWatermarkHeight) / 2);
nsecs_t t3 = systemTime(CLOCK_MONOTONIC);
MY_LOGD("Add watermark cost %02ld ms", ns2ms(t3 - t2));
//convert RGBA to I420
libyuv::ABGRToI420(mSrcRGBA, width * 4,
(unsigned char *) (pOBufferFull->getBufVA(0)), width,
(unsigned char *) (pOBufferFull->getBufVA(1)), width >> 1,
(unsigned char *) (pOBufferFull->getBufVA(2)), width >> 1,
width, height);
nsecs_t t4 = systemTime(CLOCK_MONOTONIC);
MY_LOGD("Copy in to out cost %02ld ms", ns2ms(t4 - t3));
//dump output I420
if (mDump) {
char path[256];
snprintf(path, sizeof(path), "/data/vendor/camera_dump/capture_out_frame%d_%dx%d_%s_%d.i420",
frameNo, width, height, currentDate, currentTime);
pOBufferFull->saveToFile(path);
}
free(mSrcRGBA);
} else {
if (!needRun) {
MY_LOGE("No need run, skip add watermark for capture.");
} else if (inFormat != NSCam::eImgFmt_YV12) {
MY_LOGE("Unsupported format, skip add watermark for capture.");
} else {
MY_LOGE("Unknown exception, skip add watermark for capture.");
}
memcpy((unsigned char *) (pOBufferFull->getBufVA(0)),
(unsigned char *) (pIBufferFull->getBufVA(0)),
pIBufferFull->getBufSizeInBytes(0));
memcpy((unsigned char *) (pOBufferFull->getBufVA(1)),
(unsigned char *) (pIBufferFull->getBufVA(1)),
pIBufferFull->getBufSizeInBytes(1));
memcpy((unsigned char *) (pOBufferFull->getBufVA(2)),
(unsigned char *) (pIBufferFull->getBufVA(2)),
pIBufferFull->getBufSizeInBytes(2));
}
pRequest->mIBufferFull->release();
pRequest->mOBufferFull->release();
if (pRequest->mIMetadataDynamic != nullptr) {
pRequest->mIMetadataDynamic->release();
}
if (pRequest->mIMetadataHal != nullptr) {
pRequest->mIMetadataHal->release();
}
if (pRequest->mIMetadataApp != nullptr) {
pRequest->mIMetadataApp->release();
}
}
if (pCallback != nullptr) {
MY_LOGD("callback request");
pCallback->onCompleted(pRequest, 0);
}
FUNCTION_OUT;
return OK;
}
REGISTER_PLUGIN_PROVIDER(Yuv, WatermarkCapture);
新增
vendormediatekproprietaryhardwaremtkcam33rdpartycustomertp_watermarkWatermarkPreview.cpp
#include <mtkcam3/3rdparty/plugin/PipelinePluginType.h>
#include <mtkcam/utils/metadata/hal/mtk_platform_metadata_tag.h>
#include <mtkcam/utils/metadata/client/mtk_metadata_tag.h>
#include <cutils/properties.h>
#include <watermark.h>
#include <mtkcam/utils/std/Time.h>
#include <time.h>
#include <libyuv.h>
#include <dlfcn.h>
using NSCam::NSPipelinePlugin::Interceptor;
using NSCam::NSPipelinePlugin::PipelinePlugin;
using NSCam::NSPipelinePlugin::PluginRegister;
using NSCam::NSPipelinePlugin::Join;
using NSCam::NSPipelinePlugin::JoinPlugin;
using namespace NSCam::NSPipelinePlugin;
using NSCam::MSize;
using NSCam::MERROR;
using NSCam::IImageBuffer;
using NSCam::IMetadata;
using NSCam::Type2Type;
#ifdef LOG_TAG
#undef LOG_TAG
#endif // LOG_TAG
#define LOG_TAG "WatermarkPreview"
#include <log/log.h>
#include <android/log.h>
#define MY_LOGI(fmt, arg...) ALOGI("[%s] " fmt, __FUNCTION__, ##arg)
#define MY_LOGD(fmt, arg...) ALOGD("[%s] " fmt, __FUNCTION__, ##arg)
#define MY_LOGW(fmt, arg...) ALOGW("[%s] " fmt, __FUNCTION__, ##arg)
#define MY_LOGE(fmt, arg...) ALOGE("[%s] " fmt, __FUNCTION__, ##arg)
#define FUNCTION_IN MY_LOGD("%s +", __FUNCTION__)
#define FUNCTION_OUT MY_LOGD("%s -", __FUNCTION__)
template <class T>
inline bool
tryGetMetadata(IMetadata const *pMetadata, MUINT32 tag, T& rVal)
{
if(pMetadata == nullptr) return MFALSE;
IMetadata::IEntry entry = pMetadata->entryFor(tag);
if(!entry.isEmpty())
{
rVal = entry.itemAt(0,Type2Type<T>());
return true;
}
else
{
#define var(v) #v
#define type(t) #t
MY_LOGW("no metadata %s in %s", var(tag), type(pMetadata));
#undef type
#undef var
}
return false;
}
class WatermarkPreview : public JoinPlugin::IProvider {
public:
typedef JoinPlugin::Property Property;
typedef JoinPlugin::Selection Selection;
typedef JoinPlugin::Request::Ptr RequestPtr;
typedef JoinPlugin::RequestCallback::Ptr RequestCallbackPtr;
private:
bool mDisponly = false;
bool mInplace = false;
int mOpenID1 = 0;
int mOpenID2 = 0;
MBOOL mEnable = 1;
MBOOL mDump = 0;
unsigned char *mSrcRGBA = nullptr;
unsigned char *mWatermarkRGBA = nullptr;
int mWatermarkWidth = 0;
int mWatermarkHeight = 0;
public:
WatermarkPreview();
~WatermarkPreview();
void init();
void uninit();
void abort(std::vector <RequestPtr> &pRequests);
void set(MINT32 openID1, MINT32 openID2);
const Property &property();
MERROR negotiate(Selection &sel);
MERROR process(RequestPtr pRequest, RequestCallbackPtr pCallback);
private:
MERROR getConfigSetting(Selection &sel);
MERROR getP1Setting(Selection &sel);
MERROR getP2Setting(Selection &sel);
};
WatermarkPreview::WatermarkPreview() {
FUNCTION_IN;
mEnable = property_get_bool("vendor.debug.camera.watermark.preview.enable", 1);
mDump = property_get_bool("vendor.debug.camera.watermark.preview.dump", 0);
FUNCTION_OUT;
}
WatermarkPreview::~WatermarkPreview() {
FUNCTION_IN;
FUNCTION_OUT;
}
void WatermarkPreview::init() {
FUNCTION_IN;
mWatermarkWidth = 180;
mWatermarkHeight = 640;
int watermarkSize = mWatermarkWidth * mWatermarkHeight * 4;
mWatermarkRGBA = (unsigned char *) malloc(watermarkSize);
FILE *fp;
char path[256];
snprintf(path, sizeof(path), "/vendor/res/images/watermark.rgba");
if ((fp = fopen(path, "r")) == NULL) {
MY_LOGE("Failed to open /vendor/res/images/watermark.rgba");
}
fread(mWatermarkRGBA, 1, watermarkSize, fp);
fclose(fp);
FUNCTION_OUT;
}
void WatermarkPreview::uninit() {
FUNCTION_IN;
free(mSrcRGBA);
free(mWatermarkRGBA);
FUNCTION_OUT;
}
void WatermarkPreview::abort(std::vector <RequestPtr> &pRequests) {
FUNCTION_IN;
(void)pRequests;
FUNCTION_OUT;
}
void WatermarkPreview::set(MINT32 openID1, MINT32 openID2) {
FUNCTION_IN;
MY_LOGD("set openID1:%d openID2:%d", openID1, openID2);
mOpenID1 = openID1;
mOpenID2 = openID2;
FUNCTION_OUT;
}
const WatermarkPreview::Property &WatermarkPreview::property() {
FUNCTION_IN;
static Property prop;
static bool inited;
if (!inited) {
prop.mName = "TP_WATERMARK";
prop.mFeatures = TP_FEATURE_WATERMARK;
//prop.mInPlace = MTRUE;
//prop.mFaceData = eFD_Current;
//prop.mPosition = 0;
inited = true;
}
FUNCTION_OUT;
return prop;
}
MERROR WatermarkPreview::negotiate(Selection &sel) {
FUNCTION_IN;
MERROR ret = OK;
if (sel.mSelStage == eSelStage_CFG) {
ret = getConfigSetting(sel);
} else if (sel.mSelStage == eSelStage_P1) {
ret = getP1Setting(sel);
} else if (sel.mSelStage == eSelStage_P2) {
ret = getP2Setting(sel);
}
FUNCTION_OUT;
return ret;
}
MERROR WatermarkPreview::process(RequestPtr pRequest, RequestCallbackPtr pCallback) {
FUNCTION_IN;
(void) pCallback;
MERROR ret = -EINVAL;
MBOOL needRun = MFALSE;
IImageBuffer *in = NULL, *out = NULL;
if (pRequest->mIBufferMain1 != NULL && pRequest->mOBufferMain1 != NULL) {
in = pRequest->mIBufferMain1->acquire();
out = pRequest->mOBufferMain1->acquire();
int frameNo = 0, requestNo = 0;
if (pRequest->mIMetadataHal1 != nullptr) {
IMetadata *pIMetataHAL1 = pRequest->mIMetadataHal1->acquire();
if (pIMetataHAL1 != NULL) {
if (!tryGetMetadata<int>(pIMetataHAL1, MTK_PIPELINE_FRAME_NUMBER, frameNo)) {
frameNo = 0;
}
if (!tryGetMetadata<int>(pIMetataHAL1, MTK_PIPELINE_REQUEST_NUMBER, requestNo)) {
requestNo = 0;
}
pRequest->mIMetadataHal1->release();
MY_LOGD("frameNo: %d, requestNo: %d", frameNo, requestNo);
} else {
MY_LOGD("HAL metadata empty");
}
}
MY_LOGD("in[%d](%dx%d)=%p out[%d](%dx%d)=%p",
in->getPlaneCount(), in->getImgSize().w, in->getImgSize().h, in,
out->getPlaneCount(), out->getImgSize().w, out->getImgSize().h, out);
if (pRequest->mIMetadataApp != nullptr) {
IMetadata *pIMetadataApp = pRequest->mIMetadataApp->acquire();
MINT32 mode = 0;
if (!tryGetMetadata<MINT32>(pIMetadataApp, QXT_FEATURE_WATERMARK, mode)) {
mode = 0;
}
needRun = mode == 1 ? 1 : 0;
pRequest->mIMetadataApp->release();
}
MY_LOGD("needRun: %d", needRun);
int width = in->getImgSize().w;
int height = in->getImgSize().h;
MINT inFormat = in->getImgFormat();
if (needRun && inFormat == NSCam::eImgFmt_YV12) {
uint32_t currentTime = (NSCam::Utils::TimeTool::getReadableTime()) % 1000;
time_t timep;
time (&timep);
char currentDate[20];
strftime(currentDate, sizeof(currentDate), "%Y%m%d_%H%M%S", localtime(&timep));
//dump input YV12
if (mDump) {
char path[256];
snprintf(path, sizeof(path), "/data/vendor/camera_dump/preview_in_frame%d_%dx%d_%s_%d.yv12",
frameNo, width, height, currentDate, currentTime);
in->saveToFile(path);
}
nsecs_t t1 = systemTime(CLOCK_MONOTONIC);
if (mSrcRGBA == NULL) {
mSrcRGBA = (unsigned char *) malloc(width * height * 4);
}
//convert YV12 to RGBA
libyuv::I420ToABGR((unsigned char *)(in->getBufVA(0)), width,
(unsigned char *)(in->getBufVA(2)), width >> 1,
(unsigned char *)(in->getBufVA(1)), width >> 1,
mSrcRGBA, width * 4,
width, height);
nsecs_t t2 = systemTime(CLOCK_MONOTONIC);
MY_LOGD("Prepare src cost %02ld ms", ns2ms(t2 - t1));
Watermark::add(mSrcRGBA, width, height, mWatermarkRGBA, mWatermarkWidth, mWatermarkHeight, (width - mWatermarkWidth) / 2, (height - mWatermarkHeight) / 2);
nsecs_t t3 = systemTime(CLOCK_MONOTONIC);
MY_LOGD("Add watermark cost %02ld ms", ns2ms(t3 - t2));
//convert RGBA to YV12
libyuv::ABGRToI420(mSrcRGBA, width * 4,
(unsigned char *)(out->getBufVA(0)), width,
(unsigned char *)(out->getBufVA(2)), width >> 1,
(unsigned char *)(out->getBufVA(1)), width >> 1,
width, height);
nsecs_t t4 = systemTime(CLOCK_MONOTONIC);
MY_LOGD("Copy in to out cost %02ld ms", ns2ms(t4 - t3));
//dump output YV12
if (mDump) {
char path[256];
snprintf(path, sizeof(path), "/data/vendor/camera_dump/preview_out_frame%d_%dx%d_%s_%d.yv12",
frameNo, width, height, currentDate, currentTime);
out->saveToFile(path);
}
} else {
if (!needRun) {
MY_LOGE("No need run, skip add watermark for preview.");
} else if (inFormat != NSCam::eImgFmt_YV12) {
MY_LOGE("Unsupported format, skip add watermark for preview.");
} else {
MY_LOGE("Unknown exception, skip add watermark for preview.");
}
memcpy((unsigned char *) (out->getBufVA(0)),
(unsigned char *)(in->getBufVA(0)),
in->getBufSizeInBytes(0));
memcpy((unsigned char *) (out->getBufVA(1)),
(unsigned char *)(in->getBufVA(1)),
in->getBufSizeInBytes(1));
memcpy((unsigned char *) (out->getBufVA(2)),
(unsigned char *)(in->getBufVA(2)),
in->getBufSizeInBytes(2));
}
pRequest->mIBufferMain1->release();
pRequest->mOBufferMain1->release();
ret = OK;
}
FUNCTION_OUT;
return ret;
}
MERROR WatermarkPreview::getConfigSetting(Selection &sel) {
MY_LOGI("max out size(%dx%d)",
sel.mCfgInfo.mMaxOutSize.w, sel.mCfgInfo.mMaxOutSize.h);
mDisponly = property_get_bool("vendor.debug.tpi.s.fb.disponly", 0);
mInplace = mDisponly || property_get_bool("vendor.debug.tpi.s.fb.inplace", 0);
sel.mCfgOrder = 3;
sel.mCfgJoinEntry = eJoinEntry_S_YUV;
sel.mCfgInplace = mInplace;
sel.mCfgEnableFD = MTRUE;
sel.mCfgRun = mEnable;
sel.mIBufferMain1.setRequired(MTRUE);
if (!mDisponly && property_get_bool("vendor.debug.tpi.s.fb.nv21", 0)) {
sel.mIBufferMain1.addAcceptedFormat(NSCam::eImgFmt_NV21);
}
if (!mDisponly && property_get_bool("vendor.debug.tpi.s.fb.size", 0)) {
sel.mIBufferMain1.setSpecifiedSize(sel.mCfgInfo.mMaxOutSize);
}
sel.mOBufferMain1.setRequired(MTRUE);
sel.mIBufferMain1.addAcceptedFormat(NSCam::eImgFmt_YV12);
sel.mIBufferMain1.addAcceptedSize(eImgSize_Full);
IMetadata *meta = sel.mIMetadataApp.getControl().get();
MY_LOGD("sessionMeta=%p", meta);
return OK;
}
MERROR WatermarkPreview::getP1Setting(Selection &sel) {
(void) sel;
return OK;
}
MERROR WatermarkPreview::getP2Setting(Selection &sel) {
MBOOL run = MTRUE;
sel.mP2Run = run;
return OK;
}
REGISTER_PLUGIN_PROVIDER(Join, WatermarkPreview);
将上面编译得到的 libwatermark.so 拷贝到对应文件夹
至此水印算法相关代码增加完成,接下来是多帧算法相关源码
新增 vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/cp_tp_mfnr/ 文件夹
新增
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/cp_tp_mfnr/Android.mk
ifeq ($(QXT_MFNR_SUPPORT),yes)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := libmultiframe
LOCAL_SRC_FILES_32 := lib/armeabi-v7a/libmultiframe.so
LOCAL_SRC_FILES_64 := lib/arm64-v8a/libmultiframe.so
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE_CLASS := SHARED_LIBRARIES
LOCAL_MODULE_SUFFIX := .so
LOCAL_PROPRIETARY_MODULE := true
LOCAL_CHECK_ELF_FILES := false
LOCAL_MULTILIB := both
include $(BUILD_PREBUILT)
################################################################################
################################################################################
include $(CLEAR_VARS)
#-----------------------------------------------------------
include $(TOP)/$(MTK_PATH_SOURCE)/hardware/mtkcam/mtkcam.mk
#-----------------------------------------------------------
LOCAL_SRC_FILES += MFNRImpl.cpp
#-----------------------------------------------------------
LOCAL_C_INCLUDES += $(MTKCAM_C_INCLUDES)
LOCAL_C_INCLUDES += $(TOP)/$(MTK_PATH_SOURCE)/hardware/mtkcam3/include $(MTK_PATH_SOURCE)/hardware/mtkcam/include
LOCAL_C_INCLUDES += $(TOP)/$(MTK_PATH_COMMON)/hal/inc
LOCAL_C_INCLUDES += $(TOP)/$(MTK_PATH_CUSTOM_PLATFORM)/hal/inc
LOCAL_C_INCLUDES += $(TOP)/external/libyuv/files/include/
LOCAL_C_INCLUDES += $(TOP)/$(MTK_PATH_SOURCE)/hardware/mtkcam3/3rdparty/customer/cp_tp_mfnr/include
#
LOCAL_C_INCLUDES += system/media/camera/include
#-----------------------------------------------------------
LOCAL_CFLAGS += $(MTKCAM_CFLAGS)
#
#-----------------------------------------------------------
LOCAL_STATIC_LIBRARIES +=
#
LOCAL_WHOLE_STATIC_LIBRARIES +=
#-----------------------------------------------------------
LOCAL_SHARED_LIBRARIES += liblog
LOCAL_SHARED_LIBRARIES += libutils
LOCAL_SHARED_LIBRARIES += libcutils
LOCAL_SHARED_LIBRARIES += libmtkcam_modulehelper
LOCAL_SHARED_LIBRARIES += libmtkcam_stdutils
LOCAL_SHARED_LIBRARIES += libmtkcam_pipeline
LOCAL_SHARED_LIBRARIES += libmtkcam_metadata
LOCAL_SHARED_LIBRARIES += libmtkcam_metastore
LOCAL_SHARED_LIBRARIES += libmtkcam_streamutils
LOCAL_SHARED_LIBRARIES += libmtkcam_imgbuf
LOCAL_SHARED_LIBRARIES += libmtkcam_exif
#LOCAL_SHARED_LIBRARIES += libmtkcam_3rdparty
#-----------------------------------------------------------
LOCAL_HEADER_LIBRARIES := libutils_headers liblog_headers libhardware_headers
#-----------------------------------------------------------
LOCAL_MODULE := libmtkcam.plugin.tp_mfnr
LOCAL_PROPRIETARY_MODULE := true
LOCAL_MODULE_OWNER := mtk
LOCAL_MODULE_TAGS := optional
include $(MTK_STATIC_LIBRARY)
################################################################################
#
################################################################################
include $(call all-makefiles-under,$(LOCAL_PATH))
endif
新增
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/cp_tp_mfnr/include/mf_processor.h
头文件中的接口函数介绍:
setFrameCount: 没有实际作用,用于模拟设置第三方多帧算法的帧数。因为部分第三方多帧算法在不同场景下需要的帧数可能是不同的。
setParams: 也没有实际作用,用于模拟设置第三方多帧算法所需的参数。
addFrame: 用于添加一帧图像数据,用于模拟第三方多帧算法添加图像数据。
process: 将前面添加的4帧图像数据,缩小并拼接成一张原大小的图。
createInstance: 创建接口类对象。
#ifndef QXT_MULTI_FRAME_H
#define QXT_MULTI_FRAME_H
class MFProcessor {
public:
virtual ~MFProcessor() {}
virtual void setFrameCount(int num) = 0;
virtual void setParams() = 0;
virtual void addFrame(unsigned char *src, int srcWidth, int srcHeight) = 0;
virtual void addFrame(unsigned char *srcY, unsigned char *srcU, unsigned char *srcV,
int srcWidth, int srcHeight) = 0;
virtual void scale(unsigned char *src, int srcWidth, int srcHeight,
unsigned char *dst, int dstWidth, int dstHeight) = 0;
virtual void process(unsigned char *output, int outputWidth, int outputHeight) = 0;
virtual void process(unsigned char *outputY, unsigned char *outputU, unsigned char *outputV,
int outputWidth, int outputHeight) = 0;
static MFProcessor* createInstance(int width, int height);
};
#endif //QXT_MULTI_FRAME_H
新增
vendormediatekproprietaryhardwaremtkcam33rdpartycustomercp_tp_mfnrMFNRImpl.cpp
主要函数介绍:
在property函数中feature类型设置成TP_FEATURE_MFNR,并设置名称、优先级、最大帧数等等属性。尤其注意mNeedRrzoBuffer属性,一般情况下,多帧算法必须要设置为MTRUE。
在negotiate函数中配置算法需要的输入、输出图像的格式、尺寸。注意,多帧算法有多帧输入,但是只需要一帧输出。因此这里设置了mRequestIndex == 0时才需要mOBufferFull。也就是只有第一帧才有输入和输出,其它帧只有输入。
另外,还在negotiate函数中获取上层传下来的metadata参数,根据参数决定算法是否运行。
在process函数中接入算法。第一帧时创建算法接口类对象,然后每一帧都调用算法接口函数addFrame加入,最后一帧再调用算法接口函数process进行处理并获取输出
#ifdef LOG_TAG
#undef LOG_TAG
#endif // LOG_TAG
#define LOG_TAG "MFNRProvider"
static const char *__CALLERNAME__ = LOG_TAG;
//
#include <mtkcam/utils/std/Log.h>
//
#include <stdlib.h>
#include <utils/Errors.h>
#include <utils/List.h>
#include <utils/RefBase.h>
#include <sstream>
#include <unordered_map> // std::unordered_map
//
#include <mtkcam/utils/metadata/client/mtk_metadata_tag.h>
#include <mtkcam/utils/metadata/hal/mtk_platform_metadata_tag.h>
//zHDR
#include <mtkcam/utils/hw/HwInfoHelper.h> // NSCamHw::HwInfoHelper
#include <mtkcam3/feature/utils/FeatureProfileHelper.h> //ProfileParam
#include <mtkcam/drv/IHalSensor.h>
//
#include <mtkcam/utils/imgbuf/IIonImageBufferHeap.h>
//
#include <mtkcam/utils/std/Format.h>
#include <mtkcam/utils/std/Time.h>
//
#include <mtkcam3/pipeline/hwnode/NodeId.h>
//
#include <mtkcam/utils/metastore/IMetadataProvider.h>
#include <mtkcam/utils/metastore/ITemplateRequest.h>
#include <mtkcam/utils/metastore/IMetadataProvider.h>
#include <mtkcam3/3rdparty/plugin/PipelinePlugin.h>
#include <mtkcam3/3rdparty/plugin/PipelinePluginType.h>
//
#include <isp_tuning/isp_tuning.h> //EIspProfile_T, EOperMode_*
//
#include <custom_metadata/custom_metadata_tag.h>
//
#include <libyuv.h>
#include <mf_processor.h>
using namespace NSCam;
using namespace android;
using namespace std;
using namespace NSCam::NSPipelinePlugin;
using namespace NSIspTuning;
/******************************************************************************
*
******************************************************************************/
#define MY_LOGV(fmt, arg...) CAM_LOGV("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGD(fmt, arg...) CAM_LOGD("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGI(fmt, arg...) CAM_LOGI("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGW(fmt, arg...) CAM_LOGW("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
#define MY_LOGE(fmt, arg...) CAM_LOGE("(%d)[%s] " fmt, ::gettid(), __FUNCTION__, ##arg)
//
#define MY_LOGV_IF(cond, ...) do { if ( (cond) ) { MY_LOGV(__VA_ARGS__); } }while(0)
#define MY_LOGD_IF(cond, ...) do { if ( (cond) ) { MY_LOGD(__VA_ARGS__); } }while(0)
#define MY_LOGI_IF(cond, ...) do { if ( (cond) ) { MY_LOGI(__VA_ARGS__); } }while(0)
#define MY_LOGW_IF(cond, ...) do { if ( (cond) ) { MY_LOGW(__VA_ARGS__); } }while(0)
#define MY_LOGE_IF(cond, ...) do { if ( (cond) ) { MY_LOGE(__VA_ARGS__); } }while(0)
//
#define ASSERT(cond, msg) do { if (!(cond)) { printf("Failed: %s\n", msg); return; } }while(0)
#define __DEBUG // enable debug
#ifdef __DEBUG
#include <memory>
#define FUNCTION_SCOPE
auto __scope_logger__ = [](char const* f)->std::shared_ptr<const char>{
CAM_LOGD("(%d)[%s] + ", ::gettid(), f);
return std::shared_ptr<const char>(f, [](char const* p){CAM_LOGD("(%d)[%s] -", ::gettid(), p);});
}(__FUNCTION__)
#else
#define FUNCTION_SCOPE
#endif
template <typename T>
inline MBOOL
tryGetMetadata(
IMetadata* pMetadata,
MUINT32 const tag,
T & rVal
)
{
if (pMetadata == NULL) {
MY_LOGW("pMetadata == NULL");
return MFALSE;
}
IMetadata::IEntry entry = pMetadata->entryFor(tag);
if (!entry.isEmpty()) {
rVal = entry.itemAt(0, Type2Type<T>());
return MTRUE;
}
return MFALSE;
}
#define MFNR_FRAME_COUNT 4
/******************************************************************************
*
******************************************************************************/
class MFNRProviderImpl : public MultiFramePlugin::IProvider {
typedef MultiFramePlugin::Property Property;
typedef MultiFramePlugin::Selection Selection;
typedef MultiFramePlugin::Request::Ptr RequestPtr;
typedef MultiFramePlugin::RequestCallback::Ptr RequestCallbackPtr;
public:
virtual void set(MINT32 iOpenId, MINT32 iOpenId2) {
MY_LOGD("set openId:%d openId2:%d", iOpenId, iOpenId2);
mOpenId = iOpenId;
}
virtual const Property& property() {
FUNCTION_SCOPE;
static Property prop;
static bool inited;
if (!inited) {
prop.mName = "TP_MFNR";
prop.mFeatures = TP_FEATURE_MFNR;
prop.mThumbnailTiming = eTiming_P2;
prop.mPriority = ePriority_Highest;
prop.mZsdBufferMaxNum = 8; // maximum frames requirement
prop.mNeedRrzoBuffer = MTRUE; // rrzo requirement for BSS
inited = MTRUE;
}
return prop;
};
virtual MERROR negotiate(Selection& sel) {
FUNCTION_SCOPE;
IMetadata* appInMeta = sel.mIMetadataApp.getControl().get();
tryGetMetadata<MINT32>(appInMeta, QXT_FEATURE_MFNR, mEnable);
MY_LOGD("mEnable: %d", mEnable);
if (!mEnable) {
MY_LOGD("Force off TP_MFNR shot");
return BAD_VALUE;
}
sel.mRequestCount = MFNR_FRAME_COUNT;
MY_LOGD("mRequestCount=%d", sel.mRequestCount);
sel.mIBufferFull
.setRequired(MTRUE)
.addAcceptedFormat(eImgFmt_I420) // I420 first
.addAcceptedFormat(eImgFmt_YV12)
.addAcceptedFormat(eImgFmt_NV21)
.addAcceptedFormat(eImgFmt_NV12)
.addAcceptedSize(eImgSize_Full);
//sel.mIBufferSpecified.setRequired(MTRUE).setAlignment(16, 16);
sel.mIMetadataDynamic.setRequired(MTRUE);
sel.mIMetadataApp.setRequired(MTRUE);
sel.mIMetadataHal.setRequired(MTRUE);
if (sel.mRequestIndex == 0) {
sel.mOBufferFull
.setRequired(MTRUE)
.addAcceptedFormat(eImgFmt_I420) // I420 first
.addAcceptedFormat(eImgFmt_YV12)
.addAcceptedFormat(eImgFmt_NV21)
.addAcceptedFormat(eImgFmt_NV12)
.addAcceptedSize(eImgSize_Full);
sel.mOMetadataApp.setRequired(MTRUE);
sel.mOMetadataHal.setRequired(MTRUE);
} else {
sel.mOBufferFull.setRequired(MFALSE);
sel.mOMetadataApp.setRequired(MFALSE);
sel.mOMetadataHal.setRequired(MFALSE);
}
return OK;
};
virtual void init() {
FUNCTION_SCOPE;
mDump = property_get_bool("vendor.debug.camera.mfnr.dump", 0);
//nothing to do for MFNR
};
virtual MERROR process(RequestPtr pRequest, RequestCallbackPtr pCallback) {
FUNCTION_SCOPE;
MERROR ret = 0;
// restore callback function for abort API
if (pCallback != nullptr) {
m_callbackprt = pCallback;
}
//maybe need to keep a copy in member<sp>
IMetadata* pAppMeta = pRequest->mIMetadataApp->acquire();
IMetadata* pHalMeta = pRequest->mIMetadataHal->acquire();
IMetadata* pHalMetaDynamic = pRequest->mIMetadataDynamic->acquire();
MINT32 processUniqueKey = 0;
IImageBuffer* pInImgBuffer = NULL;
uint32_t width = 0;
uint32_t height = 0;
if (!IMetadata::getEntry<MINT32>(pHalMeta, MTK_PIPELINE_UNIQUE_KEY, processUniqueKey)) {
MY_LOGE("cannot get unique about MFNR capture");
return BAD_VALUE;
}
if (pRequest->mIBufferFull != nullptr) {
pInImgBuffer = pRequest->mIBufferFull->acquire();
width = pInImgBuffer->getImgSize().w;
height = pInImgBuffer->getImgSize().h;
MY_LOGD("[IN] Full image VA: 0x%p, Size(%dx%d), Format: %s",
pInImgBuffer->getBufVA(0), width, height, format2String(pInImgBuffer->getImgFormat()));
if (mDump) {
char path[256];
snprintf(path, sizeof(path), "/data/vendor/camera_dump/mfnr_capture_in_%d_%dx%d.%s",
pRequest->mRequestIndex, width, height, format2String(pInImgBuffer->getImgFormat()));
pInImgBuffer->saveToFile(path);
}
}
if (pRequest->mIBufferSpecified != nullptr) {
IImageBuffer* pImgBuffer = pRequest->mIBufferSpecified->acquire();
MY_LOGD("[IN] Specified image VA: 0x%p, Size(%dx%d)", pImgBuffer->getBufVA(0), pImgBuffer->getImgSize().w, pImgBuffer->getImgSize().h);
}
if (pRequest->mOBufferFull != nullptr) {
mOutImgBuffer = pRequest->mOBufferFull->acquire();
MY_LOGD("[OUT] Full image VA: 0x%p, Size(%dx%d)", mOutImgBuffer->getBufVA(0), mOutImgBuffer->getImgSize().w, mOutImgBuffer->getImgSize().h);
}
if (pRequest->mIMetadataDynamic != nullptr) {
IMetadata *meta = pRequest->mIMetadataDynamic->acquire();
if (meta != NULL)
MY_LOGD("[IN] Dynamic metadata count: ", meta->count());
else
MY_LOGD("[IN] Dynamic metadata Empty");
}
MY_LOGD("frame:%d/%d, width:%d, height:%d", pRequest->mRequestIndex, pRequest->mRequestCount, width, height);
if (pInImgBuffer != NULL && mOutImgBuffer != NULL) {
uint32_t yLength = pInImgBuffer->getBufSizeInBytes(0);
uint32_t uLength = pInImgBuffer->getBufSizeInBytes(1);
uint32_t vLength = pInImgBuffer->getBufSizeInBytes(2);
uint32_t yuvLength = yLength + uLength + vLength;
if (pRequest->mRequestIndex == 0) {//First frame
//When width or height changed, recreate multiFrame
if (mLatestWidth != width || mLatestHeight != height) {
if (mMFProcessor != NULL) {
delete mMFProcessor;
mMFProcessor = NULL;
}
mLatestWidth = width;
mLatestHeight = height;
}
if (mMFProcessor == NULL) {
MY_LOGD("create mMFProcessor %dx%d", mLatestWidth, mLatestHeight);
mMFProcessor = MFProcessor::createInstance(mLatestWidth, mLatestHeight);
mMFProcessor->setFrameCount(pRequest->mRequestCount);
}
}
mMFProcessor->addFrame((uint8_t *)pInImgBuffer->getBufVA(0),
(uint8_t *)pInImgBuffer->getBufVA(1),
(uint8_t *)pInImgBuffer->getBufVA(2),
mLatestWidth, mLatestHeight);
if (pRequest->mRequestIndex == pRequest->mRequestCount - 1) {//Last frame
if (mMFProcessor != NULL) {
mMFProcessor->process((uint8_t *)mOutImgBuffer->getBufVA(0),
(uint8_t *)mOutImgBuffer->getBufVA(1),
(uint8_t *)mOutImgBuffer->getBufVA(2),
mLatestWidth, mLatestHeight);
if (mDump) {
char path[256];
snprintf(path, sizeof(path), "/data/vendor/camera_dump/mfnr_capture_out_%d_%dx%d.%s",
pRequest->mRequestIndex, mOutImgBuffer->getImgSize().w, mOutImgBuffer->getImgSize().h,
format2String(mOutImgBuffer->getImgFormat()));
mOutImgBuffer->saveToFile(path);
}
} else {
memcpy((uint8_t *)mOutImgBuffer->getBufVA(0),
(uint8_t *)pInImgBuffer->getBufVA(0),
pInImgBuffer->getBufSizeInBytes(0));
memcpy((uint8_t *)mOutImgBuffer->getBufVA(1),
(uint8_t *)pInImgBuffer->getBufVA(1),
pInImgBuffer->getBufSizeInBytes(1));
memcpy((uint8_t *)mOutImgBuffer->getBufVA(2),
(uint8_t *)pInImgBuffer->getBufVA(2),
pInImgBuffer->getBufSizeInBytes(2));
}
mOutImgBuffer = NULL;
}
}
if (pRequest->mIBufferFull != nullptr) {
pRequest->mIBufferFull->release();
}
if (pRequest->mIBufferSpecified != nullptr) {
pRequest->mIBufferSpecified->release();
}
if (pRequest->mOBufferFull != nullptr) {
pRequest->mOBufferFull->release();
}
if (pRequest->mIMetadataDynamic != nullptr) {
pRequest->mIMetadataDynamic->release();
}
mvRequests.push_back(pRequest);
MY_LOGD("collected request(%d/%d)", pRequest->mRequestIndex, pRequest->mRequestCount);
if (pRequest->mRequestIndex == pRequest->mRequestCount - 1) {
for (auto req : mvRequests) {
MY_LOGD("callback request(%d/%d) %p", req->mRequestIndex, req->mRequestCount, pCallback.get());
if (pCallback != nullptr) {
pCallback->onCompleted(req, 0);
}
}
mvRequests.clear();
}
return ret;
};
virtual void abort(vector<RequestPtr>& pRequests) {
FUNCTION_SCOPE;
bool bAbort = false;
IMetadata *pHalMeta;
MINT32 processUniqueKey = 0;
for (auto req:pRequests) {
bAbort = false;
pHalMeta = req->mIMetadataHal->acquire();
if (!IMetadata::getEntry<MINT32>(pHalMeta, MTK_PIPELINE_UNIQUE_KEY, processUniqueKey)) {
MY_LOGW("cannot get unique about MFNR capture");
}
if (m_callbackprt != nullptr) {
MY_LOGD("m_callbackprt is %p", m_callbackprt.get());
/*MFNR plugin callback request to MultiFrameNode */
for (Vector<RequestPtr>::iterator it = mvRequests.begin() ; it != mvRequests.end(); it++) {
if ((*it) == req) {
mvRequests.erase(it);
m_callbackprt->onAborted(req);
bAbort = true;
break;
}
}
} else {
MY_LOGW("callbackptr is null");
}
if (!bAbort) {
MY_LOGW("Desire abort request[%d] is not found", req->mRequestIndex);
}
}
};
virtual void uninit() {
FUNCTION_SCOPE;
if (mMFProcessor != NULL) {
delete mMFProcessor;
mMFProcessor = NULL;
}
mLatestWidth = 0;
mLatestHeight = 0;
};
virtual ~MFNRProviderImpl() {
FUNCTION_SCOPE;
};
const char * format2String(MINT format) {
switch(format) {
case NSCam::eImgFmt_RGBA8888: return "rgba";
case NSCam::eImgFmt_RGB888: return "rgb";
case NSCam::eImgFmt_RGB565: return "rgb565";
case NSCam::eImgFmt_STA_BYTE: return "byte";
case NSCam::eImgFmt_YVYU: return "yvyu";
case NSCam::eImgFmt_UYVY: return "uyvy";
case NSCam::eImgFmt_VYUY: return "vyuy";
case NSCam::eImgFmt_YUY2: return "yuy2";
case NSCam::eImgFmt_YV12: return "yv12";
case NSCam::eImgFmt_YV16: return "yv16";
case NSCam::eImgFmt_NV16: return "nv16";
case NSCam::eImgFmt_NV61: return "nv61";
case NSCam::eImgFmt_NV12: return "nv12";
case NSCam::eImgFmt_NV21: return "nv21";
case NSCam::eImgFmt_I420: return "i420";
case NSCam::eImgFmt_I422: return "i422";
case NSCam::eImgFmt_Y800: return "y800";
case NSCam::eImgFmt_BAYER8: return "bayer8";
case NSCam::eImgFmt_BAYER10: return "bayer10";
case NSCam::eImgFmt_BAYER12: return "bayer12";
case NSCam::eImgFmt_BAYER14: return "bayer14";
case NSCam::eImgFmt_FG_BAYER8: return "fg_bayer8";
case NSCam::eImgFmt_FG_BAYER10: return "fg_bayer10";
case NSCam::eImgFmt_FG_BAYER12: return "fg_bayer12";
case NSCam::eImgFmt_FG_BAYER14: return "fg_bayer14";
default: return "unknown";
};
};
private:
MINT32 mUniqueKey;
MINT32 mOpenId;
MINT32 mRealIso;
MINT32 mShutterTime;
MBOOL mZSDMode;
MBOOL mFlashOn;
Vector<RequestPtr> mvRequests;
RequestCallbackPtr m_callbackprt;
MFProcessor* mMFProcessor = NULL;
IImageBuffer* mOutImgBuffer = NULL;
uint32_t mLatestWidth = 0;
uint32_t mLatestHeight = 0;
MINT32 mEnable = 0;
MINT32 mDump = 0;
// add end
};
REGISTER_PLUGIN_PROVIDER(MultiFrame, MFNRProviderImpl);
将上面编译得到的 libmultiframe.so 拷贝到对应文件夹
至此多帧算法相关代码增加完成
注意:
MTK原文:
negotiate函数设置格式时,一个挂载点如果挂载多个同类型的plugin,则只有第一个 plugin 中的 negotiate 中的 input buffer 设定有效。
在YUVNode 下挂载单帧 YUV plugin时,一定要确保 MTK 平台的SWNR plugin 的 negotiate 直接返回不OK,不做任何 accepted format 等的设定。否则,可能会出现因 SWNR plugin和三方plugin negotiate时设定的 accepted format 不一致而导致的三方 plugin 拿不到它想要的 format 的buffer。
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/mtk/swnr/SWNRImpl.cpp
@@ -391,7 +391,8 @@ negotiate(Selection& sel)
sel.mOMetadataApp.setRequired(false);
sel.mOMetadataHal.setRequired(true);
- return OK;
+ //return OK;^M
+ return -EINVAL;//cczheng^M
}
最终vendor.img需要的目标共享库是libmtkcam_3rdparty.customer.so。因此,我们还需要修改Android.mk,
使模块 libmtkcam_3rdparty.customer 依赖 libmtkcam.plugin.tp_watermark
模块 libmtkcam_3rdparty.customer 依赖 libmtkcam.plugin.tp_mfnr
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/Android.mk
@@ -76,6 +76,19 @@ LOCAL_WHOLE_STATIC_LIBRARIES += libmtkcam.plugin.tp_zoomfusion
#
LOCAL_SHARED_LIBRARIES += libcam.iopipe
endif
+
+ifeq ($(QXT_WATERMARK_SUPPORT), yes)
+LOCAL_SHARED_LIBRARIES += libwatermark
+LOCAL_SHARED_LIBRARIES += libyuv.vendor
+LOCAL_WHOLE_STATIC_LIBRARIES += libmtkcam.plugin.tp_watermark
+endif
+
+ifeq ($(QXT_MFNR_SUPPORT), yes)
+LOCAL_SHARED_LIBRARIES += libmultiframe
+LOCAL_SHARED_LIBRARIES += libyuv.vendor
+LOCAL_WHOLE_STATIC_LIBRARIES += libmtkcam.plugin.tp_mfnr
+endif
+
#
LOCAL_SHARED_LIBRARIES += libfeature.stereo.provider
#
预置水印文件
device/mediateksample/k62v1_64_bsp/device.mk
+++ b/alps/device/mediateksample/k62v1_64_bsp/device.mk
@@ -43,6 +43,9 @@ endif
PRODUCT_COPY_FILES += frameworks/native/data/etc/android.hardware.usb.host.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.usb.host.xml
+ifeq ($(QXT_WATERMARK_SUPPORT),yes)
+PRODUCT_COPY_FILES += vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/customer/tp_watermark/res/watermark.rgba::$(TARGET_COPY_OUT_VENDOR)/res/images/watermark.rgba
+endif
camera hal进程为mtk_camera_hal,它要读取/vendor/res/images/watermark.rgba,读取需要vendor_file SELinux权限。这里为mtk_camera_hal配置SELinux权限:
device/mediatek/sepolicy/bsp/non_plat/mtk_hal_camera.te
@@ -93,7 +93,7 @@ set_prop(mtk_hal_camera, vendor_mtk_camera_prop)
allow mtk_hal_camera teei_client_device:chr_file rw_file_perms;
allow mtk_hal_camera mdla_device:chr_file rw_file_perms;
+allow mtk_hal_camera vendor_file:file { read getattr open };
#Date: 2019/11/11
移除MTK示例的MFNR算法
一般情况下,MFNR 算法同一时间只允许运行一个。因此,需要移除 MTK 示例的 MFNR 算法。我们可以使用宏控来移除,这里就简单粗暴,直接注释掉了。
vendor/mediatek/proprietary/hardware/mtkcam3/3rdparty/mtk/Android.mk
@@ -146,7 +146,7 @@ LOCAL_SHARED_LIBRARIES += libfeature.stereo.provider
LOCAL_SHARED_LIBRARIES += liblpcnr
#-----------------------------------------------------------
ifneq ($(strip $(MTKCAM_HAVE_MFB_SUPPORT)),0)
-LOCAL_WHOLE_STATIC_LIBRARIES += libmtkcam.plugin.mfnr
+# LOCAL_WHOLE_STATIC_LIBRARIES += libmtkcam.plugin.mfnr
endif
#4 Cell
LOCAL_WHOLE_STATIC_LIBRARIES += libmtkcam.plugin.remosaic
自定义metadata
添加metadata是为了让APP层能够通过metadata传递相应的参数给HAL层。APP层是通过CaptureRequest.Builder.set(@NonNull Key key, T value)来设置参数的。
由于我们是自定义的feature,无法复用MTK提供的metadata,因此,我们需要自定义metadata。
vendor/mediatek/proprietary/hardware/mtkcam/include/mtkcam/utils/metadata/client/mtk_metadata_tag.h
@@ -94,6 +94,7 @@ typedef enum mtk_camera_metadata_section {
MTK_SMVR_FEATURE = 15,
MTK_SINGLEHW_SETTING = 16,
MTK_ABF_FEATURE = 17,
+ QXT_FEATURE = 18,//cczheng
MTK_VENDOR_SECTION_COUNT,
} mtk_camera_metadata_section_t;
@@ -155,6 +156,7 @@ typedef enum mtk_camera_metadata_section_start {
MTK_SMVR_FEATURE_START = (MTK_SMVR_FEATURE + MTK_VENDOR_TAG_SECTION) << 16,
MTK_SINGLEHW_SETTING_START = (MTK_SINGLEHW_SETTING + MTK_VENDOR_TAG_SECTION) << 16,
MTK_ABF_FEATURE_START = (MTK_ABF_FEATURE + MTK_VENDOR_TAG_SECTION) << 16,
+ QXT_FEATURE_START = (QXT_FEATURE + MTK_VENDOR_TAG_SECTION) << 16,//cczheng
} mtk_camera_metadata_section_start_t;
@@ -754,6 +756,10 @@ typedef enum mtk_camera_metadata_tag {
MTK_ABF_FEATURE_ABF_RESULT,
MTK_ABF_FEATURE_AVAILABLE_ABF_MODES,
MTK_ABF_FEATURE_END,
+
+ QXT_FEATURE_WATERMARK = QXT_FEATURE_START,
+ QXT_FEATURE_MFNR,//cczheng
+ QXT_FEATURE_END,//cczheng
} mtk_camera_metadata_tag_t;
/**
vendor/mediatek/proprietary/hardware/mtkcam/include/mtkcam/utils/metadata/client/mtk_metadata_tag_info.inl
@@ -92,6 +92,8 @@ _IMP_SECTION_INFO_(MTK_HEIC, "mtk.heic")
_IMP_SECTION_INFO_(MTK_HEIC_INFO, "mtk.heic.ifno")
_IMP_SECTION_INFO_(MTK_IOPIPE_INFO, "mtk.iopipe.info")
_IMP_SECTION_INFO_(MTK_HAL_INFO, "mtk.hal.info")
+_IMP_SECTION_INFO_(QXT_FEATURE, "com.qxt.camera")
+//cczheng
/******************************************************************************
*
@@ -106,6 +108,12 @@ _IMP_TAG_INFO_( MTK_COLOR_CORRECTION_ABERRATION_MODE,
MUINT8, "aberrationMode")
_IMP_TAG_INFO_( MTK_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
MUINT8, "availableAberrationModes")
+
+_IMP_TAG_INFO_( QXT_FEATURE_WATERMARK,
+ MINT32, "watermark")
+_IMP_TAG_INFO_( QXT_FEATURE_MFNR,
+ MINT32, "mfnr")
+
//
_IMP_TAG_INFO_( MTK_CONTROL_AE_ANTIBANDING_MODE,
MUINT8, "aeAntibandingMode")
vendor/mediatek/proprietary/hardware/mtkcam/utils/metadata/vendortag/VendorTagTable.h
@@ -547,6 +547,19 @@ static auto& _FlashFeature_()
//
return sInst;
}
+//cczheng
+static auto& _QxtFeature_()
+{
+ static const std::map<uint32_t, VendorTag_t>
+ sInst = {
+ _TAG_(QXT_FEATURE_WATERMARK,
+ "watermark", TYPE_INT32),
+ _TAG_(QXT_FEATURE_MFNR,
+ "mfnr", TYPE_INT32),
+ };
+ //
+ return sInst;
+}
static auto& _SingleHWSetting_()
{
@@ -668,6 +681,10 @@ static auto& getGlobalSections()
MTK_ABF_FEATURE_START,
MTK_ABF_FEATURE_END,
_ABFFeature_() ),
+ _SECTION_( "com.qxt.camera",
+ QXT_FEATURE_START,
+ QXT_FEATURE_END,
+ _QxtFeature_() ),
};
// append custom vendor tags sections to mtk sections
vendor/mediatek/proprietary/hardware/mtkcam/utils/metastore/metadataprovider/constructStaticMetadata.cpp
@@ -1325,6 +1325,20 @@ updateData(IMetadata &rMetadata)
rMetadata.update(availReqEntry.tag(), availReqEntry);
}
+//cczheng
+#if 1
+ {
+ IMetadata::IEntry qxtAvailRequestEntry = rMetadata.entryFor(MTK_REQUEST_AVAILABLE_REQUEST_KEYS);
+ qxtAvailRequestEntry.push_back(QXT_FEATURE_WATERMARK , Type2Type< MINT32 >());
+ qxtAvailRequestEntry.push_back(QXT_FEATURE_MFNR , Type2Type< MINT32 >());
+ rMetadata.update(qxtAvailRequestEntry.tag(), qxtAvailRequestEntry);
+
+ IMetadata::IEntry qxtAvailSessionEntry = rMetadata.entryFor(MTK_REQUEST_AVAILABLE_SESSION_KEYS);
+ qxtAvailSessionEntry.push_back(QXT_FEATURE_WATERMARK , Type2Type< MINT32 >());
+ qxtAvailSessionEntry.push_back(QXT_FEATURE_MFNR , Type2Type< MINT32 >());
+ rMetadata.update(qxtAvailSessionEntry.tag(), qxtAvailSessionEntry);
+ }
+#endif
+
// update multi-cam feature mode to static metadata
// vendor tag
{
好了,搞到这一步。大功告成了,先 make 一把,刷机看效果
最后
由于 watermark.rgba 没找到源文件,我从其它地方搞了个所以水印显示时宽高不太对,就没正确显示,具体效果我没仔细调整。
另外既然系统已经预制了这两个算法,所有 app 均可调用,那么可以在系统相机中也增加两按钮,加载这两算法,也算是丰富了
原始相机单一风格,有兴趣的朋友可以自己去加下,毕竟普通 app 的代码已经提供了。
版权声明:本文为CSDN博主「cczhengv」的原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接及本声明。
原文链接:https://blog.csdn.net/u012932409/article/details/120263991