diff --git a/foxglove.sh b/foxglove.sh new file mode 100644 index 0000000..c2566db --- /dev/null +++ b/foxglove.sh @@ -0,0 +1,2 @@ +source install/setup.bash +ros2 launch foxglove_bridge foxglove_bridge_launch.xml port:=8765 \ No newline at end of file diff --git a/src/rm_auto_aim/.clang-format b/src/rm_auto_aim/.clang-format new file mode 100644 index 0000000..2f8d64b --- /dev/null +++ b/src/rm_auto_aim/.clang-format @@ -0,0 +1,18 @@ +--- +Language: Cpp +BasedOnStyle: Google + +AccessModifierOffset: -2 +AlignAfterOpenBracket: AlwaysBreak +BraceWrapping: + AfterClass: true + AfterFunction: true + AfterNamespace: true + AfterStruct: true +BreakBeforeBraces: Custom +ColumnLimit: 100 +ConstructorInitializerIndentWidth: 0 +ContinuationIndentWidth: 2 +DerivePointerAlignment: false +PointerAlignment: Middle +ReflowComments: false \ No newline at end of file diff --git a/src/rm_auto_aim/.clang-tidy b/src/rm_auto_aim/.clang-tidy new file mode 100644 index 0000000..bf3d848 --- /dev/null +++ b/src/rm_auto_aim/.clang-tidy @@ -0,0 +1,55 @@ +--- +Checks: '-*, + performance-*, + -performance-unnecessary-value-param, + llvm-namespace-comment, + modernize-redundant-void-arg, + modernize-use-nullptr, + modernize-use-default, + modernize-use-override, + modernize-loop-convert, + modernize-make-shared, + modernize-make-unique, + misc-unused-parameters, + readability-named-parameter, + readability-redundant-smartptr-get, + readability-redundant-string-cstr, + readability-simplify-boolean-expr, + readability-container-size-empty, + readability-identifier-naming, + ' +HeaderFilterRegex: '' +AnalyzeTemporaryDtors: false +CheckOptions: + - key: llvm-namespace-comment.ShortNamespaceLines + value: '10' + - key: llvm-namespace-comment.SpacesBeforeComments + value: '2' + - key: misc-unused-parameters.StrictMode + value: '1' + - key: readability-braces-around-statements.ShortStatementLines + value: '2' + # type names + - key: readability-identifier-naming.ClassCase + value: CamelCase + - key: readability-identifier-naming.EnumCase + value: CamelCase + - key: readability-identifier-naming.UnionCase + value: CamelCase + # method names + - key: readability-identifier-naming.MethodCase + value: camelBack + # variable names + - key: readability-identifier-naming.VariableCase + value: lower_case + - key: readability-identifier-naming.ClassMemberSuffix + value: '_' + # const static or global variables are UPPER_CASE + - key: readability-identifier-naming.EnumConstantCase + value: UPPER_CASE + - key: readability-identifier-naming.StaticConstantCase + value: UPPER_CASE + - key: readability-identifier-naming.ClassConstantCase + value: UPPER_CASE + - key: readability-identifier-naming.GlobalVariableCase + value: UPPER_CASE \ No newline at end of file diff --git a/src/rm_auto_aim/.gitignore b/src/rm_auto_aim/.gitignore new file mode 100644 index 0000000..3f4f07a --- /dev/null +++ b/src/rm_auto_aim/.gitignore @@ -0,0 +1,170 @@ + +# Created by https://www.gitignore.io/api/c++,linux,macos,clion +# Edit at https://www.gitignore.io/?templates=c++,linux,macos,clion + +### C++ ### +# Prerequisites +*.d + +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Fortran module files +*.mod +*.smod + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app + +### CLion ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### CLion Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +.idea/**/sonarlint/ + +# SonarQube Plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator/ + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# End of https://www.gitignore.io/api/c++,linux,macos,clion diff --git a/src/rm_auto_aim/LICENSE b/src/rm_auto_aim/LICENSE new file mode 100644 index 0000000..d1e647e --- /dev/null +++ b/src/rm_auto_aim/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 ChenJun + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/rm_auto_aim/README.md b/src/rm_auto_aim/README.md new file mode 100644 index 0000000..caa6a80 --- /dev/null +++ b/src/rm_auto_aim/README.md @@ -0,0 +1,61 @@ +# rm_auto_aim + +## Overview + +RoboMaster 装甲板自瞄算法模块 + +rm_vision + +该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块 + +若有帮助请Star这个项目,感谢~ + +### License + +The source code is released under a [MIT license](rm_auto_aim/LICENSE). + +[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) + +Author: Chen Jun + +运行环境:Ubuntu 22.04 / ROS2 Humble (未在其他环境下测试) + +![Build Status](https://github.com/chenjunnn/rm_auto_aim/actions/workflows/ros_ci.yml/badge.svg) + +## Building from Source + +### Building + +在 Ubuntu 22.04 环境下安装 [ROS2 Humble](https://docs.ros.org/en/humble/Installation/Ubuntu-Install-Debians.html) + +创建 ROS 工作空间后 clone 项目,使用 rosdep 安装依赖后编译代码 + + cd ros_ws/src + git clone https://gitlab.com/rm_vision/rm_auto_aim.git + cd .. + rosdep install --from-paths src --ignore-src -r -y + colcon build --symlink-install --packages-up-to auto_aim_bringup + +### Testing + +Run the tests with + + colcon test --packages-up-to auto_aim_bringup + +## Packages + +- [armor_detector](armor_detector) + + 订阅相机参数及图像流进行装甲板的识别并解算三维位置,输出识别到的装甲板在输入frame下的三维位置 (一般是以相机光心为原点的相机坐标系) + +- [armor_tracker](armor_tracker) + + 订阅识别节点发布的装甲板三维位置及机器人的坐标转换信息,将装甲板三维位置变换到指定惯性系(一般是以云台中心为原点,IMU 上电时的 Yaw 朝向为 X 轴的惯性系)下,然后将装甲板目标送入跟踪器中,输出跟踪机器人在指定惯性系下的状态 + +- auto_aim_interfaces + + 定义了识别节点和处理节点的接口以及定义了用于 Debug 的信息 + +- auto_aim_bringup + + 包含启动识别节点和处理节点的默认参数文件及 launch 文件 diff --git a/src/rm_auto_aim/armor_detector/CMakeLists.txt b/src/rm_auto_aim/armor_detector/CMakeLists.txt new file mode 100644 index 0000000..f1cd143 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/CMakeLists.txt @@ -0,0 +1,68 @@ +cmake_minimum_required(VERSION 3.10) +project(armor_detector) + +## Use C++14 +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +## By adding -Wall and -Werror, the compiler does not ignore warnings anymore, +## enforcing cleaner code. +add_definitions(-Wall -Werror) + +## Export compile commands for clangd +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +####################### +## Find dependencies ## +####################### + +find_package(ament_cmake_auto REQUIRED) +find_package(OpenCV REQUIRED) +ament_auto_find_build_dependencies() + +########### +## Build ## +########### + +ament_auto_add_library(${PROJECT_NAME} SHARED + DIRECTORY src +) + +target_include_directories(${PROJECT_NAME} PUBLIC ${OpenCV_INCLUDE_DIRS}) +target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS}) + +rclcpp_components_register_node(${PROJECT_NAME} + PLUGIN rm_auto_aim::ArmorDetectorNode + EXECUTABLE armor_detector_node +) + +############# +## Testing ## +############# + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + list(APPEND AMENT_LINT_AUTO_EXCLUDE + ament_cmake_copyright + ament_cmake_uncrustify + ament_cmake_cpplint + ) + ament_lint_auto_find_test_dependencies() + + find_package(ament_cmake_gtest) + ament_add_gtest(test_node_startup test/test_node_startup.cpp) + target_link_libraries(test_node_startup ${PROJECT_NAME}) + + ament_add_gtest(test_number_cls test/test_number_cls.cpp) + target_link_libraries(test_number_cls ${PROJECT_NAME}) + +endif() + +############# +## Install ## +############# + +ament_auto_package( + INSTALL_TO_SHARE + model +) diff --git a/src/rm_auto_aim/armor_detector/README.md b/src/rm_auto_aim/armor_detector/README.md new file mode 100644 index 0000000..d5d49db --- /dev/null +++ b/src/rm_auto_aim/armor_detector/README.md @@ -0,0 +1,104 @@ +# armor_detector + +- [DetectorNode](#basedetectornode) + - [Detector](#detector) + - [NumberClassifier](#numberclassifier) + - [PnPSolver](#pnpsolver) + +## 识别节点 + +订阅相机参数及图像流进行装甲板的识别并解算三维位置,输出识别到的装甲板在输入frame下的三维位置 (一般是以相机光心为原点的相机坐标系) + +### DetectorNode +装甲板识别节点 + +包含[Detector](#detector) +包含[PnPSolver](#pnpsolver) + +订阅: +- 相机参数 `/camera_info` +- 彩色图像 `/image_raw` + +发布: +- 识别目标 `/detector/armors` + +静态参数: +- 筛选灯条的参数 `light` + - 长宽比范围 `min/max_ratio` + - 最大倾斜角度 `max_angle` +- 筛选灯条配对结果的参数 `armor` + - 两灯条的最小长度之比(短边/长边)`min_light_ratio ` + - 装甲板两灯条中心的距离范围(大装甲板)`min/max_large_center_distance` + - 装甲板两灯条中心的距离范围(小装甲板)`min/max_small_center_distance` + - 装甲板的最大倾斜角度 `max_angle` + +动态参数: +- 是否发布 debug 信息 `debug` +- 识别目标颜色 `detect_color` +- 二值化的最小阈值 `binary_thres` +- 数字分类器 `classifier` + - 置信度阈值 `threshold` + +## Detector +装甲板识别器 + +### preprocessImage +预处理 + +| ![](docs/raw.png) | ![](docs/hsv_bin.png) | ![](docs/gray_bin.png) | +| :---------------: | :-------------------: | :--------------------: | +| 原图 | 通过颜色二值化 | 通过灰度二值化 | + +由于一般工业相机的动态范围不够大,导致若要能够清晰分辨装甲板的数字,得到的相机图像中灯条中心就会过曝,灯条中心的像素点的值往往都是 R=B。根据颜色信息来进行二值化效果不佳,因此此处选择了直接通过灰度图进行二值化,将灯条的颜色判断放到后续处理中。 + +### findLights +寻找灯条 + +通过 findContours 得到轮廓,再通过 minAreaRect 获得最小外接矩形,对其进行长宽比和倾斜角度的判断,可以高效的筛除形状不满足的亮斑。 + +判断灯条颜色这里采用了对轮廓内的的R/B值求和,判断两和的的大小的方法,若 `sum_r > sum_b` 则认为是红色灯条,反之则认为是蓝色灯条。 + +| ![](docs/red.png) | ![](docs/blue.png) | +| :---------------: | :----------------: | +| 提取出的红色灯条 | 提取出的蓝色灯条 | + +### matchLights +配对灯条 + +根据 `detect_color` 选择对应颜色的灯条进行两两配对,首先筛除掉两条灯条中间包含另一个灯条的情况,然后根据两灯条的长度之比、两灯条中心的距离、配对出装甲板的倾斜角度来筛选掉条件不满足的结果,得到形状符合装甲板特征的灯条配对。 + +## NumberClassifier +数字分类器 + +### extractNumbers +提取数字 + +| ![](docs/num_raw.png) | ![](docs/num_warp.png) | ![](docs/num_roi.png) | ![](docs/num_bin.png) | +| :-------------------: | :--------------------: | :-------------------: | :-------------------: | +| 原图 | 透视变换 | 取ROI | 二值化 | + +将每条灯条上下的角点拉伸到装甲板的上下边缘作为待变换点,进行透视变换,再对变换后的图像取ROI。考虑到数字图案实质上就是黑色背景+白色图案,所以此处使用了大津法进行二值化。 + +### Classify +分类 + +由于上一步对于数字的提取效果已经非常好,数字图案的特征非常清晰明显,装甲板的远近、旋转都不会使图案产生过多畸变,且图案像素点少,所以我们使用多层感知机(MLP)进行分类。 + +网络结构中定义了两个隐藏层和一个分类层,将二值化后的数字展平成 20x28=560 维的输入,送入网络进行分类。 + +网络结构: + +![](docs/model.svg) + + + + + +## PnPSolver +PnP解算器 + +[Perspective-n-Point (PnP) pose computation](https://docs.opencv.org/4.x/d5/d1f/calib3d_solvePnP.html) + +PnP解算器将 `cv::solvePnP()` 封装,接口中传入 `Armor` 类型的数据即可得到 `geometry_msgs::msg::Point` 类型的三维坐标。 + +考虑到装甲板的四个点在一个平面上,在PnP解算方法上我们选择了 `cv::SOLVEPNP_IPPE` (Method is based on the paper of T. Collins and A. Bartoli. ["Infinitesimal Plane-Based Pose Estimation"](https://link.springer.com/article/10.1007/s11263-014-0725-5). This method requires coplanar object points.) diff --git a/src/rm_auto_aim/armor_detector/docs/blue.png b/src/rm_auto_aim/armor_detector/docs/blue.png new file mode 100644 index 0000000..f10db16 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/blue.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/gray_bin.png b/src/rm_auto_aim/armor_detector/docs/gray_bin.png new file mode 100644 index 0000000..5c35e71 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/gray_bin.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/hsv_bin.png b/src/rm_auto_aim/armor_detector/docs/hsv_bin.png new file mode 100644 index 0000000..075e1c6 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/hsv_bin.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/model.svg b/src/rm_auto_aim/armor_detector/docs/model.svg new file mode 100644 index 0000000..48b6f52 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/docs/model.svg @@ -0,0 +1 @@ +1×1×20×281×9inputfloat32[1,1,20,28]ReshapeReshape_1int64[2]shape〈2〉GemmGemm_2float32[164,560]B〈164×560〉float32[164]C〈164〉ReluRelu_3GemmGemm_4float32[84,164]B〈84×164〉float32[84]C〈84〉ReluRelu_5GemmGemm_6float32[9,84]B〈9×84〉float32[9]C〈9〉ReluRelu_7outputfloat32[1,9] \ No newline at end of file diff --git a/src/rm_auto_aim/armor_detector/docs/num_bin.png b/src/rm_auto_aim/armor_detector/docs/num_bin.png new file mode 100644 index 0000000..1d47d99 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/num_bin.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/num_raw.png b/src/rm_auto_aim/armor_detector/docs/num_raw.png new file mode 100644 index 0000000..302c852 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/num_raw.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/num_roi.png b/src/rm_auto_aim/armor_detector/docs/num_roi.png new file mode 100644 index 0000000..b542bc5 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/num_roi.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/num_warp.png b/src/rm_auto_aim/armor_detector/docs/num_warp.png new file mode 100644 index 0000000..3fe6d74 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/num_warp.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/raw.png b/src/rm_auto_aim/armor_detector/docs/raw.png new file mode 100644 index 0000000..af656a0 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/raw.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/red.png b/src/rm_auto_aim/armor_detector/docs/red.png new file mode 100644 index 0000000..6e6bca0 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/red.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/result.png b/src/rm_auto_aim/armor_detector/docs/result.png new file mode 100644 index 0000000..df8bb14 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/result.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/train_acc.png b/src/rm_auto_aim/armor_detector/docs/train_acc.png new file mode 100644 index 0000000..5814d6b Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/train_acc.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/train_loss.png b/src/rm_auto_aim/armor_detector/docs/train_loss.png new file mode 100644 index 0000000..12a5c6b Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/train_loss.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/val_acc.png b/src/rm_auto_aim/armor_detector/docs/val_acc.png new file mode 100644 index 0000000..c77a786 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/val_acc.png differ diff --git a/src/rm_auto_aim/armor_detector/docs/val_loss.png b/src/rm_auto_aim/armor_detector/docs/val_loss.png new file mode 100644 index 0000000..28793eb Binary files /dev/null and b/src/rm_auto_aim/armor_detector/docs/val_loss.png differ diff --git a/src/rm_auto_aim/armor_detector/include/armor_detector/armor.hpp b/src/rm_auto_aim/armor_detector/include/armor_detector/armor.hpp new file mode 100644 index 0000000..fec27bd --- /dev/null +++ b/src/rm_auto_aim/armor_detector/include/armor_detector/armor.hpp @@ -0,0 +1,73 @@ +// Copyright 2022 Chen Jun +// Licensed under the MIT License. + +#ifndef ARMOR_DETECTOR__ARMOR_HPP_ +#define ARMOR_DETECTOR__ARMOR_HPP_ + +#include + +// STL +#include +#include + +namespace rm_auto_aim +{ +const int RED = 0; +const int BLUE = 1; + +enum class ArmorType { SMALL, LARGE, INVALID }; +const std::string ARMOR_TYPE_STR[3] = {"small", "large", "invalid"}; + +struct Light : public cv::RotatedRect +{ + Light() = default; + explicit Light(cv::RotatedRect box) : cv::RotatedRect(box) + { + cv::Point2f p[4]; + box.points(p); + std::sort(p, p + 4, [](const cv::Point2f & a, const cv::Point2f & b) { return a.y < b.y; }); + top = (p[0] + p[1]) / 2; + bottom = (p[2] + p[3]) / 2; + + length = cv::norm(top - bottom); + width = cv::norm(p[0] - p[1]); + + tilt_angle = std::atan2(std::abs(top.x - bottom.x), std::abs(top.y - bottom.y)); + tilt_angle = tilt_angle / CV_PI * 180; + } + + int color; + cv::Point2f top, bottom; + double length; + double width; + float tilt_angle; +}; + +struct Armor +{ + Armor() = default; + Armor(const Light & l1, const Light & l2) + { + if (l1.center.x < l2.center.x) { + left_light = l1, right_light = l2; + } else { + left_light = l2, right_light = l1; + } + center = (left_light.center + right_light.center) / 2; + } + + // Light pairs part + Light left_light, right_light; + cv::Point2f center; + ArmorType type; + + // Number part + cv::Mat number_img; + std::string number; + float confidence; + std::string classfication_result; +}; + +} // namespace rm_auto_aim + +#endif // ARMOR_DETECTOR__ARMOR_HPP_ diff --git a/src/rm_auto_aim/armor_detector/include/armor_detector/detector.hpp b/src/rm_auto_aim/armor_detector/include/armor_detector/detector.hpp new file mode 100644 index 0000000..5e3cd74 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/include/armor_detector/detector.hpp @@ -0,0 +1,83 @@ +// Copyright 2022 Chen Jun +// Licensed under the MIT License. + +#ifndef ARMOR_DETECTOR__DETECTOR_HPP_ +#define ARMOR_DETECTOR__DETECTOR_HPP_ + +// OpenCV +#include +#include + +// STD +#include +#include +#include + +#include "armor_detector/armor.hpp" +#include "armor_detector/number_classifier.hpp" +#include "auto_aim_interfaces/msg/debug_armors.hpp" +#include "auto_aim_interfaces/msg/debug_lights.hpp" + +namespace rm_auto_aim +{ +class Detector +{ +public: + struct LightParams + { + // width / height + double min_ratio; + double max_ratio; + // vertical angle + double max_angle; + }; + + struct ArmorParams + { + double min_light_ratio; + // light pairs distance + double min_small_center_distance; + double max_small_center_distance; + double min_large_center_distance; + double max_large_center_distance; + // horizontal angle + double max_angle; + }; + + Detector(const int & bin_thres, const int & color, const LightParams & l, const ArmorParams & a); + + std::vector detect(const cv::Mat & input); + + cv::Mat preprocessImage(const cv::Mat & input); + std::vector findLights(const cv::Mat & rbg_img, const cv::Mat & binary_img); + std::vector matchLights(const std::vector & lights); + + // For debug usage + cv::Mat getAllNumbersImage(); + void drawResults(cv::Mat & img); + + int binary_thres; + int detect_color; + LightParams l; + ArmorParams a; + + std::unique_ptr classifier; + + // Debug msgs + cv::Mat binary_img; + auto_aim_interfaces::msg::DebugLights debug_lights; + auto_aim_interfaces::msg::DebugArmors debug_armors; + +private: + bool isLight(const Light & possible_light); + bool containLight( + const Light & light_1, const Light & light_2, const std::vector & lights); + ArmorType isArmor(const Light & light_1, const Light & light_2); + + std::vector lights_; + std::vector armors_; +}; + +} // namespace rm_auto_aim + +#endif // ARMOR_DETECTOR__DETECTOR_HPP_ diff --git a/src/rm_auto_aim/armor_detector/include/armor_detector/detector_node.hpp b/src/rm_auto_aim/armor_detector/include/armor_detector/detector_node.hpp new file mode 100644 index 0000000..0cd88f3 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/include/armor_detector/detector_node.hpp @@ -0,0 +1,81 @@ +// Copyright 2022 Chen Jun +// Licensed under the MIT License. + +#ifndef ARMOR_DETECTOR__DETECTOR_NODE_HPP_ +#define ARMOR_DETECTOR__DETECTOR_NODE_HPP_ + +// ROS +#include +#include +#include +#include +#include +#include +#include +#include + +// STD +#include +#include +#include + +#include "armor_detector/detector.hpp" +#include "armor_detector/number_classifier.hpp" +#include "armor_detector/pnp_solver.hpp" +#include "auto_aim_interfaces/msg/armors.hpp" + +namespace rm_auto_aim +{ + +class ArmorDetectorNode : public rclcpp::Node +{ +public: + ArmorDetectorNode(const rclcpp::NodeOptions & options); + +private: + void imageCallback(const sensor_msgs::msg::Image::ConstSharedPtr img_msg); + + std::unique_ptr initDetector(); + std::vector detectArmors(const sensor_msgs::msg::Image::ConstSharedPtr & img_msg); + + void createDebugPublishers(); + void destroyDebugPublishers(); + + void publishMarkers(); + + // Armor Detector + std::unique_ptr detector_; + + // Detected armors publisher + auto_aim_interfaces::msg::Armors armors_msg_; + rclcpp::Publisher::SharedPtr armors_pub_; + + // Visualization marker publisher + visualization_msgs::msg::Marker armor_marker_; + visualization_msgs::msg::Marker text_marker_; + visualization_msgs::msg::MarkerArray marker_array_; + rclcpp::Publisher::SharedPtr marker_pub_; + + // Camera info part + rclcpp::Subscription::SharedPtr cam_info_sub_; + cv::Point2f cam_center_; + std::shared_ptr cam_info_; + std::unique_ptr pnp_solver_; + + // Image subscrpition + rclcpp::Subscription::SharedPtr img_sub_; + + // Debug information + bool debug_; + std::shared_ptr debug_param_sub_; + std::shared_ptr debug_cb_handle_; + rclcpp::Publisher::SharedPtr lights_data_pub_; + rclcpp::Publisher::SharedPtr armors_data_pub_; + image_transport::Publisher binary_img_pub_; + image_transport::Publisher number_img_pub_; + image_transport::Publisher result_img_pub_; +}; + +} // namespace rm_auto_aim + +#endif // ARMOR_DETECTOR__DETECTOR_NODE_HPP_ diff --git a/src/rm_auto_aim/armor_detector/include/armor_detector/number_classifier.hpp b/src/rm_auto_aim/armor_detector/include/armor_detector/number_classifier.hpp new file mode 100644 index 0000000..7a68799 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/include/armor_detector/number_classifier.hpp @@ -0,0 +1,40 @@ +// Copyright 2022 Chen Jun + +#ifndef ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_ +#define ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_ + +// OpenCV +#include + +// STL +#include +#include +#include +#include +#include + +#include "armor_detector/armor.hpp" + +namespace rm_auto_aim +{ +class NumberClassifier +{ +public: + NumberClassifier( + const std::string & model_path, const std::string & label_path, const double threshold, + const std::vector & ignore_classes = {}); + + void extractNumbers(const cv::Mat & src, std::vector & armors); + + void classify(std::vector & armors); + + double threshold; + +private: + cv::dnn::Net net_; + std::vector class_names_; + std::vector ignore_classes_; +}; +} // namespace rm_auto_aim + +#endif // ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_ diff --git a/src/rm_auto_aim/armor_detector/include/armor_detector/pnp_solver.hpp b/src/rm_auto_aim/armor_detector/include/armor_detector/pnp_solver.hpp new file mode 100644 index 0000000..e0005ae --- /dev/null +++ b/src/rm_auto_aim/armor_detector/include/armor_detector/pnp_solver.hpp @@ -0,0 +1,48 @@ +// Copyright 2022 Chen Jun +// Licensed under the MIT License. + +#ifndef ARMOR_DETECTOR__PNP_SOLVER_HPP_ +#define ARMOR_DETECTOR__PNP_SOLVER_HPP_ + +#include +#include + +// STD +#include +#include + +#include "armor_detector/armor.hpp" + +namespace rm_auto_aim +{ +class PnPSolver +{ +public: + PnPSolver( + const std::array & camera_matrix, + const std::vector & distortion_coefficients); + + // Get 3d position + bool solvePnP(const Armor & armor, cv::Mat & rvec, cv::Mat & tvec); + + // Calculate the distance between armor center and image center + float calculateDistanceToCenter(const cv::Point2f & image_point); + +private: + cv::Mat camera_matrix_; + cv::Mat dist_coeffs_; + + // Unit: mm + static constexpr float SMALL_ARMOR_WIDTH = 135; + static constexpr float SMALL_ARMOR_HEIGHT = 55; + static constexpr float LARGE_ARMOR_WIDTH = 225; + static constexpr float LARGE_ARMOR_HEIGHT = 55; + + // Four vertices of armor in 3d + std::vector small_armor_points_; + std::vector large_armor_points_; +}; + +} // namespace rm_auto_aim + +#endif // ARMOR_DETECTOR__PNP_SOLVER_HPP_ diff --git a/src/rm_auto_aim/armor_detector/model/label.txt b/src/rm_auto_aim/armor_detector/model/label.txt new file mode 100644 index 0000000..64bf0fd --- /dev/null +++ b/src/rm_auto_aim/armor_detector/model/label.txt @@ -0,0 +1,9 @@ +1 +2 +3 +4 +5 +outpost +guard +base +negative \ No newline at end of file diff --git a/src/rm_auto_aim/armor_detector/model/mlp.onnx b/src/rm_auto_aim/armor_detector/model/mlp.onnx new file mode 100644 index 0000000..2089380 Binary files /dev/null and b/src/rm_auto_aim/armor_detector/model/mlp.onnx differ diff --git a/src/rm_auto_aim/armor_detector/package.xml b/src/rm_auto_aim/armor_detector/package.xml new file mode 100644 index 0000000..94f0a09 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/package.xml @@ -0,0 +1,37 @@ + + + + armor_detector + 0.1.0 + A template for ROS packages. + Chen Jun + BSD + https://github.com/chenjunnn/rm_auto_aim + https://github.com/chenjunnn/rm_auto_aim/issues + Chen Jun + + + ament_cmake + + + rclcpp + rclcpp_components + sensor_msgs + geometry_msgs + visualization_msgs + message_filters + cv_bridge + image_transport + image_transport_plugins + auto_aim_interfaces + vision_opencv + tf2_geometry_msgs + + ament_lint_auto + ament_lint_common + ament_cmake_clang_format + + + ament_cmake + + diff --git a/src/rm_auto_aim/armor_detector/src/detector.cpp b/src/rm_auto_aim/armor_detector/src/detector.cpp new file mode 100644 index 0000000..18887eb --- /dev/null +++ b/src/rm_auto_aim/armor_detector/src/detector.cpp @@ -0,0 +1,245 @@ +// Copyright (c) 2022 ChenJun +// Licensed under the MIT License. + +// OpenCV +#include +#include +#include +#include +#include + +// STD +#include +#include +#include + +#include "armor_detector/detector.hpp" +#include "auto_aim_interfaces/msg/debug_armor.hpp" +#include "auto_aim_interfaces/msg/debug_light.hpp" + +namespace rm_auto_aim +{ +Detector::Detector( + const int & bin_thres, const int & color, const LightParams & l, const ArmorParams & a) +: binary_thres(bin_thres), detect_color(color), l(l), a(a) +{ +} + +std::vector Detector::detect(const cv::Mat & input) +{ + binary_img = preprocessImage(input); + lights_ = findLights(input, binary_img); + armors_ = matchLights(lights_); + + if (!armors_.empty()) { + classifier->extractNumbers(input, armors_); + classifier->classify(armors_); + } + + return armors_; +} + +cv::Mat Detector::preprocessImage(const cv::Mat & rgb_img) +{ + cv::Mat gray_img; + cv::cvtColor(rgb_img, gray_img, cv::COLOR_RGB2GRAY); + + cv::Mat binary_img; + cv::threshold(gray_img, binary_img, binary_thres, 255, cv::THRESH_BINARY); + + return binary_img; +} + +std::vector Detector::findLights(const cv::Mat & rbg_img, const cv::Mat & binary_img) +{ + using std::vector; + vector> contours; + vector hierarchy; + cv::findContours(binary_img, contours, hierarchy, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE); + + vector lights; + this->debug_lights.data.clear(); + + for (const auto & contour : contours) { + if (contour.size() < 5) continue; + + auto r_rect = cv::minAreaRect(contour); + auto light = Light(r_rect); + + if (isLight(light)) { + auto rect = light.boundingRect(); + if ( // Avoid assertion failed + 0 <= rect.x && 0 <= rect.width && rect.x + rect.width <= rbg_img.cols && 0 <= rect.y && + 0 <= rect.height && rect.y + rect.height <= rbg_img.rows) { + int sum_r = 0, sum_b = 0; + auto roi = rbg_img(rect); + // Iterate through the ROI + for (int i = 0; i < roi.rows; i++) { + for (int j = 0; j < roi.cols; j++) { + if (cv::pointPolygonTest(contour, cv::Point2f(j + rect.x, i + rect.y), false) >= 0) { + // if point is inside contour + sum_r += roi.at(i, j)[0]; + sum_b += roi.at(i, j)[2]; + } + } + } + // Sum of red pixels > sum of blue pixels ? + light.color = sum_r > sum_b ? RED : BLUE; + lights.emplace_back(light); + } + } + } + + return lights; +} + +bool Detector::isLight(const Light & light) +{ + // The ratio of light (short side / long side) + float ratio = light.width / light.length; + bool ratio_ok = l.min_ratio < ratio && ratio < l.max_ratio; + + bool angle_ok = light.tilt_angle < l.max_angle; + + bool is_light = ratio_ok && angle_ok; + + // Fill in debug information + auto_aim_interfaces::msg::DebugLight light_data; + light_data.center_x = light.center.x; + light_data.ratio = ratio; + light_data.angle = light.tilt_angle; + light_data.is_light = is_light; + this->debug_lights.data.emplace_back(light_data); + + return is_light; +} + +std::vector Detector::matchLights(const std::vector & lights) +{ + std::vector armors; + this->debug_armors.data.clear(); + + // Loop all the pairing of lights + for (auto light_1 = lights.begin(); light_1 != lights.end(); light_1++) { + for (auto light_2 = light_1 + 1; light_2 != lights.end(); light_2++) { + if (light_1->color != detect_color || light_2->color != detect_color) continue; + + if (containLight(*light_1, *light_2, lights)) { + continue; + } + + auto type = isArmor(*light_1, *light_2); + if (type != ArmorType::INVALID) { + auto armor = Armor(*light_1, *light_2); + armor.type = type; + armors.emplace_back(armor); + } + } + } + + return armors; +} + +// Check if there is another light in the boundingRect formed by the 2 lights +bool Detector::containLight( + const Light & light_1, const Light & light_2, const std::vector & lights) +{ + auto points = std::vector{light_1.top, light_1.bottom, light_2.top, light_2.bottom}; + auto bounding_rect = cv::boundingRect(points); + + for (const auto & test_light : lights) { + if (test_light.center == light_1.center || test_light.center == light_2.center) continue; + + if ( + bounding_rect.contains(test_light.top) || bounding_rect.contains(test_light.bottom) || + bounding_rect.contains(test_light.center)) { + return true; + } + } + + return false; +} + +ArmorType Detector::isArmor(const Light & light_1, const Light & light_2) +{ + // Ratio of the length of 2 lights (short side / long side) + float light_length_ratio = light_1.length < light_2.length ? light_1.length / light_2.length + : light_2.length / light_1.length; + bool light_ratio_ok = light_length_ratio > a.min_light_ratio; + + // Distance between the center of 2 lights (unit : light length) + float avg_light_length = (light_1.length + light_2.length) / 2; + float center_distance = cv::norm(light_1.center - light_2.center) / avg_light_length; + bool center_distance_ok = (a.min_small_center_distance <= center_distance && + center_distance < a.max_small_center_distance) || + (a.min_large_center_distance <= center_distance && + center_distance < a.max_large_center_distance); + + // Angle of light center connection + cv::Point2f diff = light_1.center - light_2.center; + float angle = std::abs(std::atan(diff.y / diff.x)) / CV_PI * 180; + bool angle_ok = angle < a.max_angle; + + bool is_armor = light_ratio_ok && center_distance_ok && angle_ok; + + // Judge armor type + ArmorType type; + if (is_armor) { + type = center_distance > a.min_large_center_distance ? ArmorType::LARGE : ArmorType::SMALL; + } else { + type = ArmorType::INVALID; + } + + // Fill in debug information + auto_aim_interfaces::msg::DebugArmor armor_data; + armor_data.type = ARMOR_TYPE_STR[static_cast(type)]; + armor_data.center_x = (light_1.center.x + light_2.center.x) / 2; + armor_data.light_ratio = light_length_ratio; + armor_data.center_distance = center_distance; + armor_data.angle = angle; + this->debug_armors.data.emplace_back(armor_data); + + return type; +} + +cv::Mat Detector::getAllNumbersImage() +{ + if (armors_.empty()) { + return cv::Mat(cv::Size(20, 28), CV_8UC1); + } else { + std::vector number_imgs; + number_imgs.reserve(armors_.size()); + for (auto & armor : armors_) { + number_imgs.emplace_back(armor.number_img); + } + cv::Mat all_num_img; + cv::vconcat(number_imgs, all_num_img); + return all_num_img; + } +} + +void Detector::drawResults(cv::Mat & img) +{ + // Draw Lights + for (const auto & light : lights_) { + cv::circle(img, light.top, 3, cv::Scalar(255, 255, 255), 1); + cv::circle(img, light.bottom, 3, cv::Scalar(255, 255, 255), 1); + auto line_color = light.color == RED ? cv::Scalar(255, 255, 0) : cv::Scalar(255, 0, 255); + cv::line(img, light.top, light.bottom, line_color, 1); + } + + // Draw armors + for (const auto & armor : armors_) { + cv::line(img, armor.left_light.top, armor.right_light.bottom, cv::Scalar(0, 255, 0), 2); + cv::line(img, armor.left_light.bottom, armor.right_light.top, cv::Scalar(0, 255, 0), 2); + } + + // Show numbers and confidence + for (const auto & armor : armors_) { + cv::putText( + img, armor.classfication_result, armor.left_light.top, cv::FONT_HERSHEY_SIMPLEX, 0.8, + cv::Scalar(0, 255, 255), 2); + } +} + +} // namespace rm_auto_aim diff --git a/src/rm_auto_aim/armor_detector/src/detector_node.cpp b/src/rm_auto_aim/armor_detector/src/detector_node.cpp new file mode 100644 index 0000000..4549411 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/src/detector_node.cpp @@ -0,0 +1,292 @@ +// Copyright 2022 Chen Jun +// Licensed under the MIT License. + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +// STD +#include +#include +#include +#include +#include + +#include "armor_detector/armor.hpp" +#include "armor_detector/detector_node.hpp" + +namespace rm_auto_aim +{ +ArmorDetectorNode::ArmorDetectorNode(const rclcpp::NodeOptions & options) +: Node("armor_detector", options) +{ + RCLCPP_INFO(this->get_logger(), "Starting DetectorNode!"); + + // Detector + detector_ = initDetector(); + + // Armors Publisher + armors_pub_ = this->create_publisher( + "/detector/armors", rclcpp::SensorDataQoS()); + + // Visualization Marker Publisher + // See http://wiki.ros.org/rviz/DisplayTypes/Marker + armor_marker_.ns = "armors"; + armor_marker_.action = visualization_msgs::msg::Marker::ADD; + armor_marker_.type = visualization_msgs::msg::Marker::CUBE; + armor_marker_.scale.x = 0.05; + armor_marker_.scale.z = 0.125; + armor_marker_.color.a = 1.0; + armor_marker_.color.g = 0.5; + armor_marker_.color.b = 1.0; + armor_marker_.lifetime = rclcpp::Duration::from_seconds(0.1); + + text_marker_.ns = "classification"; + text_marker_.action = visualization_msgs::msg::Marker::ADD; + text_marker_.type = visualization_msgs::msg::Marker::TEXT_VIEW_FACING; + text_marker_.scale.z = 0.1; + text_marker_.color.a = 1.0; + text_marker_.color.r = 1.0; + text_marker_.color.g = 1.0; + text_marker_.color.b = 1.0; + text_marker_.lifetime = rclcpp::Duration::from_seconds(0.1); + + marker_pub_ = + this->create_publisher("/detector/marker", 10); + + // Debug Publishers + debug_ = this->declare_parameter("debug", false); + if (debug_) { + createDebugPublishers(); + } + + // Debug param change moniter + debug_param_sub_ = std::make_shared(this); + debug_cb_handle_ = + debug_param_sub_->add_parameter_callback("debug", [this](const rclcpp::Parameter & p) { + debug_ = p.as_bool(); + debug_ ? createDebugPublishers() : destroyDebugPublishers(); + }); + + cam_info_sub_ = this->create_subscription( + "/camera_info", rclcpp::SensorDataQoS(), + [this](sensor_msgs::msg::CameraInfo::ConstSharedPtr camera_info) { + cam_center_ = cv::Point2f(camera_info->k[2], camera_info->k[5]); + cam_info_ = std::make_shared(*camera_info); + pnp_solver_ = std::make_unique(camera_info->k, camera_info->d); + cam_info_sub_.reset(); + }); + + img_sub_ = this->create_subscription( + "/image_raw", rclcpp::SensorDataQoS(), + std::bind(&ArmorDetectorNode::imageCallback, this, std::placeholders::_1)); +} + +void ArmorDetectorNode::imageCallback(const sensor_msgs::msg::Image::ConstSharedPtr img_msg) +{ + auto armors = detectArmors(img_msg); + + if (pnp_solver_ != nullptr) { + armors_msg_.header = armor_marker_.header = text_marker_.header = img_msg->header; + armors_msg_.armors.clear(); + marker_array_.markers.clear(); + armor_marker_.id = 0; + text_marker_.id = 0; + + auto_aim_interfaces::msg::Armor armor_msg; + for (const auto & armor : armors) { + cv::Mat rvec, tvec; + bool success = pnp_solver_->solvePnP(armor, rvec, tvec); + if (success) { + // Fill basic info + armor_msg.type = ARMOR_TYPE_STR[static_cast(armor.type)]; + armor_msg.number = armor.number; + + // Fill pose + armor_msg.pose.position.x = tvec.at(0); + armor_msg.pose.position.y = tvec.at(1); + armor_msg.pose.position.z = tvec.at(2); + // rvec to 3x3 rotation matrix + cv::Mat rotation_matrix; + cv::Rodrigues(rvec, rotation_matrix); + // rotation matrix to quaternion + tf2::Matrix3x3 tf2_rotation_matrix( + rotation_matrix.at(0, 0), rotation_matrix.at(0, 1), + rotation_matrix.at(0, 2), rotation_matrix.at(1, 0), + rotation_matrix.at(1, 1), rotation_matrix.at(1, 2), + rotation_matrix.at(2, 0), rotation_matrix.at(2, 1), + rotation_matrix.at(2, 2)); + tf2::Quaternion tf2_q; + tf2_rotation_matrix.getRotation(tf2_q); + armor_msg.pose.orientation = tf2::toMsg(tf2_q); + + // Fill the distance to image center + armor_msg.distance_to_image_center = pnp_solver_->calculateDistanceToCenter(armor.center); + + // Fill the markers + armor_marker_.id++; + armor_marker_.scale.y = armor.type == ArmorType::SMALL ? 0.135 : 0.23; + armor_marker_.pose = armor_msg.pose; + text_marker_.id++; + text_marker_.pose.position = armor_msg.pose.position; + text_marker_.pose.position.y -= 0.1; + text_marker_.text = armor.classfication_result; + armors_msg_.armors.emplace_back(armor_msg); + marker_array_.markers.emplace_back(armor_marker_); + marker_array_.markers.emplace_back(text_marker_); + } else { + RCLCPP_WARN(this->get_logger(), "PnP failed!"); + } + } + + // Publishing detected armors + armors_pub_->publish(armors_msg_); + + // Publishing marker + publishMarkers(); + } +} + +std::unique_ptr ArmorDetectorNode::initDetector() +{ + rcl_interfaces::msg::ParameterDescriptor param_desc; + param_desc.integer_range.resize(1); + param_desc.integer_range[0].step = 1; + param_desc.integer_range[0].from_value = 0; + param_desc.integer_range[0].to_value = 255; + int binary_thres = declare_parameter("binary_thres", 160, param_desc); + + param_desc.description = "0-RED, 1-BLUE"; + param_desc.integer_range[0].from_value = 0; + param_desc.integer_range[0].to_value = 1; + auto detect_color = declare_parameter("detect_color", RED, param_desc); + + Detector::LightParams l_params = { + .min_ratio = declare_parameter("light.min_ratio", 0.1), + .max_ratio = declare_parameter("light.max_ratio", 0.4), + .max_angle = declare_parameter("light.max_angle", 40.0)}; + + Detector::ArmorParams a_params = { + .min_light_ratio = declare_parameter("armor.min_light_ratio", 0.7), + .min_small_center_distance = declare_parameter("armor.min_small_center_distance", 0.8), + .max_small_center_distance = declare_parameter("armor.max_small_center_distance", 3.2), + .min_large_center_distance = declare_parameter("armor.min_large_center_distance", 3.2), + .max_large_center_distance = declare_parameter("armor.max_large_center_distance", 5.5), + .max_angle = declare_parameter("armor.max_angle", 35.0)}; + + auto detector = std::make_unique(binary_thres, detect_color, l_params, a_params); + + // Init classifier + auto pkg_path = ament_index_cpp::get_package_share_directory("armor_detector"); + auto model_path = pkg_path + "/model/mlp.onnx"; + auto label_path = pkg_path + "/model/label.txt"; + double threshold = this->declare_parameter("classifier_threshold", 0.7); + std::vector ignore_classes = + this->declare_parameter("ignore_classes", std::vector{"negative"}); + detector->classifier = + std::make_unique(model_path, label_path, threshold, ignore_classes); + + return detector; +} + +std::vector ArmorDetectorNode::detectArmors( + const sensor_msgs::msg::Image::ConstSharedPtr & img_msg) +{ + // Convert ROS img to cv::Mat + auto img = cv_bridge::toCvShare(img_msg, "rgb8")->image; + + // Update params + detector_->binary_thres = get_parameter("binary_thres").as_int(); + detector_->detect_color = get_parameter("detect_color").as_int(); + detector_->classifier->threshold = get_parameter("classifier_threshold").as_double(); + + auto armors = detector_->detect(img); + + auto final_time = this->now(); + auto latency = (final_time - img_msg->header.stamp).seconds() * 1000; + RCLCPP_DEBUG_STREAM(this->get_logger(), "Latency: " << latency << "ms"); + + // Publish debug info + if (debug_) { + binary_img_pub_.publish( + cv_bridge::CvImage(img_msg->header, "mono8", detector_->binary_img).toImageMsg()); + + // Sort lights and armors data by x coordinate + std::sort( + detector_->debug_lights.data.begin(), detector_->debug_lights.data.end(), + [](const auto & l1, const auto & l2) { return l1.center_x < l2.center_x; }); + std::sort( + detector_->debug_armors.data.begin(), detector_->debug_armors.data.end(), + [](const auto & a1, const auto & a2) { return a1.center_x < a2.center_x; }); + + lights_data_pub_->publish(detector_->debug_lights); + armors_data_pub_->publish(detector_->debug_armors); + + if (!armors.empty()) { + auto all_num_img = detector_->getAllNumbersImage(); + number_img_pub_.publish( + *cv_bridge::CvImage(img_msg->header, "mono8", all_num_img).toImageMsg()); + } + + detector_->drawResults(img); + // Draw camera center + cv::circle(img, cam_center_, 5, cv::Scalar(255, 0, 0), 2); + // Draw latency + std::stringstream latency_ss; + latency_ss << "Latency: " << std::fixed << std::setprecision(2) << latency << "ms"; + auto latency_s = latency_ss.str(); + cv::putText( + img, latency_s, cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 255, 0), 2); + result_img_pub_.publish(cv_bridge::CvImage(img_msg->header, "rgb8", img).toImageMsg()); + } + + return armors; +} + +void ArmorDetectorNode::createDebugPublishers() +{ + lights_data_pub_ = + this->create_publisher("/detector/debug_lights", 10); + armors_data_pub_ = + this->create_publisher("/detector/debug_armors", 10); + + binary_img_pub_ = image_transport::create_publisher(this, "/detector/binary_img"); + number_img_pub_ = image_transport::create_publisher(this, "/detector/number_img"); + result_img_pub_ = image_transport::create_publisher(this, "/detector/result_img"); +} + +void ArmorDetectorNode::destroyDebugPublishers() +{ + lights_data_pub_.reset(); + armors_data_pub_.reset(); + + binary_img_pub_.shutdown(); + number_img_pub_.shutdown(); + result_img_pub_.shutdown(); +} + +void ArmorDetectorNode::publishMarkers() +{ + using Marker = visualization_msgs::msg::Marker; + armor_marker_.action = armors_msg_.armors.empty() ? Marker::DELETE : Marker::ADD; + marker_array_.markers.emplace_back(armor_marker_); + marker_pub_->publish(marker_array_); +} + +} // namespace rm_auto_aim + +#include "rclcpp_components/register_node_macro.hpp" + +// Register the component with class_loader. +// This acts as a sort of entry point, allowing the component to be discoverable when its library +// is being loaded into a running process. +RCLCPP_COMPONENTS_REGISTER_NODE(rm_auto_aim::ArmorDetectorNode) diff --git a/src/rm_auto_aim/armor_detector/src/number_classifier.cpp b/src/rm_auto_aim/armor_detector/src/number_classifier.cpp new file mode 100644 index 0000000..dacce60 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/src/number_classifier.cpp @@ -0,0 +1,147 @@ +// Copyright 2022 Chen Jun +// Licensed under the MIT License. + +// OpenCV +#include +#include +#include +#include +#include +#include +#include +#include + +// STL +#include +#include +#include +#include +#include +#include + +#include "armor_detector/armor.hpp" +#include "armor_detector/number_classifier.hpp" + +namespace rm_auto_aim +{ +NumberClassifier::NumberClassifier( + const std::string & model_path, const std::string & label_path, const double thre, + const std::vector & ignore_classes) +: threshold(thre), ignore_classes_(ignore_classes) +{ + net_ = cv::dnn::readNetFromONNX(model_path); + + std::ifstream label_file(label_path); + std::string line; + while (std::getline(label_file, line)) { + class_names_.push_back(line); + } +} + +void NumberClassifier::extractNumbers(const cv::Mat & src, std::vector & armors) +{ + // Light length in image + const int light_length = 12; + // Image size after warp + const int warp_height = 28; + const int small_armor_width = 32; + const int large_armor_width = 54; + // Number ROI size + const cv::Size roi_size(20, 28); + + for (auto & armor : armors) { + // Warp perspective transform + cv::Point2f lights_vertices[4] = { + armor.left_light.bottom, armor.left_light.top, armor.right_light.top, + armor.right_light.bottom}; + + const int top_light_y = (warp_height - light_length) / 2 - 1; + const int bottom_light_y = top_light_y + light_length; + const int warp_width = armor.type == ArmorType::SMALL ? small_armor_width : large_armor_width; + cv::Point2f target_vertices[4] = { + cv::Point(0, bottom_light_y), + cv::Point(0, top_light_y), + cv::Point(warp_width - 1, top_light_y), + cv::Point(warp_width - 1, bottom_light_y), + }; + cv::Mat number_image; + auto rotation_matrix = cv::getPerspectiveTransform(lights_vertices, target_vertices); + cv::warpPerspective(src, number_image, rotation_matrix, cv::Size(warp_width, warp_height)); + + // Get ROI + number_image = + number_image(cv::Rect(cv::Point((warp_width - roi_size.width) / 2, 0), roi_size)); + + // Binarize + cv::cvtColor(number_image, number_image, cv::COLOR_RGB2GRAY); + cv::threshold(number_image, number_image, 0, 255, cv::THRESH_BINARY | cv::THRESH_OTSU); + + armor.number_img = number_image; + } +} + +void NumberClassifier::classify(std::vector & armors) +{ + for (auto & armor : armors) { + cv::Mat image = armor.number_img.clone(); + + // Normalize + image = image / 255.0; + + // Create blob from image + cv::Mat blob; + cv::dnn::blobFromImage(image, blob); + + // Set the input blob for the neural network + net_.setInput(blob); + // Forward pass the image blob through the model + cv::Mat outputs = net_.forward(); + + // Do softmax + float max_prob = *std::max_element(outputs.begin(), outputs.end()); + cv::Mat softmax_prob; + cv::exp(outputs - max_prob, softmax_prob); + float sum = static_cast(cv::sum(softmax_prob)[0]); + softmax_prob /= sum; + + double confidence; + cv::Point class_id_point; + minMaxLoc(softmax_prob.reshape(1, 1), nullptr, &confidence, nullptr, &class_id_point); + int label_id = class_id_point.x; + + armor.confidence = confidence; + armor.number = class_names_[label_id]; + + std::stringstream result_ss; + result_ss << armor.number << ": " << std::fixed << std::setprecision(1) + << armor.confidence * 100.0 << "%"; + armor.classfication_result = result_ss.str(); + } + + armors.erase( + std::remove_if( + armors.begin(), armors.end(), + [this](const Armor & armor) { + if (armor.confidence < threshold) { + return true; + } + + for (const auto & ignore_class : ignore_classes_) { + if (armor.number == ignore_class) { + return true; + } + } + + bool mismatch_armor_type = false; + if (armor.type == ArmorType::LARGE) { + mismatch_armor_type = + armor.number == "outpost" || armor.number == "2" || armor.number == "guard"; + } else if (armor.type == ArmorType::SMALL) { + mismatch_armor_type = armor.number == "1" || armor.number == "base"; + } + return mismatch_armor_type; + }), + armors.end()); +} + +} // namespace rm_auto_aim diff --git a/src/rm_auto_aim/armor_detector/src/pnp_solver.cpp b/src/rm_auto_aim/armor_detector/src/pnp_solver.cpp new file mode 100644 index 0000000..d4b4f72 --- /dev/null +++ b/src/rm_auto_aim/armor_detector/src/pnp_solver.cpp @@ -0,0 +1,58 @@ +// Copyright 2022 Chen Jun + +#include "armor_detector/pnp_solver.hpp" + +#include +#include + +namespace rm_auto_aim +{ +PnPSolver::PnPSolver( + const std::array & camera_matrix, const std::vector & dist_coeffs) +: camera_matrix_(cv::Mat(3, 3, CV_64F, const_cast(camera_matrix.data())).clone()), + dist_coeffs_(cv::Mat(1, 5, CV_64F, const_cast(dist_coeffs.data())).clone()) +{ + // Unit: m + constexpr double small_half_y = SMALL_ARMOR_WIDTH / 2.0 / 1000.0; + constexpr double small_half_z = SMALL_ARMOR_HEIGHT / 2.0 / 1000.0; + constexpr double large_half_y = LARGE_ARMOR_WIDTH / 2.0 / 1000.0; + constexpr double large_half_z = LARGE_ARMOR_HEIGHT / 2.0 / 1000.0; + + // Start from bottom left in clockwise order + // Model coordinate: x forward, y left, z up + small_armor_points_.emplace_back(cv::Point3f(0, small_half_y, -small_half_z)); + small_armor_points_.emplace_back(cv::Point3f(0, small_half_y, small_half_z)); + small_armor_points_.emplace_back(cv::Point3f(0, -small_half_y, small_half_z)); + small_armor_points_.emplace_back(cv::Point3f(0, -small_half_y, -small_half_z)); + + large_armor_points_.emplace_back(cv::Point3f(0, large_half_y, -large_half_z)); + large_armor_points_.emplace_back(cv::Point3f(0, large_half_y, large_half_z)); + large_armor_points_.emplace_back(cv::Point3f(0, -large_half_y, large_half_z)); + large_armor_points_.emplace_back(cv::Point3f(0, -large_half_y, -large_half_z)); +} + +bool PnPSolver::solvePnP(const Armor & armor, cv::Mat & rvec, cv::Mat & tvec) +{ + std::vector image_armor_points; + + // Fill in image points + image_armor_points.emplace_back(armor.left_light.bottom); + image_armor_points.emplace_back(armor.left_light.top); + image_armor_points.emplace_back(armor.right_light.top); + image_armor_points.emplace_back(armor.right_light.bottom); + + // Solve pnp + auto object_points = armor.type == ArmorType::SMALL ? small_armor_points_ : large_armor_points_; + return cv::solvePnP( + object_points, image_armor_points, camera_matrix_, dist_coeffs_, rvec, tvec, false, + cv::SOLVEPNP_IPPE); +} + +float PnPSolver::calculateDistanceToCenter(const cv::Point2f & image_point) +{ + float cx = camera_matrix_.at(0, 2); + float cy = camera_matrix_.at(1, 2); + return cv::norm(image_point - cv::Point2f(cx, cy)); +} + +} // namespace rm_auto_aim diff --git a/src/rm_auto_aim/armor_detector/test/test_node_startup.cpp b/src/rm_auto_aim/armor_detector/test/test_node_startup.cpp new file mode 100644 index 0000000..d584cef --- /dev/null +++ b/src/rm_auto_aim/armor_detector/test/test_node_startup.cpp @@ -0,0 +1,28 @@ +// Copyright 2022 Chen Jun + +#include + +#include +#include +#include + +// STD +#include + +#include "armor_detector/detector_node.hpp" + +TEST(ArmorDetectorNodeTest, NodeStartupTest) +{ + rclcpp::NodeOptions options; + auto node = std::make_shared(options); + node.reset(); +} + +int main(int argc, char ** argv) +{ + testing::InitGoogleTest(&argc, argv); + rclcpp::init(argc, argv); + auto result = RUN_ALL_TESTS(); + rclcpp::shutdown(); + return result; +} diff --git a/src/rm_auto_aim/armor_detector/test/test_number_cls.cpp b/src/rm_auto_aim/armor_detector/test/test_number_cls.cpp new file mode 100644 index 0000000..2f216df --- /dev/null +++ b/src/rm_auto_aim/armor_detector/test/test_number_cls.cpp @@ -0,0 +1,53 @@ +// Copyright 2022 Chen Jun + +#include + +#include +#include + +// STL +#include +#include +#include +#include +#include + +#include "armor_detector/number_classifier.hpp" + +using hrc = std::chrono::high_resolution_clock; + +TEST(test_nc, benchmark) +{ + auto pkg_path = ament_index_cpp::get_package_share_directory("armor_detector"); + auto model_path = pkg_path + "/model/mlp.onnx"; + auto label_path = pkg_path + "/model/label.txt"; + rm_auto_aim::NumberClassifier nc(model_path, label_path, 0.5); + + auto dummy_armors = std::vector(1); + auto test_mat = cv::Mat(20, 28, CV_8UC1); + dummy_armors[0].number_img = test_mat; + + int loop_num = 200; + int warm_up = 30; + + double time_min = DBL_MAX; + double time_max = -DBL_MAX; + double time_avg = 0; + + for (int i = 0; i < warm_up + loop_num; i++) { + auto start = hrc::now(); + nc.classify(dummy_armors); + auto end = hrc::now(); + double time = std::chrono::duration(end - start).count(); + if (i >= warm_up) { + time_min = std::min(time_min, time); + time_max = std::max(time_max, time); + time_avg += time; + } + } + time_avg /= loop_num; + + std::cout << "time_min: " << time_min << "ms" << std::endl; + std::cout << "time_max: " << time_max << "ms" << std::endl; + std::cout << "time_avg: " << time_avg << "ms" << std::endl; +} diff --git a/src/rm_auto_aim/armor_tracker/CMakeLists.txt b/src/rm_auto_aim/armor_tracker/CMakeLists.txt new file mode 100644 index 0000000..e87210d --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/CMakeLists.txt @@ -0,0 +1,58 @@ +cmake_minimum_required(VERSION 3.10) +project(armor_tracker) + +## Use C++14 +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +## By adding -Wall and -Werror, the compiler does not ignore warnings anymore, +## enforcing cleaner code. +add_definitions(-Wall -Werror) + +## Export compile commands for clangd +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +####################### +## Find dependencies ## +####################### + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +########### +## Build ## +########### + +ament_auto_add_library(${PROJECT_NAME} SHARED + DIRECTORY src +) + +rclcpp_components_register_node(${PROJECT_NAME} + PLUGIN rm_auto_aim::ArmorTrackerNode + EXECUTABLE ${PROJECT_NAME}_node +) + +############# +## Testing ## +############# + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + list(APPEND AMENT_LINT_AUTO_EXCLUDE + ament_cmake_copyright + ament_cmake_uncrustify + ament_cmake_cpplint + ) + ament_lint_auto_find_test_dependencies() + + # find_package(ament_cmake_gtest) + # set(TEST_NAME test_kalman_filter) + # ament_add_gtest(${TEST_NAME} test/${TEST_NAME}.cpp) + # target_link_libraries(${TEST_NAME} ${PROJECT_NAME}) +endif() + +############# +## Install ## +############# + +ament_auto_package() \ No newline at end of file diff --git a/src/rm_auto_aim/armor_tracker/README.md b/src/rm_auto_aim/armor_tracker/README.md new file mode 100644 index 0000000..bed3067 --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/README.md @@ -0,0 +1,84 @@ +# armor_tracker + +- [ArmorTrackerNode](#armortrackernode) + - [Tracker](#tracker) + - [KalmanFilter](#kalmanfilter) + +## ArmorTrackerNode +装甲板处理节点 + +订阅识别节点发布的装甲板三维位置及机器人的坐标转换信息,将装甲板三维位置变换到指定惯性系(一般是以云台中心为原点,IMU 上电时的 Yaw 朝向为 X 轴的惯性系)下,然后将装甲板目标送入跟踪器中,输出跟踪机器人在指定惯性系下的状态 + +订阅: +- 已识别到的装甲板 `/detector/armors` +- 机器人的坐标转换信息 `/tf` `/tf_static` + +发布: +- 最终锁定的目标 `/tracker/target` + +参数: +- 跟踪器参数 tracker + - 两帧间目标可匹配的最大距离 max_match_distance + - `DETECTING` 状态进入 `TRACKING` 状态的阈值 tracking_threshold + - `TRACKING` 状态进入 `LOST` 状态的阈值 lost_threshold + +## ExtendedKalmanFilter + +$$ x_c = x_a + r * cos (\theta) $$ +$$ y_c = y_a + r * sin (\theta) $$ + +$$ x = [x_c, y_c,z, yaw, v_{xc}, v_{yc},v_z, v_{yaw}, r]^T $$ + +参考 OpenCV 中的卡尔曼滤波器使用 Eigen 进行了实现 + +[卡尔曼滤波器](https://zh.wikipedia.org/wiki/%E5%8D%A1%E5%B0%94%E6%9B%BC%E6%BB%A4%E6%B3%A2) + +![](docs/Kalman_filter_model.png) + +考虑到自瞄任务中对于目标只有观测没有控制,所以输入-控制模型 $B$ 和控制器向量 $u$ 可忽略。 + +预测及更新的公式如下: + +预测: + +$$ x_{k|k-1} = F * x_{k-1|k-1} $$ + +$$ P_{k|k-1} = F * P_{k-1|k-1}* F^T + Q $$ + +更新: + +$$ K = P_{k|k-1} * H^T * (H * P_{k|k-1} * H^T + R)^{-1} $$ + +$$ x_{k|k} = x_{k|k-1} + K * (z_k - H * x_{k|k-1}) $$ + +$$ P_{k|k} = (I - K * H) * P_{k|k-1} $$ + +## Tracker + +参考 [SORT(Simple online and realtime tracking)](https://ieeexplore.ieee.org/abstract/document/7533003/) 中对于目标匹配的方法,使用卡尔曼滤波器对单目标在三维空间中进行跟踪 + +在此跟踪器中,卡尔曼滤波器观测量为目标在指定惯性系中的位置(xyz),状态量为目标位置及速度(xyz+vx vy vz) + +在对目标的运动模型建模为在指定惯性系中的匀速线性运动,即状态转移为 $x_{pre} = x_{post} + v_{post} * dt$ + +目标关联的判断依据为三维位置的 L2 欧式距离 + +跟踪器共有四个状态: +- `DETECTING` :短暂识别到目标,需要更多帧识别信息才能进入跟踪状态 +- `TRACKING` :跟踪器正常跟踪目标中 +- `TEMP_LOST` :跟踪器短暂丢失目标,通过卡尔曼滤波器预测目标 +- `LOST` :跟踪器完全丢失目标 + +工作流程: + +- init: + + 跟踪器默认选择离相机光心最近的目标作为跟踪对象,选择目标后初始化卡尔曼滤波器,初始状态设为当前目标位置,速度设为 0 + +- update: + + 首先由卡尔曼滤波器得到目标在当前帧的预测位置,然后遍历当前帧中的目标位置与预测位置进行匹配,若当前帧不存在目标或所有目标位置与预测位置的偏差都过大则认为目标丢失,重置卡尔曼滤波器。 + + 最后选取位置相差最小的目标作为最佳匹配项,更新卡尔曼滤波器,将更新后的状态作为跟踪器的结果输出 + + diff --git a/src/rm_auto_aim/armor_tracker/docs/Kalman_filter_model.png b/src/rm_auto_aim/armor_tracker/docs/Kalman_filter_model.png new file mode 100644 index 0000000..f603ff2 Binary files /dev/null and b/src/rm_auto_aim/armor_tracker/docs/Kalman_filter_model.png differ diff --git a/src/rm_auto_aim/armor_tracker/include/armor_tracker/extended_kalman_filter.hpp b/src/rm_auto_aim/armor_tracker/include/armor_tracker/extended_kalman_filter.hpp new file mode 100644 index 0000000..1e7f118 --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/include/armor_tracker/extended_kalman_filter.hpp @@ -0,0 +1,74 @@ +// Copyright 2022 Chen Jun + +#ifndef ARMOR_PROCESSOR__KALMAN_FILTER_HPP_ +#define ARMOR_PROCESSOR__KALMAN_FILTER_HPP_ + +#include +#include + +namespace rm_auto_aim +{ + +class ExtendedKalmanFilter +{ +public: + ExtendedKalmanFilter() = default; + + using VecVecFunc = std::function; + using VecMatFunc = std::function; + using VoidMatFunc = std::function; + + explicit ExtendedKalmanFilter( + const VecVecFunc & f, const VecVecFunc & h, const VecMatFunc & j_f, const VecMatFunc & j_h, + const VoidMatFunc & u_q, const VecMatFunc & u_r, const Eigen::MatrixXd & P0); + + // Set the initial state + void setState(const Eigen::VectorXd & x0); + + // Compute a predicted state + Eigen::MatrixXd predict(); + + // Update the estimated state based on measurement + Eigen::MatrixXd update(const Eigen::VectorXd & z); + +private: + // Process nonlinear vector function + VecVecFunc f; + // Observation nonlinear vector function + VecVecFunc h; + // Jacobian of f() + VecMatFunc jacobian_f; + Eigen::MatrixXd F; + // Jacobian of h() + VecMatFunc jacobian_h; + Eigen::MatrixXd H; + // Process noise covariance matrix + VoidMatFunc update_Q; + Eigen::MatrixXd Q; + // Measurement noise covariance matrix + VecMatFunc update_R; + Eigen::MatrixXd R; + + // Priori error estimate covariance matrix + Eigen::MatrixXd P_pri; + // Posteriori error estimate covariance matrix + Eigen::MatrixXd P_post; + + // Kalman gain + Eigen::MatrixXd K; + + // System dimensions + int n; + + // N-size identity + Eigen::MatrixXd I; + + // Priori state + Eigen::VectorXd x_pri; + // Posteriori state + Eigen::VectorXd x_post; +}; + +} // namespace rm_auto_aim + +#endif // ARMOR_PROCESSOR__KALMAN_FILTER_HPP_ diff --git a/src/rm_auto_aim/armor_tracker/include/armor_tracker/tracker.hpp b/src/rm_auto_aim/armor_tracker/include/armor_tracker/tracker.hpp new file mode 100644 index 0000000..b094ce2 --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/include/armor_tracker/tracker.hpp @@ -0,0 +1,87 @@ +// Copyright 2022 Chen Jun + +#ifndef ARMOR_PROCESSOR__TRACKER_HPP_ +#define ARMOR_PROCESSOR__TRACKER_HPP_ + +// Eigen +#include + +// ROS +#include +#include +#include + +// STD +#include +#include + +#include "armor_tracker/extended_kalman_filter.hpp" +#include "auto_aim_interfaces/msg/armors.hpp" +#include "auto_aim_interfaces/msg/target.hpp" + +namespace rm_auto_aim +{ + +enum class ArmorsNum { NORMAL_4 = 4, BALANCE_2 = 2, OUTPOST_3 = 3 }; + +class Tracker +{ +public: + Tracker(double max_match_distance, double max_match_yaw_diff); + + using Armors = auto_aim_interfaces::msg::Armors; + using Armor = auto_aim_interfaces::msg::Armor; + + void init(const Armors::SharedPtr & armors_msg); + + void update(const Armors::SharedPtr & armors_msg); + + ExtendedKalmanFilter ekf; + + int tracking_thres; + int lost_thres; + + enum State { + LOST, + DETECTING, + TRACKING, + TEMP_LOST, + } tracker_state; + + std::string tracked_id; + Armor tracked_armor; + ArmorsNum tracked_armors_num; + + double info_position_diff; + double info_yaw_diff; + + Eigen::VectorXd measurement; + + Eigen::VectorXd target_state; + + // To store another pair of armors message + double dz, another_r; + +private: + void initEKF(const Armor & a); + + void updateArmorsNum(const Armor & a); + + void handleArmorJump(const Armor & a); + + double orientationToYaw(const geometry_msgs::msg::Quaternion & q); + + Eigen::Vector3d getArmorPositionFromState(const Eigen::VectorXd & x); + + double max_match_distance_; + double max_match_yaw_diff_; + + int detect_count_; + int lost_count_; + + double last_yaw_; +}; + +} // namespace rm_auto_aim + +#endif // ARMOR_PROCESSOR__TRACKER_HPP_ diff --git a/src/rm_auto_aim/armor_tracker/include/armor_tracker/tracker_node.hpp b/src/rm_auto_aim/armor_tracker/include/armor_tracker/tracker_node.hpp new file mode 100644 index 0000000..064fd78 --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/include/armor_tracker/tracker_node.hpp @@ -0,0 +1,80 @@ +// Copyright 2022 Chen Jun + +#ifndef ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_ +#define ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_ + +// ROS +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +// STD +#include +#include +#include + +#include "armor_tracker/tracker.hpp" +#include "auto_aim_interfaces/msg/armors.hpp" +#include "auto_aim_interfaces/msg/target.hpp" +#include "auto_aim_interfaces/msg/tracker_info.hpp" + +namespace rm_auto_aim +{ +using tf2_filter = tf2_ros::MessageFilter; +class ArmorTrackerNode : public rclcpp::Node +{ +public: + explicit ArmorTrackerNode(const rclcpp::NodeOptions & options); + +private: + void armorsCallback(const auto_aim_interfaces::msg::Armors::SharedPtr armors_ptr); + + void publishMarkers(const auto_aim_interfaces::msg::Target & target_msg); + + // Maximum allowable armor distance in the XOY plane + double max_armor_distance_; + + // The time when the last message was received + rclcpp::Time last_time_; + double dt_; + + // Armor tracker + double s2qxyz_, s2qyaw_, s2qr_; + double r_xyz_factor, r_yaw; + double lost_time_thres_; + std::unique_ptr tracker_; + + // Reset tracker service + rclcpp::Service::SharedPtr reset_tracker_srv_; + + // Subscriber with tf2 message_filter + std::string target_frame_; + std::shared_ptr tf2_buffer_; + std::shared_ptr tf2_listener_; + message_filters::Subscriber armors_sub_; + std::shared_ptr tf2_filter_; + + // Tracker info publisher + rclcpp::Publisher::SharedPtr info_pub_; + + // Publisher + rclcpp::Publisher::SharedPtr target_pub_; + + // Visualization marker publisher + visualization_msgs::msg::Marker position_marker_; + visualization_msgs::msg::Marker linear_v_marker_; + visualization_msgs::msg::Marker angular_v_marker_; + visualization_msgs::msg::Marker armor_marker_; + rclcpp::Publisher::SharedPtr marker_pub_; +}; + +} // namespace rm_auto_aim + +#endif // ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_ diff --git a/src/rm_auto_aim/armor_tracker/package.xml b/src/rm_auto_aim/armor_tracker/package.xml new file mode 100644 index 0000000..a585a34 --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/package.xml @@ -0,0 +1,35 @@ + + + + armor_tracker + 0.1.0 + A template for ROS packages. + Chen Jun + BSD + https://github.com/chenjunnn/rm_auto_aim + https://github.com/chenjunnn/rm_auto_aim/issues + Chen Jun + + + ament_cmake + + + rclcpp + rclcpp_components + eigen + angles + std_srvs + geometry_msgs + visualization_msgs + message_filters + tf2_geometry_msgs + auto_aim_interfaces + + ament_lint_auto + ament_lint_common + ament_cmake_clang_format + + + ament_cmake + + diff --git a/src/rm_auto_aim/armor_tracker/src/extended_kalman_filter.cpp b/src/rm_auto_aim/armor_tracker/src/extended_kalman_filter.cpp new file mode 100644 index 0000000..fe1a56c --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/src/extended_kalman_filter.cpp @@ -0,0 +1,51 @@ +// Copyright 2022 Chen Jun + +#include "armor_tracker/extended_kalman_filter.hpp" + +namespace rm_auto_aim +{ +ExtendedKalmanFilter::ExtendedKalmanFilter( + const VecVecFunc & f, const VecVecFunc & h, const VecMatFunc & j_f, const VecMatFunc & j_h, + const VoidMatFunc & u_q, const VecMatFunc & u_r, const Eigen::MatrixXd & P0) +: f(f), + h(h), + jacobian_f(j_f), + jacobian_h(j_h), + update_Q(u_q), + update_R(u_r), + P_post(P0), + n(P0.rows()), + I(Eigen::MatrixXd::Identity(n, n)), + x_pri(n), + x_post(n) +{ +} + +void ExtendedKalmanFilter::setState(const Eigen::VectorXd & x0) { x_post = x0; } + +Eigen::MatrixXd ExtendedKalmanFilter::predict() +{ + F = jacobian_f(x_post), Q = update_Q(); + + x_pri = f(x_post); + P_pri = F * P_post * F.transpose() + Q; + + // handle the case when there will be no measurement before the next predict + x_post = x_pri; + P_post = P_pri; + + return x_pri; +} + +Eigen::MatrixXd ExtendedKalmanFilter::update(const Eigen::VectorXd & z) +{ + H = jacobian_h(x_pri), R = update_R(z); + + K = P_pri * H.transpose() * (H * P_pri * H.transpose() + R).inverse(); + x_post = x_pri + K * (z - h(x_pri)); + P_post = (I - K * H) * P_pri; + + return x_post; +} + +} // namespace rm_auto_aim diff --git a/src/rm_auto_aim/armor_tracker/src/tracker.cpp b/src/rm_auto_aim/armor_tracker/src/tracker.cpp new file mode 100644 index 0000000..2304640 --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/src/tracker.cpp @@ -0,0 +1,239 @@ +// Copyright 2022 Chen Jun + +#include "armor_tracker/tracker.hpp" + +#include +#include +#include +#include + +#include +#include + +// STD +#include +#include +#include + +namespace rm_auto_aim +{ +Tracker::Tracker(double max_match_distance, double max_match_yaw_diff) +: tracker_state(LOST), + tracked_id(std::string("")), + measurement(Eigen::VectorXd::Zero(4)), + target_state(Eigen::VectorXd::Zero(9)), + max_match_distance_(max_match_distance), + max_match_yaw_diff_(max_match_yaw_diff) +{ +} + +void Tracker::init(const Armors::SharedPtr & armors_msg) +{ + if (armors_msg->armors.empty()) { + return; + } + + // Simply choose the armor that is closest to image center + double min_distance = DBL_MAX; + tracked_armor = armors_msg->armors[0]; + for (const auto & armor : armors_msg->armors) { + if (armor.distance_to_image_center < min_distance) { + min_distance = armor.distance_to_image_center; + tracked_armor = armor; + } + } + + initEKF(tracked_armor); + RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "Init EKF!"); + + tracked_id = tracked_armor.number; + tracker_state = DETECTING; + + updateArmorsNum(tracked_armor); +} + +void Tracker::update(const Armors::SharedPtr & armors_msg) +{ + // KF predict + Eigen::VectorXd ekf_prediction = ekf.predict(); + RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "EKF predict"); + + bool matched = false; + // Use KF prediction as default target state if no matched armor is found + target_state = ekf_prediction; + + if (!armors_msg->armors.empty()) { + // Find the closest armor with the same id + Armor same_id_armor; + int same_id_armors_count = 0; + auto predicted_position = getArmorPositionFromState(ekf_prediction); + double min_position_diff = DBL_MAX; + double yaw_diff = DBL_MAX; + for (const auto & armor : armors_msg->armors) { + // Only consider armors with the same id + if (armor.number == tracked_id) { + same_id_armor = armor; + same_id_armors_count++; + // Calculate the difference between the predicted position and the current armor position + auto p = armor.pose.position; + Eigen::Vector3d position_vec(p.x, p.y, p.z); + double position_diff = (predicted_position - position_vec).norm(); + if (position_diff < min_position_diff) { + // Find the closest armor + min_position_diff = position_diff; + yaw_diff = abs(orientationToYaw(armor.pose.orientation) - ekf_prediction(6)); + tracked_armor = armor; + } + } + } + + // Store tracker info + info_position_diff = min_position_diff; + info_yaw_diff = yaw_diff; + + // Check if the distance and yaw difference of closest armor are within the threshold + if (min_position_diff < max_match_distance_ && yaw_diff < max_match_yaw_diff_) { + // Matched armor found + matched = true; + auto p = tracked_armor.pose.position; + // Update EKF + double measured_yaw = orientationToYaw(tracked_armor.pose.orientation); + measurement = Eigen::Vector4d(p.x, p.y, p.z, measured_yaw); + target_state = ekf.update(measurement); + RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "EKF update"); + } else if (same_id_armors_count == 1 && yaw_diff > max_match_yaw_diff_) { + // Matched armor not found, but there is only one armor with the same id + // and yaw has jumped, take this case as the target is spinning and armor jumped + handleArmorJump(same_id_armor); + } else { + // No matched armor found + RCLCPP_WARN(rclcpp::get_logger("armor_tracker"), "No matched armor found!"); + } + } + + // Prevent radius from spreading + if (target_state(8) < 0.12) { + target_state(8) = 0.12; + ekf.setState(target_state); + } else if (target_state(8) > 0.4) { + target_state(8) = 0.4; + ekf.setState(target_state); + } + + // Tracking state machine + if (tracker_state == DETECTING) { + if (matched) { + detect_count_++; + if (detect_count_ > tracking_thres) { + detect_count_ = 0; + tracker_state = TRACKING; + } + } else { + detect_count_ = 0; + tracker_state = LOST; + } + } else if (tracker_state == TRACKING) { + if (!matched) { + tracker_state = TEMP_LOST; + lost_count_++; + } + } else if (tracker_state == TEMP_LOST) { + if (!matched) { + lost_count_++; + if (lost_count_ > lost_thres) { + lost_count_ = 0; + tracker_state = LOST; + } + } else { + tracker_state = TRACKING; + lost_count_ = 0; + } + } +} + +void Tracker::initEKF(const Armor & a) +{ + double xa = a.pose.position.x; + double ya = a.pose.position.y; + double za = a.pose.position.z; + last_yaw_ = 0; + double yaw = orientationToYaw(a.pose.orientation); + + // Set initial position at 0.2m behind the target + target_state = Eigen::VectorXd::Zero(9); + double r = 0.26; + double xc = xa + r * cos(yaw); + double yc = ya + r * sin(yaw); + dz = 0, another_r = r; + target_state << xc, 0, yc, 0, za, 0, yaw, 0, r; + + ekf.setState(target_state); +} + +void Tracker::updateArmorsNum(const Armor & armor) +{ + if (armor.type == "large" && (tracked_id == "3" || tracked_id == "4" || tracked_id == "5")) { + tracked_armors_num = ArmorsNum::BALANCE_2; + } else if (tracked_id == "outpost") { + tracked_armors_num = ArmorsNum::OUTPOST_3; + } else { + tracked_armors_num = ArmorsNum::NORMAL_4; + } +} + +void Tracker::handleArmorJump(const Armor & current_armor) +{ + double yaw = orientationToYaw(current_armor.pose.orientation); + target_state(6) = yaw; + updateArmorsNum(current_armor); + // Only 4 armors has 2 radius and height + if (tracked_armors_num == ArmorsNum::NORMAL_4) { + dz = target_state(4) - current_armor.pose.position.z; + target_state(4) = current_armor.pose.position.z; + std::swap(target_state(8), another_r); + } + RCLCPP_WARN(rclcpp::get_logger("armor_tracker"), "Armor jump!"); + + // If position difference is larger than max_match_distance_, + // take this case as the ekf diverged, reset the state + auto p = current_armor.pose.position; + Eigen::Vector3d current_p(p.x, p.y, p.z); + Eigen::Vector3d infer_p = getArmorPositionFromState(target_state); + if ((current_p - infer_p).norm() > max_match_distance_) { + double r = target_state(8); + target_state(0) = p.x + r * cos(yaw); // xc + target_state(1) = 0; // vxc + target_state(2) = p.y + r * sin(yaw); // yc + target_state(3) = 0; // vyc + target_state(4) = p.z; // za + target_state(5) = 0; // vza + RCLCPP_ERROR(rclcpp::get_logger("armor_tracker"), "Reset State!"); + } + + ekf.setState(target_state); +} + +double Tracker::orientationToYaw(const geometry_msgs::msg::Quaternion & q) +{ + // Get armor yaw + tf2::Quaternion tf_q; + tf2::fromMsg(q, tf_q); + double roll, pitch, yaw; + tf2::Matrix3x3(tf_q).getRPY(roll, pitch, yaw); + // Make yaw change continuous (-pi~pi to -inf~inf) + yaw = last_yaw_ + angles::shortest_angular_distance(last_yaw_, yaw); + last_yaw_ = yaw; + return yaw; +} + +Eigen::Vector3d Tracker::getArmorPositionFromState(const Eigen::VectorXd & x) +{ + // Calculate predicted position of the current armor + double xc = x(0), yc = x(2), za = x(4); + double yaw = x(6), r = x(8); + double xa = xc - r * cos(yaw); + double ya = yc - r * sin(yaw); + return Eigen::Vector3d(xa, ya, za); +} + +} // namespace rm_auto_aim diff --git a/src/rm_auto_aim/armor_tracker/src/tracker_node.cpp b/src/rm_auto_aim/armor_tracker/src/tracker_node.cpp new file mode 100644 index 0000000..492bda8 --- /dev/null +++ b/src/rm_auto_aim/armor_tracker/src/tracker_node.cpp @@ -0,0 +1,350 @@ +// Copyright 2022 Chen Jun +#include "armor_tracker/tracker_node.hpp" + +// STD +#include +#include + +namespace rm_auto_aim +{ +ArmorTrackerNode::ArmorTrackerNode(const rclcpp::NodeOptions & options) +: Node("armor_tracker", options) +{ + RCLCPP_INFO(this->get_logger(), "Starting TrackerNode!"); + + // Maximum allowable armor distance in the XOY plane + max_armor_distance_ = this->declare_parameter("max_armor_distance", 10.0); + + // Tracker + double max_match_distance = this->declare_parameter("tracker.max_match_distance", 0.15); + double max_match_yaw_diff = this->declare_parameter("tracker.max_match_yaw_diff", 1.0); + tracker_ = std::make_unique(max_match_distance, max_match_yaw_diff); + tracker_->tracking_thres = this->declare_parameter("tracker.tracking_thres", 5); + lost_time_thres_ = this->declare_parameter("tracker.lost_time_thres", 0.3); + + // EKF + // xa = x_armor, xc = x_robot_center + // state: xc, v_xc, yc, v_yc, za, v_za, yaw, v_yaw, r + // measurement: xa, ya, za, yaw + // f - Process function + auto f = [this](const Eigen::VectorXd & x) { + Eigen::VectorXd x_new = x; + x_new(0) += x(1) * dt_; + x_new(2) += x(3) * dt_; + x_new(4) += x(5) * dt_; + x_new(6) += x(7) * dt_; + return x_new; + }; + // J_f - Jacobian of process function + auto j_f = [this](const Eigen::VectorXd &) { + Eigen::MatrixXd f(9, 9); + // clang-format off + f << 1, dt_, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 1, dt_, 0, 0, 0, 0, 0, + 0, 0, 0, 1, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 1, dt_, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, dt_, 0, + 0, 0, 0, 0, 0, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 1; + // clang-format on + return f; + }; + // h - Observation function + auto h = [](const Eigen::VectorXd & x) { + Eigen::VectorXd z(4); + double xc = x(0), yc = x(2), yaw = x(6), r = x(8); + z(0) = xc - r * cos(yaw); // xa + z(1) = yc - r * sin(yaw); // ya + z(2) = x(4); // za + z(3) = x(6); // yaw + return z; + }; + // J_h - Jacobian of observation function + auto j_h = [](const Eigen::VectorXd & x) { + Eigen::MatrixXd h(4, 9); + double yaw = x(6), r = x(8); + // clang-format off + // xc v_xc yc v_yc za v_za yaw v_yaw r + h << 1, 0, 0, 0, 0, 0, r*sin(yaw), 0, -cos(yaw), + 0, 0, 1, 0, 0, 0, -r*cos(yaw),0, -sin(yaw), + 0, 0, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 0, 0; + // clang-format on + return h; + }; + // update_Q - process noise covariance matrix + s2qxyz_ = declare_parameter("ekf.sigma2_q_xyz", 20.0); + s2qyaw_ = declare_parameter("ekf.sigma2_q_yaw", 100.0); + s2qr_ = declare_parameter("ekf.sigma2_q_r", 800.0); + auto u_q = [this]() { + Eigen::MatrixXd q(9, 9); + double t = dt_, x = s2qxyz_, y = s2qyaw_, r = s2qr_; + double q_x_x = pow(t, 4) / 4 * x, q_x_vx = pow(t, 3) / 2 * x, q_vx_vx = pow(t, 2) * x; + double q_y_y = pow(t, 4) / 4 * y, q_y_vy = pow(t, 3) / 2 * x, q_vy_vy = pow(t, 2) * y; + double q_r = pow(t, 4) / 4 * r; + // clang-format off + // xc v_xc yc v_yc za v_za yaw v_yaw r + q << q_x_x, q_x_vx, 0, 0, 0, 0, 0, 0, 0, + q_x_vx, q_vx_vx,0, 0, 0, 0, 0, 0, 0, + 0, 0, q_x_x, q_x_vx, 0, 0, 0, 0, 0, + 0, 0, q_x_vx, q_vx_vx,0, 0, 0, 0, 0, + 0, 0, 0, 0, q_x_x, q_x_vx, 0, 0, 0, + 0, 0, 0, 0, q_x_vx, q_vx_vx,0, 0, 0, + 0, 0, 0, 0, 0, 0, q_y_y, q_y_vy, 0, + 0, 0, 0, 0, 0, 0, q_y_vy, q_vy_vy,0, + 0, 0, 0, 0, 0, 0, 0, 0, q_r; + // clang-format on + return q; + }; + // update_R - measurement noise covariance matrix + r_xyz_factor = declare_parameter("ekf.r_xyz_factor", 0.05); + r_yaw = declare_parameter("ekf.r_yaw", 0.02); + auto u_r = [this](const Eigen::VectorXd & z) { + Eigen::DiagonalMatrix r; + double x = r_xyz_factor; + r.diagonal() << abs(x * z[0]), abs(x * z[1]), abs(x * z[2]), r_yaw; + return r; + }; + // P - error estimate covariance matrix + Eigen::DiagonalMatrix p0; + p0.setIdentity(); + tracker_->ekf = ExtendedKalmanFilter{f, h, j_f, j_h, u_q, u_r, p0}; + + // Reset tracker service + using std::placeholders::_1; + using std::placeholders::_2; + using std::placeholders::_3; + reset_tracker_srv_ = this->create_service( + "/tracker/reset", [this]( + const std_srvs::srv::Trigger::Request::SharedPtr, + std_srvs::srv::Trigger::Response::SharedPtr response) { + tracker_->tracker_state = Tracker::LOST; + response->success = true; + RCLCPP_INFO(this->get_logger(), "Tracker reset!"); + return; + }); + + // Subscriber with tf2 message_filter + // tf2 relevant + tf2_buffer_ = std::make_shared(this->get_clock()); + // Create the timer interface before call to waitForTransform, + // to avoid a tf2_ros::CreateTimerInterfaceException exception + auto timer_interface = std::make_shared( + this->get_node_base_interface(), this->get_node_timers_interface()); + tf2_buffer_->setCreateTimerInterface(timer_interface); + tf2_listener_ = std::make_shared(*tf2_buffer_); + // subscriber and filter + armors_sub_.subscribe(this, "/detector/armors", rmw_qos_profile_sensor_data); + target_frame_ = this->declare_parameter("target_frame", "odom"); + tf2_filter_ = std::make_shared( + armors_sub_, *tf2_buffer_, target_frame_, 10, this->get_node_logging_interface(), + this->get_node_clock_interface(), std::chrono::duration(1)); + // Register a callback with tf2_ros::MessageFilter to be called when transforms are available + tf2_filter_->registerCallback(&ArmorTrackerNode::armorsCallback, this); + + // Measurement publisher (for debug usage) + info_pub_ = this->create_publisher("/tracker/info", 10); + + // Publisher + target_pub_ = this->create_publisher( + "/tracker/target", rclcpp::SensorDataQoS()); + + // Visualization Marker Publisher + // See http://wiki.ros.org/rviz/DisplayTypes/Marker + position_marker_.ns = "position"; + position_marker_.type = visualization_msgs::msg::Marker::SPHERE; + position_marker_.scale.x = position_marker_.scale.y = position_marker_.scale.z = 0.1; + position_marker_.color.a = 1.0; + position_marker_.color.g = 1.0; + linear_v_marker_.type = visualization_msgs::msg::Marker::ARROW; + linear_v_marker_.ns = "linear_v"; + linear_v_marker_.scale.x = 0.03; + linear_v_marker_.scale.y = 0.05; + linear_v_marker_.color.a = 1.0; + linear_v_marker_.color.r = 1.0; + linear_v_marker_.color.g = 1.0; + angular_v_marker_.type = visualization_msgs::msg::Marker::ARROW; + angular_v_marker_.ns = "angular_v"; + angular_v_marker_.scale.x = 0.03; + angular_v_marker_.scale.y = 0.05; + angular_v_marker_.color.a = 1.0; + angular_v_marker_.color.b = 1.0; + angular_v_marker_.color.g = 1.0; + armor_marker_.ns = "armors"; + armor_marker_.type = visualization_msgs::msg::Marker::CUBE; + armor_marker_.scale.x = 0.03; + armor_marker_.scale.z = 0.125; + armor_marker_.color.a = 1.0; + armor_marker_.color.r = 1.0; + marker_pub_ = this->create_publisher("/tracker/marker", 10); +} + +void ArmorTrackerNode::armorsCallback(const auto_aim_interfaces::msg::Armors::SharedPtr armors_msg) +{ + // Tranform armor position from image frame to world coordinate + for (auto & armor : armors_msg->armors) { + geometry_msgs::msg::PoseStamped ps; + ps.header = armors_msg->header; + ps.pose = armor.pose; + try { + armor.pose = tf2_buffer_->transform(ps, target_frame_).pose; + } catch (const tf2::ExtrapolationException & ex) { + RCLCPP_ERROR(get_logger(), "Error while transforming %s", ex.what()); + return; + } + } + + // Filter abnormal armors + armors_msg->armors.erase( + std::remove_if( + armors_msg->armors.begin(), armors_msg->armors.end(), + [this](const auto_aim_interfaces::msg::Armor & armor) { + return abs(armor.pose.position.z) > 1.2 || + Eigen::Vector2d(armor.pose.position.x, armor.pose.position.y).norm() > + max_armor_distance_; + }), + armors_msg->armors.end()); + + // Init message + auto_aim_interfaces::msg::TrackerInfo info_msg; + auto_aim_interfaces::msg::Target target_msg; + rclcpp::Time time = armors_msg->header.stamp; + target_msg.header.stamp = time; + target_msg.header.frame_id = target_frame_; + + // Update tracker + if (tracker_->tracker_state == Tracker::LOST) { + tracker_->init(armors_msg); + target_msg.tracking = false; + } else { + dt_ = (time - last_time_).seconds(); + tracker_->lost_thres = static_cast(lost_time_thres_ / dt_); + tracker_->update(armors_msg); + + // Publish Info + info_msg.position_diff = tracker_->info_position_diff; + info_msg.yaw_diff = tracker_->info_yaw_diff; + info_msg.position.x = tracker_->measurement(0); + info_msg.position.y = tracker_->measurement(1); + info_msg.position.z = tracker_->measurement(2); + info_msg.yaw = tracker_->measurement(3); + info_pub_->publish(info_msg); + + if (tracker_->tracker_state == Tracker::DETECTING) { + target_msg.tracking = false; + } else if ( + tracker_->tracker_state == Tracker::TRACKING || + tracker_->tracker_state == Tracker::TEMP_LOST) { + target_msg.tracking = true; + // Fill target message + const auto & state = tracker_->target_state; + target_msg.id = tracker_->tracked_id; + target_msg.armors_num = static_cast(tracker_->tracked_armors_num); + target_msg.position.x = state(0); + target_msg.velocity.x = state(1); + target_msg.position.y = state(2); + target_msg.velocity.y = state(3); + target_msg.position.z = state(4); + target_msg.velocity.z = state(5); + target_msg.yaw = state(6); + target_msg.v_yaw = state(7); + target_msg.radius_1 = state(8); + target_msg.radius_2 = tracker_->another_r; + target_msg.dz = tracker_->dz; + } + } + + last_time_ = time; + + target_pub_->publish(target_msg); + + publishMarkers(target_msg); +} + +void ArmorTrackerNode::publishMarkers(const auto_aim_interfaces::msg::Target & target_msg) +{ + position_marker_.header = target_msg.header; + linear_v_marker_.header = target_msg.header; + angular_v_marker_.header = target_msg.header; + armor_marker_.header = target_msg.header; + + visualization_msgs::msg::MarkerArray marker_array; + if (target_msg.tracking) { + double yaw = target_msg.yaw, r1 = target_msg.radius_1, r2 = target_msg.radius_2; + double xc = target_msg.position.x, yc = target_msg.position.y, za = target_msg.position.z; + double vx = target_msg.velocity.x, vy = target_msg.velocity.y, vz = target_msg.velocity.z; + double dz = target_msg.dz; + + position_marker_.action = visualization_msgs::msg::Marker::ADD; + position_marker_.pose.position.x = xc; + position_marker_.pose.position.y = yc; + position_marker_.pose.position.z = za + dz / 2; + + linear_v_marker_.action = visualization_msgs::msg::Marker::ADD; + linear_v_marker_.points.clear(); + linear_v_marker_.points.emplace_back(position_marker_.pose.position); + geometry_msgs::msg::Point arrow_end = position_marker_.pose.position; + arrow_end.x += vx; + arrow_end.y += vy; + arrow_end.z += vz; + linear_v_marker_.points.emplace_back(arrow_end); + + angular_v_marker_.action = visualization_msgs::msg::Marker::ADD; + angular_v_marker_.points.clear(); + angular_v_marker_.points.emplace_back(position_marker_.pose.position); + arrow_end = position_marker_.pose.position; + arrow_end.z += target_msg.v_yaw / M_PI; + angular_v_marker_.points.emplace_back(arrow_end); + + armor_marker_.action = visualization_msgs::msg::Marker::ADD; + armor_marker_.scale.y = tracker_->tracked_armor.type == "small" ? 0.135 : 0.23; + bool is_current_pair = true; + size_t a_n = target_msg.armors_num; + geometry_msgs::msg::Point p_a; + double r = 0; + for (size_t i = 0; i < a_n; i++) { + double tmp_yaw = yaw + i * (2 * M_PI / a_n); + // Only 4 armors has 2 radius and height + if (a_n == 4) { + r = is_current_pair ? r1 : r2; + p_a.z = za + (is_current_pair ? 0 : dz); + is_current_pair = !is_current_pair; + } else { + r = r1; + p_a.z = za; + } + p_a.x = xc - r * cos(tmp_yaw); + p_a.y = yc - r * sin(tmp_yaw); + + armor_marker_.id = i; + armor_marker_.pose.position = p_a; + tf2::Quaternion q; + q.setRPY(0, target_msg.id == "outpost" ? -0.26 : 0.26, tmp_yaw); + armor_marker_.pose.orientation = tf2::toMsg(q); + marker_array.markers.emplace_back(armor_marker_); + } + } else { + position_marker_.action = visualization_msgs::msg::Marker::DELETE; + linear_v_marker_.action = visualization_msgs::msg::Marker::DELETE; + angular_v_marker_.action = visualization_msgs::msg::Marker::DELETE; + + armor_marker_.action = visualization_msgs::msg::Marker::DELETE; + marker_array.markers.emplace_back(armor_marker_); + } + + marker_array.markers.emplace_back(position_marker_); + marker_array.markers.emplace_back(linear_v_marker_); + marker_array.markers.emplace_back(angular_v_marker_); + marker_pub_->publish(marker_array); +} + +} // namespace rm_auto_aim + +#include "rclcpp_components/register_node_macro.hpp" + +// Register the component with class_loader. +// This acts as a sort of entry point, allowing the component to be discoverable when its library +// is being loaded into a running process. +RCLCPP_COMPONENTS_REGISTER_NODE(rm_auto_aim::ArmorTrackerNode) diff --git a/src/rm_auto_aim/auto_aim_interfaces/CMakeLists.txt b/src/rm_auto_aim/auto_aim_interfaces/CMakeLists.txt new file mode 100644 index 0000000..0545379 --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/CMakeLists.txt @@ -0,0 +1,30 @@ +cmake_minimum_required(VERSION 3.8) +project(auto_aim_interfaces) + +if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wall -Wextra -Wpedantic) +endif() + +# find dependencies +find_package(ament_cmake REQUIRED) +find_package(rosidl_default_generators REQUIRED) +find_package(geometry_msgs REQUIRED) + +rosidl_generate_interfaces(${PROJECT_NAME} + "msg/Armor.msg" + "msg/Armors.msg" + "msg/Target.msg" + + "msg/DebugLight.msg" + "msg/DebugLights.msg" + "msg/DebugArmor.msg" + "msg/DebugArmors.msg" + "msg/TrackerInfo.msg" + + DEPENDENCIES + std_msgs + geometry_msgs +) + + +ament_package() diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/Armor.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/Armor.msg new file mode 100644 index 0000000..723f523 --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/Armor.msg @@ -0,0 +1,4 @@ +string number +string type +float32 distance_to_image_center +geometry_msgs/Pose pose \ No newline at end of file diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/Armors.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/Armors.msg new file mode 100644 index 0000000..0a70d30 --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/Armors.msg @@ -0,0 +1,2 @@ +std_msgs/Header header +Armor[] armors \ No newline at end of file diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/DebugArmor.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugArmor.msg new file mode 100644 index 0000000..cf96eec --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugArmor.msg @@ -0,0 +1,5 @@ +int32 center_x +string type +float32 light_ratio +float32 center_distance +float32 angle diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/DebugArmors.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugArmors.msg new file mode 100644 index 0000000..e12e300 --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugArmors.msg @@ -0,0 +1 @@ +DebugArmor[] data \ No newline at end of file diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/DebugLight.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugLight.msg new file mode 100644 index 0000000..c876dbc --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugLight.msg @@ -0,0 +1,4 @@ +int32 center_x +bool is_light +float32 ratio +float32 angle \ No newline at end of file diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/DebugLights.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugLights.msg new file mode 100644 index 0000000..14e31ce --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/DebugLights.msg @@ -0,0 +1 @@ +DebugLight[] data \ No newline at end of file diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/Target.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/Target.msg new file mode 100644 index 0000000..fc9926b --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/Target.msg @@ -0,0 +1,11 @@ +std_msgs/Header header +bool tracking +string id +int32 armors_num +geometry_msgs/Point position +geometry_msgs/Vector3 velocity +float64 yaw +float64 v_yaw +float64 radius_1 +float64 radius_2 +float64 dz diff --git a/src/rm_auto_aim/auto_aim_interfaces/msg/TrackerInfo.msg b/src/rm_auto_aim/auto_aim_interfaces/msg/TrackerInfo.msg new file mode 100644 index 0000000..5d6bc35 --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/msg/TrackerInfo.msg @@ -0,0 +1,6 @@ +# Difference between the current measurement and prediction +float64 position_diff +float64 yaw_diff +# Unfiltered position and yaw +geometry_msgs/Point position +float64 yaw diff --git a/src/rm_auto_aim/auto_aim_interfaces/package.xml b/src/rm_auto_aim/auto_aim_interfaces/package.xml new file mode 100644 index 0000000..11a6ed3 --- /dev/null +++ b/src/rm_auto_aim/auto_aim_interfaces/package.xml @@ -0,0 +1,21 @@ + + + + auto_aim_interfaces + 0.0.0 + TODO: Package description + chenjun + TODO: License declaration + + ament_cmake + + rosidl_default_generators + rosidl_default_runtime + rosidl_interface_packages + + geometry_msgs + + + ament_cmake + + diff --git a/src/rm_auto_aim/docs/rm_vision.svg b/src/rm_auto_aim/docs/rm_vision.svg new file mode 100644 index 0000000..a9d40c6 --- /dev/null +++ b/src/rm_auto_aim/docs/rm_vision.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/src/rm_auto_aim/rm_auto_aim/CMakeLists.txt b/src/rm_auto_aim/rm_auto_aim/CMakeLists.txt new file mode 100644 index 0000000..1ec501e --- /dev/null +++ b/src/rm_auto_aim/rm_auto_aim/CMakeLists.txt @@ -0,0 +1,12 @@ +cmake_minimum_required(VERSION 3.8) +project(rm_auto_aim) + +find_package(ament_cmake_auto REQUIRED) + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + set(ament_cmake_copyright_FOUND TRUE) + ament_lint_auto_find_test_dependencies() +endif() + +ament_auto_package() diff --git a/src/rm_auto_aim/rm_auto_aim/package.xml b/src/rm_auto_aim/rm_auto_aim/package.xml new file mode 100644 index 0000000..0a6b3d0 --- /dev/null +++ b/src/rm_auto_aim/rm_auto_aim/package.xml @@ -0,0 +1,22 @@ + + + + rm_auto_aim + 0.0.0 + TODO: Package description + chenjun + TODO: License declaration + + ament_cmake + + auto_aim_interfaces + armor_detector + armor_tracker + + ament_lint_auto + ament_lint_common + + + ament_cmake + + diff --git a/src/rm_gimbal_description/.gitignore b/src/rm_gimbal_description/.gitignore new file mode 100644 index 0000000..35d74bb --- /dev/null +++ b/src/rm_gimbal_description/.gitignore @@ -0,0 +1,51 @@ +devel/ +logs/ +build/ +bin/ +lib/ +msg_gen/ +srv_gen/ +msg/*Action.msg +msg/*ActionFeedback.msg +msg/*ActionGoal.msg +msg/*ActionResult.msg +msg/*Feedback.msg +msg/*Goal.msg +msg/*Result.msg +msg/_*.py +build_isolated/ +devel_isolated/ + +# Generated by dynamic reconfigure +*.cfgc +/cfg/cpp/ +/cfg/*.py + +# Ignore generated docs +*.dox +*.wikidoc + +# eclipse stuff +.project +.cproject + +# qcreator stuff +CMakeLists.txt.user + +srv/_*.py +*.pcd +*.pyc +qtcreator-* +*.user + +/planning/cfg +/planning/docs +/planning/src + +*~ + +# Emacs +.#* + +# Catkin custom files +CATKIN_IGNORE diff --git a/src/rm_gimbal_description/CMakeLists.txt b/src/rm_gimbal_description/CMakeLists.txt new file mode 100644 index 0000000..335305a --- /dev/null +++ b/src/rm_gimbal_description/CMakeLists.txt @@ -0,0 +1,18 @@ +cmake_minimum_required(VERSION 3.8) +project(rm_gimbal_description) + +# find dependencies +find_package(ament_cmake REQUIRED) + +# Install files +install(DIRECTORY + urdf + DESTINATION share/${PROJECT_NAME}) + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + set(ament_cmake_copyright_FOUND TRUE) + ament_lint_auto_find_test_dependencies() +endif() + +ament_package() diff --git a/src/rm_gimbal_description/LICENSE b/src/rm_gimbal_description/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/src/rm_gimbal_description/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/rm_gimbal_description/README.md b/src/rm_gimbal_description/README.md new file mode 100644 index 0000000..31c85bf --- /dev/null +++ b/src/rm_gimbal_description/README.md @@ -0,0 +1,26 @@ +# rm_gimbal_description +RoboMaster 视觉自瞄系统所需的 URDF + +rm_vision + +该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块 + +## 坐标系定义 + +单位和方向请参考 https://www.ros.org/reps/rep-0103.html + +odom: 以云台中心为原点的惯性系 + +yaw_joint: 表述云台的 yaw 轴与惯性系的旋转关系 + +pitch_joint: 表述云台的 pitch 轴与惯性系的旋转关系 + +camera_joint: 表述相机到惯性系的变换关系 + +camera_optical_joint: 表述以 z 轴为前方的相机坐标系转换为 x 轴为前方的相机坐标系的旋转关系 + +## 使用方法 + +修改 [urdf/rm_gimbal.urdf.xacro](urdf/rm_gimbal.urdf.xacro) 中的 `gimbal_camera_transfrom` + +xyz 与 rpy 对应机器人云台上相机到云台中心的平移与旋转关系,可以由机械图纸测量得到,或在机器人上直接测量 diff --git a/src/rm_gimbal_description/docs/rm_vision.svg b/src/rm_gimbal_description/docs/rm_vision.svg new file mode 100644 index 0000000..a9d40c6 --- /dev/null +++ b/src/rm_gimbal_description/docs/rm_vision.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/src/rm_gimbal_description/package.xml b/src/rm_gimbal_description/package.xml new file mode 100644 index 0000000..863b9a5 --- /dev/null +++ b/src/rm_gimbal_description/package.xml @@ -0,0 +1,21 @@ + + + + rm_gimbal_description + 0.0.0 + TODO: Package description + chenjun + TODO: License declaration + + ament_cmake + + launch_ros + xacro + + ament_lint_auto + ament_lint_common + + + ament_cmake + + diff --git a/src/rm_gimbal_description/urdf/rm_gimbal.urdf.xacro b/src/rm_gimbal_description/urdf/rm_gimbal.urdf.xacro new file mode 100644 index 0000000..906b6fb --- /dev/null +++ b/src/rm_gimbal_description/urdf/rm_gimbal.urdf.xacro @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/rm_serial_driver/.clang-format b/src/rm_serial_driver/.clang-format new file mode 100644 index 0000000..2f8d64b --- /dev/null +++ b/src/rm_serial_driver/.clang-format @@ -0,0 +1,18 @@ +--- +Language: Cpp +BasedOnStyle: Google + +AccessModifierOffset: -2 +AlignAfterOpenBracket: AlwaysBreak +BraceWrapping: + AfterClass: true + AfterFunction: true + AfterNamespace: true + AfterStruct: true +BreakBeforeBraces: Custom +ColumnLimit: 100 +ConstructorInitializerIndentWidth: 0 +ContinuationIndentWidth: 2 +DerivePointerAlignment: false +PointerAlignment: Middle +ReflowComments: false \ No newline at end of file diff --git a/src/rm_serial_driver/.gitignore b/src/rm_serial_driver/.gitignore new file mode 100644 index 0000000..35d74bb --- /dev/null +++ b/src/rm_serial_driver/.gitignore @@ -0,0 +1,51 @@ +devel/ +logs/ +build/ +bin/ +lib/ +msg_gen/ +srv_gen/ +msg/*Action.msg +msg/*ActionFeedback.msg +msg/*ActionGoal.msg +msg/*ActionResult.msg +msg/*Feedback.msg +msg/*Goal.msg +msg/*Result.msg +msg/_*.py +build_isolated/ +devel_isolated/ + +# Generated by dynamic reconfigure +*.cfgc +/cfg/cpp/ +/cfg/*.py + +# Ignore generated docs +*.dox +*.wikidoc + +# eclipse stuff +.project +.cproject + +# qcreator stuff +CMakeLists.txt.user + +srv/_*.py +*.pcd +*.pyc +qtcreator-* +*.user + +/planning/cfg +/planning/docs +/planning/src + +*~ + +# Emacs +.#* + +# Catkin custom files +CATKIN_IGNORE diff --git a/src/rm_serial_driver/CMakeLists.txt b/src/rm_serial_driver/CMakeLists.txt new file mode 100644 index 0000000..d7c27bd --- /dev/null +++ b/src/rm_serial_driver/CMakeLists.txt @@ -0,0 +1,57 @@ +cmake_minimum_required(VERSION 3.10) +project(rm_serial_driver) + +## Use C++14 +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +## By adding -Wall and -Werror, the compiler does not ignore warnings anymore, +## enforcing cleaner code. +add_definitions(-Wall -Werror) + +## Export compile commands for clangd +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +####################### +## Find dependencies ## +####################### + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +########### +## Build ## +########### + +ament_auto_add_library(${PROJECT_NAME} SHARED + src/rm_serial_driver.cpp + src/crc.cpp +) + +rclcpp_components_register_node(${PROJECT_NAME} + PLUGIN rm_serial_driver::RMSerialDriver + EXECUTABLE ${PROJECT_NAME}_node +) + +############# +## Testing ## +############# + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + list(APPEND AMENT_LINT_AUTO_EXCLUDE + ament_cmake_copyright + ament_cmake_uncrustify + ) + ament_lint_auto_find_test_dependencies() +endif() + +############# +## Install ## +############# + +ament_auto_package( + INSTALL_TO_SHARE + config + launch +) diff --git a/src/rm_serial_driver/LICENSE b/src/rm_serial_driver/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/src/rm_serial_driver/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/rm_serial_driver/README.md b/src/rm_serial_driver/README.md new file mode 100644 index 0000000..27dea24 --- /dev/null +++ b/src/rm_serial_driver/README.md @@ -0,0 +1,36 @@ +# rm_serial_driver +RoboMaster 视觉系统与电控系统的串口通讯模块 + +rm_vision + +该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块 + +## Overview + +本模块基于 [transport_drivers](https://github.com/ros-drivers/transport_drivers) 实现了 [rm_auto_aim](https://github.com/chenjunnn/rm_auto_aim) 项目与电控部分通讯的功能,也可作为使用 ros2 作为开发框架的参赛队的串口通讯模块参考 + +若有帮助请Star这个项目,感谢~ + +## 使用指南 + +安装依赖 `sudo apt install ros-humble-serial-driver` + +更改 [serial_driver.yaml](config/serial_driver.yaml) 中的参数以匹配与电控通讯的串口 + +启动串口模块 `ros2 launch rm_serial_driver serial_driver.launch.py` + +## 发送和接收 + +详情请参考 [packet.hpp](include/rm_serial_driver/packet.hpp) + +从电控端需要接受 + +- 机器人的自身颜色 `robot_color` 以判断对应的识别目标颜色 +- 云台姿态 `pitch` 和 `yaw`, 单位和方向请参考 https://www.ros.org/reps/rep-0103.html +- 当前云台瞄准的位置 `aim_x, aim_y, aim_z`,用于发布可视化 Marker + +视觉端发送 armor_tracker 的输出,即对于目标机器人的观测,具体的运动预测、装甲板选择、弹道解算在电控端完成 + +## 电控端的处理 + +TBD diff --git a/src/rm_serial_driver/config/serial_driver.yaml b/src/rm_serial_driver/config/serial_driver.yaml new file mode 100644 index 0000000..b902ef9 --- /dev/null +++ b/src/rm_serial_driver/config/serial_driver.yaml @@ -0,0 +1,8 @@ +/rm_serial_driver: + ros__parameters: + device_name: /dev/ttyACM0 + baud_rate: 115200 + flow_control: none + parity: none + stop_bits: "1" + diff --git a/src/rm_serial_driver/docs/rm_vision.svg b/src/rm_serial_driver/docs/rm_vision.svg new file mode 100644 index 0000000..a9d40c6 --- /dev/null +++ b/src/rm_serial_driver/docs/rm_vision.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/src/rm_serial_driver/include/rm_serial_driver/crc.hpp b/src/rm_serial_driver/include/rm_serial_driver/crc.hpp new file mode 100644 index 0000000..da01b09 --- /dev/null +++ b/src/rm_serial_driver/include/rm_serial_driver/crc.hpp @@ -0,0 +1,29 @@ +// Copyright (c) 2022 ChenJun +// Licensed under the Apache-2.0 License. + +#ifndef RM_SERIAL_DRIVER__CRC_HPP_ +#define RM_SERIAL_DRIVER__CRC_HPP_ + +#include + +namespace crc16 +{ +/** + * @brief CRC16 Verify function + * @param[in] pchMessage : Data to Verify, + * @param[in] dwLength : Stream length = Data + checksum + * @return : True or False (CRC Verify Result) + */ +uint32_t Verify_CRC16_Check_Sum(const uint8_t * pchMessage, uint32_t dwLength); + +/** + * @brief Append CRC16 value to the end of the buffer + * @param[in] pchMessage : Data to Verify, + * @param[in] dwLength : Stream length = Data + checksum + * @return none + */ +void Append_CRC16_Check_Sum(uint8_t * pchMessage, uint32_t dwLength); + +} // namespace crc16 + +#endif // RM_SERIAL_DRIVER__CRC_HPP_ diff --git a/src/rm_serial_driver/include/rm_serial_driver/packet.hpp b/src/rm_serial_driver/include/rm_serial_driver/packet.hpp new file mode 100644 index 0000000..13a929b --- /dev/null +++ b/src/rm_serial_driver/include/rm_serial_driver/packet.hpp @@ -0,0 +1,67 @@ +// Copyright (c) 2022 ChenJun +// Licensed under the Apache-2.0 License. + +#ifndef RM_SERIAL_DRIVER__PACKET_HPP_ +#define RM_SERIAL_DRIVER__PACKET_HPP_ + +#include +#include +#include + +namespace rm_serial_driver +{ +struct ReceivePacket +{ + uint8_t header = 0x5A; + uint8_t detect_color : 1; // 0-red 1-blue + bool reset_tracker : 1; + uint8_t reserved : 6; + float roll; + float pitch; + float yaw; + float aim_x; + float aim_y; + float aim_z; + uint16_t checksum = 0; +} __attribute__((packed)); + +struct SendPacket +{ + uint8_t header = 0xA5; + bool tracking : 1; + uint8_t id : 3; // 0-outpost 6-guard 7-base + uint8_t armors_num : 3; // 2-balance 3-outpost 4-normal + uint8_t reserved : 1; + float x; + float y; + float z; + float yaw; + float vx; + float vy; + float vz; + float v_yaw; + float r1; + float r2; + float dz; + uint16_t checksum = 0; +} __attribute__((packed)); + +inline ReceivePacket fromVector(const std::vector & data) +{ + ReceivePacket packet; + std::copy(data.begin(), data.end(), reinterpret_cast(&packet)); + return packet; +} + +inline std::vector toVector(const SendPacket & data) +{ + std::vector packet(sizeof(SendPacket)); + std::copy( + reinterpret_cast(&data), + reinterpret_cast(&data) + sizeof(SendPacket), packet.begin()); + return packet; +} + +} // namespace rm_serial_driver + +#endif // RM_SERIAL_DRIVER__PACKET_HPP_ diff --git a/src/rm_serial_driver/include/rm_serial_driver/rm_serial_driver.hpp b/src/rm_serial_driver/include/rm_serial_driver/rm_serial_driver.hpp new file mode 100644 index 0000000..06f22cf --- /dev/null +++ b/src/rm_serial_driver/include/rm_serial_driver/rm_serial_driver.hpp @@ -0,0 +1,82 @@ +// Copyright (c) 2022 ChenJun +// Licensed under the Apache-2.0 License. + +#ifndef RM_SERIAL_DRIVER__RM_SERIAL_DRIVER_HPP_ +#define RM_SERIAL_DRIVER__RM_SERIAL_DRIVER_HPP_ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +// C++ system +#include +#include +#include +#include +#include + +#include "auto_aim_interfaces/msg/target.hpp" + +namespace rm_serial_driver +{ +class RMSerialDriver : public rclcpp::Node +{ +public: + explicit RMSerialDriver(const rclcpp::NodeOptions & options); + + ~RMSerialDriver() override; + +private: + void getParams(); + + void receiveData(); + + void sendData(auto_aim_interfaces::msg::Target::SharedPtr msg); + + void reopenPort(); + + void setParam(const rclcpp::Parameter & param); + + void resetTracker(); + + // Serial port + std::unique_ptr owned_ctx_; + std::string device_name_; + std::unique_ptr device_config_; + std::unique_ptr serial_driver_; + + // Param client to set detect_colr + using ResultFuturePtr = std::shared_future>; + bool initial_set_param_ = false; + uint8_t previous_receive_color_ = 0; + rclcpp::AsyncParametersClient::SharedPtr detector_param_client_; + ResultFuturePtr set_param_future_; + + // Service client to reset tracker + rclcpp::Client::SharedPtr reset_tracker_client_; + + // Aimimg point receiving from serial port for visualization + visualization_msgs::msg::Marker aiming_point_; + + // Broadcast tf from odom to gimbal_link + double timestamp_offset_ = 0; + std::unique_ptr tf_broadcaster_; + + rclcpp::Subscription::SharedPtr target_sub_; + + // For debug usage + rclcpp::Publisher::SharedPtr latency_pub_; + rclcpp::Publisher::SharedPtr marker_pub_; + + std::thread receive_thread_; +}; +} // namespace rm_serial_driver + +#endif // RM_SERIAL_DRIVER__RM_SERIAL_DRIVER_HPP_ diff --git a/src/rm_serial_driver/launch/serial_driver.launch.py b/src/rm_serial_driver/launch/serial_driver.launch.py new file mode 100644 index 0000000..65bc3d7 --- /dev/null +++ b/src/rm_serial_driver/launch/serial_driver.launch.py @@ -0,0 +1,21 @@ +import os + +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch_ros.actions import Node + + +def generate_launch_description(): + config = os.path.join( + get_package_share_directory('rm_serial_driver'), 'config', 'serial_driver.yaml') + + rm_serial_driver_node = Node( + package='rm_serial_driver', + executable='rm_serial_driver_node', + namespace='', + output='screen', + emulate_tty=True, + parameters=[config], + ) + + return LaunchDescription([rm_serial_driver_node]) diff --git a/src/rm_serial_driver/package.xml b/src/rm_serial_driver/package.xml new file mode 100644 index 0000000..61f995c --- /dev/null +++ b/src/rm_serial_driver/package.xml @@ -0,0 +1,36 @@ + + + + rm_serial_driver + 0.1.0 + A template for ROS packages. + Chen Jun + MIT + https://github.com/chenjunnn/ros2_mindvision_camera + https://github.com/chenjunnn/ros2_mindvision_camera/issues + Chen Jun + + + ament_cmake + + + rclcpp + rclcpp_components + serial_driver + geometry_msgs + tf2_ros + tf2_geometry_msgs + visualization_msgs + auto_aim_interfaces + std_srvs + + ament_lint_auto + ament_lint_common + ament_cmake_clang_format + + + ament_cmake + + diff --git a/src/rm_serial_driver/src/crc.cpp b/src/rm_serial_driver/src/crc.cpp new file mode 100644 index 0000000..1273dd6 --- /dev/null +++ b/src/rm_serial_driver/src/crc.cpp @@ -0,0 +1,93 @@ +// Copyright (c) 2022 ChenJun +// Licensed under the Apache-2.0 License. + +#include "rm_serial_driver/crc.hpp" + +#include + +namespace crc16 +{ +constexpr uint16_t CRC16_INIT = 0xFFFF; + +constexpr uint16_t W_CRC_TABLE[256] = { + 0x0000, 0x1189, 0x2312, 0x329b, 0x4624, 0x57ad, 0x6536, 0x74bf, 0x8c48, 0x9dc1, 0xaf5a, 0xbed3, + 0xca6c, 0xdbe5, 0xe97e, 0xf8f7, 0x1081, 0x0108, 0x3393, 0x221a, 0x56a5, 0x472c, 0x75b7, 0x643e, + 0x9cc9, 0x8d40, 0xbfdb, 0xae52, 0xdaed, 0xcb64, 0xf9ff, 0xe876, 0x2102, 0x308b, 0x0210, 0x1399, + 0x6726, 0x76af, 0x4434, 0x55bd, 0xad4a, 0xbcc3, 0x8e58, 0x9fd1, 0xeb6e, 0xfae7, 0xc87c, 0xd9f5, + 0x3183, 0x200a, 0x1291, 0x0318, 0x77a7, 0x662e, 0x54b5, 0x453c, 0xbdcb, 0xac42, 0x9ed9, 0x8f50, + 0xfbef, 0xea66, 0xd8fd, 0xc974, 0x4204, 0x538d, 0x6116, 0x709f, 0x0420, 0x15a9, 0x2732, 0x36bb, + 0xce4c, 0xdfc5, 0xed5e, 0xfcd7, 0x8868, 0x99e1, 0xab7a, 0xbaf3, 0x5285, 0x430c, 0x7197, 0x601e, + 0x14a1, 0x0528, 0x37b3, 0x263a, 0xdecd, 0xcf44, 0xfddf, 0xec56, 0x98e9, 0x8960, 0xbbfb, 0xaa72, + 0x6306, 0x728f, 0x4014, 0x519d, 0x2522, 0x34ab, 0x0630, 0x17b9, 0xef4e, 0xfec7, 0xcc5c, 0xddd5, + 0xa96a, 0xb8e3, 0x8a78, 0x9bf1, 0x7387, 0x620e, 0x5095, 0x411c, 0x35a3, 0x242a, 0x16b1, 0x0738, + 0xffcf, 0xee46, 0xdcdd, 0xcd54, 0xb9eb, 0xa862, 0x9af9, 0x8b70, 0x8408, 0x9581, 0xa71a, 0xb693, + 0xc22c, 0xd3a5, 0xe13e, 0xf0b7, 0x0840, 0x19c9, 0x2b52, 0x3adb, 0x4e64, 0x5fed, 0x6d76, 0x7cff, + 0x9489, 0x8500, 0xb79b, 0xa612, 0xd2ad, 0xc324, 0xf1bf, 0xe036, 0x18c1, 0x0948, 0x3bd3, 0x2a5a, + 0x5ee5, 0x4f6c, 0x7df7, 0x6c7e, 0xa50a, 0xb483, 0x8618, 0x9791, 0xe32e, 0xf2a7, 0xc03c, 0xd1b5, + 0x2942, 0x38cb, 0x0a50, 0x1bd9, 0x6f66, 0x7eef, 0x4c74, 0x5dfd, 0xb58b, 0xa402, 0x9699, 0x8710, + 0xf3af, 0xe226, 0xd0bd, 0xc134, 0x39c3, 0x284a, 0x1ad1, 0x0b58, 0x7fe7, 0x6e6e, 0x5cf5, 0x4d7c, + 0xc60c, 0xd785, 0xe51e, 0xf497, 0x8028, 0x91a1, 0xa33a, 0xb2b3, 0x4a44, 0x5bcd, 0x6956, 0x78df, + 0x0c60, 0x1de9, 0x2f72, 0x3efb, 0xd68d, 0xc704, 0xf59f, 0xe416, 0x90a9, 0x8120, 0xb3bb, 0xa232, + 0x5ac5, 0x4b4c, 0x79d7, 0x685e, 0x1ce1, 0x0d68, 0x3ff3, 0x2e7a, 0xe70e, 0xf687, 0xc41c, 0xd595, + 0xa12a, 0xb0a3, 0x8238, 0x93b1, 0x6b46, 0x7acf, 0x4854, 0x59dd, 0x2d62, 0x3ceb, 0x0e70, 0x1ff9, + 0xf78f, 0xe606, 0xd49d, 0xc514, 0xb1ab, 0xa022, 0x92b9, 0x8330, 0x7bc7, 0x6a4e, 0x58d5, 0x495c, + 0x3de3, 0x2c6a, 0x1ef1, 0x0f78}; + +/** + * @brief CRC16 Caculation function + * @param[in] pchMessage : Data to Verify, + * @param[in] dwLength : Stream length = Data + checksum + * @param[in] wCRC : CRC16 init value(default : 0xFFFF) + * @return : CRC16 checksum + */ +uint16_t Get_CRC16_Check_Sum(const uint8_t * pchMessage, uint32_t dwLength, uint16_t wCRC) +{ + uint8_t ch_data; + + if (pchMessage == nullptr) return 0xFFFF; + while (dwLength--) { + ch_data = *pchMessage++; + (wCRC) = + ((uint16_t)(wCRC) >> 8) ^ W_CRC_TABLE[((uint16_t)(wCRC) ^ (uint16_t)(ch_data)) & 0x00ff]; + } + + return wCRC; +} + +/** + * @brief CRC16 Verify function + * @param[in] pchMessage : Data to Verify, + * @param[in] dwLength : Stream length = Data + checksum + * @return : True or False (CRC Verify Result) + */ +uint32_t Verify_CRC16_Check_Sum(const uint8_t * pchMessage, uint32_t dwLength) +{ + uint16_t w_expected = 0; + + if ((pchMessage == nullptr) || (dwLength <= 2)) return false; + + w_expected = Get_CRC16_Check_Sum(pchMessage, dwLength - 2, CRC16_INIT); + return ( + (w_expected & 0xff) == pchMessage[dwLength - 2] && + ((w_expected >> 8) & 0xff) == pchMessage[dwLength - 1]); +} + +/** + * @brief Append CRC16 value to the end of the buffer + * @param[in] pchMessage : Data to Verify, + * @param[in] dwLength : Stream length = Data + checksum + * @return none + */ +void Append_CRC16_Check_Sum(uint8_t * pchMessage, uint32_t dwLength) +{ + uint16_t w_crc = 0; + + if ((pchMessage == nullptr) || (dwLength <= 2)) return; + + w_crc = Get_CRC16_Check_Sum(reinterpret_cast(pchMessage), dwLength - 2, CRC16_INIT); + + pchMessage[dwLength - 2] = (uint8_t)(w_crc & 0x00ff); + pchMessage[dwLength - 1] = (uint8_t)((w_crc >> 8) & 0x00ff); +} + +} // namespace crc16 diff --git a/src/rm_serial_driver/src/rm_serial_driver.cpp b/src/rm_serial_driver/src/rm_serial_driver.cpp new file mode 100644 index 0000000..7bde124 --- /dev/null +++ b/src/rm_serial_driver/src/rm_serial_driver.cpp @@ -0,0 +1,334 @@ +// Copyright (c) 2022 ChenJun +// Licensed under the Apache-2.0 License. + +#include + +#include +#include +#include +#include +#include + +// C++ system +#include +#include +#include +#include +#include +#include + +#include "rm_serial_driver/crc.hpp" +#include "rm_serial_driver/packet.hpp" +#include "rm_serial_driver/rm_serial_driver.hpp" + +namespace rm_serial_driver +{ +RMSerialDriver::RMSerialDriver(const rclcpp::NodeOptions & options) +: Node("rm_serial_driver", options), + owned_ctx_{new IoContext(2)}, + serial_driver_{new drivers::serial_driver::SerialDriver(*owned_ctx_)} +{ + RCLCPP_INFO(get_logger(), "Start RMSerialDriver!"); + + getParams(); + + // TF broadcaster + timestamp_offset_ = this->declare_parameter("timestamp_offset", 0.0); + tf_broadcaster_ = std::make_unique(*this); + + // Create Publisher + latency_pub_ = this->create_publisher("/latency", 10); + marker_pub_ = this->create_publisher("/aiming_point", 10); + + // Detect parameter client + detector_param_client_ = std::make_shared(this, "armor_detector"); + + // Tracker reset service client + reset_tracker_client_ = this->create_client("/tracker/reset"); + + try { + serial_driver_->init_port(device_name_, *device_config_); + if (!serial_driver_->port()->is_open()) { + serial_driver_->port()->open(); + receive_thread_ = std::thread(&RMSerialDriver::receiveData, this); + } + } catch (const std::exception & ex) { + RCLCPP_ERROR( + get_logger(), "Error creating serial port: %s - %s", device_name_.c_str(), ex.what()); + throw ex; + } + + aiming_point_.header.frame_id = "odom"; + aiming_point_.ns = "aiming_point"; + aiming_point_.type = visualization_msgs::msg::Marker::SPHERE; + aiming_point_.action = visualization_msgs::msg::Marker::ADD; + aiming_point_.scale.x = aiming_point_.scale.y = aiming_point_.scale.z = 0.12; + aiming_point_.color.r = 1.0; + aiming_point_.color.g = 1.0; + aiming_point_.color.b = 1.0; + aiming_point_.color.a = 1.0; + aiming_point_.lifetime = rclcpp::Duration::from_seconds(0.1); + + // Create Subscription + target_sub_ = this->create_subscription( + "/tracker/target", rclcpp::SensorDataQoS(), + std::bind(&RMSerialDriver::sendData, this, std::placeholders::_1)); +} + +RMSerialDriver::~RMSerialDriver() +{ + if (receive_thread_.joinable()) { + receive_thread_.join(); + } + + if (serial_driver_->port()->is_open()) { + serial_driver_->port()->close(); + } + + if (owned_ctx_) { + owned_ctx_->waitForExit(); + } +} + +void RMSerialDriver::receiveData() +{ + std::vector header(1); + std::vector data; + data.reserve(sizeof(ReceivePacket)); + + while (rclcpp::ok()) { + try { + serial_driver_->port()->receive(header); + + if (header[0] == 0x5A) { + data.resize(sizeof(ReceivePacket) - 1); + serial_driver_->port()->receive(data); + + data.insert(data.begin(), header[0]); + ReceivePacket packet = fromVector(data); + + bool crc_ok = + crc16::Verify_CRC16_Check_Sum(reinterpret_cast(&packet), sizeof(packet)); + if (crc_ok) { + if (!initial_set_param_ || packet.detect_color != previous_receive_color_) { + setParam(rclcpp::Parameter("detect_color", packet.detect_color)); + previous_receive_color_ = packet.detect_color; + } + + if (packet.reset_tracker) { + resetTracker(); + } + + geometry_msgs::msg::TransformStamped t; + timestamp_offset_ = this->get_parameter("timestamp_offset").as_double(); + t.header.stamp = this->now() + rclcpp::Duration::from_seconds(timestamp_offset_); + t.header.frame_id = "odom"; + t.child_frame_id = "gimbal_link"; + tf2::Quaternion q; + q.setRPY(packet.roll, packet.pitch, packet.yaw); + t.transform.rotation = tf2::toMsg(q); + tf_broadcaster_->sendTransform(t); + + if (abs(packet.aim_x) > 0.01) { + aiming_point_.header.stamp = this->now(); + aiming_point_.pose.position.x = packet.aim_x; + aiming_point_.pose.position.y = packet.aim_y; + aiming_point_.pose.position.z = packet.aim_z; + marker_pub_->publish(aiming_point_); + } + } else { + RCLCPP_ERROR(get_logger(), "CRC error!"); + } + } else { + RCLCPP_WARN_THROTTLE(get_logger(), *get_clock(), 20, "Invalid header: %02X", header[0]); + } + } catch (const std::exception & ex) { + RCLCPP_ERROR_THROTTLE( + get_logger(), *get_clock(), 20, "Error while receiving data: %s", ex.what()); + reopenPort(); + } + } +} + +void RMSerialDriver::sendData(const auto_aim_interfaces::msg::Target::SharedPtr msg) +{ + const static std::map id_unit8_map{ + {"", 0}, {"outpost", 0}, {"1", 1}, {"1", 1}, {"2", 2}, + {"3", 3}, {"4", 4}, {"5", 5}, {"guard", 6}, {"base", 7}}; + + try { + SendPacket packet; + packet.tracking = msg->tracking; + packet.id = id_unit8_map.at(msg->id); + packet.armors_num = msg->armors_num; + packet.x = msg->position.x; + packet.y = msg->position.y; + packet.z = msg->position.z; + packet.yaw = msg->yaw; + packet.vx = msg->velocity.x; + packet.vy = msg->velocity.y; + packet.vz = msg->velocity.z; + packet.v_yaw = msg->v_yaw; + packet.r1 = msg->radius_1; + packet.r2 = msg->radius_2; + packet.dz = msg->dz; + crc16::Append_CRC16_Check_Sum(reinterpret_cast(&packet), sizeof(packet)); + + std::vector data = toVector(packet); + + serial_driver_->port()->send(data); + + std_msgs::msg::Float64 latency; + latency.data = (this->now() - msg->header.stamp).seconds() * 1000.0; + RCLCPP_DEBUG_STREAM(get_logger(), "Total latency: " + std::to_string(latency.data) + "ms"); + latency_pub_->publish(latency); + } catch (const std::exception & ex) { + RCLCPP_ERROR(get_logger(), "Error while sending data: %s", ex.what()); + reopenPort(); + } +} + +void RMSerialDriver::getParams() +{ + using FlowControl = drivers::serial_driver::FlowControl; + using Parity = drivers::serial_driver::Parity; + using StopBits = drivers::serial_driver::StopBits; + + uint32_t baud_rate{}; + auto fc = FlowControl::NONE; + auto pt = Parity::NONE; + auto sb = StopBits::ONE; + + try { + device_name_ = declare_parameter("device_name", ""); + } catch (rclcpp::ParameterTypeException & ex) { + RCLCPP_ERROR(get_logger(), "The device name provided was invalid"); + throw ex; + } + + try { + baud_rate = declare_parameter("baud_rate", 0); + } catch (rclcpp::ParameterTypeException & ex) { + RCLCPP_ERROR(get_logger(), "The baud_rate provided was invalid"); + throw ex; + } + + try { + const auto fc_string = declare_parameter("flow_control", ""); + + if (fc_string == "none") { + fc = FlowControl::NONE; + } else if (fc_string == "hardware") { + fc = FlowControl::HARDWARE; + } else if (fc_string == "software") { + fc = FlowControl::SOFTWARE; + } else { + throw std::invalid_argument{ + "The flow_control parameter must be one of: none, software, or hardware."}; + } + } catch (rclcpp::ParameterTypeException & ex) { + RCLCPP_ERROR(get_logger(), "The flow_control provided was invalid"); + throw ex; + } + + try { + const auto pt_string = declare_parameter("parity", ""); + + if (pt_string == "none") { + pt = Parity::NONE; + } else if (pt_string == "odd") { + pt = Parity::ODD; + } else if (pt_string == "even") { + pt = Parity::EVEN; + } else { + throw std::invalid_argument{"The parity parameter must be one of: none, odd, or even."}; + } + } catch (rclcpp::ParameterTypeException & ex) { + RCLCPP_ERROR(get_logger(), "The parity provided was invalid"); + throw ex; + } + + try { + const auto sb_string = declare_parameter("stop_bits", ""); + + if (sb_string == "1" || sb_string == "1.0") { + sb = StopBits::ONE; + } else if (sb_string == "1.5") { + sb = StopBits::ONE_POINT_FIVE; + } else if (sb_string == "2" || sb_string == "2.0") { + sb = StopBits::TWO; + } else { + throw std::invalid_argument{"The stop_bits parameter must be one of: 1, 1.5, or 2."}; + } + } catch (rclcpp::ParameterTypeException & ex) { + RCLCPP_ERROR(get_logger(), "The stop_bits provided was invalid"); + throw ex; + } + + device_config_ = + std::make_unique(baud_rate, fc, pt, sb); +} + +void RMSerialDriver::reopenPort() +{ + RCLCPP_WARN(get_logger(), "Attempting to reopen port"); + try { + if (serial_driver_->port()->is_open()) { + serial_driver_->port()->close(); + } + serial_driver_->port()->open(); + RCLCPP_INFO(get_logger(), "Successfully reopened port"); + } catch (const std::exception & ex) { + RCLCPP_ERROR(get_logger(), "Error while reopening port: %s", ex.what()); + if (rclcpp::ok()) { + rclcpp::sleep_for(std::chrono::seconds(1)); + reopenPort(); + } + } +} + +void RMSerialDriver::setParam(const rclcpp::Parameter & param) +{ + if (!detector_param_client_->service_is_ready()) { + RCLCPP_WARN(get_logger(), "Service not ready, skipping parameter set"); + return; + } + + if ( + !set_param_future_.valid() || + set_param_future_.wait_for(std::chrono::seconds(0)) == std::future_status::ready) { + RCLCPP_INFO(get_logger(), "Setting detect_color to %ld...", param.as_int()); + set_param_future_ = detector_param_client_->set_parameters( + {param}, [this, param](const ResultFuturePtr & results) { + for (const auto & result : results.get()) { + if (!result.successful) { + RCLCPP_ERROR(get_logger(), "Failed to set parameter: %s", result.reason.c_str()); + return; + } + } + RCLCPP_INFO(get_logger(), "Successfully set detect_color to %ld!", param.as_int()); + initial_set_param_ = true; + }); + } +} + +void RMSerialDriver::resetTracker() +{ + if (!reset_tracker_client_->service_is_ready()) { + RCLCPP_WARN(get_logger(), "Service not ready, skipping tracker reset"); + return; + } + + auto request = std::make_shared(); + reset_tracker_client_->async_send_request(request); + RCLCPP_INFO(get_logger(), "Reset tracker!"); +} + +} // namespace rm_serial_driver + +#include "rclcpp_components/register_node_macro.hpp" + +// Register the component with class_loader. +// This acts as a sort of entry point, allowing the component to be discoverable when its library +// is being loaded into a running process. +RCLCPP_COMPONENTS_REGISTER_NODE(rm_serial_driver::RMSerialDriver) diff --git a/src/rm_vision/.github/workflows/ci.yml b/src/rm_vision/.github/workflows/ci.yml new file mode 100644 index 0000000..cc0c4e1 --- /dev/null +++ b/src/rm_vision/.github/workflows/ci.yml @@ -0,0 +1,33 @@ +name: rm_vision CI + +on: + push: + branches: [ main ] + +jobs: + docker-build-and-push: + if: github.event_name == 'push' + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: chenjunnn + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: . + file: ./Dockerfile + push: true + tags: chenjunnn/rm_vision:lastest diff --git a/src/rm_vision/Dockerfile b/src/rm_vision/Dockerfile new file mode 100644 index 0000000..5ff9c65 --- /dev/null +++ b/src/rm_vision/Dockerfile @@ -0,0 +1,41 @@ +FROM ros:humble-ros-base + +# create workspace +RUN mkdir -p /ros_ws/src +WORKDIR /ros_ws/ + +# clone projects +RUN cd src && git clone https://gitlab.com/rm_vision/rm_auto_aim --depth=1 && \ + git clone https://gitlab.com/rm_vision/ros2-mindvision-camera --depth=1 && \ + git clone https://gitlab.com/rm_vision/ros2-hik-camera --depth=1 && \ + git clone https://gitlab.com/rm_vision/rm_gimbal_description --depth=1 && \ + git clone https://gitlab.com/rm_vision/rm_serial_driver --depth=1 && \ + git clone https://gitlab.com/rm_vision/rm_vision --depth=1 + +# install dependencies and some tools +RUN apt-get update && rosdep install --from-paths src --ignore-src -r -y && \ + apt-get install ros-humble-foxglove-bridge wget htop vim -y && \ + rm -rf /var/lib/apt/lists/* + +# setup zsh +RUN sh -c "$(wget -O- https://github.com/deluan/zsh-in-docker/releases/download/v1.1.2/zsh-in-docker.sh)" -- \ + -t jispwoso -p git \ + -p https://github.com/zsh-users/zsh-autosuggestions \ + -p https://github.com/zsh-users/zsh-syntax-highlighting && \ + chsh -s /bin/zsh && \ + rm -rf /var/lib/apt/lists/* + +# build +RUN . /opt/ros/humble/setup.sh && colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release + +# setup .zshrc +RUN echo 'export TERM=xterm-256color\n\ +source /ros_ws/install/setup.zsh\n\ +eval "$(register-python-argcomplete3 ros2)"\n\ +eval "$(register-python-argcomplete3 colcon)"\n'\ +>> /root/.zshrc + +# source entrypoint setup +RUN sed --in-place --expression \ + '$isource "/ros_ws/install/setup.bash"' \ + /ros_entrypoint.sh diff --git a/src/rm_vision/LICENSE b/src/rm_vision/LICENSE new file mode 100644 index 0000000..d1e647e --- /dev/null +++ b/src/rm_vision/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 ChenJun + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/rm_vision/README.md b/src/rm_vision/README.md new file mode 100644 index 0000000..a6ee5a4 --- /dev/null +++ b/src/rm_vision/README.md @@ -0,0 +1,60 @@ +# rm_vision + +rm_vision + +## Overview + +本项目不存在任何交流群、交流板块、论坛等。任何自称“rm_vision交流”等的群组、论坛等均为假冒,请注意辨别。 + +[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) +[![State-of-the-art Shitcode](https://img.shields.io/static/v1?label=State-of-the-art&message=Shitcode&color=7B5804)](https://github.com/trekhleb/state-of-the-art-shitcode) + +如您所见,本项目带有“Shitcode”标志,这说明本项目极其不适合投入实际生产,仅供交流学习之用途,如果擅自使用,后果自负。 + +## 包含项目 + +装甲板自动瞄准算法模块 https://gitlab.com/rm_vision/rm_auto_aim + +MindVision 相机模块 https://gitlab.com/rm_vision/ros2_mindvision_camera + +HikVision 相机模块 https://gitlab.com/rm_vision/ros2_hik_camera + +机器人云台描述文件 https://gitlab.com/rm_vision/rm_gimbal_description + +串口通讯模块 https://gitlab.com/rm_vision/rm_serial_driver + +视觉算法仿真器 https://gitlab.com/rm_vision/rm_vision_simulator + +## 通过 Docker 部署 + +拉取镜像 + +``` +docker pull hezhexi2002/rm_vision:backup +``` + +构建开发容器 + +``` +docker run -it --name rv_devel \ +--privileged --network host \ +-v /dev:/dev -v $HOME/.ros:/root/.ros -v ws:/ros_ws \ +hezhexi2002/rm_vision:backup \ +ros2 launch foxglove_bridge foxglove_bridge_launch.xml +``` + +构建运行容器 + +``` +docker run -it --name rv_runtime \ +--privileged --network host --restart always \ +-v /dev:/dev -v $HOME/.ros:/root/.ros -v ws:/ros_ws \ +hezhexi2002/rm_vision:backup \ +ros2 launch rm_vision_bringup vision_bringup.launch.py +``` + +TBD + +## 源码编译 + +TBD diff --git a/src/rm_vision/docs/rm_vision.svg b/src/rm_vision/docs/rm_vision.svg new file mode 100644 index 0000000..a9d40c6 --- /dev/null +++ b/src/rm_vision/docs/rm_vision.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/src/rm_vision/docs/rm_vision_inside.svg b/src/rm_vision/docs/rm_vision_inside.svg new file mode 100644 index 0000000..9f8c69b --- /dev/null +++ b/src/rm_vision/docs/rm_vision_inside.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/rm_vision/rm_vision_bringup/CMakeLists.txt b/src/rm_vision/rm_vision_bringup/CMakeLists.txt new file mode 100644 index 0000000..8ff6598 --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/CMakeLists.txt @@ -0,0 +1,16 @@ +cmake_minimum_required(VERSION 3.8) +project(rm_vision_bringup) + +find_package(ament_cmake_auto REQUIRED) + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + set(ament_cmake_copyright_FOUND TRUE) + ament_lint_auto_find_test_dependencies() +endif() + +ament_auto_package( + INSTALL_TO_SHARE + launch + config +) diff --git a/src/rm_vision/rm_vision_bringup/config/camera_info.yaml b/src/rm_vision/rm_vision_bringup/config/camera_info.yaml new file mode 100644 index 0000000..b55cf3c --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/config/camera_info.yaml @@ -0,0 +1,26 @@ +image_width: 1440 +image_height: 1080 +camera_name: narrow_stereo +camera_matrix: + rows: 3 + cols: 3 + data: [1807.12121, 0. , 711.11997, + 0. , 1806.46896, 562.49495, + 0. , 0. , 1. ] +distortion_model: plumb_bob +distortion_coefficients: + rows: 1 + cols: 5 + data: [-0.078049, 0.158627, 0.000304, -0.000566, 0.000000] +rectification_matrix: + rows: 3 + cols: 3 + data: [1., 0., 0., + 0., 1., 0., + 0., 0., 1.] +projection_matrix: + rows: 3 + cols: 4 + data: [1790.52356, 0. , 710.04861, 0. , + 0. , 1794.3208 , 562.32704, 0. , + 0. , 0. , 1. , 0. ] diff --git a/src/rm_vision/rm_vision_bringup/config/launch_params.yaml b/src/rm_vision/rm_vision_bringup/config/launch_params.yaml new file mode 100644 index 0000000..10b60d0 --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/config/launch_params.yaml @@ -0,0 +1,9 @@ +camera: hik + +odom2camera: + xyz: "\"0.10 0.0 0.05\"" + rpy: "\"0.0 0.0 0.0\"" + +detector_log_level: INFO +tracker_log_level: INFO +serial_log_level: INFO diff --git a/src/rm_vision/rm_vision_bringup/config/node_params.yaml b/src/rm_vision/rm_vision_bringup/config/node_params.yaml new file mode 100644 index 0000000..0ee78dc --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/config/node_params.yaml @@ -0,0 +1,47 @@ +/camera_node: + ros__parameters: + camera_info_url: package://rm_vision_bringup/config/camera_info.yaml + exposure_time: 2500 + gain: 8.0 + +/serial_driver: + ros__parameters: + timestamp_offset: 0.006 + device_name: /dev/ttyACM0 + baud_rate: 115200 + flow_control: none + parity: none + stop_bits: "1" + +/armor_detector: + ros__parameters: + debug: true + + detect_color: 0 + binary_thres: 80 + + light.min_ratio: 0.1 + armor.min_light_ratio: 0.8 + + classifier_threshold: 0.8 + ignore_classes: ["negative"] + +/armor_tracker: + ros__parameters: + target_frame: odom + max_armor_distance: 10.0 + + ekf: + sigma2_q_xyz: 0.05 + sigma2_q_yaw: 5.0 + sigma2_q_r: 80.0 + + r_xyz_factor: 4e-4 + r_yaw: 5e-3 + + tracker: + max_match_distance: 0.5 + max_match_yaw_diff: 1.0 + + tracking_thres: 5 + lost_time_thres: 1.0 diff --git a/src/rm_vision/rm_vision_bringup/launch/common.py b/src/rm_vision/rm_vision_bringup/launch/common.py new file mode 100644 index 0000000..c731a60 --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/launch/common.py @@ -0,0 +1,32 @@ +import os +import yaml + +from ament_index_python.packages import get_package_share_directory +from launch.substitutions import Command +from launch_ros.actions import Node + +launch_params = yaml.safe_load(open(os.path.join( + get_package_share_directory('rm_vision_bringup'), 'config', 'launch_params.yaml'))) + +robot_description = Command(['xacro ', os.path.join( + get_package_share_directory('rm_gimbal_description'), 'urdf', 'rm_gimbal.urdf.xacro'), + ' xyz:=', launch_params['odom2camera']['xyz'], ' rpy:=', launch_params['odom2camera']['rpy']]) + +robot_state_publisher = Node( + package='robot_state_publisher', + executable='robot_state_publisher', + parameters=[{'robot_description': robot_description, + 'publish_frequency': 1000.0}] +) + +node_params = os.path.join( + get_package_share_directory('rm_vision_bringup'), 'config', 'node_params.yaml') + +tracker_node = Node( + package='armor_tracker', + executable='armor_tracker_node', + output='both', + emulate_tty=True, + parameters=[node_params], + ros_arguments=['--log-level', 'armor_tracker:='+launch_params['tracker_log_level']], +) diff --git a/src/rm_vision/rm_vision_bringup/launch/no_hardware.launch.py b/src/rm_vision/rm_vision_bringup/launch/no_hardware.launch.py new file mode 100644 index 0000000..c550693 --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/launch/no_hardware.launch.py @@ -0,0 +1,27 @@ +import os +import sys +from ament_index_python.packages import get_package_share_directory +sys.path.append(os.path.join(get_package_share_directory('rm_vision_bringup'), 'launch')) + + +def generate_launch_description(): + + from common import launch_params, robot_state_publisher, node_params, tracker_node + from launch_ros.actions import Node + from launch import LaunchDescription + + detector_node = Node( + package='armor_detector', + executable='armor_detector_node', + emulate_tty=True, + output='both', + parameters=[node_params], + arguments=['--ros-args', '--log-level', + 'armor_detector:='+launch_params['detector_log_level']], + ) + + return LaunchDescription([ + robot_state_publisher, + detector_node, + tracker_node, + ]) diff --git a/src/rm_vision/rm_vision_bringup/launch/vision_bringup.launch.py b/src/rm_vision/rm_vision_bringup/launch/vision_bringup.launch.py new file mode 100644 index 0000000..07585a9 --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/launch/vision_bringup.launch.py @@ -0,0 +1,82 @@ +import os +import sys +from ament_index_python.packages import get_package_share_directory +sys.path.append(os.path.join(get_package_share_directory('rm_vision_bringup'), 'launch')) + + +def generate_launch_description(): + + from common import node_params, launch_params, robot_state_publisher, tracker_node + from launch_ros.descriptions import ComposableNode + from launch_ros.actions import ComposableNodeContainer, Node + from launch.actions import TimerAction, Shutdown + from launch import LaunchDescription + + def get_camera_node(package, plugin): + return ComposableNode( + package=package, + plugin=plugin, + name='camera_node', + parameters=[node_params], + extra_arguments=[{'use_intra_process_comms': True}] + ) + + def get_camera_detector_container(camera_node): + return ComposableNodeContainer( + name='camera_detector_container', + namespace='', + package='rclcpp_components', + executable='component_container', + composable_node_descriptions=[ + camera_node, + ComposableNode( + package='armor_detector', + plugin='rm_auto_aim::ArmorDetectorNode', + name='armor_detector', + parameters=[node_params], + extra_arguments=[{'use_intra_process_comms': True}] + ) + ], + output='both', + emulate_tty=True, + ros_arguments=['--ros-args', '--log-level', + 'armor_detector:='+launch_params['detector_log_level']], + on_exit=Shutdown(), + ) + + hik_camera_node = get_camera_node('hik_camera', 'hik_camera::HikCameraNode') + mv_camera_node = get_camera_node('mindvision_camera', 'mindvision_camera::MVCameraNode') + + if (launch_params['camera'] == 'hik'): + cam_detector = get_camera_detector_container(hik_camera_node) + elif (launch_params['camera'] == 'mv'): + cam_detector = get_camera_detector_container(mv_camera_node) + + serial_driver_node = Node( + package='rm_serial_driver', + executable='rm_serial_driver_node', + name='serial_driver', + output='both', + emulate_tty=True, + parameters=[node_params], + on_exit=Shutdown(), + ros_arguments=['--ros-args', '--log-level', + 'serial_driver:='+launch_params['serial_log_level']], + ) + + delay_serial_node = TimerAction( + period=1.5, + actions=[serial_driver_node], + ) + + delay_tracker_node = TimerAction( + period=2.0, + actions=[tracker_node], + ) + + return LaunchDescription([ + robot_state_publisher, + cam_detector, + delay_serial_node, + delay_tracker_node, + ]) diff --git a/src/rm_vision/rm_vision_bringup/package.xml b/src/rm_vision/rm_vision_bringup/package.xml new file mode 100644 index 0000000..0a7710a --- /dev/null +++ b/src/rm_vision/rm_vision_bringup/package.xml @@ -0,0 +1,18 @@ + + + + rm_vision_bringup + 0.0.0 + TODO: Package description + chenjun + TODO: License declaration + + ament_cmake + + rm_auto_aim + rm_serial_driver + + + ament_cmake + + diff --git a/src/ros2-hik-camera/.clang-format b/src/ros2-hik-camera/.clang-format new file mode 100644 index 0000000..2f8d64b --- /dev/null +++ b/src/ros2-hik-camera/.clang-format @@ -0,0 +1,18 @@ +--- +Language: Cpp +BasedOnStyle: Google + +AccessModifierOffset: -2 +AlignAfterOpenBracket: AlwaysBreak +BraceWrapping: + AfterClass: true + AfterFunction: true + AfterNamespace: true + AfterStruct: true +BreakBeforeBraces: Custom +ColumnLimit: 100 +ConstructorInitializerIndentWidth: 0 +ContinuationIndentWidth: 2 +DerivePointerAlignment: false +PointerAlignment: Middle +ReflowComments: false \ No newline at end of file diff --git a/src/ros2-hik-camera/.clang-tidy b/src/ros2-hik-camera/.clang-tidy new file mode 100644 index 0000000..bf3d848 --- /dev/null +++ b/src/ros2-hik-camera/.clang-tidy @@ -0,0 +1,55 @@ +--- +Checks: '-*, + performance-*, + -performance-unnecessary-value-param, + llvm-namespace-comment, + modernize-redundant-void-arg, + modernize-use-nullptr, + modernize-use-default, + modernize-use-override, + modernize-loop-convert, + modernize-make-shared, + modernize-make-unique, + misc-unused-parameters, + readability-named-parameter, + readability-redundant-smartptr-get, + readability-redundant-string-cstr, + readability-simplify-boolean-expr, + readability-container-size-empty, + readability-identifier-naming, + ' +HeaderFilterRegex: '' +AnalyzeTemporaryDtors: false +CheckOptions: + - key: llvm-namespace-comment.ShortNamespaceLines + value: '10' + - key: llvm-namespace-comment.SpacesBeforeComments + value: '2' + - key: misc-unused-parameters.StrictMode + value: '1' + - key: readability-braces-around-statements.ShortStatementLines + value: '2' + # type names + - key: readability-identifier-naming.ClassCase + value: CamelCase + - key: readability-identifier-naming.EnumCase + value: CamelCase + - key: readability-identifier-naming.UnionCase + value: CamelCase + # method names + - key: readability-identifier-naming.MethodCase + value: camelBack + # variable names + - key: readability-identifier-naming.VariableCase + value: lower_case + - key: readability-identifier-naming.ClassMemberSuffix + value: '_' + # const static or global variables are UPPER_CASE + - key: readability-identifier-naming.EnumConstantCase + value: UPPER_CASE + - key: readability-identifier-naming.StaticConstantCase + value: UPPER_CASE + - key: readability-identifier-naming.ClassConstantCase + value: UPPER_CASE + - key: readability-identifier-naming.GlobalVariableCase + value: UPPER_CASE \ No newline at end of file diff --git a/src/ros2-hik-camera/CMakeLists.txt b/src/ros2-hik-camera/CMakeLists.txt new file mode 100644 index 0000000..a142fdb --- /dev/null +++ b/src/ros2-hik-camera/CMakeLists.txt @@ -0,0 +1,67 @@ +cmake_minimum_required(VERSION 3.8) +project(hik_camera) + +## Use C++14 +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +## By adding -Wall and -Werror, the compiler does not ignore warnings anymore, +## enforcing cleaner code. +add_definitions(-Wall -Werror) + +## Export compile commands for clangd +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +ament_auto_add_library(${PROJECT_NAME} SHARED + src/hik_camera_node.cpp +) + +target_include_directories(${PROJECT_NAME} PUBLIC hikSDK/include) + +if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64") + target_link_directories(${PROJECT_NAME} PUBLIC hikSDK/lib/amd64) + install( + DIRECTORY hikSDK/lib/amd64/ + DESTINATION lib + ) +elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64") + target_link_directories(${PROJECT_NAME} PUBLIC hikSDK/lib/arm64) + install( + DIRECTORY hikSDK/lib/arm64/ + DESTINATION lib + ) +else() + message(FATAL_ERROR "Unsupport host system architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}!") +endif() + +target_link_libraries(${PROJECT_NAME} + FormatConversion + MediaProcess + MvCameraControl + MVRender + MvUsb3vTL +) + +rclcpp_components_register_node(${PROJECT_NAME} + PLUGIN hik_camera::HikCameraNode + EXECUTABLE ${PROJECT_NAME}_node +) + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + list(APPEND AMENT_LINT_AUTO_EXCLUDE + ament_cmake_copyright + ament_cmake_cpplint + ament_cmake_uncrustify + ) + ament_lint_auto_find_test_dependencies() +endif() + +ament_auto_package( + INSTALL_TO_SHARE + config + launch +) diff --git a/src/ros2-hik-camera/README.md b/src/ros2-hik-camera/README.md new file mode 100644 index 0000000..b913395 --- /dev/null +++ b/src/ros2-hik-camera/README.md @@ -0,0 +1,14 @@ +# ros2_hik_camera + +A ROS2 packge for Hikvision USB3.0 industrial camera + +## Usage + +``` +ros2 launch hik_camera hik_camera.launch.py +``` + +## Params + +- exposure_time +- gain diff --git a/src/ros2-hik-camera/config/camera_info.yaml b/src/ros2-hik-camera/config/camera_info.yaml new file mode 100644 index 0000000..0f509df --- /dev/null +++ b/src/ros2-hik-camera/config/camera_info.yaml @@ -0,0 +1,20 @@ +image_width: 640 +image_height: 480 +camera_name: narrow_stereo +camera_matrix: + rows: 3 + cols: 3 + data: [1320.127401, 0.000000, 609.902940, 0.000000, 1329.050651, 457.308236, 0.000000, 0.000000, 1.000000] +distortion_model: plumb_bob +distortion_coefficients: + rows: 1 + cols: 5 + data: [-0.034135, 0.131210, -0.015866, -0.004433, 0.000000] +rectification_matrix: + rows: 3 + cols: 3 + data: [1.000000, 0.000000, 0.000000, 0.000000, 1.000000, 0.000000, 0.000000, 0.000000, 1.000000] +projection_matrix: + rows: 3 + cols: 4 + data: [1337.148438, 0.000000, 605.640265, 0.000000, 0.000000, 1333.494019, 444.618912, 0.000000, 0.000000, 0.000000, 1.000000, 0.000000] diff --git a/src/ros2-hik-camera/config/camera_params.yaml b/src/ros2-hik-camera/config/camera_params.yaml new file mode 100644 index 0000000..7f5f3cd --- /dev/null +++ b/src/ros2-hik-camera/config/camera_params.yaml @@ -0,0 +1,6 @@ +/hik_camera: + ros__parameters: + camera_name: narrow_stereo + + exposure_time: 6000 + gain: 32.0 diff --git a/src/ros2-hik-camera/hikSDK/include/CameraParams.h b/src/ros2-hik-camera/hikSDK/include/CameraParams.h new file mode 100644 index 0000000..e4b1428 --- /dev/null +++ b/src/ros2-hik-camera/hikSDK/include/CameraParams.h @@ -0,0 +1,1081 @@ + +#ifndef _MV_CAMERA_PARAMS_H_ +#define _MV_CAMERA_PARAMS_H_ + +#include "PixelType.h" + +#ifndef __cplusplus +typedef char bool; +#define true 1 +#define false 0 +#endif + +/// \~chinese +/// 设备类型定义 +/// \~english +/// Device Type Definition +#define MV_UNKNOW_DEVICE 0x00000000 ///< \~chinese 未知设备类型,保留意义 \~english Unknown Device Type, Reserved +#define MV_GIGE_DEVICE 0x00000001 ///< \~chinese GigE设备 \~english GigE Device +#define MV_1394_DEVICE 0x00000002 ///< \~chinese 1394-a/b 设备 \~english 1394-a/b Device +#define MV_USB_DEVICE 0x00000004 ///< \~chinese USB 设备 \~english USB Device +#define MV_CAMERALINK_DEVICE 0x00000008 ///< \~chinese CamLink设备 \~english CamLink Device + +/// \~chinese GigE设备信息 \~english GigE device info +typedef struct _MV_GIGE_DEVICE_INFO_ +{ + unsigned int nIpCfgOption; + unsigned int nIpCfgCurrent; ///< \~chinese \~english IP configuration:bit31-static bit30-dhcp bit29-lla + unsigned int nCurrentIp; + unsigned int nCurrentSubNetMask; ///< \~chinese \~english curtent subnet mask + unsigned int nDefultGateWay; ///< \~chinese \~english current gateway + unsigned char chManufacturerName[32]; + unsigned char chModelName[32]; + unsigned char chDeviceVersion[32]; + unsigned char chManufacturerSpecificInfo[48]; + unsigned char chSerialNumber[16]; + unsigned char chUserDefinedName[16]; + unsigned int nNetExport; ///< \~chinese 网口IP地址 \~english NetWork IP Address + + unsigned int nReserved[4]; +}MV_GIGE_DEVICE_INFO; + +#define INFO_MAX_BUFFER_SIZE 64 + +/// \~chinese USB设备信息 \~english USB device info +typedef struct _MV_USB3_DEVICE_INFO_ +{ + unsigned char CrtlInEndPoint; ///< \~chinese 控制输入端点 \~english Control input endpoint + unsigned char CrtlOutEndPoint; ///< \~chinese 控制输出端点 \~english Control output endpoint + unsigned char StreamEndPoint; ///< \~chinese 流端点 \~english Flow endpoint + unsigned char EventEndPoint; ///< \~chinese 事件端点 \~english Event endpoint + unsigned short idVendor; ///< \~chinese 供应商ID号 \~english Vendor ID Number + unsigned short idProduct; ///< \~chinese 产品ID号 \~english Device ID Number + unsigned int nDeviceNumber; ///< \~chinese 设备序列号 \~english Device Serial Number + unsigned char chDeviceGUID[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 设备GUID号 \~english Device GUID Number + unsigned char chVendorName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 供应商名字 \~english Vendor Name + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 型号名字 \~english Model Name + unsigned char chFamilyName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 家族名字 \~english Family Name + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 设备版本号 \~english Device Version + unsigned char chManufacturerName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 制造商名字 \~english Manufacturer Name + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 序列号 \~english Serial Number + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 用户自定义名字 \~english User Defined Name + unsigned int nbcdUSB; ///< \~chinese 支持的USB协议 \~english Support USB Protocol + + unsigned int nReserved[3]; ///< \~chinese 保留字节 \~english Reserved bytes +}MV_USB3_DEVICE_INFO; + +/// \~chinese +/// \brief CamLink设备信息 +/// \~english +/// \brief CamLink device info +typedef struct _MV_CamL_DEV_INFO_ +{ + unsigned char chPortID[INFO_MAX_BUFFER_SIZE]; + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; + unsigned char chFamilyName[INFO_MAX_BUFFER_SIZE]; + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; + unsigned char chManufacturerName[INFO_MAX_BUFFER_SIZE]; + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; + + unsigned int nReserved[38]; +}MV_CamL_DEV_INFO; + +/// \~chinese +/// \brief 设备信息 +/// \~english +/// \brief Device info +typedef struct _MV_CC_DEVICE_INFO_ +{ + unsigned short nMajorVer; + unsigned short nMinorVer; + unsigned int nMacAddrHigh; ///< \~chinese MAC 地址\~english MAC Address + unsigned int nMacAddrLow; + + unsigned int nTLayerType; ///< \~chinese 设备传输层协议类型,e.g. MV_GIGE_DEVICE\~english Device Transport Layer Protocol Type, e.g. MV_GIGE_DEVICE + + unsigned int nReserved[4]; + + union + { + MV_GIGE_DEVICE_INFO stGigEInfo; + MV_USB3_DEVICE_INFO stUsb3VInfo; + MV_CamL_DEV_INFO stCamLInfo; + // more ... + }SpecialInfo; + +}MV_CC_DEVICE_INFO; + +/// \~chinese 网络传输的相关信息\~english Network transmission information +typedef struct _MV_NETTRANS_INFO_ +{ + int64_t nReviceDataSize; ///< \~chinese 已接收数据大小 [统计StartGrabbing和StopGrabbing之间的数据量]\~english Received Data Size [Calculate the Data Size between StartGrabbing and StopGrabbing] + int nThrowFrameCount; ///< \~chinese 丢帧数量\~english Throw frame number + unsigned int nNetRecvFrameCount; + int64_t nRequestResendPacketCount; ///< \~chinese 请求重发包数 + int64_t nResendPacketCount; ///< \~chinese 重发包数 + +}MV_NETTRANS_INFO; + +#define MV_MAX_TLS_NUM 8 ///< \~chinese 最多支持的传输层实例个数\~english The maximum number of supported transport layer instances +#define MV_MAX_DEVICE_NUM 256 ///< \~chinese 最大支持的设备个数\~english The maximum number of supported devices + +/// \~chinese +/// \brief 设备信息列表 +/// \~english +/// \brief Device Information List +typedef struct _MV_CC_DEVICE_INFO_LIST_ +{ + unsigned int nDeviceNum; ///< \~chinese 在线设备数量\~english Online Device Number + MV_CC_DEVICE_INFO* pDeviceInfo[MV_MAX_DEVICE_NUM]; ///< \~chinese 支持最多256个设备\~english Support up to 256 devices + +}MV_CC_DEVICE_INFO_LIST; + + +/// \~chinese Chunk内容 \~english The content of ChunkData +typedef struct _MV_CHUNK_DATA_CONTENT_ +{ + unsigned char* pChunkData; + unsigned int nChunkID; + unsigned int nChunkLen; + + unsigned int nReserved[8]; // 保留 + +}MV_CHUNK_DATA_CONTENT; + +/// \~chinese 输出帧的信息\~english Output Frame Information +typedef struct _MV_FRAME_OUT_INFO_ +{ + unsigned short nWidth; ///< \~chinese 图像宽 \~english Image Width + unsigned short nHeight; ///< \~chinese 图像高 \~english Image Height + enum MvGvspPixelType enPixelType; ///< \~chinese 像素格式 \~english Pixel Type + + unsigned int nFrameNum; ///< \~chinese 帧号 \~english Frame Number + unsigned int nDevTimeStampHigh; ///< \~chinese 时间戳高32位\~english Timestamp high 32 bits + unsigned int nDevTimeStampLow; ///< \~chinese 时间戳低32位\~english Timestamp low 32 bits + unsigned int nReserved0; ///< \~chinese 保留,8字节对齐\~english Reserved, 8-byte aligned + int64_t nHostTimeStamp; ///< \~chinese 主机生成的时间戳\~english Host-generated timestamp + + unsigned int nFrameLen; + + unsigned int nLostPacket; ///< \~chinese 本帧丢包数\~english Lost Pacekt Number In This Frame + unsigned int nReserved[2]; +}MV_FRAME_OUT_INFO; + +/// \~chinese 输出帧的信息\~english Output Frame Information +typedef struct _MV_FRAME_OUT_INFO_EX_ +{ + unsigned short nWidth; ///< \~chinese 图像宽 \~english Image Width + unsigned short nHeight; ///< \~chinese 图像高 \~english Image Height + enum MvGvspPixelType enPixelType; ///< \~chinese 像素格式 \~english Pixel Type + + unsigned int nFrameNum; ///< \~chinese 帧号 \~english Frame Number + unsigned int nDevTimeStampHigh; ///< \~chinese 时间戳高32位\~english Timestamp high 32 bits + unsigned int nDevTimeStampLow; ///< \~chinese 时间戳低32位\~english Timestamp low 32 bits + unsigned int nReserved0; ///< \~chinese 保留,8字节对齐\~english Reserved, 8-byte aligned + int64_t nHostTimeStamp; ///< \~chinese 主机生成的时间戳\~english Host-generated timestamp + + unsigned int nFrameLen; + + /// \~chinese 设备水印时标\~english Device frame-specific time scale + unsigned int nSecondCount; + unsigned int nCycleCount; + unsigned int nCycleOffset; + + float fGain; + float fExposureTime; + unsigned int nAverageBrightness; ///< \~chinese 平均亮度\~english Average brightness + + /// \~chinese 白平衡相关\~english White balance + unsigned int nRed; + unsigned int nGreen; + unsigned int nBlue; + + unsigned int nFrameCounter; + unsigned int nTriggerIndex; ///< \~chinese 触发计数\~english Trigger Counting + + unsigned int nInput; ///< \~chinese 输入\~english Input + unsigned int nOutput; ///< \~chinese 输出\~english Output + + /// \~chinese ROI区域\~english ROI Region + unsigned short nOffsetX; + unsigned short nOffsetY; + unsigned short nChunkWidth; + unsigned short nChunkHeight; + + unsigned int nLostPacket; ///< \~chinese 本帧丢包数\~english Lost Pacekt Number In This Frame + + unsigned int nUnparsedChunkNum;///< \~chinese 未解析的Chunkdata个数 + union + { + MV_CHUNK_DATA_CONTENT* pUnparsedChunkContent; + int64_t nAligning; + }UnparsedChunkList; + + unsigned int nReserved[36]; // 保留 +}MV_FRAME_OUT_INFO_EX; + +/// \~chinese 图像结构体,输出图像指针地址及图像信息\~english Image Struct, output the pointer of Image and the information of the specific image +typedef struct _MV_FRAME_OUT_ +{ + unsigned char* pBufAddr; ///< \~chinese 图像指针地址\~english pointer of image + MV_FRAME_OUT_INFO_EX stFrameInfo; ///< \~chinese 图像信息\~english information of the specific image + + unsigned int nRes[16]; ///< \~chinese 保留\~english reserved +}MV_FRAME_OUT; + + +typedef struct _MV_DISPLAY_FRAME_INFO_ +{ + void* hWnd; ///< \~chinese 窗口句柄\~english HWND + unsigned char* pData; ///< \~chinese 显示的数据\~english Data Buffer + unsigned int nDataLen; ///< \~chinese 数据长度\~english Data Size + unsigned short nWidth; ///< \~chinese 图像宽\~english Width + unsigned short nHeight; ///< \~chinese 图像高\~english Height + enum MvGvspPixelType enPixelType; ///< \~chinese 像素格式\~english Pixel format + unsigned int nRes[4]; + +}MV_DISPLAY_FRAME_INFO; + +/// \~chinese 保存图片格式\~english Save image type +enum MV_SAVE_IAMGE_TYPE +{ + MV_Image_Undefined = 0, + MV_Image_Bmp = 1, + MV_Image_Jpeg = 2, + MV_Image_Png = 3, + MV_Image_Tif = 4, +}; + +/// \~chinese 图片保存参数\~english Save Image Parameters +typedef struct _MV_SAVE_IMAGE_PARAM_T_ +{ + unsigned char* pData; ///< [IN] \~chinese 输入数据缓存\~english Input Data Buffer + unsigned int nDataLen; ///< [IN] \~chinese 输入数据大小\~english Input Data Size + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 输入数据的像素格式\~english Input Data Pixel Format + unsigned short nWidth; ///< [IN] \~chinese 图像宽\~english Image Width + unsigned short nHeight; ///< [IN] \~chinese 图像高\~english Image Height + + unsigned char* pImageBuffer; ///< [OUT] \~chinese 输出图片缓存\~english Output Image Buffer + unsigned int nImageLen; ///< [OUT] \~chinese 输出图片大小\~english Output Image Size + unsigned int nBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小\~english Output buffer size provided + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] \~chinese 输出图片格式\~english Output Image Format + +}MV_SAVE_IMAGE_PARAM; + +/// \~chinese 图片保存参数\~english Save Image Parameters +typedef struct _MV_SAVE_IMAGE_PARAM_T_EX_ +{ + unsigned char* pData; ///< [IN] \~chinese 输入数据缓存\~english Input Data Buffer + unsigned int nDataLen; ///< [IN] \~chinese 输入数据大小\~english Input Data Size + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 输入数据的像素格式\~english Input Data Pixel Format + unsigned short nWidth; ///< [IN] \~chinese 图像宽\~english Image Width + unsigned short nHeight; ///< [IN] \~chinese 图像高\~english Image Height + + unsigned char* pImageBuffer; ///< [OUT] \~chinese 输出图片缓存\~english Output Image Buffer + unsigned int nImageLen; ///< [OUT] \~chinese 输出图片大小\~english Output Image Size + unsigned int nBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小\~english Output buffer size provided + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] \~chinese 输出图片格式\~english Output Image Format + unsigned int nJpgQuality; ///< [IN] \~chinese JPG编码质量(50-99],其它格式无效\~english Encoding quality(50-99],Other formats are invalid + + ///< [IN] \~chinese Bayer格式转为RGB24的插值方法 0-最近邻 1-双线性 2-Hamilton (如果传入其它值则默认为最近邻) + ///< [IN] \~english Interpolation method of convert Bayer to RGB24 0-nearest neighbour 1-bilinearity 2-Hamilton + unsigned int iMethodValue; + unsigned int nReserved[3]; + +}MV_SAVE_IMAGE_PARAM_EX; + +/// \~chinese 旋转角度 \~english Rotation angle +typedef enum _MV_IMG_ROTATION_ANGLE_ +{ + MV_IMAGE_ROTATE_90 = 1, + MV_IMAGE_ROTATE_180 = 2, + MV_IMAGE_ROTATE_270 = 3, + +}MV_IMG_ROTATION_ANGLE; + +/// \~chinese 图像旋转结构体 \~english Rotate image structure +typedef struct _MV_CC_ROTATE_IMAGE_PARAM_T_ +{ + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned int nWidth; ///< [IN][OUT] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [IN][OUT] \~chinese 图像高 \~english Height + + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + + MV_IMG_ROTATION_ANGLE enRotationAngle; ///< [IN] \~chinese 旋转角度 \~english Rotation angle + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_ROTATE_IMAGE_PARAM; + +/// \~chinese 翻转类型 \~english Flip type +typedef enum _MV_IMG_FLIP_TYPE_ +{ + MV_FLIP_VERTICAL = 1, + MV_FLIP_HORIZONTAL = 2, + +}MV_IMG_FLIP_TYPE; + +/// \~chinese 图像翻转结构体 \~english Flip image structure +typedef struct _MV_CC_FLIP_IMAGE_PARAM_T_ +{ + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned int nWidth; ///< [IN] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [IN] \~chinese 图像高 \~english Height + + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + + MV_IMG_FLIP_TYPE enFlipType; ///< [IN] \~chinese 翻转类型 \~english Flip type + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_FLIP_IMAGE_PARAM; + + +/// \~chinese 图像转换结构体 \~english Pixel convert structure +typedef struct _MV_PIXEL_CONVERT_PARAM_T_ +{ + unsigned short nWidth; ///< [IN] \~chinese 图像宽 \~english Width + unsigned short nHeight; ///< [IN] \~chinese 图像高\~english Height + + enum MvGvspPixelType enSrcPixelType; ///< [IN] \~chinese 源像素格式\~english Source pixel format + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存\~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据大小\~english Input data size + + enum MvGvspPixelType enDstPixelType; ///< [IN] \~chinese 目标像素格式\~english Destination pixel format + unsigned char* pDstBuffer; ///< [OUT] \~chinese 输出数据缓存\~english Output data buffer + unsigned int nDstLen; ///< [OUT] \~chinese 输出数据大小\~english Output data size + unsigned int nDstBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小\~english Provided outbut buffer size + + unsigned int nRes[4]; +}MV_CC_PIXEL_CONVERT_PARAM; + +/// \~chinese Gamma类型 \~english Gamma type +typedef enum _MV_CC_GAMMA_TYPE_ +{ + MV_CC_GAMMA_TYPE_NONE = 0, ///< \~chinese 不启用 \~english Disable + MV_CC_GAMMA_TYPE_VALUE = 1, ///< \~chinese Gamma值 \~english Gamma value + MV_CC_GAMMA_TYPE_USER_CURVE = 2, ///< \~chinese Gamma曲线 \~english Gamma curve + ///< \~chinese 长度:256*sizeof(unsigned char) \~english 8bit,length:256*sizeof(unsigned char) + MV_CC_GAMMA_TYPE_LRGB2SRGB = 3, ///< \~chinese linear RGB to sRGB \~english linear RGB to sRGB + MV_CC_GAMMA_TYPE_SRGB2LRGB = 4, ///< \~chinese sRGB to linear RGB(仅色彩插值时支持,色彩校正时无效) \~english sRGB to linear RGB + +}MV_CC_GAMMA_TYPE; + +// Gamma信息 +/// \~chinese Gamma信息结构体 \~english Gamma info structure +typedef struct _MV_CC_GAMMA_PARAM_T_ +{ + MV_CC_GAMMA_TYPE enGammaType; ///< [IN] \~chinese Gamma类型 \~english Gamma type + float fGammaValue; ///< [IN] \~chinese Gamma值[0.1,4.0] \~english Gamma value[0.1,4.0] + unsigned char* pGammaCurveBuf; ///< [IN] \~chinese Gamma曲线缓存 \~english Gamma curve buffer + unsigned int nGammaCurveBufLen; ///< [IN] \~chinese Gamma曲线长度 \~english Gamma curve buffer size + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_GAMMA_PARAM; + +/// \~chinese 水印信息 \~english Frame-specific information +typedef struct _MV_CC_FRAME_SPEC_INFO_ +{ + /// \~chinese 设备水印时标 \~english Device frame-specific time scale + unsigned int nSecondCount; ///< [OUT] \~chinese 秒数 \~english The Seconds + unsigned int nCycleCount; ///< [OUT] \~chinese 周期数 \~english The Count of Cycle + unsigned int nCycleOffset; ///< [OUT] \~chinese 周期偏移量 \~english The Offset of Cycle + + float fGain; ///< [OUT] \~chinese 增益 \~english Gain + float fExposureTime; ///< [OUT] \~chinese 曝光时间 \~english Exposure Time + unsigned int nAverageBrightness; ///< [OUT] \~chinese 平均亮度 \~english Average brightness + + /// \~chinese 白平衡相关 \~english White balance + unsigned int nRed; ///< [OUT] \~chinese 红色 \~english Red + unsigned int nGreen; ///< [OUT] \~chinese 绿色 \~english Green + unsigned int nBlue; ///< [OUT] \~chinese 蓝色 \~english Blue + + unsigned int nFrameCounter; ///< [OUT] \~chinese 总帧数 \~english Frame Counter + unsigned int nTriggerIndex; ///< [OUT] \~chinese 触发计数 \~english Trigger Counting + + unsigned int nInput; ///< [OUT] \~chinese 输入 \~english Input + unsigned int nOutput; ///< [OUT] \~chinese 输出 \~english Output + + /// \~chinese ROI区域 \~english ROI Region + unsigned short nOffsetX; ///< [OUT] \~chinese 水平偏移量 \~english OffsetX + unsigned short nOffsetY; ///< [OUT] \~chinese 垂直偏移量 \~english OffsetY + unsigned short nFrameWidth; ///< [OUT] \~chinese 水印宽 \~english The Width of Chunk + unsigned short nFrameHeight; ///< [OUT] \~chinese 水印高 \~english The Height of Chunk + + unsigned int nReserved[16]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_FRAME_SPEC_INFO; + +/// \~chinese 无损解码参数 \~english High Bandwidth decode structure +typedef struct _MV_CC_HB_DECODE_PARAM_T_ +{ + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcLen; ///< [IN] \~chinese 输入数据大小 \~english Input data size + + unsigned int nWidth; ///< [OUT] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [OUT] \~chinese 图像高 \~english Height + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据大小 \~english Output data size + enum MvGvspPixelType enDstPixelType; ///< [OUT] \~chinese 输出的像素格式 \~english Output pixel format + + MV_CC_FRAME_SPEC_INFO stFrameSpecInfo; ///< [OUT] \~chinese 水印信息 \~english Frame Spec Info + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_HB_DECODE_PARAM; + +/// \~chinese 录像格式定义\~english Record Format Type +typedef enum _MV_RECORD_FORMAT_TYPE_ +{ + MV_FormatType_Undefined = 0, + MV_FormatType_AVI = 1, + +}MV_RECORD_FORMAT_TYPE; + +/// \~chinese 录像参数\~english Record Parameters +typedef struct _MV_CC_RECORD_PARAM_T_ +{ + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 输入数据的像素格式 + + unsigned short nWidth; ///< [IN] \~chinese 图像宽(指定目标参数时需为2的倍数) + unsigned short nHeight; ///< [IN] \~chinese 图像高(指定目标参数时需为2的倍数) + + float fFrameRate; ///< [IN] \~chinese 帧率fps(1/16-120) + unsigned int nBitRate; ///< [IN] \~chinese 码率kbps(128kbps-16Mbps) + + MV_RECORD_FORMAT_TYPE enRecordFmtType; ///< [IN] \~chinese 录像格式 + + char* strFilePath; ///< [IN] \~chinese 录像文件存放路径(如果路径中存在中文,需转成utf-8) + + unsigned int nRes[8]; + +}MV_CC_RECORD_PARAM; + +/// \~chinese 录像数据\~english Record Data +typedef struct _MV_CC_INPUT_FRAME_INFO_T_ +{ + unsigned char* pData; ///< [IN] \~chinese 图像数据指针 + unsigned int nDataLen; ///< [IN] \~chinese 图像大小 + + unsigned int nRes[8]; + +}MV_CC_INPUT_FRAME_INFO; + +/// \~chinese 采集模式\~english Acquisition mode +typedef enum _MV_CAM_ACQUISITION_MODE_ +{ + MV_ACQ_MODE_SINGLE = 0, ///< \~chinese 单帧模式\~english Single Mode + MV_ACQ_MODE_MUTLI = 1, ///< \~chinese 多帧模式\~english Multi Mode + MV_ACQ_MODE_CONTINUOUS = 2, ///< \~chinese 持续采集模式\~english Continuous Mode + +}MV_CAM_ACQUISITION_MODE; + +/// \~chinese 增益模式\~english Gain Mode +typedef enum _MV_CAM_GAIN_MODE_ +{ + MV_GAIN_MODE_OFF = 0, ///< \~chinese 关闭\~english Single Mode + MV_GAIN_MODE_ONCE = 1, ///< \~chinese 一次\~english Multi Mode + MV_GAIN_MODE_CONTINUOUS = 2, ///< \~chinese 连续\~english Continuous Mode + +}MV_CAM_GAIN_MODE; + +/// \~chinese 曝光模式\~english Exposure Mode +typedef enum _MV_CAM_EXPOSURE_MODE_ +{ + MV_EXPOSURE_MODE_TIMED = 0, ///< Timed + MV_EXPOSURE_MODE_TRIGGER_WIDTH = 1, ///< TriggerWidth +}MV_CAM_EXPOSURE_MODE; + +/// \~chinese 自动曝光模式 \~english Auto Exposure Mode +typedef enum _MV_CAM_EXPOSURE_AUTO_MODE_ +{ + MV_EXPOSURE_AUTO_MODE_OFF = 0, ///< \~chinese 关闭\~english Off + MV_EXPOSURE_AUTO_MODE_ONCE = 1, ///< \~chinese 一次\~english Once + MV_EXPOSURE_AUTO_MODE_CONTINUOUS = 2, ///< \~chinese 连续\~english Continuous + +}MV_CAM_EXPOSURE_AUTO_MODE; + +/// \~chinese 触发模式 \~english Trigger Mode +typedef enum _MV_CAM_TRIGGER_MODE_ +{ + MV_TRIGGER_MODE_OFF = 0, ///< \~chinese 关闭\~english Off + MV_TRIGGER_MODE_ON = 1, ///< \~chinese 打开\~english ON + +}MV_CAM_TRIGGER_MODE; + +typedef enum _MV_CAM_GAMMA_SELECTOR_ +{ + MV_GAMMA_SELECTOR_USER = 1, + MV_GAMMA_SELECTOR_SRGB = 2, + +}MV_CAM_GAMMA_SELECTOR; + +typedef enum _MV_CAM_BALANCEWHITE_AUTO_ +{ + MV_BALANCEWHITE_AUTO_OFF = 0, + MV_BALANCEWHITE_AUTO_ONCE = 2, + MV_BALANCEWHITE_AUTO_CONTINUOUS = 1, ///< \~chinese 连续\~english Continuous + +}MV_CAM_BALANCEWHITE_AUTO; + +typedef enum _MV_CAM_TRIGGER_SOURCE_ +{ + MV_TRIGGER_SOURCE_LINE0 = 0, + MV_TRIGGER_SOURCE_LINE1 = 1, + MV_TRIGGER_SOURCE_LINE2 = 2, + MV_TRIGGER_SOURCE_LINE3 = 3, + MV_TRIGGER_SOURCE_COUNTER0 = 4, + + MV_TRIGGER_SOURCE_SOFTWARE = 7, + MV_TRIGGER_SOURCE_FrequencyConverter= 8, + +}MV_CAM_TRIGGER_SOURCE; + +typedef enum _MV_GIGE_TRANSMISSION_TYPE_ +{ + MV_GIGE_TRANSTYPE_UNICAST = 0x0, ///< \~chinese 表示单播(默认)\~english Unicast mode + MV_GIGE_TRANSTYPE_MULTICAST = 0x1, ///< \~chinese 表示组播\~english Multicast mode + MV_GIGE_TRANSTYPE_LIMITEDBROADCAST = 0x2, ///< \~chinese 表示局域网内广播,暂不支持\~english Limited broadcast mode,not support + MV_GIGE_TRANSTYPE_SUBNETBROADCAST = 0x3, ///< \~chinese 表示子网内广播,暂不支持\~english Subnet broadcast mode,not support + MV_GIGE_TRANSTYPE_CAMERADEFINED = 0x4, ///< \~chinese 表示从相机获取,暂不支持\~english Transtype from camera,not support + MV_GIGE_TRANSTYPE_UNICAST_DEFINED_PORT = 0x5, ///< \~chinese 表示用户自定义应用端接收图像数据Port号\~english User Defined Receive Data Port + MV_GIGE_TRANSTYPE_UNICAST_WITHOUT_RECV = 0x00010000, ///< \~chinese 表示设置了单播,但本实例不接收图像数据\~english Unicast without receive data + MV_GIGE_TRANSTYPE_MULTICAST_WITHOUT_RECV = 0x00010001, ///< \~chinese 表示组播模式,但本实例不接收图像数据\~english Multicast without receive data +}MV_GIGE_TRANSMISSION_TYPE; +// GigEVision IP Configuration +#define MV_IP_CFG_STATIC 0x05000000 +#define MV_IP_CFG_DHCP 0x06000000 +#define MV_IP_CFG_LLA 0x04000000 + +// GigEVision Net Transfer Mode +#define MV_NET_TRANS_DRIVER 0x00000001 +#define MV_NET_TRANS_SOCKET 0x00000002 + +// CameraLink Baud Rates (CLUINT32) +#define MV_CAML_BAUDRATE_9600 0x00000001 +#define MV_CAML_BAUDRATE_19200 0x00000002 +#define MV_CAML_BAUDRATE_38400 0x00000004 +#define MV_CAML_BAUDRATE_57600 0x00000008 +#define MV_CAML_BAUDRATE_115200 0x00000010 +#define MV_CAML_BAUDRATE_230400 0x00000020 +#define MV_CAML_BAUDRATE_460800 0x00000040 +#define MV_CAML_BAUDRATE_921600 0x00000080 +#define MV_CAML_BAUDRATE_AUTOMAX 0x40000000 + +/// \~chinese 信息类型\~english Information Type +#define MV_MATCH_TYPE_NET_DETECT 0x00000001 ///< \~chinese 网络流量和丢包信息\~english Network traffic and packet loss information +#define MV_MATCH_TYPE_USB_DETECT 0x00000002 ///< \~chinese host接收到来自U3V设备的字节总数\~english The total number of bytes host received from U3V device + +/// \~chinese 某个节点对应的子节点个数最大值\~english The maximum number of child nodes corresponding to a node +#define MV_MAX_XML_NODE_NUM_C 128 + +/// \~chinese 节点名称字符串最大长度\~english The maximum length of node name string +#define MV_MAX_XML_NODE_STRLEN_C 64 + +/// \~chinese 节点String值最大长度\~english The maximum length of Node String +#define MV_MAX_XML_STRVALUE_STRLEN_C 64 + +/// \~chinese 节点描述字段最大长度\~english The maximum length of the node description field +#define MV_MAX_XML_DISC_STRLEN_C 512 + +/// \~chinese 最多的单元数\~englishThe maximum number of units +#define MV_MAX_XML_ENTRY_NUM 10 + +/// \~chinese 父节点个数上限\~english The maximum number of parent nodes +#define MV_MAX_XML_PARENTS_NUM 8 + +/// \~chinese 每个已经实现单元的名称长度\~english The length of the name of each unit that has been implemented +#define MV_MAX_XML_SYMBOLIC_STRLEN_C 64 + +#define MV_MAX_XML_SYMBOLIC_NUM 64 + + +/// \~chinese 全匹配的一种信息结构体\~english A fully matched information structure +typedef struct _MV_ALL_MATCH_INFO_ +{ + unsigned int nType; ///< \~chinese 需要输出的信息类型,e.g. MV_MATCH_TYPE_NET_DETECT\~english Information type need to output ,e.g. MV_MATCH_TYPE_NET_DETECT + void* pInfo; ///< \~chinese 输出的信息缓存,由调用者分配\~english Output information cache, which is allocated by the caller + unsigned int nInfoSize; ///< \~chinese 信息缓存的大小\~english Information cache size + +}MV_ALL_MATCH_INFO; + + + +/// \~chinese 网络流量和丢包信息反馈结构体,对应类型为 MV_MATCH_TYPE_NET_DETECT +/// \~en:Network traffic and packet loss feedback structure, the corresponding type is MV_MATCH_TYPE_NET_DETECT +typedef struct _MV_MATCH_INFO_NET_DETECT_ +{ + int64_t nReviceDataSize; ///< \~chinese 已接收数据大小 [统计StartGrabbing和StopGrabbing之间的数据量]\~english Received data size + int64_t nLostPacketCount; ///< \~chinese 丢失的包数量\~english Number of packets lost + unsigned int nLostFrameCount; ///< \~chinese 丢帧数量\~english Number of frames lost + unsigned int nNetRecvFrameCount; ///< \~chinese 保留\~english Reserved + int64_t nRequestResendPacketCount; ///< \~chinese 请求重发包数 + int64_t nResendPacketCount; ///< \~chinese 重发包数 +}MV_MATCH_INFO_NET_DETECT; + +/// \~chinese host收到从u3v设备端的总字节数,对应类型为 MV_MATCH_TYPE_USB_DETECT\~english The total number of bytes host received from the u3v device side, the corresponding type is MV_MATCH_TYPE_USB_DETECT +typedef struct _MV_MATCH_INFO_USB_DETECT_ +{ + int64_t nReviceDataSize; ///< \~chinese 已接收数据大小 [统计OpenDevicce和CloseDevice之间的数据量]\~english Received data size + unsigned int nRevicedFrameCount; ///< \~chinese 已收到的帧数\~english Number of frames received + unsigned int nErrorFrameCount; ///< \~chinese 错误帧数\~english Number of error frames + unsigned int nReserved[2]; ///< \~chinese 保留\~english Reserved +}MV_MATCH_INFO_USB_DETECT; + +typedef struct _MV_IMAGE_BASIC_INFO_ +{ + // width + unsigned short nWidthValue; + unsigned short nWidthMin; + unsigned int nWidthMax; + unsigned int nWidthInc; + + // height + unsigned int nHeightValue; + unsigned int nHeightMin; + unsigned int nHeightMax; + unsigned int nHeightInc; + + // framerate + float fFrameRateValue; + float fFrameRateMin; + float fFrameRateMax; + + /// \~chinese 像素格式\~english pixel format + unsigned int enPixelType; ///< \~chinese 当前的像素格式\~english Current pixel format + unsigned int nSupportedPixelFmtNum; ///< \~chinese 支持的像素格式种类\~english Support pixel format + unsigned int enPixelList[MV_MAX_XML_SYMBOLIC_NUM]; + unsigned int nReserved[8]; + +}MV_IMAGE_BASIC_INFO; + +/// \~chinese 异常消息类型\~english Exception message type +#define MV_EXCEPTION_DEV_DISCONNECT 0x00008001 ///< \~chinese 设备断开连接\~english The device is disconnected +#define MV_EXCEPTION_VERSION_CHECK 0x00008002 ///< \~chinese SDK与驱动版本不匹配\~english SDK does not match the driver version +/// \~chinese 设备的访问模式\~english Device Access Mode +/// \~chinese 独占权限,其他APP只允许读CCP寄存器\~english Exclusive authority, other APP is only allowed to read the CCP register +#define MV_ACCESS_Exclusive 1 +/// \~chinese 可以从5模式下抢占权限,然后以独占权限打开\~english You can seize the authority from the 5 mode, and then open with exclusive authority +#define MV_ACCESS_ExclusiveWithSwitch 2 +/// \~chinese 控制权限,其他APP允许读所有寄存器\~english Control authority, allows other APP reading all registers +#define MV_ACCESS_Control 3 +/// \~chinese 可以从5的模式下抢占权限,然后以控制权限打开\~english You can seize the authority from the 5 mode, and then open with control authority +#define MV_ACCESS_ControlWithSwitch 4 +/// \~chinese 以可被抢占的控制权限打开\~english Open with seized control authority +#define MV_ACCESS_ControlSwitchEnable 5 +/// \~chinese 可以从5的模式下抢占权限,然后以可被抢占的控制权限打开\~english You can seize the authority from the 5 mode, and then open with seized control authority +#define MV_ACCESS_ControlSwitchEnableWithKey 6 +/// \~chinese 读模式打开设备,适用于控制权限下\~english Open with read mode and is available under control authority +#define MV_ACCESS_Monitor 7 + + +/************************************************************************/ +/* 封装了GenICam的C接口相关参数定义 */ +/* Package of GenICam C interface-related parameters definition */ +/************************************************************************/ + +/// \~chinese 每个节点对应的接口类型\~english Interface type corresponds to each node +enum MV_XML_InterfaceType +{ + IFT_IValue, //!> IValue interface + IFT_IBase, //!> IBase interface + IFT_IInteger, //!> IInteger interface + IFT_IBoolean, //!> IBoolean interface + IFT_ICommand, //!> ICommand interface + IFT_IFloat, //!> IFloat interface + IFT_IString, //!> IString interface + IFT_IRegister, //!> IRegister interface + IFT_ICategory, //!> ICategory interface + IFT_IEnumeration, //!> IEnumeration interface + IFT_IEnumEntry, //!> IEnumEntry interface + IFT_IPort, //!> IPort interface +}; + +/// \~chinese 节点的访问模式\~english Node Access Mode +enum MV_XML_AccessMode +{ + AM_NI, //!< Not implemented + AM_NA, //!< Not available + AM_WO, //!< Write Only + AM_RO, //!< Read Only + AM_RW, //!< Read and Write + AM_Undefined, //!< Object is not yet initialized + AM_CycleDetect, //!< used internally for AccessMode cycle detection +}; + +/// \~chinese 节点的可见性权限\~english Node Visible Permission +enum MV_XML_Visibility +{ + V_Beginner = 0, //!< Always visible + V_Expert = 1, //!< Visible for experts or Gurus + V_Guru = 2, //!< Visible for Gurus + V_Invisible = 3, //!< Not Visible + V_Undefined = 99 //!< Object is not yet initialized +}; + +/// \~chinese Event事件回调信息\~english Event callback infomation +#define MAX_EVENT_NAME_SIZE 128 // 相机Event事件名称最大长度\~english Max length of event name + +typedef struct _MV_EVENT_OUT_INFO_ +{ + char EventName[MAX_EVENT_NAME_SIZE]; ///< \~chinese Event名称\~english Event name + + unsigned short nEventID; ///< \~chinese Event号\~english Event ID + unsigned short nStreamChannel; ///< \~chinese 流通道序号\~english Circulation number + + unsigned int nBlockIdHigh; ///< \~chinese 帧号高位\~english BlockId high + unsigned int nBlockIdLow; ///< \~chinese 帧号低位\~english BlockId low + + unsigned int nTimestampHigh; ///< \~chinese 时间戳高位\~english Timestramp high + unsigned int nTimestampLow; ///< \~chinese 时间戳低位\~english Timestramp low + + void* pEventData; ///< \~chinese Event数据\~english Event data + unsigned int nEventDataSize; ///< \~chinese Event数据长度\~english Event data len + + unsigned int nReserved[16]; ///< \~chinese 预留\~english Reserved +}MV_EVENT_OUT_INFO; + +/// \~chinese 文件存取\~english File Access +typedef struct _MV_CC_FILE_ACCESS_T +{ + const char* pUserFileName; ///< \~chinese 用户文件名\~english User file name + const char* pDevFileName; ///< \~chinese 设备文件名\~english Device file name + + unsigned int nReserved[32]; ///< \~chinese 预留\~english Reserved +}MV_CC_FILE_ACCESS; + +/// \~chinese 文件存取进度\~english File Access Progress +typedef struct _MV_CC_FILE_ACCESS_PROGRESS_T +{ + int64_t nCompleted; ///< \~chinese 已完成的长度\~english Completed Length + int64_t nTotal; ///< \~chinese 总长度\~english Total Length + + unsigned int nReserved[8]; ///< \~chinese 预留\~english Reserved +}MV_CC_FILE_ACCESS_PROGRESS; + + + +/// \~chinese 传输模式,可以为单播模式、组播模式等\~english Transmission type +typedef struct _MV_TRANSMISSION_TYPE_T +{ + MV_GIGE_TRANSMISSION_TYPE enTransmissionType; ///< \~chinese 传输模式\~english Transmission type + unsigned int nDestIp; ///< \~chinese 目标IP,组播模式下有意义\~english Destination IP + unsigned short nDestPort; ///< \~chinese 目标Port,组播模式下有意义\~english Destination port + + unsigned int nReserved[32]; ///< \~chinese 预留\~english Reserved +}MV_TRANSMISSION_TYPE; + +/// \~chinese 动作命令信息\~english Action Command +typedef struct _MV_ACTION_CMD_INFO_T +{ + unsigned int nDeviceKey; ///< \~chinese 设备密钥 + unsigned int nGroupKey; ///< \~chinese 组键 + unsigned int nGroupMask; ///< \~chinese 组掩码 + + unsigned int bActionTimeEnable; ///< \~chinese 只有设置成1时Action Time才有效,非1时无效 + int64_t nActionTime; ///< \~chinese 预定的时间,和主频有关 + + const char* pBroadcastAddress; ///< \~chinese 广播包地址 + unsigned int nTimeOut; ///< \~chinese 等待ACK的超时时间,如果为0表示不需要ACK + + unsigned int bSpecialNetEnable; ///< [IN] \~chinese 只有设置成1时指定的网卡IP才有效,非1时无效 \~english Special IP Enable + unsigned int nSpecialNetIP; ///< [IN] \~chinese 指定的网卡IP \~english Special Net IP address + + unsigned int nReserved[14]; ///< \~chinese 预留\~english Reserved + +}MV_ACTION_CMD_INFO; + +/// \~chinese 动作命令返回信息\~english Action Command Result +typedef struct _MV_ACTION_CMD_RESULT_T +{ + unsigned char strDeviceAddress[12 + 3 + 1]; ///< IP address of the device + + //status code returned by the device + int nStatus;//1.0x0000:success. + //2.0x8001:Command is not supported by the device. + //3.0x8013:The device is not synchronized to a master clock to be used as time reference. + //4.0x8015:A device queue or packet data has overflowed. + //5.0x8016:The requested scheduled action command was requested at a time that is already past. + + unsigned int nReserved[4]; ///< \~chinese 预留\~english Reserved + +}MV_ACTION_CMD_RESULT; + +/// \~chinese 动作命令返回信息列表\~english Action Command Result List +typedef struct _MV_ACTION_CMD_RESULT_LIST_T +{ + unsigned int nNumResults; ///< \~chinese 返回值个数 + MV_ACTION_CMD_RESULT* pResults; + +}MV_ACTION_CMD_RESULT_LIST; + +// ch:单个节点基本属性 | en:Single Node Basic Attributes +typedef struct _MV_XML_NODE_FEATURE_ +{ + enum MV_XML_InterfaceType enType; ///< \~chinese 节点类型\~english Node Type + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Is visibility + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述,目前暂不支持\~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称\~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名\~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示\~english Notice + + unsigned int nReserved[4]; +}MV_XML_NODE_FEATURE; + +/// \~chinese 节点列表\~english Node List +typedef struct _MV_XML_NODES_LIST_ +{ + unsigned int nNodeNum; ///< \~chinese 节点个数\~english Node Number + MV_XML_NODE_FEATURE stNodes[MV_MAX_XML_NODE_NUM_C]; +}MV_XML_NODES_LIST; + + + +typedef struct _MV_XML_FEATURE_Value_ +{ + enum MV_XML_InterfaceType enType; ///< \~chinese节点类型\~english Node Type + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese节点描述 目前暂不支持\~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称\~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名\~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示\~english Notice + unsigned int nReserved[4]; +}MV_XML_FEATURE_Value; + +typedef struct _MV_XML_FEATURE_Base_ +{ + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode +}MV_XML_FEATURE_Base; + +typedef struct _MV_XML_FEATURE_Integer_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持\~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese是否可见\~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int64_t nValue; ///< \~chinese 当前值\~english Current Value + int64_t nMinValue; ///< \~chinese 最小值\~english Min Value + int64_t nMaxValue; ///< \~chinese 最大值\~english Max Value + int64_t nIncrement; ///< \~chinese 增量\~english Increment + + unsigned int nReserved[4]; + +}MV_XML_FEATURE_Integer; + +typedef struct _MV_XML_FEATURE_Boolean_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持\~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + bool bValue; ///< \~chinese 当前值\~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Boolean; + +typedef struct _MV_XML_FEATURE_Command_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持\~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Command; + +typedef struct _MV_XML_FEATURE_Float_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持\~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + double dfValue; ///< \~chinese 当前值\~english Current Value + double dfMinValue; ///< \~chinese 最小值\~english Min Value + double dfMaxValue; ///< \~chinese 最大值\~english Max Value + double dfIncrement; ///< \~chinese 增量\~english Increment + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Float; + +typedef struct _MV_XML_FEATURE_String_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持\~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + char strValue[MV_MAX_XML_STRVALUE_STRLEN_C]; ///< \~chinese 当前值\~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_String; + +typedef struct _MV_XML_FEATURE_Register_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持\~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int64_t nAddrValue; ///< \~chinese 当前值\~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Register; + +typedef struct _MV_XML_FEATURE_Category_ +{ + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述 目前暂不支持\~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称\~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名\~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示\~english Notice + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Category; + +typedef struct _MV_XML_FEATURE_EnumEntry_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持\~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + int bIsImplemented; + int nParentsNum; + MV_XML_NODE_FEATURE stParentsList[MV_MAX_XML_PARENTS_NUM]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + int64_t nValue; ///< \~chinese 当前值\~english Current Value + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int nReserved[8]; + +}MV_XML_FEATURE_EnumEntry; + + +typedef struct _MV_XML_FEATURE_Enumeration_ +{ + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述 目前暂不支持\~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称\~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名\~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示\~english Notice + + int nSymbolicNum; ///< \~chinese Symbolic数\~english Symbolic Number + char strCurrentSymbolic[MV_MAX_XML_SYMBOLIC_STRLEN_C]; ///< \~chinese 当前Symbolic索引\~english Current Symbolic Index + char strSymbolic[MV_MAX_XML_SYMBOLIC_NUM][MV_MAX_XML_SYMBOLIC_STRLEN_C]; + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int64_t nValue; ///< \~chinese 当前值\~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Enumeration; + + +typedef struct _MV_XML_FEATURE_Port_ +{ + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见\~english Visible + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述 目前暂不支持\~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称\~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名\~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示\~english Notice + + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式\~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是 目前暂不支持\~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Port; + +typedef struct _MV_XML_CAMERA_FEATURE_ +{ + enum MV_XML_InterfaceType enType; + union + { + MV_XML_FEATURE_Integer stIntegerFeature; + MV_XML_FEATURE_Float stFloatFeature; + MV_XML_FEATURE_Enumeration stEnumerationFeature; + MV_XML_FEATURE_String stStringFeature; + }SpecialFeature; + +}MV_XML_CAMERA_FEATURE; + +typedef struct _MVCC_ENUMVALUE_T +{ + unsigned int nCurValue; ///< \~chinese 当前值\~english Current Value + unsigned int nSupportedNum; ///< \~chinese 数据的有效数据个数\~english Number of valid data + unsigned int nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]; + + unsigned int nReserved[4]; +}MVCC_ENUMVALUE; + +typedef struct _MVCC_INTVALUE_T +{ + unsigned int nCurValue; ///< \~chinese 当前值\~english Current Value + unsigned int nMax; + unsigned int nMin; + unsigned int nInc; + + unsigned int nReserved[4]; +}MVCC_INTVALUE; + +typedef struct _MVCC_INTVALUE_EX_T +{ + int64_t nCurValue; ///< \~chinese 当前值\~english Current Value + int64_t nMax; + int64_t nMin; + int64_t nInc; + + unsigned int nReserved[16]; +}MVCC_INTVALUE_EX; + +typedef struct _MVCC_FLOATVALUE_T +{ + float fCurValue; ///< \~chinese 当前值\~english Current Value + float fMax; + float fMin; + + unsigned int nReserved[4]; +}MVCC_FLOATVALUE; + +typedef struct _MVCC_STRINGVALUE_T +{ + char chCurValue[256]; ///< \~chinese 当前值\~english Current Value + + int64_t nMaxLength; + unsigned int nReserved[2]; +}MVCC_STRINGVALUE; + +#endif /* _MV_CAMERA_PARAMS_H_ */ diff --git a/src/ros2-hik-camera/hikSDK/include/MvCameraControl.h b/src/ros2-hik-camera/hikSDK/include/MvCameraControl.h new file mode 100644 index 0000000..2bd89d6 --- /dev/null +++ b/src/ros2-hik-camera/hikSDK/include/MvCameraControl.h @@ -0,0 +1,3398 @@ + +#ifndef _MV_CAMERA_CTRL_H_ +#define _MV_CAMERA_CTRL_H_ + +#include "MvErrorDefine.h" +#include "CameraParams.h" + +/** +* @brief 动态库导入导出定义 +* @brief Import and export definition of the dynamic library +*/ +#ifndef MV_CAMCTRL_API + + #if (defined (_WIN32) || defined(WIN64)) + #if defined(MV_CAMCTRL_EXPORTS) + #define MV_CAMCTRL_API __declspec(dllexport) + #else + #define MV_CAMCTRL_API __declspec(dllimport) + #endif + #else + #ifndef __stdcall + #define __stdcall + #endif + + #if defined(MV_CAMCTRL_EXPORTS) + #define MV_CAMCTRL_API __attribute__((visibility("default"))) + #else + #define MV_CAMCTRL_API + #endif + #endif + +#endif + +#ifndef IN + #define IN +#endif + +#ifndef OUT + #define OUT +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/************************************************************************/ +/* 相机的基本指令和操作 */ +/* Camera basic instructions and operations */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 获取SDK版本号 + * @return 始终返回4字节版本号 + | 主 | 次 | 修正 | 测试 | + | :---: | :---: | :---: | :---: | + | 8bits | 8bits | 8bits | 8bits | + * @remarks 比如返回值为0x01000001,即SDK版本号为V1.0.0.1。 + + * @~english + * @brief Get SDK Version + * @return Always return 4 Bytes of version number + | Main | Sub | Rev | Test | + | :---: | :---: | :---: | :---: | + | 8bits | 8bits | 8bits | 8bits | + * @remarks For example, if the return value is 0x01000001, the SDK version is V1.0.0.1. + ************************************************************************/ +MV_CAMCTRL_API unsigned int __stdcall MV_CC_GetSDKVersion(); + +/********************************************************************//** + * @~chinese + * @brief 获取支持的传输层 + * @return 支持的传输层编号 + + * @~english + * @brief Get supported Transport Layer + * @return Supported Transport Layer number + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumerateTls(); + +/********************************************************************//** + * @~chinese + * @brief 枚举设备 + * @param nTLayerType [IN] 枚举传输层 + * @param pstDevList [OUT] 设备列表 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 设备列表的内存是在SDK内部分配的,多线程调用该接口时会进行设备列表内存的释放和申请.\n + 建议尽量避免多线程枚举操作。 + + * @~english + * @brief Enumerate Device + * @param nTLayerType [IN] Enumerate TLs + * @param pstDevList [OUT] Device List + * @return Success, return #MV_OK. Failure, return error code + * @remarks @remarks The memory of the device list is allocated within the SDK. When the interface is invoked by multiple threads, the memory of the device list will be released and applied.\n + It is recommended to avoid multithreaded enumeration operations as much as possible. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumDevices(IN unsigned int nTLayerType, IN OUT MV_CC_DEVICE_INFO_LIST* pstDevList); + +/********************************************************************//** + * @~chinese + * @brief 根据厂商名字枚举设备 + * @param nTLayerType [IN] 枚举传输层 + * @param pstDevList [OUT] 设备列表 + * @param pManufacturerName [IN] 厂商名字 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Enumerate device according to manufacture name + * @param nTLayerType [IN] Transmission layer of enumeration + * @param pstDevList [OUT] Device list + * @param pManufacturerName [IN] Manufacture Name + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumDevicesEx(IN unsigned int nTLayerType, IN OUT MV_CC_DEVICE_INFO_LIST* pstDevList, IN const char* pManufacturerName); + +/********************************************************************//** + * @~chinese + * @brief 设备是否可达 + * @param pstDevInfo [IN] 设备信息结构体 + * @param nAccessMode [IN] 访问权限 + * @return 可达,返回true;不可达,返回false + * @remarks 读取设备CCP寄存器的值,判断当前状态是否具有某种访问权限。 \n + 如果设备不支持 #MV_ACCESS_ExclusiveWithSwitch、 #MV_ACCESS_ControlWithSwitch、 #MV_ACCESS_ControlSwitchEnableWithKey这三种模式,接口返回false。目前设备不支持这3种抢占模式,国际上主流的厂商的相机也都暂不支持这3种模式。 \n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Is the device accessible + * @param pstDevInfo [IN] Device Information Structure + * @param nAccessMode [IN] Access Right + * @return Access, return true. Not access, return false + @remarks Read device CCP register value and determine current access permission.\n + Return false if the device does not support the modes #MV_ACCESS_ExclusiveWithSwitch, #MV_ACCESS_ControlWithSwitch, #MV_ACCESS_ControlSwitchEnableWithKey. Currently the device does not support the 3 preemption modes, neither do the devices from other mainstream manufacturers. \n + This API is not supported by CameraLink device. + ************************************************************************/ +MV_CAMCTRL_API bool __stdcall MV_CC_IsDeviceAccessible(IN MV_CC_DEVICE_INFO* pstDevInfo, IN unsigned int nAccessMode); + +/********************************************************************//** + * @~chinese + * @brief 设置SDK日志路径(如果日志服务MvLogServer已启用,则该接口无效,默认日志服务为开启状态) + * @param pSDKLogPath [IN] SDK日志路径 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 设置路径之后,可以指定路径存放日志。\n + v2.4.1版本新增日志服务,开启服务之后该接口无效。 + + * @~english + * @brief Set SDK log path + * @param pSDKLogPath [IN] SDK log path + * @return Access, return true. Not access, return false + * @remarks For version V2.4.1, added log service, this API is invalid when the service is enabled.And The logging service is enabled by default\n + This API is used to set the log file storing path. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetSDKLogPath(IN const char * pSDKLogPath); + +/********************************************************************//** + * @~chinese + * @brief 创建设备句柄 + * @param handle [OUT] 设备句柄 + * @param pstDevInfo [IN] 设备信息结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 根据输入的设备信息,创建库内部必须的资源和初始化内部模块。通过该接口创建句柄,调用SDK接口,会默认生成SDK日志文件,保存在当前可执行程序路径下的MvSdkLog文件夹,如果不需要生成日志文件,可以通过MV_CC_CreateHandleWithoutLog创建句柄。 + + * @~english + * @brief Create Device Handle + * @param handle [OUT] Device handle + * @param pstDevInfo [IN] Device Information Structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks Create required resources within library and initialize internal module according to input device information. Create handle and call SDK interface through this interface, and SDK log file will be created by default. Creating handle through #MV_CC_CreateHandleWithoutLog will not generate log files. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CreateHandle(OUT void ** handle, IN const MV_CC_DEVICE_INFO* pstDevInfo); + +/********************************************************************//** + * @~chinese + * @brief 创建设备句柄,不生成日志 + * @param handle [OUT] 设备句柄 + * @param pstDevInfo [IN] 设备信息结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 根据输入的设备信息,创建库内部必须的资源和初始化内部模块。通过该接口创建句柄,调用SDK接口,不会默认生成SDK日志文件,如果需要生成日志文件可以通过MV_CC_CreateHandle创建句柄,日志文件自动生成,保存在当前可执行程序路径下的MvSdkLog文件夹。 + + * @~english + * @brief Create Device Handle without log + * @param handle [OUT] Device handle + * @param pstDevInfo [IN] Device Information Structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks Create required resources within library and initialize internal module according to input device information. Create handle and call SDK interface through this interface, and SDK log file will not be created. To create logs, create handle through MV_CC_CreateHandle, and log files will be automatically generated. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CreateHandleWithoutLog(OUT void ** handle, IN const MV_CC_DEVICE_INFO* pstDevInfo); + +/********************************************************************//** + * @~chinese + * @brief 销毁设备句柄 + * @param handle [IN] 设备句柄 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Destroy Device Handle + * @param handle [IN] Device handle + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DestroyHandle(IN void * handle); + +/********************************************************************//** + * @~chinese + * @brief 打开设备 + * @param handle [IN] 设备句柄 + * @param nAccessMode [IN] 访问权限 + * @param nSwitchoverKey [IN] 切换访问权限时的密钥 + * @return 成功,返回#MV_OK ;错误,返回错误码 + * @remarks 根据设置的设备参数,找到对应的设备,连接设备。\n + 调用接口时可不传入nAccessMode和nSwitchoverKey,此时默认设备访问模式为独占权限。目前设备暂不支持#MV_ACCESS_ExclusiveWithSwitch、 #MV_ACCESS_ControlWithSwitch、MV_ACCESS_ControlSwitchEnable、MV_ACCESS_ControlSwitchEnableWithKey这四种抢占模式。\n + 对于U3V设备,nAccessMode、nSwitchoverKey这两个参数无效。 + + * @~english + * @brief Open Device + * @param handle [IN] Device handle + * @param nAccessMode [IN] Access Right + * @param nSwitchoverKey [IN] Switch key of access right + * @return Success, return #MV_OK. Failure, return error code + * @remarks Find specific device and connect according to set device parameters. \n + When calling the interface, the input of nAccessMode and nSwitchoverKey is optional, and the device access mode is exclusive. Currently the device does not support the following preemption modes: MV_ACCESS_ExclusiveWithSwitch, MV_ACCESS_ControlWithSwitch, MV_ACCESS_ControlSwitchEnableWithKey.\n + For USB3Vision device, nAccessMode, nSwitchoverKey are invalid. + ************************************************************************/ +#ifndef __cplusplus +MV_CAMCTRL_API int __stdcall MV_CC_OpenDevice(IN void* handle, IN unsigned int nAccessMode, IN unsigned short nSwitchoverKey); +#else +MV_CAMCTRL_API int __stdcall MV_CC_OpenDevice(IN void* handle, IN unsigned int nAccessMode = MV_ACCESS_Exclusive, IN unsigned short nSwitchoverKey = 0); +#endif + +/********************************************************************//** + * @~chinese + * @brief 关闭相机 + * @param handle [IN] 设备句柄 + * @return 成功,返回#MV_OK ;错误,返回错误码 + * @remarks 通过MV_CC_OpenDevice连接设备后,可以通过该接口断开设备连接,释放资源 + + * @~english + * @brief Close Device + * @param handle [IN] Device handle + * @return Success, return #MV_OK. Failure, return error code + * @remarks After connecting to device through MV_CC_OpenDevice, use this interface to disconnect and release resources. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CloseDevice(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 判断相机是否处于连接状态 + * @param handle [IN] 设备句柄 + * @return 设备处于连接状态,返回true;没连接或失去连接,返回false + + * @~english + * @brief Is The Device Connected + * @param handle [IN] Device handle + * @return Connected, return true. Not Connected or DIsconnected, return false + ***********************************************************************/ +MV_CAMCTRL_API bool __stdcall MV_CC_IsDeviceConnected(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 注册图像数据回调 + * @param handle [IN] 设备句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 通过该接口可以设置图像数据回调函数,在MV_CC_CreateHandle之后即可调用。 \n + 图像数据采集有两种方式,两种方式不能复用: + - 方式一:调用MV_CC_RegisterImageCallBackEx设置图像数据回调函数,然后调用MV_CC_StartGrabbing开始采集,采集的图像数据在设置的回调函数中返回。 + - 方式二:调用MV_CC_StartGrabbing开始采集,然后在应用层循环调用MV_CC_GetOneFrameTimeout获取指定像素格式的帧数据,获取帧数据时上层应用程序需要根据帧率控制好调用该接口的频率。 + 该接口仅在windows版本和Linux版本下支持。\n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Register the image callback function + * @param handle [IN] Device handle + * @param cbOutput [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks After MV_CC_CreateHandle, call this interface to set image data callback function.\n + There are two available image data acquisition modes, and cannot be used together: + - Mode 1: Call MV_CC_RegisterImageCallBack to set image data callback function, and then call MV_CC_StartGrabbing to start acquiring. The acquired image data will return in the set callback function. + - Mode 2: Call MV_CC_StartGrabbing to start acquiring, and then call MV_CC_GetOneFrameTimeout repeatedly in application layer to get frame data of specified pixel format. When getting frame data, the frequency of calling this interface should be controlled by upper layer application according to frame rate. + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBackEx(void* handle, + void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser), + void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册图像数据回调,RGB + * @param handle [IN] 设备句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 通过该接口可以设置图像数据回调函数,在MV_CC_CreateHandle之后即可调用。 \n + 图像数据采集有两种方式,两种方式不能复用: + - 方式一:调用MV_CC_RegisterImageCallBackForRGB设置RGB24格式图像数据回调函数,然后调用MV_CC_StartGrabbing开始采集,采集的图像数据在设置的回调函数中返回。 + - 方式二:调用MV_CC_StartGrabbing开始采集,然后在应用层循环调用MV_CC_GetImageForRGB获取RGB24格式的帧数据,获取帧数据时上层应用程序需要根据帧率控制好调用该接口的频率。\n\n + 该接口仅在windows版本和Linux版本下支持。 \n + 该接口不支持CameraLink设备。 + + * @~english + * @brief register image data callback, RGB + * @param handle [IN] Device handle + * @param cbOutput [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks Before calling this API to set image data callback function, you should call this API MV_CC_CreateHandle. \n + There are two image acquisition modes, the two modes cannot be reused: + - Mode 1: Call #MV_CC_RegisterImageCallBackForRGB to set RGB24 format image data callback function, + and then call #MV_CC_StartGrabbing to start acquisition, + the collected image data will be returned in the configured callback function. + - Mode 2: Call MV_CC_StartGrabbing to start acquisition, and the call MV_CC_GetImageForRGB + repeatedly in application layer to get frame data with RGB24 format. + When getting frame data, the upper application program should control the frequency + of calling this API according to frame rate. \n\n + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBackForRGB(void* handle, + void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser), + void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册图像数据回调,BGR + * @param handle [IN] 设备句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回 #MV_OK ;错误,返回错误码 + * @remarks 通过该接口可以设置图像数据回调函数,在MV_CC_CreateHandle之后即可调用。\n + 图像数据采集有两种方式,两种方式不能复用:\n + - 方式一:调用 #MV_CC_RegisterImageCallBackForBGR 设置 #BGR24 图像数据回调函数,然后调用 #MV_CC_StartGrabbing 开始采集,采集的图像数据在设置的回调函数中返回。 + - 方式二:调用 #MV_CC_StartGrabbing 开始采集,然后在应用层循环调用 #MV_CC_GetImageForBGR 获取 #BGR24 格式的帧数据,获取帧数据时上层应用程序需要根据帧率控制好调用该接口的频率。 \n\n + 该接口仅在windows版本和Linux版本下支持。 \n + 该接口不支持CameraLink设备。 + + * @~english + * @brief register image data callback, BGR + * @param handle [IN] Device handle + * @param cbOutput [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks Before calling this API to set image data callback function, you should call this API MV_CC_CreateHandle. \n + There are two image acquisition modes, the two modes cannot be reused: \n + - Mode 1: Call MV_CC_RegisterImageCallBackForBGR to set RGB24 format image data callback function, and then call MV_CC_StartGrabbing to start acquisition, the collected image data will be returned in the configured callback function.\n + - Mode 2: Call MV_CC_StartGrabbing to start acquisition, and the call MV_CC_GetImageForBGR repeatedly in application layer to get frame data with BGR24 format. When getting frame data, the upper application program should control the frequency of calling this API according to frame rate.\n + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBackForBGR(void* handle, + void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser), + void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 开始取流 + * @param handle [IN] 设备句柄 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 该接口不支持CameraLink设备。 + + * @~english + * @brief Start Grabbing + * @param handle [IN] Device handle + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StartGrabbing(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 停止取流 + * @param handle [IN] 设备句柄 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 该接口不支持CameraLink设备。 + + * @~english + * @brief Stop Grabbing + * @param handle [IN] Device handle + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StopGrabbing(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 获取一帧RGB数据,此函数为查询式获取,每次调用查询内部 + 缓存有无数据,有数据则获取数据,无数据返回错误码 + * @param handle [IN] 设备句柄 + * @param pData [OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pFrameInfo [OUT] 图像信息结构体 + * @param nMsec [IN] 等待超时时间 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 每次调用该接口,将查询内部缓存是否有数据,如果有数据则转换成RGB24格式返回,如果没有数据则返回错误码。因为图像转换成RGB24格式有耗时,所以当数据帧率过高时该接口可能会导致丢帧。\n + 调用该接口获取图像数据帧之前需要先调用MV_CC_StartGrabbing启动图像采集。该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。 \n + 该接口不支持CameraLink设备。 \n + 该接口仅在windows版本和Linux版本下支持。 + + * @~english + * @brief Get one frame of RGB data, this function is using query to get data + query whether the internal cache has data, get data if there has, return error code if no data + * @param handle [IN] Device handle + * @param pData [OUT] Image data receiving buffer + * @param nDataSize [IN] Buffer size + * @param pFrameInfo [OUT] Image information structure + * @param nMsec [IN] Waiting timeout + * @return Success, return #MV_OK. Failure, return error code + * @remarks Before calling this API to set image data callback function, you should call this API MV_CC_CreateHandle. \n + There are two image acquisition modes, the two modes cannot be reused: \n + - Mode 1: Call MV_CC_RegisterImageCallBackForBGR to set RGB24 format image data callback function, and then call MV_CC_StartGrabbing to start acquisition, the collected image data will be returned in the configured callback function.\n + - Mode 2: Call MV_CC_StartGrabbing to start acquisition, and the call MV_CC_GetImageForBGR repeatedly in application layer to get frame data with BGR24 format. When getting frame data, the upper application program should control the frequency of calling this API according to frame rate. \n + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageForRGB(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pFrameInfo, int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 获取一帧BGR数据,此函数为查询式获取,每次调用查询内部 + 缓存有无数据,有数据则获取数据,无数据返回错误码 + * @param handle [IN] 设备句柄 + * @param pData [OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pFrameInfo [OUT] 图像信息结构体 + * @param nMsec [IN] 等待超时时间 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 每次调用该接口,将查询内部缓存是否有数据,如果有数据则转换成BGR24格式返回,如果没有数据则返回错误码。因为图像转换成BGR24格式有耗时,所以当数据帧率过高时该接口可能会导致丢帧。 \n + 调用该接口获取图像数据帧之前需要先调用MV_CC_StartGrabbing启动图像采集。该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。\n + 该接口不支持CameraLink设备。\n + 该接口仅在windows版本和Linux版本下支持。 + + + * @~english + * @brief Get one frame of BGR data, this function is using query to get data + query whether the internal cache has data, get data if there has, return error code if no data + * @param handle [IN] Device handle + * @param pData [OUT] Image data receiving buffer + * @param nDataSize [IN] Buffer size + * @param pFrameInfo [OUT] Image information structure + * @param nMsec [IN] Waiting timeout + * @return Success, return #MV_OK. Failure, return error code + * @remarks Before calling this API to set image data callback function, you should call this API MV_CC_CreateHandle. \n + There are two image acquisition modes, the two modes cannot be reused: \n + - Mode 1: Call MV_CC_RegisterImageCallBackForBGR to set RGB24 format image data callback function, and then call MV_CC_StartGrabbing to start acquisition, the collected image data will be returned in the configured callback function.\n + - Mode 2: Call MV_CC_StartGrabbing to start acquisition, and the call MV_CC_GetImageForBGR repeatedly in application layer to get frame data with BGR24 format. When getting frame data, the upper application program should control the frequency of calling this API according to frame rate. \n + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageForBGR(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pFrameInfo, int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 使用内部缓存获取一帧图片(与MV_CC_Display不能同时使用) + * @param handle [IN] 设备句柄 + * @param pFrame [OUT] 图像数据和图像信息 + * @param nMsec [IN] 等待超时时间,输入INFINITE时表示无限等待,直到收到一帧数据或者停止取流 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 调用该接口获取图像数据帧之前需要先调用MV_CC_StartGrabbing启动图像采集。该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。该接口支持设置超时时间,SDK内部等待直到有数据时返回,可以增加取流平稳性,适合用于对平稳性要求较高的场合。 \n + 该接口与MV_CC_FreeImageBuffer配套使用,当处理完取到的数据后,需要用MV_CC_FreeImageBuffer接口将pFrame内的数据指针权限进行释放。 \n + 该接口与MV_CC_GetOneFrameTimeout相比,有着更高的效率。且其取流缓存的分配是由sdk内部自动分配的,而MV_CC_GetOneFrameTimeout接口是需要客户自行分配。\n + 该接口在调用MV_CC_Display后无法取流。 \n + 该接口对于U3V、GIGE设备均可支持。 \n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Get a frame of an image using an internal cache + * @param handle [IN] Device handle + * @param pFrame [OUT] Image data and image information + * @param nMsec [IN] Waiting timeout + * @return Success, return #MV_OK. Failure, return error code + * @remarks Before calling this API to get image data frame, you should call MV_CC_StartGrabbing to start image acquisition. This API can get frame data actively, the upper layer program should control the frequency of calling this API according to the frame rate. This API supports setting timeout, and SDK will wait to return until data appears. This function will increase the streaming stability, which can be used in the situation with high stability requirement. \n + This API and MV_CC_FreeImageBuffer should be called in pairs, after processing the acquired data, you should call MV_CC_FreeImageBuffer to release the data pointer permission of pFrame. \n + This interface is more efficient than MV_CC_GetOneFrameTimeout. The allocation of the stream cache is automatically allocated within the SDK.The MV_CC_GetOneFrameTimeout interface needs to be allocated by customers themselves. \n + This API cannot be called to stream after calling MV_CC_Display. \n + This API is not supported by CameraLink device. \n + This API is supported by both USB3 vision camera and GigE camera. \n + **********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageBuffer(IN void* handle, OUT MV_FRAME_OUT* pFrame, IN unsigned int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 释放图像缓存(此接口用于释放不再使用的图像缓存,与MV_CC_GetImageBuffer配套使用) + * @param handle [IN] 设备句柄 + * @param pFrame [IN] 图像数据和图像数据 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 该接口与MV_CC_GetImageBuffer配套使用,使用MV_CC_GetImageBuffer接口取到的图像数据pFrame,需要用 #MV_CC_FreeImageBuffer 接口进行权限释放。 \n + 该接口对于取流效率高于GetOneFrameTimeout接口,且GetImageBuffer在不进行Free的情况下,最大支持输出的节点数与 #MV_CC_SetImageNodeNum 接口所设置的节点数相等,默认节点数是1。\n + 该接口对于U3V、GIGE设备均可支持。 \n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Free image buffer(this interface can free image buffer, used with MV_CC_GetImageBuffer) + * @param handle [IN] Device handle + * @param pFrame [IN] Image data and image information + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API and MV_CC_GetImageBuffer should be called in pairs, before calling MV_CC_GetImageBuffer to get image data pFrame, you should call #MV_CC_FreeImageBuffer to release the permission. \n + Compared with API MV_CC_GetOneFrameTimeout, this API has higher efficiency of image acquisition. The max. number of nodes can be outputted is same as the "nNum" of API #MV_CC_SetImageNodeNum, default value is 1. \n + This API is not supported by CameraLink device. \n + This API is supported by both USB3 vision camera and GigE camera. + **********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FreeImageBuffer(IN void* handle, IN MV_FRAME_OUT* pFrame); + +/********************************************************************//** + * @~chinese + * @brief 采用超时机制获取一帧图片,SDK内部等待直到有数据时返回 + * @param handle [IN] 设备句柄 + * @param pData [OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pFrameInfo [OUT] 图像信息结构体 + * @param nMsec [IN] 等待超时时间 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 调用该接口获取图像数据帧之前需要先调用MV_CC_StartGrabbing启动图像采集。该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。该接口支持设置超时时间,SDK内部等待直到有数据时返回,可以增加取流平稳性,适合用于对平稳性要求较高的场合。\n + 该接口对于U3V、GIGE设备均可支持。\n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Timeout mechanism is used to get image, and the SDK waits inside until the data is returned + * @param handle [IN] Device handle + * @param pData [OUT] Image data receiving buffer + * @param nDataSize [IN] Buffer size + * @param pFrameInfo [OUT] Image information structure + * @param nMsec [IN] Waiting timeout + * @return Success, return #MV_OK. Failure, return error code + * @remarks Before calling this API to get image data frame, call MV_CC_StartGrabbing to start image acquisition. This API can get frame data actively, the upper layer program should control the frequency of calling this API according to the frame rate. This API supports setting timeout, SDK will wait to return until data appears. This function will increase the streaming stability, which can be used in the situation with high stability requirement. \n + Both the USB3Vision and GIGE camera can support this API. \n + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOneFrameTimeout(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pFrameInfo, unsigned int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 清除取流数据缓存 + * @param handle [IN] 设备句柄 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口允许用户在不停止取流的时候,就能清除缓存中不需要的图像。\n + 该接口在连续模式切触发模式后,可以清除历史数据。 + + * @~english + * @brief if Image buffers has retrieved the data,Clear them + * @param handle [IN] Device handle + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface allows user to clear the unnecessary images from the buffer memory without stopping acquisition. \n + This interface allows user to clear previous data after switching from continuous mode to trigger mode. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ClearImageBuffer(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 显示图像,注册显示窗口,内部自动显示(与MV_CC_GetImageBuffer不能同时使用) + * @param handle [IN] 句柄 + * @param hWnd [IN] 显示窗口句柄 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Display one frame image, register display window, automatic display internally + * @param handle [IN] Handle + * @param hWnd [IN] Display Window Handle + * @return Success, return #MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_Display(IN void* handle, void* hWnd); + +/********************************************************************//** + * @~chinese + * @brief 显示一帧图像 + * @param handle [IN] 设备句柄 + * @param pDisplayInfo [IN] 图像信息 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 该接口对于U3V、GIGE设备均可支持。\n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Display one frame image + * @param handle [IN] Device handle + * @param pDisplayInfo [IN] Frame Info + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is valid for USB3Vision camera and GIGE camera. \n + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DisplayOneFrame(IN void* handle, IN MV_DISPLAY_FRAME_INFO* pDisplayInfo); + +/********************************************************************//** + * @~chinese + * @brief 设置SDK内部图像缓存节点个数,大于等于1,在抓图前调用 + * @param handle [IN] 设备句柄 + * @param nNum [IN] 缓存节点个数,范围[1,30],默认为1 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 调用该接口可以设置SDK内部图像缓存节点个数,在调用MV_CC_StartGrabbing开始抓图前调用。\n + 增加图像缓存节点个数会增加SDK使用的内存,但是可以有效避免某些性能差的ARM板出现的跳帧现象。\n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Set the number of the internal image cache nodes in SDK, Greater than or equal to 1, to be called before the capture + * @param handle [IN] Device handle + * @param nNum [IN] Image Node Number, range[1,30], default 1 + * @return Success, return #MV_OK. Failure, return error code + * @remarks Call this interface to set the number of SDK internal image buffer nodes. + The interface should be called before calling MV_CC_StartGrabbing for capturing. \n + Increasing the number of image cache nodes will increase the memory used by the SDK, + but it can effectively avoid frame skipping on some ARM boards with poor performance. \n + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetImageNodeNum(IN void* handle, unsigned int nNum); + +/********************************************************************//** + * @~chinese + * @brief 获取设备信息,取流之前调用 + * @param handle [IN] 设备句柄 + * @param pstDevInfo [IN][OUT] 返回给调用者有关相机设备信息结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 支持用户在打开设备后获取设备信息。\n + 若该设备是GigE相机,则调用该接口存在阻塞风险,因此不建议在取流过程中调用该接口。 + + * @~english + * @brief Get device information + * @param handle [IN] Device handle + * @param pstDevInfo [IN][OUT] Structure pointer of device information + * @return Success, return #MV_OK. Failure, return error code + * @remarks The API support users to access device information after opening the device. \n + If the device is a GigE camera, there is a blocking risk in calling the interface, so it is not recommended to call the interface during the fetching process. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetDeviceInfo(IN void * handle, IN OUT MV_CC_DEVICE_INFO* pstDevInfo); + +/********************************************************************//** + * @~chinese + * @brief 获取各种类型的信息 + * @param handle [IN] 设备句柄 + * @param pstInfo [IN][OUT] 返回给调用者有关相机各种类型的信息结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 接口里面输入需要获取的信息类型(指定MV_ALL_MATCH_INFO结构体中的nType类型),获取对应的信息(在MV_ALL_MATCH_INFO结构体中pInfo里返回)。 \n + 该接口的调用前置条件取决于所获取的信息类型,获取GigE设备的MV_MATCH_TYPE_NET_DETECT信息需在开启抓图之后调用,获取U3V设备的MV_MATCH_TYPE_USB_DETECT信息需在打开设备之后调用。 \n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Get various type of information + * @param handle [IN] Device handle + * @param pstInfo [IN][OUT] Structure pointer of various type of information + * @return Success, return #MV_OK. Failure, return error code + * @remarks Input required information type (specify nType in structure MV_ALL_MATCH_INFO) in the interface and get corresponding information (return in pInfo of structure MV_ALL_MATCH_INFO). \n + The calling precondition of this interface is determined by obtained information type. Call after enabling capture to get MV_MATCH_TYPE_NET_DETECT information of GigE device, and call after starting device to get MV_MATCH_TYPE_USB_DETECT information of USB3Vision device. \n + This API is not supported by CameraLink device. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAllMatchInfo(IN void* handle, IN OUT MV_ALL_MATCH_INFO* pstInfo); + +/************************************************************************/ +/* 设置和获取相机参数的万能接口 */ +/* General interface for getting and setting camera parameters */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 获取Integer属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值,如获取宽度信息则为"Width" + * @param pIntValue [IN][OUT] 返回给调用者有关相机属性结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以获取int类型的指定节点的值。strKey取值可以参考XML节点参数类型列表, + * 表格里面数据类型为“IInteger”的节点值都可以通过该接口获取,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Get Integer value + * @param handle [IN] Device handle + * @param strKey [IN] Key value, for example, using "Width" to get width + * @param pIntValue [IN][OUT] Structure pointer of camera features + * @return Success, return #MV_OK. Failure, return error code + * @remarks You can call this API to get the value of camera node with integer type after connecting the device. + * For strKey value, refer to MvCameraNode. All the node values of "IInteger" in the list + * can be obtained via this API. strKey corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetIntValue(IN void* handle,IN const char* strKey,OUT MVCC_INTVALUE *pIntValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Integer属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值,如获取宽度信息则为"Width" + * @param pIntValue [IN][OUT] 返回给调用者有关相机属性结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以获取int类型的指定节点的值。strKey取值可以参考XML节点参数类型列表, + * 表格里面数据类型为“IInteger”的节点值都可以通过该接口获取,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Get Integer value + * @param handle [IN] Device handle + * @param strKey [IN] Key value, for example, using "Width" to get width + * @param pIntValue [IN][OUT] Structure pointer of camera features + * @return Success, return #MV_OK. Failure, return error code + * @remarks You can call this API to get the value of camera node with integer type after connecting the device. + * For strKey value, refer to MvCameraNode. All the node values of "IInteger" in the list + * can be obtained via this API. strKey corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetIntValueEx(IN void* handle,IN const char* strKey,OUT MVCC_INTVALUE_EX *pIntValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Integer型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值,如获取宽度信息则为"Width" + * @param nValue [IN] 想要设置的相机的属性值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置int类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IInteger”的节点值都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Set Integer value + * @param handle [IN] Device handle + * @param strKey [IN] Key value, for example, using "Width" to set width + * @param nValue [IN] Feature value to set + * @return Success, return #MV_OK. Failure, return error code + * @remarks You can call this API to set the value of camera node with integer type after connecting the device. For strKey value, refer to MvCameraNode. All the node values of "IInteger" in the list can be obtained via this API. strKey corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetIntValue(IN void* handle,IN const char* strKey,IN unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Integer型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值,如获取宽度信息则为"Width" + * @param nValue [IN] 想要设置的相机的属性值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置int类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IInteger”的节点值都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Set Integer value + * @param handle [IN] Device handle + * @param strKey [IN] Key value, for example, using "Width" to set width + * @param nValue [IN] Feature value to set + * @return Success, return #MV_OK. Failure, return error code + * @remarks You can call this API to set the value of camera node with integer type after connecting the device. For strKey value, refer to MvCameraNode. All the node values of "IInteger" in the list can be obtained via this API. strKey corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetIntValueEx(IN void* handle,IN const char* strKey,IN int64_t nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Enum属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param pEnumValue [IN][OUT] 返回给调用者有关相机属性结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以获取Enum类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IEnumeration”的节点值都可以通过该接口获取,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Get Enum value + * @param handle [IN] Device handle + * @param strKey [IN] Key value, for example, using "PixelFormat" to get pixel format + * @param pEnumValue [IN][OUT] Structure pointer of camera features + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to get specified Enum nodes. For value of strKey, see MvCameraNode, The node values of IEnumeration can be obtained through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetEnumValue(IN void* handle,IN const char* strKey,OUT MVCC_ENUMVALUE *pEnumValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Enum型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param nValue [IN] 想要设置的相机的属性值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置Enum类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IEnumeration”的节点值都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Set Enum value + * @param handle [IN] Device handle + * @param strKey [IN] Key value, for example, using "PixelFormat" to set pixel format + * @param nValue [IN] Feature value to set + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to set specified Enum nodes. For value of strKey, see MvCameraNode, The node values of IEnumeration can be obtained through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetEnumValue(IN void* handle,IN const char* strKey,IN unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Enum型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param sValue [IN] 想要设置的相机的属性字符串 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置Enum类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IEnumeration”的节点值都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Set Enum value + * @param handle [IN] Device handle + * @param strKey [IN] Key value, for example, using "PixelFormat" to set pixel format + * @param sValue [IN] Feature String to set + * @return Success, return #MV_OK. Failure, return error code + * @remarks Call this API after connecting the device. All the values of nodes with IEnumeration type can be set via this API. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetEnumValueByString(IN void* handle,IN const char* strKey,IN const char* sValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Float属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值 + * @param pFloatValue [IN][OUT] 返回给调用者有关相机属性结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以获取float类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IFloat”的节点值都可以通过该接口获取,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Get Float value + * @param handle [IN] Device handle + * @param strKey [IN] Key value + * @param pFloatValue [IN][OUT] Structure pointer of camera features + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to get specified float node. For detailed strKey value see: MvCameraNode. The node values of IFloat can be obtained through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFloatValue(IN void* handle,IN const char* strKey,OUT MVCC_FLOATVALUE *pFloatValue); + +/********************************************************************//** + * @~chinese + * @brief 设置float型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值 + * @param fValue [IN] 想要设置的相机的属性值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置float类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IFloat”的节点值都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Set float value + * @param handle [IN] Device handle + * @param strKey [IN] Key value + * @param fValue [IN] Feature value to set + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to set specified float node. For detailed strKey value see: MvCameraNode. The node values of IFloat can be set through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetFloatValue(IN void* handle,IN const char* strKey,IN float fValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Boolean属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值 + * @param pBoolValue [IN][OUT] 返回给调用者有关相机属性值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以获取bool类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IBoolean”的节点值都可以通过该接口获取,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Get Boolean value + * @param handle [IN] Device handle + * @param strKey [IN] Key value + * @param pBoolValue [IN][OUT] Structure pointer of camera features + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to get specified bool nodes. For value of strKey, see MvCameraNode. The node values of IBoolean can be obtained through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBoolValue(IN void* handle,IN const char* strKey,OUT bool *pBoolValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Boolean型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值 + * @param bValue [IN] 想要设置的相机的属性值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置bool类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IBoolean”的节点值都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Set Boolean value + * @param handle [IN] Device handle + * @param strKey [IN] Key value + * @param bValue [IN] Feature value to set + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to set specified bool nodes. For value of strKey, see MvCameraNode. The node values of IBoolean can be set through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBoolValue(IN void* handle,IN const char* strKey,IN bool bValue); + +/********************************************************************//** + * @~chinese + * @brief 获取String属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值 + * @param pStringValue [IN][OUT] 返回给调用者有关相机属性结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以获取string类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IString”的节点值都可以通过该接口获取,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Get String value + * @param handle [IN] Device handle + * @param strKey [IN] Key value + * @param pStringValue [IN][OUT] Structure pointer of camera features + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to get specified string nodes. For value of strKey, see MvCameraNode. The node values of IString can be obtained through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetStringValue(IN void* handle,IN const char* strKey,OUT MVCC_STRINGVALUE *pStringValue); + +/********************************************************************//** + * @~chinese + * @brief 设置String型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值 + * @param sValue [IN] 想要设置的相机的属性值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置string类型的指定节点的值。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“IString”的节点值都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Set String value + * @param handle [IN] Device handle + * @param strKey [IN] Key value + * @param sValue [IN] Feature value to set + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to set specified string nodes. For value of strKey, see MvCameraNode. The node values of IString can be set through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetStringValue(IN void* handle,IN const char* strKey,IN const char * sValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Command型属性值 + * @param handle [IN] 设备句柄 + * @param strKey [IN] 属性键值 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 连接设备之后调用该接口可以设置指定的Command类型节点。strKey取值可以参考XML节点参数类型列表,表格里面数据类型为“ICommand”的节点都可以通过该接口设置,strKey参数取值对应列表里面的“名称”一列。 + + * @~english + * @brief Send Command + * @param handle [IN] Device handle + * @param strKey [IN] Key value + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to set specified Command nodes. For value of strKey, see MvCameraNode. The node values of ICommand can be set through this interface, strKey value corresponds to the Name column. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetCommandValue(IN void* handle,IN const char* strKey); + +/********************************************************************//** + * @~chinese + * @brief 清除GenICam节点缓存 + * @param handle [IN] 设备句柄 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Invalidate GenICam Nodes + * @param handle [IN] Device handle + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_InvalidateNodes(IN void* handle); + + +/************************************************************************/ +/* 设备升级 和 寄存器读写 和异常、事件回调 */ +/* Device upgrade, register read and write and exception callback */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 设备本地升级 + * @param handle [IN] 设备句柄 + * @param pFilePathName [IN] 文件名 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 通过该接口可以将升级固件文件发送给设备进行升级。该接口需要等待升级固件文件成功传给设备端之后再返回,响应时间可能较长。 + + * @~english + * @brief Device Local Upgrade + * @param handle [IN] Device handle + * @param pFilePathName [IN] File name + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_LocalUpgrade(IN void* handle, const void *pFilePathName); + +/********************************************************************//** + * @~chinese + * @brief 获取升级进度 + * @param handle [IN] 设备句柄 + * @param pnProcess [OUT] 进度接收地址 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 获取升级进度百分值。 + + * @~english + * @brief Get Upgrade Progress + * @param handle [IN] Device handle + * @param pnProcess [OUT] Progress receiving address + * @return Success, return #MV_OK. Failure, return error code + * @remarks Call this API to send the upgrade firmware to the device for upgrade. This API will wait for return until the upgrade firmware is sent to the device, this response may take a long time. \n + For CameraLink device, it keeps sending upgrade firmware continuously. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetUpgradeProcess(IN void* handle, unsigned int* pnProcess); + +/********************************************************************//** + * @~chinese + * @brief 读内存 + * @param handle [IN] 设备句柄 + * @param pBuffer [IN][OUT] 作为返回值使用,保存读到的内存值(内存值是按照大端模式存储的) + * @param nAddress [IN] 待读取的内存地址,该地址可以从设备的Camera.xml文件中获取,形如xxx_RegAddr的xml节点值 + * @param nLength [IN] 待读取的内存长度 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 访问设备,读取某段寄存器的数据。 + + * @~english + * @brief Read Memory + * @param handle [IN] Device Handle + * @param pBuffer [IN][OUT] Used as a return value, save the read-in memory value ( Memory value is stored in accordance with the big end model) + * @param nAddress [IN] Memory address to be read, which can be obtained from the Camera.xml file of the device, the form xml node value of xxx_RegAddr + * @param nLength [IN] Length of the memory to be read + * @return Success, return #MV_OK. Failure, return error code + * @remarks Access device, read the data from certain register. +*************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ReadMemory(IN void* handle , void *pBuffer, int64_t nAddress, int64_t nLength); + +/********************************************************************//** + * @~chinese + * @brief 写内存 + * @param handle [IN] 设备句柄 + * @param pBuffer [IN] 待写入的内存值(注意内存值要按照大端模式存储) + * @param nAddress [IN] 待写入的内存地址,该地址可以从设备的Camera.xml文件中获取,形如xxx_RegAddr的xml节点值 + * @param nLength [IN] 待写入的内存长度 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 访问设备,把一段数据写入某段寄存器。 + + * @~english + * @brief Write Memory + * @param handle [IN] Device Handle + * @param pBuffer [IN] Memory value to be written ( Note the memory value to be stored in accordance with the big end model) + * @param nAddress [IN] Memory address to be written, which can be obtained from the Camera.xml file of the device, the form xml node value of xxx_RegAddr + * @param nLength [IN] Length of the memory to be written + * @return Success, return #MV_OK. Failure, return error code + * @remarks Access device, write a piece of data into a certain segment of register. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_WriteMemory(IN void* handle , const void *pBuffer, int64_t nAddress, int64_t nLength); + +/********************************************************************//** + * @~chinese + * @brief 注册异常消息回调,在打开设备之后调用 + * @param handle [IN] 设备句柄 + * @param cbException [IN] 异常回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 该接口需要在MV_CC_OpenDevice打开设备之后调用。设备异常断开连接后可以在回调里面获取到异常消息,GigE设备掉线之后需要先调用MV_CC_CloseDevice接口关闭设备,再调用MV_CC_OpenDevice接口重新打开设备。 + + * @~english + * @brief Register Exception Message CallBack, call after open device + * @param handle [IN] Device handle + * @param cbException [IN] Exception Message CallBack Function Pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks Call this interface after the device is opened by MV_CC_OpenDevice. When device is exceptionally disconnected, the exception message can be obtained from callback function. For Disconnected GigE device, first call MV_CC_CloseDevice to shut device, and then call MV_CC_OpenDevice to reopen the device. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterExceptionCallBack(IN void* handle, + void(__stdcall* cbException)(unsigned int nMsgType, void* pUser), void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册全部事件回调,在打开设备之后调用 + * @param handle [IN] 设备句柄 + * @param cbEvent [IN] 事件回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 通过该接口设置事件回调,可以在回调函数里面获取采集、曝光等事件信息。 \n + 该接口不支持CameraLink设备。\n + 该接口仅在windows版本和Linux版本下支持。 + + * @~english + * @brief Register event callback, which is called after the device is opened + * @param handle [IN] Device handle + * @param cbEvent [IN] Event CallBack Function Pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks Call this API to set the event callback function to get the event information, e.g., acquisition, exposure, and so on. \n + This API is not supported by CameraLink device. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterAllEventCallBack(void* handle, void(__stdcall* cbEvent)(MV_EVENT_OUT_INFO * pEventInfo, void* pUser), void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册单个事件回调,在打开设备之后调用 + * @param handle [IN] 设备句柄 + * @param pEventName [IN] 事件名称 + * @param cbEvent [IN] 事件回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 通过该接口设置事件回调,可以在回调函数里面获取采集、曝光等事件信息。\n + 该接口不支持CameraLink设备,仅支持"设备掉线"这一种事件。 + + * @~english + * @brief Register single event callback, which is called after the device is opened + * @param handle [IN] Device handle + * @param pEventName [IN] Event name + * @param cbEvent [IN] Event CallBack Function Pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks Call this API to set the event callback function to get the event information, e.g., acquisition, exposure, and so on. \n + This API is not supported by CameraLink device . +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterEventCallBackEx(void* handle, const char* pEventName, + void(__stdcall* cbEvent)(MV_EVENT_OUT_INFO * pEventInfo, void* pUser), void* pUser); + + +/************************************************************************/ +/* GigEVision 设备独有的接口 */ +/* GigEVision device specific interface */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 强制IP + * @param handle [IN] 设备句柄 + * @param nIP [IN] 设置的IP + * @param nSubNetMask [IN] 子网掩码 + * @param nDefaultGateWay [IN] 默认网关 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 强制设置相机网络参数(包括IP、子网掩码、默认网关),强制设置之后将需要重新创建设备句柄,仅GigEVision相机支持。\n + 如果设备为DHCP的状态,调用该接口强制设置相机网络参数之后设备将会重启。 + + * @~english + * @brief Force IP + * @param handle [IN] Device handle + * @param nIP [IN] IP to set + * @param nSubNetMask [IN] Subnet mask + * @param nDefaultGateWay [IN] Default gateway + * @return Success, return #MV_OK. Failure, return error code + * @remarks Force setting camera network parameter (including IP address, subnet mask, default gateway). After forced setting, device handle should be created again. This function is only supported by GigEVision camera.\n + If device is in DHCP status, after calling this API to force setting camera network parameter, the device will restart. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_ForceIpEx(IN void* handle, unsigned int nIP, unsigned int nSubNetMask, unsigned int nDefaultGateWay); + +/********************************************************************//** + * @~chinese + * @brief 配置IP方式 + * @param handle [IN] 设备句柄 + * @param nType [IN] IP类型,见MV_IP_CFG_x + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 发送命令设置相机的IP方式,如DHCP、LLA等,仅GigEVision相机支持。 + + * @~english + * @brief IP configuration method + * @param handle [IN] Device handle + * @param nType [IN] IP type, refer to MV_IP_CFG_x + * @return Success, return #MV_OK. Failure, return error code + * @remarks Send command to set camera IP mode, such as DHCP and LLA, only supported by GigEVision. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetIpConfig(IN void* handle, unsigned int nType); + +/********************************************************************//** + * @~chinese + * @brief 设置仅使用某种模式,type: MV_NET_TRANS_x,不设置时,默认优先使用driver + * @param handle [IN] 设备句柄 + * @param nType [IN] 网络传输模式,见MV_NET_TRANS_x + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 通过该接口可以设置SDK内部优先使用的网络模式,默认优先使用驱动模式,仅GigEVision相机支持。 + + * @~english + * @brief Set to use only one mode,type: MV_NET_TRANS_x. When do not set, priority is to use driver by default + * @param handle [IN] Device handle + * @param nType [IN] Net transmission mode, refer to MV_NET_TRANS_x + * @return Success, return #MV_OK. Failure, return error code + * @remarksSet SDK internal priority network mode through this interface, drive mode by default, only supported by GigEVision camera. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetNetTransMode(IN void* handle, unsigned int nType); + +/********************************************************************//** + * @~chinese + * @brief 获取网络传输信息 + * @param handle [IN] 设备句柄 + * @param pstInfo [OUT] 信息结构体 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 通过该接口可以获取网络传输相关信息,包括已接收数据大小、丢帧数量等,在MV_CC_StartGrabbing开启采集之后调用。仅GigEVision相机支持。 + + * @~english + * @brief Get net transmission information + * @param handle [IN] Device handle + * @param pstInfo [OUT] Information Structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks Get network transmission information through this API, including received data size, number of lost frames. Call this API after starting image acquiring through MV_CC_StartGrabbing. This API is supported only by GigEVision Camera. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetNetTransInfo(IN void* handle, MV_NETTRANS_INFO* pstInfo); + +/********************************************************************//** + * @~chinese + * @brief 设置GVSP取流超时时间 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 超时时间,默认300ms,范围:>10ms + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 连接设备之后,取流动作发生前,调用该接口可以设置GVSP取流超时时间。GVSP取流超时设置过短可能造成图像异常,设置过长可能造成取流时间变长。 + + * @~english + * @brief Set GVSP streaming timeout + * @param handle [IN] Device handle + * @param nMillisec [IN] Timeout, default 300ms, range: >10ms + * @return Success, return MV_OK. Failure, return error code + * @remarks After the device is connected, and just before start streaming, + * call this interface to set GVSP streaming timeout value. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGvspTimeout(void* handle, unsigned int nMillisec); + +/********************************************************************//** + * @~chinese + * @brief 获取GVSP取流超时时间 + * @param handle [IN] 设备句柄 + * @param pnMillisec [IN] 超时时间指针,以毫秒位单位 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口用于获取当前的GVSP取流超时时间 + + * @~english + * @brief Get GVSP streaming timeout + * @param handle [IN] Device handle + * @param pnMillisec [IN] Timeout, ms as unit + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface is used to get the current GVSP streaming timeout. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGvspTimeout(IN void* handle, unsigned int* pnMillisec); + +/********************************************************************//** + * @~chinese + * @brief 设置GVCP命令超时时间 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 超时时间,默认500ms,范围:0-10000ms + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 连接设备之后调用该接口可以设置GVCP命令超时时间。 + + * @~english + * @brief Set GVCP cammand timeout + * @param handle [IN] Device handle + * @param nMillisec [IN] Timeout, default 500ms, range: 0-10000ms + * @return Success, return MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to set GVCP command timeout time. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGvcpTimeout(void* handle, unsigned int nMillisec); + +/********************************************************************//** + * @~chinese + * @brief 获取GVCP命令超时时间 + * @param handle [IN] 设备句柄 + * @param pnMillisec [OUT] 超时时间指针,以毫秒位单位 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口用于获取当前的GVCP超时时间。 + + * @~english + * @brief Get GVCP cammand timeout + * @param handle [IN] Device handle + * @param pnMillisec [OUT] Timeout, ms as unit + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface is used to get the current GVCP timeout. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGvcpTimeout(IN void* handle, unsigned int* pnMillisec); + +/********************************************************************//** + * @~chinese + * @brief 设置重传GVCP命令次数 + * @param handle [IN] 设备句柄 + * @param nRetryGvcpTimes [IN] 重传次数,范围:0-100 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口用于在GVCP包传输异常时,增加重传的次数,在一定程度上可以避免设备掉线,范围为0-100。 + + * @~english + * @brief Set the number of retry GVCP cammand + * @param handle [IN] Device handle + * @param nRetryGvcpTimes [IN] The number of retries,rang:0-100 + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface is used to increase The Times of retransmission when GVCP packet transmission is abnormal,and to some extent, it can avoid dropping the camera, with a range of 0-100. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetRetryGvcpTimes(IN void* handle, unsigned int nRetryGvcpTimes); + +/********************************************************************//** + * @~chinese + * @brief 获取重传GVCP命令次数 + * @param handle [IN] 设备句柄 + * @param pnRetryGvcpTimes [OUT] 重传次数指针 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口用于获取当前的GVCP重传次数,默认3次。 + + * @~english + * @brief Get the number of retry GVCP cammand + * @param handle [IN] Device handle + * @param pnRetryGvcpTimes [OUT] The number of retries + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface is used to get the current number of GVCP retransmissions, which defaults to 3. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetRetryGvcpTimes(IN void* handle, unsigned int* pnRetryGvcpTimes); + +/********************************************************************//** + * @~chinese + * @brief 获取最佳的packet size,该接口目前只支持GigE设备 + * @param handle [IN] 设备句柄 + * @return 最佳packetsize + * @remarks 获取最佳的packet size,对应GigEVision设备是SCPS,对应U3V设备是每次从驱动读取的包大小,该大小即网络上传输一个包的大小。该接口需要在MV_CC_OpenDevice之后、MV_CC_StartGrabbing之前调用。 \n + 该接口不支持CameraLink设备。 + + * @~english + * @brief Get the optimal Packet Size, Only support GigE Camera + * @param handle [IN] Device handle + * @return Optimal packetsize + * @remarks To get optimized packet size, for GigEVision device is SCPS, for USB3Vision device is the size of packet read from drive each time, and it is the size of a packet transported on the network. The interface should be called after MV_CC_OpenDevice and before MV_CC_StartGrabbing. \n + This API is not supported by CameraLink device. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOptimalPacketSize(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 设置是否打开重发包支持,及重发包设置 + * @param handle [IN] 设备句柄 + * @param bEnable [IN] 是否支持重发包 + * @param nMaxResendPercent [IN] 最大重发比 + * @param nResendTimeout [IN] 重发超时时间 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 连接设备之后调用该接口可以设置重发包属性,仅GigEVision相机支持。 \n + 该接口仅在windows版本和Linux版本下支持。 + + * @~english + * @brief Set whethe to enable resend, and set resend + * @param handle [IN] Device handle + * @param bEnable [IN] enable resend + * @param nMaxResendPercent [IN] Max resend persent + * @param nResendTimeout [IN] Resend timeout + * @return Success, return #MV_OK. Failure, return error code + * @remarks After the device is connected, call this interface to set resend packet properties, only supported by GigEVision camera. + ************************************************************************/ +#ifndef __cplusplus +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResend(void* handle, unsigned int bEnable, unsigned int nMaxResendPercent, unsigned int nResendTimeout); +#else +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResend(void* handle, unsigned int bEnable, unsigned int nMaxResendPercent = 10, unsigned int nResendTimeout = 50); +#endif + + +/********************************************************************//** + * @~chinese + * @brief 设置重传命令最大尝试次数 + * @param handle [IN] 设备句柄 + * @param nRetryTimes [IN] 重传命令最大尝试次数,默认20 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口必须在MV_CC_StartGrabbing之前调用,否则返回错误码MV_E_CALLORDER。 + + * @~english + * @brief set the max resend retry times + * @param handle [IN] Device handle + * @param nRetryTimes [IN] the max times to retry resending lost packets,default 20 + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface must be called before MV_CC_StartGrabbing. Otherwise return MV_E_CALLORDER. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResendMaxRetryTimes(void* handle, unsigned int nRetryTimes); + +/********************************************************************//** + * @~chinese + * @brief 获取重传命令最大尝试次数 + * @param handle [IN] 设备句柄 + * @param pnRetryTimes [IN] 重传命令最大尝试次数 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @~english + * @brief get the max resend retry times + * @param handle [IN] Device handle + * @param pnRetryTimes [OUT] the max times to retry resending lost packets + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetResendMaxRetryTimes(void* handle, unsigned int* pnRetryTimes); + +/********************************************************************//** + * @~chinese + * @brief 设置同一重传包多次请求之间的时间间隔 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 同一重传包多次请求之间的时间间隔,默认10ms + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口必须在MV_CC_StartGrabbing之前调用,否则返回错误码MV_E_CALLORDER。 + + * @~english + * @brief set time interval between same resend requests + * @param handle [IN] Device handle + * @param nMillisec [OUT] the time interval between same resend requests, default 10ms + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface must be called before MV_CC_StartGrabbing. Otherwise return MV_E_CALLORDER. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResendTimeInterval(void* handle, unsigned int nMillisec); + +/********************************************************************//** + * @~chinese + * @brief 获取同一重传包多次请求之间的时间间隔 + * @param handle [IN] 设备句柄 + * @param pnMillisec [IN] 同一重传包多次请求之间的时间间隔 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @~english + * @brief get time interval between same resend requests + * @param handle [IN] Device handle + * @param pnMillisec [OUT] the time interval between same resend requests + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetResendTimeInterval(void* handle, unsigned int* pnMillisec); + + +/********************************************************************//** + * @~chinese + * @brief 设置传输模式,可以为单播模式、组播模式等 + * @param handle [IN] 设备句柄 + * @param pstTransmissionType [IN] 传输模式结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 通过该接口可以设置传输模式为单播、组播等模式,仅GigEVision相机支持。 + + * @~english + * @brief Set transmission type,Unicast or Multicast + * @param handle [IN] Device handle + * @param pstTransmissionType [IN] Struct of transmission type + * @return Success, return #MV_OK. Failure, return error code + * @remarks Call this API to set the transmission mode as single cast mode and multicast mode. And this API is only valid for GigEVision camera. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetTransmissionType(void* handle, MV_TRANSMISSION_TYPE * pstTransmissionType); + +/********************************************************************//** + * @~chinese + * @brief 发出动作命令 + * @param pstActionCmdInfo [IN] 动作命令信息 + * @param pstActionCmdResults [OUT] 动作命令返回信息列表 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 仅GigEVision相机支持。 + + * @~english + * @brief Issue Action Command + * @param pstActionCmdInfo [IN] Action Command + * @param pstActionCmdResults [OUT] Action Command Result List + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is supported only by GigEVision camera. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_IssueActionCommand(IN MV_ACTION_CMD_INFO* pstActionCmdInfo, OUT MV_ACTION_CMD_RESULT_LIST* pstActionCmdResults); + +/************************************************************************/ +/* XML解析树的生成 */ +/* XML parse tree generation */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 获取相机属性树XML + * @param handle [IN] 设备句柄 + * @param pData [OUT] XML数据接收缓存 + * @param nDataSize [IN] 接收缓存大小 + * @param pnDataLen [OUT] 实际数据大小 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 当pData为NULL或nDataSize比实际的xml文件小时,不拷贝数据,由pnDataLen返回xml文件大小;\n + 当pData为有效缓存地址,且缓存足够大时,拷贝完整数据保存在该缓存里面,并由pnDataLen返回xml文件实际大小。 + + * @~english + * @brief Get camera feature tree XML + * @param handle [IN] Device handle + * @param pData [OUT] XML data receiving buffer + * @param nDataSize [IN] Buffer size + * @param pnDataLen [OUT] Actual data length + * @return Success, return #MV_OK. Failure, return error code + * @remarks * @remarks When pData is NULL or nDataSize than the actual XML file hours, do not copy the data, returned by pnDataLen XML file size.\n + When pData is a valid cache address and the cache is large enough, copy the full data into the cache, and pnDataLen returns the actual size of the XML file. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetGenICamXML(IN void* handle, IN OUT unsigned char* pData, IN unsigned int nDataSize, OUT unsigned int* pnDataLen); + +/************************************************************************/ +/* 附加接口 */ +/* Additional interface */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 保存图片,支持Bmp和Jpeg.编码质量在50-99之前 + * @param handle [IN] 设备句柄 + * @param pSaveParam [IN][OUT] 保存图片参数结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 通过将接口可以将从设备采集到的原始图像数据转换成JPEG或者BMP等格式并存放在指定内存中,然后用户可以将转换之后的数据直接保存成图片文件。该接口调用无接口顺序要求,有图像源数据就可以进行转换,可以先调用MV_CC_GetOneFrameTimeout或者MV_CC_RegisterImageCallBackEx设置回调函数,获取一帧图像数据,然后再通过该接口转换格式。 \n + MV_CC_SaveImageEx2比MV_CC_SaveImageEx增加参数handle,为了保证与其他接口的统一。\n + 该接口仅在windows版本和Linux版本下支持。 + + * @~english + * @brief Save image, support Bmp and Jpeg. Encoding quality(50-99] + * @param handle [IN] Device handle + * @param pSaveParam [IN][OUT] Save image parameters structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks Once there is image data, you can call this API to convert the data. \n + You can also call MV_CC_GetOneFrameTimeout or MV_CC_RegisterImageCallBackEx or MV_CC_GetImageBuffer to get one image frame and set the callback function, and then call this API to convert the format. \n + Comparing with the API MV_CC_SaveImageEx, this API added the parameter handle to ensure the unity with other API. + + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageEx2(IN void* handle, MV_SAVE_IMAGE_PARAM_EX* pSaveParam); + + +/********************************************************************//** + * @~chinese + * @brief 图像旋转 + * @param handle [IN] 设备句柄 + * @param pstRotateParam [IN][OUT] 图像旋转参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口只支持MONO8/RGB24/BGR24格式数据的90/180/270度旋转。 + + * @~english + * @brief Rotate Image + * @param handle [IN] Device handle + * @param pstRotateParam [IN][OUT] Rotate image parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks This API only support 90/180/270 rotation of data in the MONO8/RGB24/BGR24 format. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RotateImage(IN void* handle, IN OUT MV_CC_ROTATE_IMAGE_PARAM* pstRotateParam); + +/********************************************************************//** + * @~chinese + * @brief 图像翻转 + * @param handle [IN] 设备句柄 + * @param pstFlipParam [IN][OUT] 图像翻转参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口只支持MONO8/RGB24/BGR24格式数据的垂直和水平翻转。 + + * @~english + * @brief Flip Image + * @param handle [IN] Device handle + * @param pstFlipParam [IN][OUT] Flip image parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks This API only support vertical and horizontal reverse of data in the MONO8/RGB24/BGR24 format. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FlipImage(IN void* handle, IN OUT MV_CC_FLIP_IMAGE_PARAM* pstFlipParam); + +/********************************************************************//** + * @~chinese + * @brief 像素格式转换 + * @param handle [IN] 设备句柄 + * @param pstCvtParam [IN][OUT] 像素格式转换参数结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 通过将接口可以将从设备采集到的原始图像数据转换成用户所需的像素格式并存放在指定内存中。该接口调用无接口顺序要求,有图像源数据就可以进行转换,可以先调用MV_CC_GetOneFrameTimeout或者MV_CC_RegisterImageCallBack设置回调函数,获取一帧图像数据,然后再通过该接口转换格式。如果相机当前采集图像是JPEG压缩的格式,则不支持调用该接口进行显示。 \n + 该接口仅在windows版本和Linux版本下支持。 + + * @~english + * @brief Pixel format conversion + * @param handle [IN] Device handle + * @param pstCvtParam [IN][OUT] Convert Pixel Type parameter structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is used to transform the collected original data to pixel format and save to specified memory. There is no order requirement to call this API, the transformation will execute when there is image data. First call MV_CC_GetOneFrameTimeout or MV_CC_RegisterImageCallBackEx to set callback function, and get a frame of image data, then call this API to transform the format. \n + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ConvertPixelType(IN void* handle, IN OUT MV_CC_PIXEL_CONVERT_PARAM* pstCvtParam); + +/********************************************************************//** + * @~chinese + * @brief 插值算法类型设置 + * @param handle [IN] 设备句柄 + * @param BayerCvtQuality [IN] Bayer的插值方法 0-最近邻 1-双线性 2-Hamilton + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 设置内部图像转换接口的贝尔插值质量参数,MV_CC_ConvertPixelType、MV_CC_SaveImageEx2接口内部使用的插值算法是该接口所设定的。 + + * @~english + * @brief Interpolation algorithm type setting + * @param handle [IN] Device handle + * @param BayerCvtQuality [IN] Bayer interpolation method 0-nearest neighbour 1-bilinearity 2-Hamilton + * @return Success, return #MV_OK. Failure, return error code + * @remarks Set the bell interpolation quality parameters of the internal image conversion interface, and the interpolation algorithm used in the MV CC ConvertPixelType and MV CC SaveImageEx2 interfaces is set by this interface. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerCvtQuality(IN void* handle, IN unsigned int BayerCvtQuality); + +/********************************************************************//** + * @~chinese + * @brief 设置Bayer格式的Gamma信息 + * @param handle [IN] 设备句柄 + * @param pstGammaParam [IN] Gamma信息 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 设置该信息后,在调用MV_CC_ConvertPixelType、MV_CC_SaveImageEx2接口将Bayer8/10/12/16格式转成RGB24/48, RGBA32/64,BGR24/48,BGRA32/64时起效。 + + * @~english + * @brief Set Gamma param + * @param handle [IN] Device handle + * @param pstGammaParam [IN] Gamma param + * @return Success, return MV_OK. Failure, return error code + * @remarks After setting the param, it work in the calling MV_CC_ConvertPixelType\MV_CC_SaveImageEx2 API convert Bayer8/10/12/16 to RGB24/48, RGBA32/64,BGR24/48,BGRA32/64. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerGammaParam(IN void* handle, IN MV_CC_GAMMA_PARAM* pstGammaParam); + + +/********************************************************************//** + * @~chinese + * @brief 无损解码 + * @param handle [IN] 设备句柄 + * @param pstDecodeParam [IN][OUT] 无损解码参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 将从相机中取到的无损压缩码流解码成裸数据,同时支持解析当前相机实时图像的水印信息(如果输入的无损码流不是当前相机或者不是实时取流的,则水印解析可能异常) + + * @~english + * @brief High Bandwidth Decode + * @param handle [IN] Device handle + * @param pstDecodeParam [IN][OUT] High Bandwidth Decode parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks Decode the lossless compressed data from the camera into raw data + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_HB_Decode(IN void* handle, IN OUT MV_CC_HB_DECODE_PARAM* pstDecodeParam); + +/********************************************************************//** + * @~chinese + * @brief 保存相机属性 + * @param handle [IN] 设备句柄 + * @param pFileName [IN] 属性文件名 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Save camera feature + * @param handle [IN] Device handle + * @param pFileName [IN] File name + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FeatureSave(IN void* handle, IN const char* pFileName); + +/********************************************************************//** + * @~chinese + * @brief 导入相机属性 + * @param handle [IN] 设备句柄 + * @param pFileName [IN] 属性文件名 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Load camera feature + * @param handle [IN] Device handle + * @param pFileName [IN] File name + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FeatureLoad(IN void* handle, IN const char* pFileName); + +/********************************************************************//** + * @~chinese + * @brief 从相机读取文件 + * @param handle [IN] 设备句柄 + * @param pstFileAccess [IN] 文件存取结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Read the file from the camera + * @param handle [IN] Device handle + * @param pstFileAccess [IN] File access structure + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FileAccessRead(IN void* handle, IN MV_CC_FILE_ACCESS * pstFileAccess); + +/********************************************************************//** + * @~chinese + * @brief 将文件写入相机 + * @param handle [IN] 设备句柄 + * @param pstFileAccess [IN] 文件存取结构体 + * @return 成功,返回#MV_OK ;错误,返回错误码 + + * @~english + * @brief Write the file to camera + * @param handle [IN] Device handle + * @param pstFileAccess [IN] File access structure + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FileAccessWrite(IN void* handle, IN MV_CC_FILE_ACCESS * pstFileAccess); + +/********************************************************************//** + * @~chinese + * @brief 获取文件存取的进度 + * @param handle [IN] 设备句柄 + * @param pstFileAccessProgress [IN] 进度内容 + * @return 成功,返回#MV_OK ;错误,返回错误码 (当前文件存取的状态) + + * @~english + * @brief Get File Access Progress + * @param handle [IN] Device handle + * @param pstFileAccessProgress [IN] File access Progress + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFileAccessProgress(IN void* handle, OUT MV_CC_FILE_ACCESS_PROGRESS * pstFileAccessProgress); + +/********************************************************************//** + * @~chinese + * @brief 开始录像 + * @param handle [IN] 设备句柄 + * @param pstRecordParam [IN] 录像参数结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Start Record + * @param handle [IN] Device handle + * @param pstRecordParam [IN] Record param structure + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StartRecord(IN void* handle, IN MV_CC_RECORD_PARAM* pstRecordParam); + +/********************************************************************//** + * @~chinese + * @brief 输入录像数据 + * @param handle [IN] 设备句柄 + * @param pstInputFrameInfo [IN] 录像数据结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + + * @~english + * @brief Input RAW data to Record + * @param handle [IN] Device handle + * @param pstInputFrameInfo [IN] Record data structure + * @return Success, return #MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_InputOneFrame(IN void* handle, IN MV_CC_INPUT_FRAME_INFO * pstInputFrameInfo); + +/********************************************************************//** + * @~chinese + * @brief 停止录像 + * @param handle [IN] 设备句柄 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Stop Record + * @param handle [IN] Device handle + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StopRecord(IN void* handle); + + + +/************************************************************************/ +/* 不建议使用的接口 */ +/* Interfaces not recommended */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 获取图像基本信息 + * @param handle [IN] 相机句柄 + * @param pstInfo [IN][OUT] 返回给调用者有关相机图像基本信息结构体指针 + * @return 成功,返回 #MV_OK ,失败,返回错误码 + * @remarks 参考 CameraParams.h 中的 #MV_IMAGE_BASIC_INFO 定义 + + * @~english + * @brief Get basic information of image + * @param handle [IN] Handle + * @param pstInfo [IN][OUT] Structure pointer of image basic information + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to the definition of #MV_IMAGE_BASIC_INFO in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageInfo(IN void* handle, IN OUT MV_IMAGE_BASIC_INFO* pstInfo); + +/********************************************************************//** + * @~chinese + * @brief 获取GenICam代理 + * @param handle [IN] 句柄地址 + * @return GenICam代理类指针 ,正常返回值非NULL;异常返回NULL + + * @~english + * @brief Get GenICam proxy + * @param handle [IN] Handle address + * @return GenICam proxy pointer, normal, return non-NULL; exception, return NULL + ************************************************************************/ +MV_CAMCTRL_API void* __stdcall MV_CC_GetTlProxy(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 获取根节点 + * @param handle [IN] 句柄 + * @param pstNode [OUT] 根节点信息结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Get root node + * @param handle [IN] Handle + * @param pstNode [OUT] Root node information structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetRootNode(IN void* handle, IN OUT MV_XML_NODE_FEATURE* pstNode); + +/********************************************************************//** + * @~chinese + * @brief 从xml中获取指定节点的所有子节点,根节点为Root + * @param handle [IN] 句柄 + * @param pstNode [IN] 根节点信息结构体 + * @param pstNodesList [OUT] 节点列表结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Get all children node of specific node from xml, root node is Root + * @param handle [IN] Handle + * @param pstNode [IN] Root node information structure + * @param pstNodesList [OUT] Node information structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetChildren(IN void* handle, IN MV_XML_NODE_FEATURE* pstNode, IN OUT MV_XML_NODES_LIST* pstNodesList); + +/********************************************************************//** + * @~chinese + * @brief 获得当前节点的属性 + * @param handle [IN] 句柄 + * @param pstNode [IN] 根节点信息结构体 + * @param pstFeature [OUT] 当前节点属性结构体, + pstFeature 具体结构体内容参考 MV_XML_FEATURE_x + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Get current node feature + * @param handle [IN] Handle + * @param pstNode [IN] Root node information structure + * @param pstFeature [OUT] Current node feature structure + Details of pstFeature refer to MV_XML_FEATURE_x + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetNodeFeature(IN void* handle, IN MV_XML_NODE_FEATURE* pstNode, IN OUT void* pstFeature); + +/********************************************************************//** + * @~chinese + * @brief 更新节点 + * @param handle [IN] 句柄 + * @param enType [IN] 节点类型 + * @param pstFeature [OUT] 当前节点属性结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Update node + * @param handle [IN] Handle + * @param enType [IN] Node type + * @param pstFeature [OUT] Current node feature structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_UpdateNodeFeature(IN void* handle, IN enum MV_XML_InterfaceType enType, IN void* pstFeature); + +// 有节点需要更新时的回调函数 +// 当调用MV_XML_UpdateNodeFeature接口更新节点属性时,注册的回调函数cbUpdate会在pstNodesList中返回与之相关联的节点 +/********************************************************************//** + * @~chinese + * @fn MV_XML_RegisterUpdateCallBack + * @brief 注册更新回调 + * @param handle [IN] 句柄 + * @param cbUpdate [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Register update callback + * @param handle [IN] Handle + * @param cbUpdate [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_RegisterUpdateCallBack(IN void* handle, + IN void(__stdcall* cbUpdate)(enum MV_XML_InterfaceType enType, void* pstFeature, MV_XML_NODES_LIST* pstNodesList, void* pUser), + IN void* pUser); + + +/************************************************************************/ +/* 弃用的接口 */ +/* Abandoned interface */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 获取一帧图像,此函数为查询式获取,每次调用查询内部缓存有 + 无数据,有数据则范围数据,无数据返回错误码 + (该接口已弃用,建议改用 MV_CC_GetOneFrameTimeOut接口) + * @param handle [IN] 句柄 + * @param pData [OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pFrameInfo [OUT] 图像信息结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Get one frame data, this function is using query to get data, + query whether the internal cache has data, return data if there has, return error code if no data + (This interface is abandoned, it is recommended to use the MV_CC_GetOneFrameTimeOut) + * @param handle [IN] Handle + * @param pData [OUT] Recevied image data pointer + * @param nDataSize [IN] Recevied buffer size + * @param pFrameInfo [OUT] Image information structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOneFrame(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO* pFrameInfo); + +/********************************************************************//** + * @~chinese + * @brief 获取一帧trunck数据,此函数为查询式获取,每次调用查询内部 + 缓存有无数据,有数据则范围数据,无数据返回错误码 + (该接口已弃用,建议改用 MV_CC_GetOneFrameTimeOut接口) + * @param handle [IN] 句柄 + * @param pData [OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pFrameInfo [OUT] 图像信息结构体 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Get one frame of trunck data, this function is using query to get data, + query whether the internal cache has data, return data if there has, return error code if no data + (This interface is abandoned, it is recommended to use the MV_CC_GetOneFrameTimeOut) + * @param handle [IN] Handle + * @param pData [OUT] Recevied image data pointer + * @param nDataSize [IN] Recevied buffer size + * @param pFrameInfo [OUT] Image information structure + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOneFrameEx(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pFrameInfo); + +/********************************************************************//** + * @~chinese + * @brief 注册图像数据回调(该接口已弃用,建议改用 MV_CC_RegisterImageCallBackEx接口) + * @param handle [IN] 句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Register image data callback (This interface is abandoned, it is recommended to use the MV_CC_RegisterImageCallBackEx) + * @param handle [IN] Handle + * @param cbOutput [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBack(void* handle, + void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO* pFrameInfo, void* pUser), + void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 保存图片(该接口已弃用,建议改用 MV_CC_SaveImageEx2接口) + * @param pSaveParam [IN][OUT] 保存图片参数结构体 + - pData; // [IN] 输入数据缓存 + - nDataLen; // [IN] 输入数据大小 + - enPixelType; // [IN] 输入数据的像素格式 + - nWidth; // [IN] 图像宽 + - nHeight; // [IN] 图像高 + - pImageBuffer; // [OUT] 输出图片缓存 + - nImageLen; // [OUT] 输出图片大小 + - nBufferSize; // [IN] 提供的输出缓冲区大小 + - enImageType; // [IN] 输出图片格式 + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Save image (This interface is abandoned, it is recommended to use the MV_CC_SaveImageEx2) + * @param pSaveParam [IN][OUT] Save image parameters structure + - pData; // [IN] Input data buffer + - nDataLen; // [IN] Input data size + - enPixelType; // [IN] Input data pixel format + - nWidth; // [IN] Width + - nHeight; // [IN] Height + - pImageBuffer; // [OUT] Output image buffer + - nImageLen; // [OUT] Output image size + - nBufferSize; // [IN] Provided output buffer size + - enImageType; // [IN] Output image type + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImage(IN OUT MV_SAVE_IMAGE_PARAM* pSaveParam); + +/********************************************************************//** + * @~chinese + * @brief 保存图片,支持Bmp和Jpeg.编码质量在50-99之前 (该接口已弃用,建议改用 MV_CC_SaveImageEx2接口) + * @param pSaveParam [IN][OUT] 保存图片参数结构体 + pData; // [IN] 输入数据缓存 + nDataLen; // [IN] 输入数据大小 + enPixelType; // [IN] 输入数据的像素格式 + nWidth; // [IN] 图像宽 + nHeight; // [IN] 图像高 + pImageBuffer; // [OUT] 输出图片缓存 + nImageLen; // [OUT] 输出图片大小 + nBufferSize; // [IN] 提供的输出缓冲区大小 + enImageType; // [IN] 输出图片格式 + nJpgQuality; // [IN] 编码质量, (50-99] + nReserved[4]; + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief Save image, support Bmp and Jpeg. Encoding quality, (50-99] + * @param pSaveParam [IN][OUT] Save image parameters structure + pData; // [IN] Input data buffer + nDataLen; // [IN] Input data size + enPixelType; // [IN] Pixel format of input data + nWidth; // [IN] Image width + nHeight; // [IN] Image height + pImageBuffer; // [OUT] Output image buffer + nImageLen; // [OUT] Output image size + nBufferSize; // [IN] Output buffer size provided + enImageType; // [IN] Output image format + nJpgQuality; // [IN] Encoding quality, (50-99] + nReserved[4]; + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageEx(IN OUT MV_SAVE_IMAGE_PARAM_EX* pSaveParam); + +/********************************************************************//** + * @~chinese + * @brief 强制IP(该接口已弃用,建议改用 MV_GIGE_ForceIpEx接口) + * @param handle [IN] 设备句柄 + * @param nIP [IN] 设置的IP + * @return 见返回错误码 + * @remarks + + * @~english + * @brief Force IP (This interface is abandoned, it is recommended to use the MV_GIGE_ForceIpEx) + * @param handle [IN] Handle + * @param nIP [IN] IP to set + * @return Refer to error code + * @remarks +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_ForceIp(IN void* handle, unsigned int nIP); + +/********************************************************************//** + * @~chinese + * @brief 注册事件回调(该接口已弃用,建议改用 MV_CC_RegisterEventCallBackEx接口) + * @param handle [IN] 设备句柄 + * @param cbEvent [IN] 事件回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 见返回错误码 + * @remarks + + * @~english + * @brief Register event callback (this interface has been deprecated and is recommended to be converted to the MV_CC_RegisterEventCallBackEx interface) + * @param handle [IN] Handle + * @param cbEvent [IN] event callback pointer + * @param pUser [IN] User defined value + * @return Refer to error code + * @remarks +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterEventCallBack(void* handle, + void(__stdcall* cbEvent)(unsigned int nExternalEventId, void* pUser), + void* pUser); + + +/************************************************************************/ +/* 相机参数获取和设置,此模块的所有接口,将逐步废弃,建议用万能接口代替 */ +/* Get and set camara parameters, all interfaces of this module will be replaced by general interface*/ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 获取图像宽度 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机宽度的信息结构体指针 + * 返回的pstValue结构体的意义 + * - unsigned int nCurValue; // 代表相机当前的宽度值 + * - unsigned int nMax; // 表示相机允许的最大可设置的宽度值 + * - unsigned int nMin; // 表示相机允许的最小可设置的宽度值 + * - unsigned int nInc; // 表示相机设置的宽度增量必须是nInc的倍数,否则无效 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 其他整型结构体参数的接口可参照此接口 + + * @~english + * @brief Get image width + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Returns the information structure pointer about the camera's width for the caller + * The meaning of returns pstValue structure + * - unsigned int nCurValue; // Represents the current width value of the camera + * - unsigned int nMax; // Indicates the maximum settable width value allowed by the camera + * - unsigned int nMin; // Indicates the minimum settable width value allowed by the camera + * - unsigned int nInc; // Indicates that the width increment set by the camera must be a multiple of nInc, otherwise it is invalid + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Other Integer structure parameters interface can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetWidth(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置图像宽度 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的相机宽度的值,注意此宽度值必须是MV_CC_GetWidth接口返回的pstValue中的nInc的倍数才能设置成功 + * @return 成功,返回#MV_OK,并且相机宽度将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set image width + * @param handle [IN] Camera Handle + * @param nValue [IN] To set the value of the camera width, note that the width value must be a multiple of nInc in the pstValue returned by the MV_CC_GetWidth interface + * @return Success, return #MV_OK, and the camera width will change to the corresponding value. Failure, return error code + * @remarks +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetWidth(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取图像高度 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机高度的信息结构体指针 + * @return 成功,返回#MV_OK,并将高度信息返回到结构体中,失败,返回错误码 + * @remarks 可参照接口#MV_CC_GetWidth + + * @~english + * @brief Get image height + * @param handle [IN] Camera handle + * @param pstValue [IN][OUT] Return pointer of information structure related to camera height to user + * @return Success, return #MV_OK, and return height information to the structure. Failure, return error code + * @remarks Refer to #MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetHeight(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置图像高度 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的相机宽度的值,注意此宽度值必须是MV_CC_GetWidth接口返回的pstValue中的nInc的倍数才能设置成功 + * @return 成功,返回#MV_OK,并且相机高度将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set image height + * @param handle [IN] Camera Handle + * @param nValue [IN] Camera height value to set, note that this value must be times of nInc of pstValue returned by MV_CC_GetWidth + * @return Success, return #MV_OK, and the camera height will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetHeight(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取图像X偏移 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机X偏移的信息结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口#MV_CC_GetWidth + + * @~english + * @brief Get image X offset + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to camera X offset to user + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to #MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetX(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置图像AOI偏移 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的相机AOI的值 + * @return 成功,返回#MV_OK,并且相机AOI偏移将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set image X offset + * @param handle [IN] Camera Handle + * @param nValue [IN] Camera X offset value to set + * @return Success, return #MV_OK, and the camera X offset will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetX(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取图像Y偏移 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机Y偏移的信息结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口#MV_CC_GetWidth + + * @~english + * @brief Get image Y offset + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to camera Y offset to user + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to #MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetY(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置图像AOI偏移 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的相机AOI的值 + * @return 成功,返回#MV_OK,并且相机AOI偏移将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set image Y offset + * @param handle [IN] Camera Handle + * @param nValue [IN] Camera Y offset value to set + * @return Success, return #MV_OK, and the camera Y offset will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetY(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取曝光下限 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机曝光值下限结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口#MV_CC_GetWidth + + * @~english + * @brief Get exposure lower limit + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to camera exposure lower to user + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to #MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeLower(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置曝光值下限 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的曝光值下限 + * @return 成功,返回#MV_OK,并且相机曝光下限将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set exposure lower limit + * @param handle [IN] Camera Handle + * @param nValue [IN] Exposure lower to set + * @return Success, return #MV_OK, and the camera exposure time lower limit value will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeLower(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取曝光上限 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机曝光值上限结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口#MV_CC_GetWidth + + * @~english + * @brief Get exposure upper limit + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to camera exposure upper to user + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to #MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeUpper(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置曝光值上限 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的曝光值上限 + * @return 成功,返回#MV_OK,并且相机曝光上限将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set exposure upper limit + * @param handle [IN] Camera Handle + * @param nValue [IN] Exposure upper to set + * @return Success, return #MV_OK, and the camera exposure time upper limit value will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeUpper(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取亮度值 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机亮度结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口#MV_CC_GetWidth + + * @~english + * @brief Get brightness + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to camera brightness to user + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to #MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBrightness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置亮度值 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的亮度值 + * @return 成功,返回#MV_OK,并且相机亮度将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set brightness + * @param handle [IN] Camera Handle + * @param nValue [IN] Brightness upper to set + * @return Success, return #MV_OK, and the camera brightness value will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBrightness(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取帧率 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机帧率的信息结构体指针 + * 返回的pstValue结构体的意义 + * - float fCurValue; // 表示相机当前的帧率 + * - float fMax; // 表示相机允许设置的最大帧率 + * - float fMin; // 表示相机允许设置的最小帧率 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 其他浮点型结构体参数的接口可参照此接口 + + * @~english + * @brief Get Frame Rate + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to camera frame rate to user + * The meaning of returns pstValue structure + * - float fCurValue; // Indicates the current frame rate of the camera + * - float fMax; // Indicates the maximum frame rate allowed by the camera + * - float fMin; // Indicates the minimum frame rate allowed by the camera + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Other interface of Float structure parameters can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFrameRate(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置帧率 + * @param handle [IN] 相机句柄 + * @param fValue [IN] 想要设置的相机帧率 + * @return 成功,返回#MV_OK,并且相机帧率将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set frame rate + * @param handle [IN] Camera Handle + * @param fValue [IN] Camera frame rate to set + * @return Success, return #MV_OK, and camera frame rate will be changed to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetFrameRate(IN void* handle, IN const float fValue); + +/********************************************************************//** + * @~chinese + * @brief 获取增益 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机增益的信息结构体指针 + * 返回的pstValue结构体的意义 + * - float fCurValue; // 表示相机当前的帧率 + * - float fMax; // 表示相机允许设置的最大帧率 + * - float fMin; // 表示相机允许设置的最小帧率 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 其他浮点型结构体参数的接口可参照此接口 + + * @~english + * @brief Get Gain + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to gain to user + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * - float fCurValue; // Camera current gain + * - float fMax; // The maximum gain camera allowed + * - float fMin; // The minimum gain camera allowed + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Other interface of Float structure parameters can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGain(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置帧率 + * @param handle [IN] 相机句柄 + * @param fValue [IN] 想要设置的相机帧率 + * @return 成功,返回#MV_OK,并且相机帧率将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set Gain + * @param handle [IN] Camera Handle + * @param fValue [IN] Gain value to set + * @return Success, return #MV_OK, and the camera gain value will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGain(IN void* handle, IN const float fValue); + +/********************************************************************//** + * @~chinese + * @brief 获取曝光时间 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机曝光时间的信息结构体指针 + * 返回的pstValue结构体的意义 + * - float fCurValue; // 表示相机当前的帧率 + * - float fMax; // 表示相机允许设置的最大帧率 + * - float fMin; // 表示相机允许设置的最小帧率 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 其他浮点型结构体参数的接口可参照此接口 + + * @~english + * @brief Get exposure time + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Return pointer of information structure related to exposure time to user + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * - float fCurValue; // Camera current exposure time + * - float fMax; // The maximum exposure time camera allowed + * - float fMin; // The minimum exposure time camera allowed + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Other interface of Float structure parameters can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetExposureTime(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置曝光时间 + * @param handle [IN] 相机句柄 + * @param fValue [IN] 想要设置的相机帧率 + * @return 成功,返回#MV_OK,并且相机帧率将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set exposure time + * @param handle [IN] Camera Handle + * @param fValue [IN] Exposure time to set + * @return Success, return #MV_OK, and the camera exposure time value will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetExposureTime(IN void* handle, IN const float fValue); + +/********************************************************************//** + * @~chinese + * @brief 获取像素格式 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关像素格式的信息结构体指针 \n + * 返回的pstValue结构体的意义 + * - unsigned int nCurValue; // 相机当前的像素格式,是枚举类型,比如说PixelType_Gvsp_Mono8, 这里获得的是其整型值,具体数值参照PixelType.h的MvGvspPixelType枚举类型 + * - unsigned int nSupportedNum; // 相机支持的像素格式的个数 + * - unsigned int nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]; // 相机所有支持的像素格式对应的整型值列表,后面要设置像素格式时,参数必须是这个数组中的一种,否则无效 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 其他枚举类型参数接口可参照此接口,有关相应参数的枚举类型对应的整型值请查找PixelType.h 和 CameraParams.h中相应的定义 + + * @~english + * @brief Get Pixel Format + * @param handle [IN] Camera Handle + * @param pstValue [IN][OUT] Returns the information structure pointer about pixel format for the caller \n + * The meaning of returns pstValue structure + * - unsigned int nCurValue; // The current pixel format of the camera, is the enumeration type, such as #PixelType_Gvsp_Mono8, here is the integer value, the specific value please refer to MvGvspPixelType enumeration type in PixelType.h + * - unsigned int nSupportedNum; // Number of pixel formats supported by the camera + * - unsigned int nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]; // The integer values list correspond to all supported pixel formats of the camera, followed by when set the pixel format, the parameter must be one of this list, otherwise invalid + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Other interface of Enumeration structure parameters can refer to this interface, look for the corresponding definition in PixelType.h and CameraParams.h for the integer values of the enum type parameter + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetPixelFormat(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置像素格式 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的像素格式对应的整型值,调用此接口时可以直接填写枚举值,如#MV_CC_SetPixelFormat(m_handle, #PixelType_Gvsp_RGB8_Packed); + * @return 成功,返回#MV_OK,并且相机像素格式将会更改为相应值,失败,返回错误码 + * @remarks 要设置的枚举类型必须是Get接口返回的nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]中的一种,否则会失败 + + * @~english + * @brief Set Pixel Format + * @param handle [IN] Camera Handle + * @param nValue [IN] The corresponding integer value for pixel format to be set, when calling this interface can be directly filled in enumeration values, such as MV_CC_SetPixelFormat(m_handle, PixelType_Gvsp_RGB8_Packed); + * @return Success, return #MV_OK, and the camera pixel format will change to the corresponding value. Failure, return error code + * @remarks Other interface of Enumeration structure parameters can refer to this interface, the enumeration type to be set must be one of the nSupportValue [#MV_MAX_XML_SYMBOLIC_NUM] returned by the Get interface, otherwise it will fail + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetPixelFormat(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取采集模式 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关采集模式的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口#MV_CC_GetPixelFormat,参考 CameraParams.h 中的#MV_CAM_ACQUISITION_MODE 定义 + + * @~english + * @brief Get acquisition mode + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of acquisition mode + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to #MV_CC_GetPixelFormat and definition of #MV_CAM_ACQUISITION_MODE in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置像素格式 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的采集模式对应的整型值 + * @return 成功,返回#MV_OK,并且相机采集模式将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set acquisition mode + * @param handle [IN] Handle + * @param nValue [IN] Integer value to set corresponding to acquisition mode + * @return Success, return #MV_OK, and the camera acquisition mode will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionMode(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取增益模式 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关增益模式的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口#MV_CC_GetPixelFormat,参考 CameraParams.h 中的 MV_CAM_GAIN_MODE 定义 + + * @~english + * @brief Get gain mode + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of gain mode + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to #MV_CC_GetPixelFormat and definition of #MV_CAM_GAIN_MODE in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGainMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置增益模式 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的增益模式对应的整型值 + * @return 成功,返回#MV_OK,并且相机增益模式将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set gain mode + * @param handle [IN] Handle + * @param nValue [IN] Integer value to set corresponding to gain mode + * @return Success, return #MV_OK, and the camera gain mode will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGainMode(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取自动曝光模式 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关自动曝光模式的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口#MV_CC_GetPixelFormat,参考 CameraParams.h 中的#MV_CAM_EXPOSURE_AUTO_MODE 定义 + + * @~english + * @brief Get auto exposure mode + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of auto exposure mode + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to #MV_CC_GetPixelFormat and definition of #MV_CAM_EXPOSURE_AUTO_MODE in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetExposureAutoMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置自动曝光模式 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的自动曝光模式对应的整型值 + * @return 成功,返回#MV_OK,并且相机自动曝光模式将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set auto exposure mode + * @param handle [IN] Handle + * @param nValue [IN] Integer value to set corresponding to auto exposure mode + * @return Success, return #MV_OK, and the camera auto exposure mode will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetExposureAutoMode(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取触发模式 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关触发模式的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口#MV_CC_GetPixelFormat,参考 CameraParams.h 中的#MV_CAM_TRIGGER_MODE 定义 + + * @~english + * @brief Get trigger mode + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of trigger mode + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to #MV_CC_GetPixelFormat and definition of #MV_CAM_TRIGGER_MODE in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置触发模式 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的触发模式对应的整型值 + * @return 成功,返回#MV_OK,并且相机触发模式将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set trigger mode + * @param handle [IN] Handle + * @param nValue [IN] Integer value to set corresponding to trigger mode + * @return Success, return #MV_OK, and the camera trigger mode will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerMode(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取触发延时 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机触发延时的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口MV_CC_GetFrameRate + + * @~english + * @brief Get tigger delay + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of trigger delay + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to MV_CC_GetFrameRate + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerDelay(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置触发延时 + * @param handle [IN] 相机句柄 + * @param fValue [IN] 想要设置的相机触发延时 + * @return 成功,返回#MV_OK,并且相机触发延时将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set tigger delay + * @param handle [IN] Handle + * @param fValue [IN] Trigger delay to set + * @return Success, return #MV_OK, and the camera trigger delay will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerDelay(IN void* handle, IN const float fValue); + +/********************************************************************//** + * @~chinese + * @brief 获取触发源 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关触发源的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口MV_CC_GetPixelFormat,参考 CameraParams.h 中的 MV_CAM_TRIGGER_SOURCE 定义 + + * @~english + * @brief Get trigger source + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of trigger source + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to MV_CC_GetPixelFormat and definition of MV_CAM_TRIGGER_SOURCE in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerSource(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置触发源 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的触发源对应的整型值 + * @return 成功,返回#MV_OK,并且相机触发源将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set trigger source + * @param handle [IN] Handle + * @param nValue [IN] Integer value to set corresponding to trigger source + * @return Success, return #MV_OK, and the camera trigger source will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerSource(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 软触发一次(接口仅在已选择的触发源为软件触发时有效) + * @param handle [IN] 相机句柄 + * @return 成功,返回#MV_OK, 失败,返回错误码 + * @remarks + + * @~english + * @brief Execute software trigger once (this interface only valid when the trigger source is set to software) + * @param handle [IN] Handle + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_TriggerSoftwareExecute(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 获取Gamma类型 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关Gamma类型的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口MV_CC_GetPixelFormat,参考 CameraParams.h 中的 MV_CAM_GAMMA_SELECTOR 定义 + + * @~english + * @brief Get Gamma mode + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of gamma mode + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to MV_CC_GetPixelFormat and definition of MV_CAM_GAMMA_SELECTOR in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGammaSelector(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Gamma类型 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的Gamma类型对应的整型值 + * @return 成功,返回#MV_OK,并且相机Gamma类型将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set Gamma mode + * @param handle [IN] Handle + * @param nValue [IN] Integer value to set corresponding to gamma mode + * @return Success, return #MV_OK, and the camera gamma mode will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGammaSelector(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Gamma值 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机Gamma值的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口MV_CC_GetExposureTime + + * @~english + * @brief Get Gamma value + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of gamma value + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to MV_CC_GetFrameRate + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGamma(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Gamma值 + * @param handle [IN] 相机句柄 + * @param fValue [IN] 想要设置的相机Gamma值 + * @return 成功,返回#MV_OK,并且相机Gamma值将会更改为相应值,失败,返回错误码 + + * @~english + * @brief Set Gamma value + * @param handle [IN] Handle + * @param fValue [IN] Gamma value to set + * @return Success, return #MV_OK, and the camera gamma value will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGamma(IN void* handle, IN const float fValue); + +/********************************************************************//** + * @~chinese + * @brief 获取锐度 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机锐度结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get sharpness + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of sharpness + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetSharpness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置锐度 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的锐度 + * @return 成功,返回#MV_OK,并且相机锐度将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set sharpness + * @param handle [IN] Handle + * @param nValue [IN] Sharpness to set + * @return Success, return #MV_OK, and the camera sharpness will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetSharpness(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取灰度 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机灰度结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get Hue + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of Hue + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetHue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置灰度 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的灰度 + * @return 成功,返回#MV_OK,并且相机灰度将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set Hue + * @param handle [IN] Handle + * @param nValue [IN] Hue to set + * @return Success, return #MV_OK, and the camera Hue will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetHue(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取饱和度 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机饱和度结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get Saturation + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of Saturation + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetSaturation(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置饱和度 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的饱和度 + * @return 成功,返回#MV_OK,并且相机饱和度将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set Saturation + * @param handle [IN] Handle + * @param nValue [IN] Saturation to set + * @return Success, return #MV_OK, and the camera Saturation will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetSaturation(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取自动白平衡 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者的有关自动白平衡的信息结构体指针 + * @return 成功,返回#MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * @remarks 可参照接口MV_CC_GetPixelFormat,参考 CameraParams.h 中的 MV_CAM_BALANCEWHITE_AUTO 定义 + + + * @~english + * @brief Get Auto white balance + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of auto white balance + * @return Success, return #MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * @remarks Refer to MV_CC_GetPixelFormat and definition of MV_CAM_BALANCEWHITE_AUTO in CameraParams.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceWhiteAuto(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置自动白平衡 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 要设置的自动白平衡对应的整型值 + * @return 成功,返回#MV_OK,并且相机自动白平衡将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set Auto white balance + * @param handle [IN] Handle + * @param nValue [IN] Integer value to set corresponding to auto white balance + * @return Success, return #MV_OK, and the camera auto white balance will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceWhiteAuto(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取白平衡 红 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机白平衡 红结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get white balance red + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of white balance red + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioRed(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置白平衡 红 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的白平衡 红 + * @return 成功,返回#MV_OK,并且相机白平衡 红将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set white balance red + * @param handle [IN] Handle + * @param nValue [IN] White balance red to set + * @return Success, return #MV_OK, and the camera white balance red will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioRed(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取白平衡 绿 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机白平衡 绿结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get white balance green + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of white balance green + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioGreen(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置白平衡 绿 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的白平衡 绿 + * @return 成功,返回#MV_OK,并且相机白平衡 绿将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set white balance green + * @param handle [IN] Handle + * @param nValue [IN] White balance green to set + * @return Success, return #MV_OK, and the camera white balance green will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioGreen(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取白平衡 蓝 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机白平衡 蓝结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get white balance blue + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of white balance blue + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioBlue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置白平衡 蓝 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的白平衡 蓝 + * @return 成功,返回#MV_OK,并且相机白平衡 蓝将会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set white balance blue + * @param handle [IN] Handle + * @param nValue [IN] White balance blue to set + * @return Success, return #MV_OK, and the camera white balance blue will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioBlue(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取水印信息内包含的信息类型 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机水印信息内包含的信息类型结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get information type included by frame stamp + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of information type included by frame stamp + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFrameSpecInfoAbility(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置水印信息内包含的信息类型 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的水印信息内包含的信息类型 + * @return 成功,返回#MV_OK,并且相机水印信息内包含的信息类型会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set information type included by frame stamp + * @param handle [IN] Handle + * @param nValue [IN] Information type included by frame stamp to set + * @return Success, return #MV_OK, and the camera information type included by frame stamp will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetFrameSpecInfoAbility(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取设备自定义名字 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机名字结构体指针 + * @return 成功,返回#MV_OK,并且获取到相机的自定义名字,失败,返回错误码 + * @remarks + + * @~english + * @brief Get device user defined name + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of device name + * @return Success, return #MV_OK, and get the camera user defined name. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetDeviceUserID(IN void* handle, IN OUT MVCC_STRINGVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置设备自定义名字 + * @param handle [IN] 相机句柄 + * @param chValue [IN] 设备名字 + * @return 成功,返回#MV_OK,并且设置设备自定义名字,失败,返回错误码 + * @remarks + + * @~english + * @brief Set device user defined name + * @param handle [IN] Handle + * @param chValue [IN] Device name + * @return Success, return #MV_OK, and set the camera user defined name. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetDeviceUserID(IN void* handle, IN const char* chValue); + +/********************************************************************//** + * @~chinese + * @brief 获取一次触发的帧数 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机一次触发的帧数结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get frame number trigger by once + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of frame number trigger by once + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBurstFrameCount(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置一次触发的帧数 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的一次触发的帧数 + * @return 成功,返回#MV_OK,并且相机一次触发的帧数会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set frame number trigger by once + * @param handle [IN] Handle + * @param nValue [IN] Frame number trigger by once to set + * @return Success, return #MV_OK, and the camera frame number trigger by once will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBurstFrameCount(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取行频 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机行频结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get line rate + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of line rate + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionLineRate(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置行频 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的行频 + * @return 成功,返回#MV_OK,并且相机行频会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set line rate + * @param handle [IN] Handle + * @param nValue [IN] Line rate to set + * @return Success, return #MV_OK, and the camera line rate will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionLineRate(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取心跳信息 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机心跳信息结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get heartbeat information + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of heartbeat information + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetHeartBeatTimeout(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置心跳信息 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的心跳信息 + * @return 成功,返回#MV_OK,并且相机心跳信息会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set heartbeat information + * @param handle [IN] Handle + * @param nValue [IN] Heartbeat information to set + * @return Success, return #MV_OK, and the camera heartbeat information will change to the corresponding value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetHeartBeatTimeout(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取网络包大小 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机网络包大小结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get network packet size + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of network packet size + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPSPacketSize(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置网络包大小 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的网络包大小 + * @return 成功,返回#MV_OK,并且相机网络包大小会更改为相应值,失败,返回错误码 + + * @~english + * @brief Set network packet size + * @param handle [IN] Handle + * @param nValue [IN] Packet size to set + * @return Success, return #MV_OK, and change packet size to setting value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPSPacketSize(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取网络包发送间隔 + * @param handle [IN] 相机句柄 + * @param pstValue [IN][OUT] 返回给调用者有关相机网络包发送间隔结构体指针 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 可参照接口MV_CC_GetWidth + + * @~english + * @brief Get network packet sending delay + * @param handle [IN] Handle + * @param pstValue [IN][OUT] Structure pointer of network packet sending delay + * @return Success, return #MV_OK. Failure, return error code + * @remarks Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPD(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/********************************************************************//** + * @~chinese + * @brief 设置网络包发送间隔 + * @param handle [IN] 相机句柄 + * @param nValue [IN] 想要设置的网络包发送间隔 + * @return 成功,返回#MV_OK,并且相机网络包发送间隔会更改为相应值,失败,返回错误码 + + * @~english + * @brief Set network packet sending delay + * @param handle [IN] Handle + * @param nValue [IN] Packet delay to set + * @return Success, return #MV_OK, and change packet delay to setting value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPD(IN void* handle, IN const unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取接收端IP地址,0xa9fe0102 表示 169.254.1.2 + * @param handle [IN] 相机句柄 + * @param pnIP [IN][OUT] 返回给调用者接收端IP地址 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks + + * @~english + * @brief Get receiver IP address, 0xa9fe0102 indicates 169.254.1.2 + * @param handle [IN] Handle + * @param pnIP [IN][OUT] Receiver IP address + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCDA(IN void* handle, unsigned int* pnIP); + +/********************************************************************//** + * @~chinese + * @brief 设置接收端IP地址 + * @param handle [IN] 相机句柄 + * unsigned int nIP [IN] 想要设置的接收端IP地址 + * @return 成功,返回#MV_OK,并且相机接收端IP地址会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set receiver IP address + * @param handle [IN] Handel + * unsigned int nIP [IN] Receiver IP address to set + * @return Success, return #MV_OK, and change receiver IP address to setting value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCDA(IN void* handle, unsigned int nIP); + +/********************************************************************//** + * @~chinese + * @brief 获取发送端的端口号 + * @param handle [IN] 相机句柄 + * @param pnPort [IN][OUT] 返回给调用者发送端的端口号 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks + + * @~english + * @brief Get transmitter port number + * @param handle [IN] Handle + * @param pnPort [IN][OUT] Transmitter port number + * @return Success, return #MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCSP(IN void* handle, unsigned int* pnPort); + +/********************************************************************//** + * @~chinese + * @brief 设置发送端的端口号 + * @param handle [IN] 相机句柄 + * @param nPort [IN] 想要设置的发送端的端口号 + * @return 成功,返回#MV_OK,并且相机发送端的端口号会更改为相应值,失败,返回错误码 + * @remarks + + * @~english + * @brief Set transmitter port number + * @param handle [IN] Handle + * @param nPort [IN] Transmitter port number to set + * @return Success, return #MV_OK, and change transmitter port number to setting value. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCSP(IN void* handle, unsigned int nPort); + + +/************************************************************************/ +/* CameraLink 设备独有的接口,Linux 平台不支持 */ +/* APIs only support CameraLink device, not supported on Linux */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 设置设备波特率 + * @param handle [IN] 设备句柄 + * @param nBaudrate [IN] 设置的波特率值,数值参考CameraParams.h中宏定义,如#define MV_CAML_BAUDRATE_9600 0x00000001 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 该接口接口支持在设备未连接时调用。 + + * @~english + * @brief Set device bauderate using one of the CL_BAUDRATE_XXXX value + * @param handle [IN] Device handle + * @param nBaudrate [IN] baud rate to set. Refer to the CameraParams.h for parameter definitions, for example, #define MV_CAML_BAUDRATE_9600 0x00000001 + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is supported only by CameraLink device.\n + This API supports calls when devices are not connected. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_SetDeviceBauderate(IN void* handle, unsigned int nBaudrate); + +/********************************************************************//** + * @~chinese + * @brief 获取设备波特率 + * @param handle [IN] 设备句柄 + * @param pnCurrentBaudrate [OUT] 波特率信息指针,数值参考CameraParams.h中宏定义,如#define MV_CAML_BAUDRATE_9600 0x00000001 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 该接口接口支持在设备未连接时调用。 + + * @~english + * @brief Returns the current device bauderate, using one of the CL_BAUDRATE_XXXX value + * @param handle [IN] Device handle + * @param pnCurrentBaudrate [OUT] Return pointer of baud rate to user. Refer to the CameraParams.h for parameter definitions, for example, #define MV_CAML_BAUDRATE_9600 0x00000001 + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is supported only by CameraLink device.\n + This API supports calls when devices are not connected. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_GetDeviceBauderate(IN void* handle, unsigned int* pnCurrentBaudrate); + +/********************************************************************//** + * @~chinese + * @brief 获取设备与主机间连接支持的波特率 + * @param handle [IN] 设备句柄 + * @param pnBaudrateAblity [OUT] 支持的波特率信息的指针。所支持波特率的或运算结果,单个数值参考CameraParams.h中宏定义,如#define MV_CAML_BAUDRATE_9600 0x00000001 + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks 该接口接口支持在设备未连接时调用。 + + * @~english + * @brief Returns supported bauderates of the combined device and host interface + * @param handle [IN] Device handle + * @param pnBaudrateAblity [OUT] Return pointer of the supported bauderates to user. 'OR' operation results of the supported bauderates. Refer to the CameraParams.h for single value definitions, for example, #define MV_CAML_BAUDRATE_9600 0x00000001 + * @return Success, return #MV_OK. Failure, return error code + * @remarks This API is supported only by CameraLink device.\n + This API supports calls when devices are not connected. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_GetSupportBauderates(IN void* handle, unsigned int* pnBaudrateAblity); + +/********************************************************************//** + * @~chinese + * @brief 设置串口操作等待时长 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 串口操作的等待时长, ms + * @return 成功,返回#MV_OK,失败,返回错误码 + * @remarks + + * @~english + * @brief Sets the timeout for operations on the serial port + * @param handle [IN] Device handle + * @param nMillisec [IN] Timeout in [ms] for operations on the serial port. + * @return Success, return #MV_OK. Failure, return error code + * @return Success, return MV_OK. Failure, return error code +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_SetGenCPTimeOut(IN void* handle, unsigned int nMillisec); + +/************************************************************************/ +/* U3V 设备独有的接口 */ +/************************************************************************/ +/********************************************************************//** + * @~chinese + * @brief 设置U3V的传输包大小 + * @param handle [IN] 设备句柄 + * @param nTransferSize [IN] 传输的包大小, Byte,默认为1M,rang:>=0x10000 + * @return 成功,返回MV_OK,失败,返回错误码 + * @remarks 增加传输包大小可以适当降低取流时的CPU占用率。但不同的PC和不同USB扩展卡存在不同的兼容性,如果该参数设置过大可能会出现取不到图像的风险。 + + * @~english + * @brief Set transfer size of U3V device + * @param handle [IN] Device handle + * @param nTransferSize [IN] Transfer size,Byte,default:1M,rang:>=0x10000 + * @return Success, return MV_OK. Failure, return error code + * @remarks Increasing the transmission packet size can reduce the CPU utilization at the time of fetching. However, different PCS and different USB extension CARDS have different compatibility, and if this parameter is set too large, there may be the risk of not getting the image. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_SetTransferSize(IN void* handle, unsigned int nTransferSize); + +/********************************************************************//** + * @~chinese + * @brief 获取U3V的传输包大小 + * @param handle [IN] 设备句柄 + * @param pnTransferSize [OUT] 传输的包大小指针, Byte + * @return 成功,返回MV_OK,失败,返回错误码 + * @remarks 该接口用于获取当前的U3V传输包大小,默认1M。 + + * @~english + * @brief Get transfer size of U3V device + * @param handle [IN] Device handle + * @param pnTransferSize [OUT] Transfer size,Byte + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface is used to get the current U3V transfer packet size, default 1M. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_GetTransferSize(IN void* handle, unsigned int* pnTransferSize); + +/********************************************************************//** + * @~chinese + * @brief 设置U3V的传输通道个数 + * @param handle [IN] 设备句柄 + * @param nTransferWays [IN] 传输通道个数,范围:1-10 + * @return 成功,返回MV_OK,失败,返回错误码 + * @remarks 用户可以根据PC的性能、设备出图帧率、图像大小和内存使用率等因素对该参数进行调节。但不同的PC和不同的USB扩展卡存在不同的兼容性。 + + * @~english + * @brief Set transfer ways of U3V device + * @param handle [IN] Device handle + * @param nTransferWays [IN] Transfer ways,rang:1-10 + * @return Success, return MV_OK. Failure, return error code + * @remarks Users can adjust this parameter according to PC performance, camera image frame rate, image size, memory utilization and other factors. But different PCS and different USB expansion CARDS have different compatibility. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_SetTransferWays(IN void* handle, unsigned int nTransferWays); + +/********************************************************************//** + * @~chinese + * @brief 获取U3V的传输通道个数 + * @param handle [IN] 设备句柄 + * @param pnTransferWays [OUT] 传输通道个数指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * @remarks 该接口用于获取当前的U3V异步取流节点个数,2000W设备的MONO8默认为3个,YUV为默认2个,RGB为默认1个,其它情况默认8个节点。 + + * @~english + * @brief Get transfer ways of U3V device + * @param handle [IN] Device handle + * @param pnTransferWays [OUT] Transfer ways + * @return Success, return MV_OK. Failure, return error code + * @remarks This interface is used to get the current number of U3V asynchronous feed nodes. For 2000W camera, MONO8 defaults to 3, YUV defaults to 2, RGB defaults to 1, and other cases default to 8 nodes. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_GetTransferWays(IN void* handle, unsigned int* pnTransferWays); + +#ifdef __cplusplus +} +#endif + +#endif //_MV_CAMERA_CTRL_H_ diff --git a/src/ros2-hik-camera/hikSDK/include/MvErrorDefine.h b/src/ros2-hik-camera/hikSDK/include/MvErrorDefine.h new file mode 100644 index 0000000..a71cd79 --- /dev/null +++ b/src/ros2-hik-camera/hikSDK/include/MvErrorDefine.h @@ -0,0 +1,104 @@ + +#ifndef _MV_ERROR_DEFINE_H_ +#define _MV_ERROR_DEFINE_H_ + +/********************************************************************/ +/// \~chinese +/// \name 正确码定义 +/// @{ +/// \~english +/// \name Definition of correct code +/// @{ +#define MV_OK 0x00000000 ///< \~chinese 成功,无错误 \~english Successed, no error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name 通用错误码定义:范围0x80000000-0x800000FF +/// @{ +/// \~english +/// \name Definition of General error code +/// @{ +#define MV_E_HANDLE 0x80000000 ///< \~chinese 错误或无效的句柄 \~english Error or invalid handle +#define MV_E_SUPPORT 0x80000001 ///< \~chinese 不支持的功能 \~english Not supported function +#define MV_E_BUFOVER 0x80000002 ///< \~chinese 缓存已满 \~english Buffer overflow +#define MV_E_CALLORDER 0x80000003 ///< \~chinese 函数调用顺序错误 \~english Function calling order error +#define MV_E_PARAMETER 0x80000004 ///< \~chinese 错误的参数 \~english Incorrect parameter +#define MV_E_RESOURCE 0x80000006 ///< \~chinese 资源申请失败 \~english Applying resource failed +#define MV_E_NODATA 0x80000007 ///< \~chinese 无数据 \~english No data +#define MV_E_PRECONDITION 0x80000008 ///< \~chinese 前置条件有误,或运行环境已发生变化 \~english Precondition error, or running environment changed +#define MV_E_VERSION 0x80000009 ///< \~chinese 版本不匹配 \~english Version mismatches +#define MV_E_NOENOUGH_BUF 0x8000000A ///< \~chinese 传入的内存空间不足 \~english Insufficient memory +#define MV_E_ABNORMAL_IMAGE 0x8000000B ///< \~chinese 异常图像,可能是丢包导致图像不完整 \~english Abnormal image, maybe incomplete image because of lost packet +#define MV_E_LOAD_LIBRARY 0x8000000C ///< \~chinese 动态导入DLL失败 \~english Load library failed +#define MV_E_NOOUTBUF 0x8000000D ///< \~chinese 没有可输出的缓存 \~english No Avaliable Buffer +#define MV_E_UNKNOW 0x800000FF ///< \~chinese 未知的错误 \~english Unknown error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name GenICam系列错误:范围0x80000100-0x800001FF +/// @{ +/// \~english +/// \name GenICam Series Error Codes: Range from 0x80000100 to 0x800001FF +/// @{ +#define MV_E_GC_GENERIC 0x80000100 ///< \~chinese 通用错误 \~english General error +#define MV_E_GC_ARGUMENT 0x80000101 ///< \~chinese 参数非法 \~english Illegal parameters +#define MV_E_GC_RANGE 0x80000102 ///< \~chinese 值超出范围 \~english The value is out of range +#define MV_E_GC_PROPERTY 0x80000103 ///< \~chinese 属性 \~english Property +#define MV_E_GC_RUNTIME 0x80000104 ///< \~chinese 运行环境有问题 \~english Running environment error +#define MV_E_GC_LOGICAL 0x80000105 ///< \~chinese 逻辑错误 \~english Logical error +#define MV_E_GC_ACCESS 0x80000106 ///< \~chinese 节点访问条件有误 \~english Node accessing condition error +#define MV_E_GC_TIMEOUT 0x80000107 ///< \~chinese 超时 \~english Timeout +#define MV_E_GC_DYNAMICCAST 0x80000108 ///< \~chinese 转换异常 \~english Transformation exception +#define MV_E_GC_UNKNOW 0x800001FF ///< \~chinese GenICam未知错误 \~english GenICam unknown error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name GigE_STATUS对应的错误码:范围0x80000200-0x800002FF +/// @{ +/// \~english +/// \name GigE_STATUS Error Codes: Range from 0x80000200 to 0x800002FF +/// @{ +#define MV_E_NOT_IMPLEMENTED 0x80000200 ///< \~chinese 命令不被设备支持 \~english The command is not supported by device +#define MV_E_INVALID_ADDRESS 0x80000201 ///< \~chinese 访问的目标地址不存在 \~english The target address being accessed does not exist +#define MV_E_WRITE_PROTECT 0x80000202 ///< \~chinese 目标地址不可写 \~english The target address is not writable +#define MV_E_ACCESS_DENIED 0x80000203 ///< \~chinese 设备无访问权限 \~english No permission +#define MV_E_BUSY 0x80000204 ///< \~chinese 设备忙,或网络断开 \~english Device is busy, or network disconnected +#define MV_E_PACKET 0x80000205 ///< \~chinese 网络包数据错误 \~english Network data packet error +#define MV_E_NETER 0x80000206 ///< \~chinese 网络相关错误 \~english Network error +#define MV_E_IP_CONFLICT 0x80000221 ///< \~chinese 设备IP冲突 \~english Device IP conflict +/// @} + +/********************************************************************/ +/// \~chinese +/// \name USB_STATUS对应的错误码:范围0x80000300-0x800003FF +/// @{ +/// \~english +/// \name USB_STATUS Error Codes: Range from 0x80000300 to 0x800003FF +/// @{ +#define MV_E_USB_READ 0x80000300 ///< \~chinese 读usb出错 \~english Reading USB error +#define MV_E_USB_WRITE 0x80000301 ///< \~chinese 写usb出错 \~english Writing USB error +#define MV_E_USB_DEVICE 0x80000302 ///< \~chinese 设备异常 \~english Device exception +#define MV_E_USB_GENICAM 0x80000303 ///< \~chinese GenICam相关错误 \~english GenICam error +#define MV_E_USB_BANDWIDTH 0x80000304 ///< \~chinese 带宽不足 该错误码新增 \~english Insufficient bandwidth, this error code is newly added +#define MV_E_USB_DRIVER 0x80000305 ///< \~chinese 驱动不匹配或者未装驱动 \~english Driver mismatch or unmounted drive +#define MV_E_USB_UNKNOW 0x800003FF ///< \~chinese USB未知的错误 \~english USB unknown error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name 升级时对应的错误码:范围0x80000400-0x800004FF +/// @{ +/// \~english +/// \name Upgrade Error Codes: Range from 0x80000400 to 0x800004FF +/// @{ +#define MV_E_UPG_FILE_MISMATCH 0x80000400 ///< \~chinese 升级固件不匹配 \~english Firmware mismatches +#define MV_E_UPG_LANGUSGE_MISMATCH 0x80000401 ///< \~chinese 升级固件语言不匹配 \~english Firmware language mismatches +#define MV_E_UPG_CONFLICT 0x80000402 ///< \~chinese 升级冲突(设备已经在升级了再次请求升级即返回此错误) \~english Upgrading conflicted (repeated upgrading requests during device upgrade) +#define MV_E_UPG_INNER_ERR 0x80000403 ///< \~chinese 升级时相机内部出现错误 \~english Camera internal error during upgrade +#define MV_E_UPG_UNKNOW 0x800004FF ///< \~chinese 升级时未知错误 \~english Unknown error during upgrade +/// @} + +#endif //_MV_ERROR_DEFINE_H_ diff --git a/src/ros2-hik-camera/hikSDK/include/MvISPErrorDefine.h b/src/ros2-hik-camera/hikSDK/include/MvISPErrorDefine.h new file mode 100644 index 0000000..8f4d096 --- /dev/null +++ b/src/ros2-hik-camera/hikSDK/include/MvISPErrorDefine.h @@ -0,0 +1,93 @@ + +#ifndef _MV_ISP_ERROR_DEFINE_H_ +#define _MV_ISP_ERROR_DEFINE_H_ + +/************************************************************************ +* ISP㷨Ĵ +************************************************************************/ +// ͨ +#define MV_ALG_OK 0x00000000 //ȷ +#define MV_ALG_ERR 0x10000000 //ȷʹ + +// +#define MV_ALG_E_ABILITY_ARG 0x10000001 //дЧ + +// ڴ +#define MV_ALG_E_MEM_NULL 0x10000002 //ڴַΪ +#define MV_ALG_E_MEM_ALIGN 0x10000003 //ڴ벻Ҫ +#define MV_ALG_E_MEM_LACK 0x10000004 //ڴռС +#define MV_ALG_E_MEM_SIZE_ALIGN 0x10000005 //ڴռСҪ +#define MV_ALG_E_MEM_ADDR_ALIGN 0x10000006 //ڴַҪ + +// ͼ +#define MV_ALG_E_IMG_FORMAT 0x10000007 //ͼʽȷ߲֧ +#define MV_ALG_E_IMG_SIZE 0x10000008 //ͼ߲ȷ߳Χ +#define MV_ALG_E_IMG_STEP 0x10000009 //ͼstepƥ +#define MV_ALG_E_IMG_DATA_NULL 0x1000000A //ͼݴ洢ַΪ + +// +#define MV_ALG_E_CFG_TYPE 0x1000000B //û߻ȡͲȷ +#define MV_ALG_E_CFG_SIZE 0x1000000C //û߻ȡ롢ṹСȷ +#define MV_ALG_E_PRC_TYPE 0x1000000D //Ͳȷ +#define MV_ALG_E_PRC_SIZE 0x1000000E //ʱ롢Сȷ +#define MV_ALG_E_FUNC_TYPE 0x1000000F //ӴͲȷ +#define MV_ALG_E_FUNC_SIZE 0x10000010 //Ӵʱ롢Сȷ + +// в +#define MV_ALG_E_PARAM_INDEX 0x10000011 //indexȷ +#define MV_ALG_E_PARAM_VALUE 0x10000012 //valueȷ߳Χ +#define MV_ALG_E_PARAM_NUM 0x10000013 //param_numȷ + +// ӿڵü +#define MV_ALG_E_NULL_PTR 0x10000014 //ָΪ +#define MV_ALG_E_OVER_MAX_MEM 0x10000015 //޶ڴ +#define MV_ALG_E_CALL_BACK 0x10000016 //ص + +// 㷨ؼ +#define MV_ALG_E_ENCRYPT 0x10000017 //ܴ +#define MV_ALG_E_EXPIRE 0x10000018 //㷨ʹ޴ + +// ڲģ鷵صĻ +#define MV_ALG_E_BAD_ARG 0x10000019 //Χȷ +#define MV_ALG_E_DATA_SIZE 0x1000001A //ݴСȷ +#define MV_ALG_E_STEP 0x1000001B //stepȷ + +// cpuָִ֧ +#define MV_ALG_E_CPUID 0x1000001C //cpu֧Żеָ + +#define MV_ALG_WARNING 0x1000001D // + +#define MV_ALG_E_TIME_OUT 0x1000001E //㷨ⳬʱ +#define MV_ALG_E_LIB_VERSION 0x1000001F //㷨汾ų +#define MV_ALG_E_MODEL_VERSION 0x10000020 //ģͰ汾ų +#define MV_ALG_E_GPU_MEM_ALLOC 0x10000021 //GPUڴ +#define MV_ALG_E_FILE_NON_EXIST 0x10000022 //ļ +#define MV_ALG_E_NONE_STRING 0x10000023 //ַΪ +#define MV_ALG_E_IMAGE_CODEC 0x10000024 //ͼ +#define MV_ALG_E_FILE_OPEN 0x10000025 //ļ +#define MV_ALG_E_FILE_READ 0x10000026 //ļȡ +#define MV_ALG_E_FILE_WRITE 0x10000027 //ļд +#define MV_ALG_E_FILE_READ_SIZE 0x10000028 //ļȡС +#define MV_ALG_E_FILE_TYPE 0x10000029 //ļʹ +#define MV_ALG_E_MODEL_TYPE 0x1000002A //ģʹ +#define MV_ALG_E_MALLOC_MEM 0x1000002B //ڴ +#define MV_ALG_E_BIND_CORE_FAILED 0x1000002C //̰߳ʧ + +// д +#define MV_ALG_E_DENOISE_NE_IMG_FORMAT 0x10402001 //ͼʽ +#define MV_ALG_E_DENOISE_NE_FEATURE_TYPE 0x10402002 //ʹ +#define MV_ALG_E_DENOISE_NE_PROFILE_NUM 0x10402003 //Ը +#define MV_ALG_E_DENOISE_NE_GAIN_NUM 0x10402004 // +#define MV_ALG_E_DENOISE_NE_GAIN_VAL 0x10402005 //ֵ +#define MV_ALG_E_DENOISE_NE_BIN_NUM 0x10402006 // +#define MV_ALG_E_DENOISE_NE_INIT_GAIN 0x10402007 //Ƴʼô +#define MV_ALG_E_DENOISE_NE_NOT_INIT 0x10402008 //δʼ +#define MV_ALG_E_DENOISE_COLOR_MODE 0x10402009 //ɫռģʽ +#define MV_ALG_E_DENOISE_ROI_NUM 0x1040200a //ͼROI +#define MV_ALG_E_DENOISE_ROI_ORI_PT 0x1040200b //ͼROIԭ +#define MV_ALG_E_DENOISE_ROI_SIZE 0x1040200c //ͼROIС +#define MV_ALG_E_DENOISE_GAIN_NOT_EXIST 0x1040200d //治(Ѵ) +#define MV_ALG_E_DENOISE_GAIN_BEYOND_RANGE 0x1040200e //治ڷΧ +#define MV_ALG_E_DENOISE_NP_BUF_SIZE 0x1040200f //ڴС + +#endif //_MV_ISP_ERROR_DEFINE_H_ diff --git a/src/ros2-hik-camera/hikSDK/include/PixelType.h b/src/ros2-hik-camera/hikSDK/include/PixelType.h new file mode 100644 index 0000000..d83f9de --- /dev/null +++ b/src/ros2-hik-camera/hikSDK/include/PixelType.h @@ -0,0 +1,202 @@ + +#ifndef _MV_PIXEL_TYPE_H_ +#define _MV_PIXEL_TYPE_H_ + +//#include "Base/GCTypes.h" + + +/************************************************************************/ +/* GigE Vision (2.0.03) PIXEL FORMATS */ +/************************************************************************/ + +// Indicate if pixel is monochrome or RGB +#define MV_GVSP_PIX_MONO 0x01000000 +#define MV_GVSP_PIX_RGB 0x02000000 // deprecated in version 1.1 +#define MV_GVSP_PIX_COLOR 0x02000000 +#define MV_GVSP_PIX_CUSTOM 0x80000000 +#define MV_GVSP_PIX_COLOR_MASK 0xFF000000 + +// Indicate effective number of bits occupied by the pixel (including padding). +// This can be used to compute amount of memory required to store an image. +#define MV_PIXEL_BIT_COUNT(n) ((n) << 16) + +#define MV_GVSP_PIX_EFFECTIVE_PIXEL_SIZE_MASK 0x00FF0000 +#define MV_GVSP_PIX_EFFECTIVE_PIXEL_SIZE_SHIFT 16 + +// Pixel ID: lower 16-bit of the pixel formats +#define MV_GVSP_PIX_ID_MASK 0x0000FFFF +#define MV_GVSP_PIX_COUNT 0x46 // next Pixel ID available + + +enum MvGvspPixelType +{ + // Undefined pixel type +#ifdef WIN32 + PixelType_Gvsp_Undefined = 0xFFFFFFFF, +#else + PixelType_Gvsp_Undefined = -1, +#endif + // Mono buffer format defines + PixelType_Gvsp_Mono1p = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(1) | 0x0037), + PixelType_Gvsp_Mono2p = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(2) | 0x0038), + PixelType_Gvsp_Mono4p = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(4) | 0x0039), + PixelType_Gvsp_Mono8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0001), + PixelType_Gvsp_Mono8_Signed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0002), + PixelType_Gvsp_Mono10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0003), + PixelType_Gvsp_Mono10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0004), + PixelType_Gvsp_Mono12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0005), + PixelType_Gvsp_Mono12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0006), + PixelType_Gvsp_Mono14 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0025), + PixelType_Gvsp_Mono16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0007), + + // Bayer buffer format defines + PixelType_Gvsp_BayerGR8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0008), + PixelType_Gvsp_BayerRG8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0009), + PixelType_Gvsp_BayerGB8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000A), + PixelType_Gvsp_BayerBG8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000B), + PixelType_Gvsp_BayerGR10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000C), + PixelType_Gvsp_BayerRG10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000D), + PixelType_Gvsp_BayerGB10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000E), + PixelType_Gvsp_BayerBG10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000F), + PixelType_Gvsp_BayerGR12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0010), + PixelType_Gvsp_BayerRG12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0011), + PixelType_Gvsp_BayerGB12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0012), + PixelType_Gvsp_BayerBG12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0013), + PixelType_Gvsp_BayerGR10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0026), + PixelType_Gvsp_BayerRG10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0027), + PixelType_Gvsp_BayerGB10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0028), + PixelType_Gvsp_BayerBG10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0029), + PixelType_Gvsp_BayerGR12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002A), + PixelType_Gvsp_BayerRG12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002B), + PixelType_Gvsp_BayerGB12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002C), + PixelType_Gvsp_BayerBG12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002D), + PixelType_Gvsp_BayerGR16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x002E), + PixelType_Gvsp_BayerRG16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x002F), + PixelType_Gvsp_BayerGB16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0030), + PixelType_Gvsp_BayerBG16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0031), + + // RGB Packed buffer format defines + PixelType_Gvsp_RGB8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0014), + PixelType_Gvsp_BGR8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0015), + PixelType_Gvsp_RGBA8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0016), + PixelType_Gvsp_BGRA8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0017), + PixelType_Gvsp_RGB10_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0018), + PixelType_Gvsp_BGR10_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0019), + PixelType_Gvsp_RGB12_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x001A), + PixelType_Gvsp_BGR12_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x001B), + PixelType_Gvsp_RGB16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0033), + PixelType_Gvsp_BGR16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x004B), + PixelType_Gvsp_RGBA16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0064), + PixelType_Gvsp_BGRA16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0051), + PixelType_Gvsp_RGB10V1_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x001C), + PixelType_Gvsp_RGB10V2_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x001D), + PixelType_Gvsp_RGB12V1_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(36) | 0X0034), + PixelType_Gvsp_RGB565_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0035), + PixelType_Gvsp_BGR565_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0X0036), + + // YUV Packed buffer format defines + PixelType_Gvsp_YUV411_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x001E), + PixelType_Gvsp_YUV422_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x001F), + PixelType_Gvsp_YUV422_YUYV_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0032), + PixelType_Gvsp_YUV444_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0020), + PixelType_Gvsp_YCBCR8_CBYCR = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x003A), + PixelType_Gvsp_YCBCR422_8 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x003B), + PixelType_Gvsp_YCBCR422_8_CBYCRY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0043), + PixelType_Gvsp_YCBCR411_8_CBYYCRYY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x003C), + PixelType_Gvsp_YCBCR601_8_CBYCR = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x003D), + PixelType_Gvsp_YCBCR601_422_8 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x003E), + PixelType_Gvsp_YCBCR601_422_8_CBYCRY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0044), + PixelType_Gvsp_YCBCR601_411_8_CBYYCRYY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x003F), + PixelType_Gvsp_YCBCR709_8_CBYCR = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0040), + PixelType_Gvsp_YCBCR709_422_8 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0041), + PixelType_Gvsp_YCBCR709_422_8_CBYCRY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0045), + PixelType_Gvsp_YCBCR709_411_8_CBYYCRYY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x0042), + + // RGB Planar buffer format defines + PixelType_Gvsp_RGB8_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0021), + PixelType_Gvsp_RGB10_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0022), + PixelType_Gvsp_RGB12_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0023), + PixelType_Gvsp_RGB16_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0024), + + // 自定义的图片格式 + PixelType_Gvsp_Jpeg = (MV_GVSP_PIX_CUSTOM | MV_PIXEL_BIT_COUNT(24) | 0x0001), + + PixelType_Gvsp_Coord3D_ABC32f = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(96) | 0x00C0),//0x026000C0 + PixelType_Gvsp_Coord3D_ABC32f_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(96) | 0x00C1),//0x026000C1 + + // 该值被废弃,请参考PixelType_Gvsp_Coord3D_AC32f_64; the value is discarded + PixelType_Gvsp_Coord3D_AC32f = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(40) | 0x00C2), + // 该值被废弃; the value is discarded (已放入Chunkdata) + PixelType_Gvsp_COORD3D_DEPTH_PLUS_MASK = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(28) | 0x0001), + + PixelType_Gvsp_Coord3D_ABC32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(96) | 0x3001),//0x82603001 + PixelType_Gvsp_Coord3D_AB32f = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x3002),//0x82403002 + PixelType_Gvsp_Coord3D_AB32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x3003),//0x82403003 + PixelType_Gvsp_Coord3D_AC32f_64 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x00C2),//0x024000C2 + PixelType_Gvsp_Coord3D_AC32f_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x00C3),//0x024000C3 + PixelType_Gvsp_Coord3D_AC32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x3004),//0x82403004 + PixelType_Gvsp_Coord3D_A32f = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x00BD),//0x012000BD + PixelType_Gvsp_Coord3D_A32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x3005),//0x81203005 + PixelType_Gvsp_Coord3D_C32f = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x00BF),//0x012000BF + PixelType_Gvsp_Coord3D_C32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x3006),//0x81203006 + + PixelType_Gvsp_Coord3D_ABC16 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x00B9),//0x023000B9 + PixelType_Gvsp_Coord3D_C16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x00B8),//0x011000B8 + + //无损压缩像素格式定义 + PixelType_Gvsp_HB_Mono8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0001), + PixelType_Gvsp_HB_Mono10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0003), + PixelType_Gvsp_HB_Mono10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0004), + PixelType_Gvsp_HB_Mono12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0005), + PixelType_Gvsp_HB_Mono12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0006), + PixelType_Gvsp_HB_Mono16 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0007), + PixelType_Gvsp_HB_BayerGR8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0008), + PixelType_Gvsp_HB_BayerRG8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0009), + PixelType_Gvsp_HB_BayerGB8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000A), + PixelType_Gvsp_HB_BayerBG8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000B), + PixelType_Gvsp_HB_BayerRBGG8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0046), + PixelType_Gvsp_HB_BayerGR10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000C), + PixelType_Gvsp_HB_BayerRG10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000D), + PixelType_Gvsp_HB_BayerGB10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000E), + PixelType_Gvsp_HB_BayerBG10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000F), + PixelType_Gvsp_HB_BayerGR12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0010), + PixelType_Gvsp_HB_BayerRG12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0011), + PixelType_Gvsp_HB_BayerGB12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0012), + PixelType_Gvsp_HB_BayerBG12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0013), + PixelType_Gvsp_HB_BayerGR10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0026), + PixelType_Gvsp_HB_BayerRG10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0027), + PixelType_Gvsp_HB_BayerGB10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0028), + PixelType_Gvsp_HB_BayerBG10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0029), + PixelType_Gvsp_HB_BayerGR12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002A), + PixelType_Gvsp_HB_BayerRG12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002B), + PixelType_Gvsp_HB_BayerGB12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002C), + PixelType_Gvsp_HB_BayerBG12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002D), + PixelType_Gvsp_HB_YUV422_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x001F), + PixelType_Gvsp_HB_YUV422_YUYV_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0032), + PixelType_Gvsp_HB_RGB8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0014), + PixelType_Gvsp_HB_BGR8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0015), + PixelType_Gvsp_HB_RGBA8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0016), + PixelType_Gvsp_HB_BGRA8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0017), + PixelType_Gvsp_HB_RGB16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0033), + PixelType_Gvsp_HB_BGR16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x004B), + PixelType_Gvsp_HB_RGBA16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0064), + PixelType_Gvsp_HB_BGRA16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0051), + +}; + +//enum MvUsbPixelType +//{ +// +//}; + +//跨平台定义 +//Cross Platform Definition +#ifdef WIN32 +typedef __int64 int64_t; +typedef unsigned __int64 uint64_t; +#else +#include +#endif + + +#endif /* _MV_PIXEL_TYPE_H_ */ diff --git a/src/ros2-hik-camera/hikSDK/lib/amd64/libFormatConversion.so b/src/ros2-hik-camera/hikSDK/lib/amd64/libFormatConversion.so new file mode 100644 index 0000000..10d4e0b Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/amd64/libFormatConversion.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/amd64/libMVRender.so b/src/ros2-hik-camera/hikSDK/lib/amd64/libMVRender.so new file mode 100644 index 0000000..b09a02b Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/amd64/libMVRender.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/amd64/libMediaProcess.so b/src/ros2-hik-camera/hikSDK/lib/amd64/libMediaProcess.so new file mode 100644 index 0000000..6fd3f71 Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/amd64/libMediaProcess.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/amd64/libMvCameraControl.so b/src/ros2-hik-camera/hikSDK/lib/amd64/libMvCameraControl.so new file mode 100644 index 0000000..88c139b Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/amd64/libMvCameraControl.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/amd64/libMvUsb3vTL.so b/src/ros2-hik-camera/hikSDK/lib/amd64/libMvUsb3vTL.so new file mode 100644 index 0000000..810e863 Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/amd64/libMvUsb3vTL.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/arm64/libFormatConversion.so b/src/ros2-hik-camera/hikSDK/lib/arm64/libFormatConversion.so new file mode 100644 index 0000000..793ac90 Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/arm64/libFormatConversion.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/arm64/libMVRender.so b/src/ros2-hik-camera/hikSDK/lib/arm64/libMVRender.so new file mode 100644 index 0000000..4ed543c Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/arm64/libMVRender.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/arm64/libMediaProcess.so b/src/ros2-hik-camera/hikSDK/lib/arm64/libMediaProcess.so new file mode 100644 index 0000000..3f2d9e4 Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/arm64/libMediaProcess.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/arm64/libMvCameraControl.so b/src/ros2-hik-camera/hikSDK/lib/arm64/libMvCameraControl.so new file mode 100644 index 0000000..0c20a4f Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/arm64/libMvCameraControl.so differ diff --git a/src/ros2-hik-camera/hikSDK/lib/arm64/libMvUsb3vTL.so b/src/ros2-hik-camera/hikSDK/lib/arm64/libMvUsb3vTL.so new file mode 100644 index 0000000..2749f55 Binary files /dev/null and b/src/ros2-hik-camera/hikSDK/lib/arm64/libMvUsb3vTL.so differ diff --git a/src/ros2-hik-camera/launch/hik_camera.launch.py b/src/ros2-hik-camera/launch/hik_camera.launch.py new file mode 100644 index 0000000..1d25f29 --- /dev/null +++ b/src/ros2-hik-camera/launch/hik_camera.launch.py @@ -0,0 +1,34 @@ +import os + +from ament_index_python.packages import get_package_share_directory, get_package_prefix +from launch import LaunchDescription +from launch.actions import DeclareLaunchArgument +from launch.substitutions import LaunchConfiguration +from launch_ros.actions import Node + + +def generate_launch_description(): + params_file = os.path.join( + get_package_share_directory('hik_camera'), 'config', 'camera_params.yaml') + + camera_info_url = 'package://hik_camera/config/camera_info.yaml' + + return LaunchDescription([ + DeclareLaunchArgument(name='params_file', + default_value=params_file), + DeclareLaunchArgument(name='camera_info_url', + default_value=camera_info_url), + DeclareLaunchArgument(name='use_sensor_data_qos', + default_value='false'), + + Node( + package='hik_camera', + executable='hik_camera_node', + output='screen', + emulate_tty=True, + parameters=[LaunchConfiguration('params_file'), { + 'camera_info_url': LaunchConfiguration('camera_info_url'), + 'use_sensor_data_qos': LaunchConfiguration('use_sensor_data_qos'), + }], + ) + ]) diff --git a/src/ros2-hik-camera/package.xml b/src/ros2-hik-camera/package.xml new file mode 100644 index 0000000..22a1956 --- /dev/null +++ b/src/ros2-hik-camera/package.xml @@ -0,0 +1,28 @@ + + + + hik_camera + 0.0.0 + TODO: Package description + nolem + TODO: License declaration + + + ament_cmake + + + rclcpp + rclcpp_components + sensor_msgs + image_transport + image_transport_plugins + camera_info_manager + + camera_calibration + + ament_lint_auto + ament_lint_common + + ament_cmake + + diff --git a/src/ros2-hik-camera/src/hik_camera_node.cpp b/src/ros2-hik-camera/src/hik_camera_node.cpp new file mode 100644 index 0000000..d02d953 --- /dev/null +++ b/src/ros2-hik-camera/src/hik_camera_node.cpp @@ -0,0 +1,202 @@ +#include "MvCameraControl.h" +// ROS +#include +#include +#include +#include +#include +#include +#include + +namespace hik_camera +{ +class HikCameraNode : public rclcpp::Node +{ +public: + explicit HikCameraNode(const rclcpp::NodeOptions & options) : Node("hik_camera", options) + { + RCLCPP_INFO(this->get_logger(), "Starting HikCameraNode!"); + + MV_CC_DEVICE_INFO_LIST device_list; + // enum device + nRet = MV_CC_EnumDevices(MV_USB_DEVICE, &device_list); + RCLCPP_INFO(this->get_logger(), "Found camera count = %d", device_list.nDeviceNum); + + while (device_list.nDeviceNum == 0 && rclcpp::ok()) { + RCLCPP_ERROR(this->get_logger(), "No camera found!"); + RCLCPP_INFO(this->get_logger(), "Enum state: [%x]", nRet); + std::this_thread::sleep_for(std::chrono::seconds(1)); + nRet = MV_CC_EnumDevices(MV_USB_DEVICE, &device_list); + } + + MV_CC_CreateHandle(&camera_handle_, device_list.pDeviceInfo[0]); + + MV_CC_OpenDevice(camera_handle_); + + // Get camera infomation + MV_CC_GetImageInfo(camera_handle_, &img_info_); + image_msg_.data.reserve(img_info_.nHeightMax * img_info_.nWidthMax * 3); + + // Init convert param + convert_param_.nWidth = img_info_.nWidthValue; + convert_param_.nHeight = img_info_.nHeightValue; + convert_param_.enDstPixelType = PixelType_Gvsp_RGB8_Packed; + + bool use_sensor_data_qos = this->declare_parameter("use_sensor_data_qos", true); + auto qos = use_sensor_data_qos ? rmw_qos_profile_sensor_data : rmw_qos_profile_default; + camera_pub_ = image_transport::create_camera_publisher(this, "image_raw", qos); + + declareParameters(); + + MV_CC_StartGrabbing(camera_handle_); + + // Load camera info + camera_name_ = this->declare_parameter("camera_name", "narrow_stereo"); + camera_info_manager_ = + std::make_unique(this, camera_name_); + auto camera_info_url = + this->declare_parameter("camera_info_url", "package://hik_camera/config/camera_info.yaml"); + if (camera_info_manager_->validateURL(camera_info_url)) { + camera_info_manager_->loadCameraInfo(camera_info_url); + camera_info_msg_ = camera_info_manager_->getCameraInfo(); + } else { + RCLCPP_WARN(this->get_logger(), "Invalid camera info URL: %s", camera_info_url.c_str()); + } + + params_callback_handle_ = this->add_on_set_parameters_callback( + std::bind(&HikCameraNode::parametersCallback, this, std::placeholders::_1)); + + capture_thread_ = std::thread{[this]() -> void { + MV_FRAME_OUT out_frame; + + RCLCPP_INFO(this->get_logger(), "Publishing image!"); + + image_msg_.header.frame_id = "camera_optical_frame"; + image_msg_.encoding = "rgb8"; + + while (rclcpp::ok()) { + nRet = MV_CC_GetImageBuffer(camera_handle_, &out_frame, 1000); + if (MV_OK == nRet) { + convert_param_.pDstBuffer = image_msg_.data.data(); + convert_param_.nDstBufferSize = image_msg_.data.size(); + convert_param_.pSrcData = out_frame.pBufAddr; + convert_param_.nSrcDataLen = out_frame.stFrameInfo.nFrameLen; + convert_param_.enSrcPixelType = out_frame.stFrameInfo.enPixelType; + + MV_CC_ConvertPixelType(camera_handle_, &convert_param_); + + image_msg_.header.stamp = this->now(); + image_msg_.height = out_frame.stFrameInfo.nHeight; + image_msg_.width = out_frame.stFrameInfo.nWidth; + image_msg_.step = out_frame.stFrameInfo.nWidth * 3; + image_msg_.data.resize(image_msg_.width * image_msg_.height * 3); + + camera_info_msg_.header = image_msg_.header; + camera_pub_.publish(image_msg_, camera_info_msg_); + + MV_CC_FreeImageBuffer(camera_handle_, &out_frame); + fail_conut_ = 0; + } else { + RCLCPP_WARN(this->get_logger(), "Get buffer failed! nRet: [%x]", nRet); + MV_CC_StopGrabbing(camera_handle_); + MV_CC_StartGrabbing(camera_handle_); + fail_conut_++; + } + + if (fail_conut_ > 5) { + RCLCPP_FATAL(this->get_logger(), "Camera failed!"); + rclcpp::shutdown(); + } + } + }}; + } + + ~HikCameraNode() override + { + if (capture_thread_.joinable()) { + capture_thread_.join(); + } + if (camera_handle_) { + MV_CC_StopGrabbing(camera_handle_); + MV_CC_CloseDevice(camera_handle_); + MV_CC_DestroyHandle(&camera_handle_); + } + RCLCPP_INFO(this->get_logger(), "HikCameraNode destroyed!"); + } + +private: + void declareParameters() + { + rcl_interfaces::msg::ParameterDescriptor param_desc; + MVCC_FLOATVALUE f_value; + param_desc.integer_range.resize(1); + param_desc.integer_range[0].step = 1; + // Exposure time + param_desc.description = "Exposure time in microseconds"; + MV_CC_GetFloatValue(camera_handle_, "ExposureTime", &f_value); + param_desc.integer_range[0].from_value = f_value.fMin; + param_desc.integer_range[0].to_value = f_value.fMax; + double exposure_time = this->declare_parameter("exposure_time", 5000, param_desc); + MV_CC_SetFloatValue(camera_handle_, "ExposureTime", exposure_time); + RCLCPP_INFO(this->get_logger(), "Exposure time: %f", exposure_time); + + // Gain + param_desc.description = "Gain"; + MV_CC_GetFloatValue(camera_handle_, "Gain", &f_value); + param_desc.integer_range[0].from_value = f_value.fMin; + param_desc.integer_range[0].to_value = f_value.fMax; + double gain = this->declare_parameter("gain", f_value.fCurValue, param_desc); + MV_CC_SetFloatValue(camera_handle_, "Gain", gain); + RCLCPP_INFO(this->get_logger(), "Gain: %f", gain); + } + + rcl_interfaces::msg::SetParametersResult parametersCallback( + const std::vector & parameters) + { + rcl_interfaces::msg::SetParametersResult result; + result.successful = true; + for (const auto & param : parameters) { + if (param.get_name() == "exposure_time") { + int status = MV_CC_SetFloatValue(camera_handle_, "ExposureTime", param.as_int()); + if (MV_OK != status) { + result.successful = false; + result.reason = "Failed to set exposure time, status = " + std::to_string(status); + } + } else if (param.get_name() == "gain") { + int status = MV_CC_SetFloatValue(camera_handle_, "Gain", param.as_double()); + if (MV_OK != status) { + result.successful = false; + result.reason = "Failed to set gain, status = " + std::to_string(status); + } + } else { + result.successful = false; + result.reason = "Unknown parameter: " + param.get_name(); + } + } + return result; + } + + sensor_msgs::msg::Image image_msg_; + + image_transport::CameraPublisher camera_pub_; + + int nRet = MV_OK; + void * camera_handle_; + MV_IMAGE_BASIC_INFO img_info_; + + MV_CC_PIXEL_CONVERT_PARAM convert_param_; + + std::string camera_name_; + std::unique_ptr camera_info_manager_; + sensor_msgs::msg::CameraInfo camera_info_msg_; + + int fail_conut_ = 0; + std::thread capture_thread_; + + OnSetParametersCallbackHandle::SharedPtr params_callback_handle_; +}; +} // namespace hik_camera + +#include "rclcpp_components/register_node_macro.hpp" + +RCLCPP_COMPONENTS_REGISTER_NODE(hik_camera::HikCameraNode)