From 7d8f4c66a32a1338a7f4cd36127bc8f7f930a185 Mon Sep 17 00:00:00 2001 From: Mirek Burkon Date: Mon, 1 Apr 2024 03:32:02 -0700 Subject: [PATCH 1/3] make sure latest ros-humble-depthai is installed --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 41150839..e8d510a1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,6 +6,8 @@ ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update \ && apt-get -y install --no-install-recommends software-properties-common git libusb-1.0-0-dev wget zsh python3-colcon-common-extensions +# install latest +RUN apt-get install ros-${ROS_DISTRO}-depthai ENV DEBIAN_FRONTEND=dialog RUN sh -c "$(wget https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh -O -)" From e3d83d5ce7da01ecbcc8a3fde60616a6ce6f8917 Mon Sep 17 00:00:00 2001 From: Mirek Burkon Date: Mon, 1 Apr 2024 03:39:04 -0700 Subject: [PATCH 2/3] h.264 ROS output for RGB and Mono nodes --- depthai_bridge/CMakeLists.txt | 2 + .../include/depthai_bridge/ImageConverter.hpp | 7 ++ depthai_bridge/src/ImageConverter.cpp | 57 +++++++++++++- depthai_ros_driver/CMakeLists.txt | 1 + .../dai_nodes/sensors/mono.hpp | 10 ++- .../dai_nodes/sensors/rgb.hpp | 10 ++- .../dai_nodes/sensors/sensor_helpers.hpp | 9 +++ depthai_ros_driver/package.xml | 1 + .../src/dai_nodes/sensors/mono.cpp | 63 ++++++++++++---- .../src/dai_nodes/sensors/rgb.cpp | 75 ++++++++++++------- .../src/dai_nodes/sensors/sensor_helpers.cpp | 16 ++++ .../param_handlers/sensor_param_handler.cpp | 2 + 12 files changed, 203 insertions(+), 50 deletions(-) diff --git a/depthai_bridge/CMakeLists.txt b/depthai_bridge/CMakeLists.txt index d39393ae..d384e437 100644 --- a/depthai_bridge/CMakeLists.txt +++ b/depthai_bridge/CMakeLists.txt @@ -41,6 +41,7 @@ find_package(tf2_ros REQUIRED) find_package(tf2 REQUIRED) find_package(tf2_geometry_msgs REQUIRED) find_package(composition_interfaces REQUIRED) +find_package(ffmpeg_image_transport_msgs REQUIRED) set(dependencies camera_info_manager @@ -56,6 +57,7 @@ set(dependencies tf2_geometry_msgs tf2 composition_interfaces + ffmpeg_image_transport_msgs ) include_directories( diff --git a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp index 358b2cd7..89bc5c5f 100644 --- a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp +++ b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp @@ -11,10 +11,12 @@ #include "depthai-shared/common/Point2f.hpp" #include "depthai/device/CalibrationHandler.hpp" #include "depthai/pipeline/datatype/ImgFrame.hpp" +#include "depthai/pipeline/datatype/EncodedFrame.hpp" #include "rclcpp/time.hpp" #include "sensor_msgs/msg/camera_info.hpp" #include "sensor_msgs/msg/image.hpp" #include "std_msgs/msg/header.hpp" +#include "ffmpeg_image_transport_msgs/msg/ffmpeg_packet.hpp" namespace dai { @@ -22,7 +24,9 @@ namespace ros { namespace StdMsgs = std_msgs::msg; namespace ImageMsgs = sensor_msgs::msg; +namespace FFMPEGMsgs = ffmpeg_image_transport_msgs::msg; using ImagePtr = ImageMsgs::Image::SharedPtr; +using FFMPEGPacketPtr = FFMPEGMsgs::FFMPEGPacket::SharedPtr; using TimePoint = std::chrono::time_point; @@ -84,6 +88,9 @@ class ImageConverter { ImageMsgs::Image toRosMsgRawPtr(std::shared_ptr inData, const sensor_msgs::msg::CameraInfo& info = sensor_msgs::msg::CameraInfo()); ImagePtr toRosMsgPtr(std::shared_ptr inData); + FFMPEGMsgs::FFMPEGPacket toRosVideoMsgRawPtr(std::shared_ptr inData, int fw, int fh); + // FFMPEGPacketPtr toRosMsgH264Ptr(std::shared_ptr inData); + void toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outData); /** TODO(sachin): Add support for ros msg to cv mat since we have some diff --git a/depthai_bridge/src/ImageConverter.cpp b/depthai_bridge/src/ImageConverter.cpp index 4901a744..6fb76422 100644 --- a/depthai_bridge/src/ImageConverter.cpp +++ b/depthai_bridge/src/ImageConverter.cpp @@ -218,11 +218,66 @@ void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque< ImagePtr ImageConverter::toRosMsgPtr(std::shared_ptr inData) { auto msg = toRosMsgRawPtr(inData); - ImagePtr ptr = std::make_shared(msg); return ptr; } +/** + * This should be implemented in dai::EncodedFrame but I'm not gonna + * edit another repo for it + */ +std::chrono::time_point getOffsetTimestamp(std::chrono::time_point ts, CameraExposureOffset offset, std::chrono::microseconds expTime) { + switch(offset) { + case CameraExposureOffset::START: + return ts - expTime; + case CameraExposureOffset::MIDDLE: + return ts - expTime / 2; + case CameraExposureOffset::END: + default: + return ts; + } +} + +FFMPEGMsgs::FFMPEGPacket ImageConverter::toRosVideoMsgRawPtr(std::shared_ptr inData, int fw, int fh) { + if(_updateRosBaseTimeOnToRosMsg) { + updateRosBaseTime(); + } + std::chrono::_V2::steady_clock::time_point tstamp; + if(_getBaseDeviceTimestamp) + if(_addExpOffset) + tstamp = getOffsetTimestamp(inData->getTimestampDevice(), _expOffset, inData->getExposureTime()); + else + tstamp = inData->getTimestampDevice(); + else if(_addExpOffset) + tstamp = getOffsetTimestamp(inData->getTimestamp(), _expOffset, inData->getExposureTime()); + else + tstamp = tstamp = inData->getTimestamp(); + + FFMPEGMsgs::FFMPEGPacket outFrameMsg; + StdMsgs::Header header; + header.frame_id = _frameName; + header.stamp = getFrameTime(_rosBaseTime, _steadyBaseTime, tstamp); + outFrameMsg.header = header; + + auto ft = inData->getFrameType(); // I = keyframe + + outFrameMsg.width = fw; + outFrameMsg.height = fh; + outFrameMsg.encoding = "h.264"; + outFrameMsg.pts = header.stamp.sec * 1000000000 + header.stamp.nanosec; + outFrameMsg.flags = (int) (ft == RawEncodedFrame::FrameType::I); + outFrameMsg.is_bigendian = false; + outFrameMsg.data = inData->getData(); + + return outFrameMsg; +} + +// FFMPEGPacketPtr ImageConverter::toRosMsgH264Ptr(std::shared_ptr inData) { +// auto msg = toRosVideoMsgRawPtr(inData); +// FFMPEGPacketPtr ptr = std::make_shared(msg); +// return ptr; +// } + void ImageConverter::toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outData) { std::unordered_map::iterator revEncodingIter; if(_daiInterleaved) { diff --git a/depthai_ros_driver/CMakeLists.txt b/depthai_ros_driver/CMakeLists.txt index e6ba0821..f191c94f 100644 --- a/depthai_ros_driver/CMakeLists.txt +++ b/depthai_ros_driver/CMakeLists.txt @@ -30,6 +30,7 @@ rclcpp sensor_msgs diagnostic_updater diagnostic_msgs +ffmpeg_image_transport_msgs ) set(SENSOR_DEPS diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp index 5621dada..307236ea 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp @@ -6,6 +6,7 @@ #include "image_transport/image_transport.hpp" #include "sensor_msgs/msg/camera_info.hpp" #include "sensor_msgs/msg/image.hpp" +#include "ffmpeg_image_transport_msgs/msg/ffmpeg_packet.hpp" namespace dai { class Pipeline; @@ -59,16 +60,17 @@ class Mono : public BaseNode { std::unique_ptr imageConverter; image_transport::CameraPublisher monoPubIT; rclcpp::Publisher::SharedPtr monoPub; + rclcpp::Publisher::SharedPtr h264Pub; rclcpp::Publisher::SharedPtr infoPub; std::shared_ptr infoManager; std::shared_ptr monoCamNode; - std::shared_ptr videoEnc; + std::shared_ptr videoEnc, videoEncH264; std::unique_ptr ph; - std::shared_ptr monoQ; + std::shared_ptr monoQ, h264Q; std::shared_ptr controlQ; - std::shared_ptr xoutMono; + std::shared_ptr xoutMono, xoutH264; std::shared_ptr xinControl; - std::string monoQName, controlQName; + std::string monoQName, controlQName, h264QName; }; } // namespace dai_nodes diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp index 864a993e..463f5b3b 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp @@ -5,6 +5,7 @@ #include "image_transport/image_transport.hpp" #include "sensor_msgs/msg/camera_info.hpp" #include "sensor_msgs/msg/image.hpp" +#include "ffmpeg_image_transport_msgs/msg/ffmpeg_packet.hpp" namespace dai { class Pipeline; @@ -63,16 +64,17 @@ class RGB : public BaseNode { std::unique_ptr imageConverter; image_transport::CameraPublisher rgbPubIT, previewPubIT; rclcpp::Publisher::SharedPtr rgbPub, previewPub; + rclcpp::Publisher::SharedPtr h264Pub; rclcpp::Publisher::SharedPtr rgbInfoPub, previewInfoPub; std::shared_ptr infoManager, previewInfoManager; std::shared_ptr colorCamNode; - std::shared_ptr videoEnc; + std::shared_ptr videoEnc, videoEncH264; std::unique_ptr ph; - std::shared_ptr colorQ, previewQ; + std::shared_ptr colorQ, previewQ, h264Q; std::shared_ptr controlQ; - std::shared_ptr xoutColor, xoutPreview; + std::shared_ptr xoutColor, xoutPreview, xoutH264; std::shared_ptr xinControl; - std::string ispQName, previewQName, controlQName; + std::string ispQName, previewQName, controlQName, h264QName; }; } // namespace dai_nodes diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp index 3066088c..2c1175d8 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp @@ -12,6 +12,7 @@ #include "depthai/pipeline/datatype/CameraControl.hpp" #include "image_transport/camera_publisher.hpp" #include "sensor_msgs/msg/camera_info.hpp" +#include "ffmpeg_image_transport_msgs/msg/ffmpeg_packet.hpp" namespace dai { class Device; @@ -63,6 +64,14 @@ void cameraPub(const std::string& /*name*/, std::shared_ptr infoManager, bool lazyPub = true); +void videoPub(const std::string& /*name*/, + const std::shared_ptr& data, + dai::ros::ImageConverter& converter, + rclcpp::Publisher::SharedPtr pub, + int w, + int h, + bool lazyPub = true); + void splitPub(const std::string& /*name*/, const std::shared_ptr& data, dai::ros::ImageConverter& converter, diff --git a/depthai_ros_driver/package.xml b/depthai_ros_driver/package.xml index f34b3597..4449acb3 100644 --- a/depthai_ros_driver/package.xml +++ b/depthai_ros_driver/package.xml @@ -18,6 +18,7 @@ vision_msgs std_msgs std_srvs + ffmpeg_image_transport_msgs cv_bridge image_transport image_transport_plugins diff --git a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp index e71b5495..1014ab1a 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp @@ -31,12 +31,13 @@ Mono::Mono(const std::string& daiNodeName, ph = std::make_unique(node, daiNodeName, socket); ph->declareParams(monoCamNode, sensor, publish); setXinXout(pipeline); - RCLCPP_DEBUG(node->get_logger(), "Node %s created", daiNodeName.c_str()); + RCLCPP_INFO(node->get_logger(), "Node %s created", daiNodeName.c_str()); } Mono::~Mono() = default; void Mono::setNames() { monoQName = getName() + "_mono"; controlQName = getName() + "_control"; + h264QName = getName() + "_h264"; } void Mono::setXinXout(std::shared_ptr pipeline) { @@ -51,27 +52,42 @@ void Mono::setXinXout(std::shared_ptr pipeline) { monoCamNode->out.link(xoutMono->input); } } + if(ph->getParam("i_enable_h264")) { + RCLCPP_INFO(getROSNode()->get_logger(), "Setting up h264 output %s (q=%d)", h264QName.c_str(), ph->getParam("i_h264_quality")); + videoEncH264 = sensor_helpers::createEncoder(pipeline, ph->getParam("i_h264_quality"), dai::VideoEncoderProperties::Profile::H264_HIGH); + videoEncH264->setKeyframeFrequency(30); // one kf / second @ 30 hz + monoCamNode->out.link(videoEncH264->input); + + xoutH264 = pipeline->create(); + xoutH264->setStreamName(h264QName); + xoutH264->input.setQueueSize(2); + xoutH264->input.setBlocking(false); + videoEncH264->out.link(xoutH264->input); + } xinControl = pipeline->create(); xinControl->setStreamName(controlQName); xinControl->out.link(monoCamNode->inputControl); } void Mono::setupQueues(std::shared_ptr device) { + + auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); + + imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); + imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); + + if(ph->getParam("i_low_bandwidth")) { + imageConverter->convertFromBitstream(dai::RawImgFrame::Type::GRAY8); + } + if(ph->getParam("i_add_exposure_offset")) { + auto offset = static_cast(ph->getParam("i_exposure_offset")); + imageConverter->addExposureOffset(offset); + } + if(ph->getParam("i_reverse_stereo_socket_order")) { + imageConverter->reverseStereoSocketOrder(); + } if(ph->getParam("i_publish_topic")) { - auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); - imageConverter = - std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); - imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); - if(ph->getParam("i_low_bandwidth")) { - imageConverter->convertFromBitstream(dai::RawImgFrame::Type::GRAY8); - } - if(ph->getParam("i_add_exposure_offset")) { - auto offset = static_cast(ph->getParam("i_exposure_offset")); - imageConverter->addExposureOffset(offset); - } - if(ph->getParam("i_reverse_stereo_socket_order")) { - imageConverter->reverseStereoSocketOrder(); - } + monoQ = device->getOutputQueue(monoQName, ph->getParam("i_max_q_size"), false); infoManager = std::make_shared( getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + getName()).get(), "/" + getName()); if(ph->getParam("i_calibration_file").empty()) { @@ -84,7 +100,6 @@ void Mono::setupQueues(std::shared_ptr device) { } else { infoManager->loadCameraInfo(ph->getParam("i_calibration_file")); } - monoQ = device->getOutputQueue(monoQName, ph->getParam("i_max_q_size"), false); if(ipcEnabled()) { RCLCPP_DEBUG(getROSNode()->get_logger(), "Enabling intra_process communication!"); monoPub = getROSNode()->create_publisher("~/" + getName() + "/image_raw", 10); @@ -109,12 +124,28 @@ void Mono::setupQueues(std::shared_ptr device) { ph->getParam("i_enable_lazy_publisher"))); } } + if(ph->getParam("i_enable_h264")) { + RCLCPP_INFO(getROSNode()->get_logger(), "Setting up h264 queue %s", tfPrefix.c_str()); + + h264Q = device->getOutputQueue(h264QName, ph->getParam("i_max_q_size"), false); + h264Pub = getROSNode()->create_publisher("~/" + getName() + "/h264", 10); + h264Q->addCallback(std::bind(sensor_helpers::videoPub, + std::placeholders::_1, + std::placeholders::_2, + *imageConverter, + h264Pub, + ph->getParam("i_width"), ph->getParam("i_height"), //it's be nice to get these from EncodedFrame + ph->getParam("i_enable_lazy_publisher"))); + } controlQ = device->getInputQueue(controlQName); } void Mono::closeQueues() { if(ph->getParam("i_publish_topic")) { monoQ->close(); } + if(ph->getParam("i_enable_h264")) { + h264Q->close(); + } controlQ->close(); } diff --git a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp index 7286d19d..c0693765 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp @@ -31,13 +31,14 @@ RGB::RGB(const std::string& daiNodeName, ph = std::make_unique(node, daiNodeName, socket); ph->declareParams(colorCamNode, sensor, publish); setXinXout(pipeline); - RCLCPP_DEBUG(node->get_logger(), "Node %s created", daiNodeName.c_str()); + RCLCPP_INFO(node->get_logger(), "Node %s created", daiNodeName.c_str()); } RGB::~RGB() = default; void RGB::setNames() { ispQName = getName() + "_isp"; previewQName = getName() + "_preview"; controlQName = getName() + "_control"; + h264QName = getName() + "_h264"; } void RGB::setXinXout(std::shared_ptr pipeline) { @@ -62,31 +63,44 @@ void RGB::setXinXout(std::shared_ptr pipeline) { xoutPreview->input.setBlocking(false); colorCamNode->preview.link(xoutPreview->input); } + if(ph->getParam("i_enable_h264")) { + RCLCPP_INFO(getROSNode()->get_logger(), "Setting up h264 output %s (q=%d)", h264QName.c_str(), ph->getParam("i_h264_quality")); + videoEncH264 = sensor_helpers::createEncoder(pipeline, ph->getParam("i_h264_quality"), dai::VideoEncoderProperties::Profile::H264_HIGH); + videoEncH264->setKeyframeFrequency(30); // one kf / second @ 30 hz + colorCamNode->video.link(videoEncH264->input); + + xoutH264 = pipeline->create(); + xoutH264->setStreamName(h264QName); + xoutH264->input.setQueueSize(2); + xoutH264->input.setBlocking(false); + videoEncH264->out.link(xoutH264->input); + } xinControl = pipeline->create(); xinControl->setStreamName(controlQName); xinControl->out.link(colorCamNode->inputControl); } void RGB::setupQueues(std::shared_ptr device) { + + auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); + + imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); + imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); + + if(ph->getParam("i_low_bandwidth")) { + imageConverter->convertFromBitstream(dai::RawImgFrame::Type::BGR888i); + } + if(ph->getParam("i_add_exposure_offset")) { + auto offset = static_cast(ph->getParam("i_exposure_offset")); + imageConverter->addExposureOffset(offset); + } + if(ph->getParam("i_reverse_stereo_socket_order")) { + imageConverter->reverseStereoSocketOrder(); + } if(ph->getParam("i_publish_topic")) { - auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); + colorQ = device->getOutputQueue(ispQName, ph->getParam("i_max_q_size"), false); infoManager = std::make_shared( getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + getName()).get(), "/" + getName()); - imageConverter = - std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); - imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); - if(ph->getParam("i_low_bandwidth")) { - imageConverter->convertFromBitstream(dai::RawImgFrame::Type::BGR888i); - } - if(ph->getParam("i_add_exposure_offset")) { - auto offset = static_cast(ph->getParam("i_exposure_offset")); - imageConverter->addExposureOffset(offset); - } - - if(ph->getParam("i_reverse_stereo_socket_order")) { - imageConverter->reverseStereoSocketOrder(); - } - if(ph->getParam("i_calibration_file").empty()) { infoManager->setCameraInfo(sensor_helpers::getCalibInfo(getROSNode()->get_logger(), *imageConverter, @@ -97,7 +111,6 @@ void RGB::setupQueues(std::shared_ptr device) { } else { infoManager->loadCameraInfo(ph->getParam("i_calibration_file")); } - colorQ = device->getOutputQueue(ispQName, ph->getParam("i_max_q_size"), false); if(ipcEnabled()) { rgbPub = getROSNode()->create_publisher("~/" + getName() + "/image_raw", 10); rgbInfoPub = getROSNode()->create_publisher("~/" + getName() + "/camera_info", 10); @@ -123,12 +136,8 @@ void RGB::setupQueues(std::shared_ptr device) { } if(ph->getParam("i_enable_preview")) { previewQ = device->getOutputQueue(previewQName, ph->getParam("i_max_q_size"), false); - previewInfoManager = std::make_shared( getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + previewQName).get(), previewQName); - auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); - imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); - imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); if(ph->getParam("i_calibration_file").empty()) { previewInfoManager->setCameraInfo(sensor_helpers::getCalibInfo(getROSNode()->get_logger(), *imageConverter, @@ -156,15 +165,31 @@ void RGB::setupQueues(std::shared_ptr device) { ph->getParam("i_enable_lazy_publisher"))); } }; + if(ph->getParam("i_enable_h264")) { + RCLCPP_INFO(getROSNode()->get_logger(), "Setting up h264 queue %s", tfPrefix.c_str()); + + h264Q = device->getOutputQueue(h264QName, ph->getParam("i_max_q_size"), false); + h264Pub = getROSNode()->create_publisher("~/" + getName() + "/h264", 10); + h264Q->addCallback(std::bind(sensor_helpers::videoPub, + std::placeholders::_1, + std::placeholders::_2, + *imageConverter, + h264Pub, + ph->getParam("i_width"), ph->getParam("i_height"), //it's be nice to get these from EncodedFrame + ph->getParam("i_enable_lazy_publisher"))); + } controlQ = device->getInputQueue(controlQName); } void RGB::closeQueues() { if(ph->getParam("i_publish_topic")) { colorQ->close(); - if(ph->getParam("i_enable_preview")) { - previewQ->close(); - } + } + if(ph->getParam("i_enable_preview")) { + previewQ->close(); + } + if(ph->getParam("i_enable_h264")) { + h264Q->close(); } controlQ->close(); } diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp index 59e9f8b0..a6a7f19c 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp @@ -95,6 +95,21 @@ void cameraPub(const std::string& /*name*/, } } +void videoPub(const std::string& /*name*/, + const std::shared_ptr& data, + dai::ros::ImageConverter& converter, + rclcpp::Publisher::SharedPtr pub, + int w, + int h, + bool lazyPub) { + if(rclcpp::ok() && (!lazyPub || pub->get_subscription_count() > 0 || pub->get_intra_process_subscription_count() > 0)) { + auto f = std::dynamic_pointer_cast(data); + auto rawFrameMsg = converter.toRosVideoMsgRawPtr(f, w, h); + ffmpeg_image_transport_msgs::msg::FFMPEGPacket::UniquePtr msg = std::make_unique(rawFrameMsg); + pub->publish(std::move(msg)); + } +} + void splitPub(const std::string& /*name*/, const std::shared_ptr& data, dai::ros::ImageConverter& converter, @@ -129,6 +144,7 @@ sensor_msgs::msg::CameraInfo getCalibInfo(const rclcpp::Logger& logger, } return info; } + std::shared_ptr createEncoder(std::shared_ptr pipeline, int quality, dai::VideoEncoderProperties::Profile profile) { auto enc = pipeline->create(); enc->setQuality(quality); diff --git a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp index 5b168899..2b5e3a6b 100644 --- a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp @@ -33,6 +33,8 @@ void SensorParamHandler::declareCommonParams(dai::CameraBoardSocket socket) { declareAndLogParam("i_add_exposure_offset", false); declareAndLogParam("i_exposure_offset", 0); declareAndLogParam("i_reverse_stereo_socket_order", false); + declareAndLogParam("i_enable_h264", false); + declareAndLogParam("i_h264_quality", 80); } void SensorParamHandler::declareParams(std::shared_ptr monoCam, dai_nodes::sensor_helpers::ImageSensor sensor, bool publish) { From c5bf299f3ed6ae7f9a5ee7b952bf7dbbdf27674e Mon Sep 17 00:00:00 2001 From: Mirek Burkon Date: Mon, 1 Apr 2024 03:44:54 -0700 Subject: [PATCH 3/3] + -y --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index e8d510a1..8865ecf5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,7 @@ RUN apt-get update \ && apt-get -y install --no-install-recommends software-properties-common git libusb-1.0-0-dev wget zsh python3-colcon-common-extensions # install latest -RUN apt-get install ros-${ROS_DISTRO}-depthai +RUN apt-get -y install ros-${ROS_DISTRO}-depthai ENV DEBIAN_FRONTEND=dialog RUN sh -c "$(wget https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh -O -)"