diff --git a/lib/src/collection.dart b/lib/src/collection.dart index 8e12361..dfd2c09 100644 --- a/lib/src/collection.dart +++ b/lib/src/collection.dart @@ -13,7 +13,7 @@ CameraDetails getDefaultDetails(CameraName name) => CameraDetails( name: name, resolutionWidth: 300, resolutionHeight: 300, - quality: 50, + quality: 75, fps: 24, status: CameraStatus.CAMERA_ENABLED, ); diff --git a/lib/src/isolates/child.dart b/lib/src/isolates/child.dart index 93eafda..cbdafd4 100644 --- a/lib/src/isolates/child.dart +++ b/lib/src/isolates/child.dart @@ -19,7 +19,7 @@ abstract class CameraIsolate extends IsolateChild /// A timer to periodically send the camera status to the dashboard. Timer? statusTimer; /// A timer to read from the camera at an FPS given by [details]. - PeriodicTimer? frameTimer; + Timer? frameTimer; /// A timer to log out the [fpsCount] every 5 seconds using [sendLog]. Timer? fpsTimer; /// Records how many FPS this camera is actually running at. @@ -99,7 +99,7 @@ abstract class CameraIsolate extends IsolateChild if (details.status != CameraStatus.CAMERA_ENABLED) return; sendLog(LogLevel.debug, "Starting camera $name. Status=${details.status}"); final interval = details.fps == 0 ? Duration.zero : Duration(milliseconds: 1000 ~/ details.fps); - frameTimer = PeriodicTimer(interval, sendFrames); + frameTimer = Timer.periodic(interval, (_) => sendFrames()); fpsTimer = Timer.periodic(const Duration(seconds: 5), (_) { sendLog(LogLevel.trace, "Camera $name sent ${fpsCount ~/ 5} frames"); fpsCount = 0; diff --git a/lib/src/isolates/opencv.dart b/lib/src/isolates/opencv.dart index dca9bb1..c5aff05 100644 --- a/lib/src/isolates/opencv.dart +++ b/lib/src/isolates/opencv.dart @@ -19,6 +19,7 @@ class OpenCVCameraIsolate extends CameraIsolate { @override void initCamera() { camera = getCamera(name); + camera.setResolution(details.resolutionWidth, details.resolutionHeight); if (!camera.isOpened) { sendLog(LogLevel.warning, "Camera $name is not connected"); updateDetails(CameraDetails(status: CameraStatus.CAMERA_DISCONNECTED)); @@ -38,6 +39,7 @@ class OpenCVCameraIsolate extends CameraIsolate { void sendFrames() { final matrix = camera.getFrame(); if (matrix == nullptr) return; + // detectAndAnnotateFrames(matrix); final frame = encodeJpg(matrix, quality: details.quality); matrix.dispose(); if (frame == null) { // Error getting the frame diff --git a/lib/src/isolates/realsense.dart b/lib/src/isolates/realsense.dart index 2a4c3f0..b8c3174 100644 --- a/lib/src/isolates/realsense.dart +++ b/lib/src/isolates/realsense.dart @@ -76,24 +76,30 @@ class RealSenseIsolate extends CameraIsolate { sendFrame(colorizedJpg); } - // Get RGB frame - final Pointer rawRGB = frames.ref.rgb_data; - final Pointer rgbMatrix = getMatrix(camera.rgbResolution.height, camera.rgbResolution.width, rawRGB); - detectAndAnnotateFrames(rgbMatrix); // detect ArUco tags - - // Compress the RGB frame into a JPG - final OpenCVImage? rgbJpg = encodeJpg(rgbMatrix, quality: details.quality); - if (rgbJpg == null) { - sendLog(LogLevel.debug, "Could not encode RGB frame"); - } else { - final newDetails = details.deepCopy()..name = CameraName.ROVER_FRONT; - sendFrame(rgbJpg, detailsOverride: newDetails); - } + sendRgbFrame(frames.ref.rgb_data); fpsCount++; - // send(DepthFramePayload(frames.address)); + // send(DepthFramePayload(frames.address)); // For autonomy nativeLib.Mat_destroy(colorizedMatrix); - nativeLib.Mat_destroy(rgbMatrix); frames.dispose(); } + + /// Sends the RealSense's RGB frame and optionally detects ArUco tags. + void sendRgbFrame(Pointer rawRGB) { + if (rawRGB == nullptr) return; + final Pointer rgbMatrix = getMatrix(camera.rgbResolution.height, camera.rgbResolution.width, rawRGB); + //detectAndAnnotateFrames(rgbMatrix); // detect ArUco tags + + // Compress the RGB frame into a JPG + if (rgbMatrix != nullptr) { + final OpenCVImage? rgbJpg = encodeJpg(rgbMatrix, quality: details.quality); + if (rgbJpg == null) { + sendLog(LogLevel.debug, "Could not encode RGB frame"); + } else { + final newDetails = details.deepCopy()..name = CameraName.ROVER_FRONT; + sendFrame(rgbJpg, detailsOverride: newDetails); + } + nativeLib.Mat_destroy(rgbMatrix); + } + } } diff --git a/lib/src/utils/constants.dart b/lib/src/utils/constants.dart index 5390bd7..54264ed 100644 --- a/lib/src/utils/constants.dart +++ b/lib/src/utils/constants.dart @@ -6,12 +6,14 @@ import "package:burt_network/generated.dart"; /// /// Map for MAC or LINUX devices Map cameraNames = { - CameraName.ROVER_FRONT: "/dev/realsense_rgb", - CameraName.ROVER_REAR: "/dev/rear_camera", - CameraName.AUTONOMY_DEPTH: "/dev/realsense_depth", - CameraName.SUBSYSTEM1: "/dev/subsystem1", - CameraName.SUBSYSTEM2: "/dev/subsystem2", - CameraName.SUBSYSTEM3: "/dev/subsystem3", + CameraName.ROVER_FRONT: "/dev/rover-cam_realsense_rgb", + CameraName.ROVER_REAR: "/dev/rover-cam_rear", + CameraName.AUTONOMY_DEPTH: "/dev/rover-cam_realsense_depth", + CameraName.SUBSYSTEM1: "/dev/rover-cam_bottom-right", + CameraName.SUBSYSTEM2: "/dev/rover-cam_bottom-left", +// CameraName.SUBSYSTEM1: "/dev/rover-cam_subsystem_1", +// CameraName.SUBSYSTEM2: "/dev/rover-cam_subsystem_2", + CameraName.SUBSYSTEM3: "/dev/rover-cam_subsystem_3", }; /// Map for WINDOWS devices diff --git a/src/realsense_internal.cpp b/src/realsense_internal.cpp index 41b3e6d..c4b2e9c 100644 --- a/src/realsense_internal.cpp +++ b/src/realsense_internal.cpp @@ -48,23 +48,26 @@ const char* burt_rs::RealSense::getDeviceName() { BurtRsStatus burt_rs::RealSense::startStream() { rs2::config rs_config; rs_config.enable_stream(RS2_STREAM_DEPTH, DEPTH_WIDTH, HEIGHT); - rs_config.enable_stream(RS2_STREAM_COLOR, RGB_WIDTH, HEIGHT, RS2_FORMAT_BGR8); + // rs_config.enable_stream(RS2_STREAM_COLOR, RGB_WIDTH, HEIGHT, RS2_FORMAT_BGR8); auto profile = pipeline.start(rs_config); auto frames = pipeline.wait_for_frames(); auto depth_frame = frames.get_depth_frame(); - auto rgb_frame = frames.get_color_frame(); + // auto rgb_frame = frames.get_color_frame(); auto depth_width = depth_frame.get_width(); auto depth_height = depth_frame.get_height(); - auto rgb_width = rgb_frame.get_width(); - auto rgb_height = rgb_frame.get_height(); + // auto rgb_width = rgb_frame.get_width(); + // auto rgb_height = rgb_frame.get_height(); - if (rgb_width == 0 || rgb_height == 0 || depth_width == 0 || depth_height == 0) { + // if (rgb_width == 0 || rgb_height == 0 || depth_width == 0 || depth_height == 0) { + if (depth_width == 0 || depth_height == 0) { return BurtRsStatus::BurtRsStatus_resolution_unknown; } else { config.depth_width = depth_width; config.depth_height = depth_height; - config.rgb_width = rgb_width; - config.rgb_height = rgb_height; + // config.rgb_width = rgb_width; + // config.rgb_height = rgb_height; + config.rgb_width = 0; + config.rgb_height = 0; return BurtRsStatus::BurtRsStatus_ok; } } @@ -81,16 +84,16 @@ NativeFrames* burt_rs::RealSense::getDepthFrame() { if (!pipeline.poll_for_frames(&frames)) return nullptr; rs2::depth_frame depth_frame = frames.get_depth_frame(); rs2::frame colorized_frame = colorizer.colorize(depth_frame); - rs2::frame rgb_frame = frames.get_color_frame(); + // rs2::frame rgb_frame = frames.get_color_frame(); // Copy both frames -- TODO: optimize this to be a move instead int depth_length = depth_frame.get_data_size(); int colorized_length = colorized_frame.get_data_size(); - int rgb_length = rgb_frame.get_data_size(); - if (depth_length == 0 || colorized_length == 0 || rgb_length == 0) return nullptr; + // int rgb_length = rgb_frame.get_data_size(); + // if (depth_length == 0 || colorized_length == 0 || rgb_length == 0) return nullptr; uint8_t* depth_copy = new uint8_t[depth_length]; uint8_t* colorized_copy = new uint8_t[colorized_length]; - uint8_t* rgb_copy = new uint8_t[rgb_length]; + // uint8_t* rgb_copy = new uint8_t[rgb_length]; // Copy all the data in the depth frame const uint8_t* depth_data = static_cast(depth_frame.get_data()); @@ -105,10 +108,10 @@ NativeFrames* burt_rs::RealSense::getDepthFrame() { } // Copy all the data in the RGB frame - const uint8_t* rgb_data = static_cast(rgb_frame.get_data()); + /* const uint8_t* rgb_data = static_cast(rgb_frame.get_data()); for (int i = 0; i < rgb_length; i++) { rgb_copy[i] = rgb_data[i]; - } + }*/ // Return both frames return new NativeFrames { @@ -116,8 +119,8 @@ NativeFrames* burt_rs::RealSense::getDepthFrame() { depth_length: depth_length, colorized_data: colorized_copy, colorized_length: colorized_length, - rgb_data: rgb_copy, - rgb_length: rgb_length, + rgb_data: nullptr, + rgb_length: 0, }; }