Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Disable RGB from RealSense, again #12

Merged
merged 8 commits into from
May 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion lib/src/collection.dart
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ CameraDetails getDefaultDetails(CameraName name) => CameraDetails(
name: name,
resolutionWidth: 300,
resolutionHeight: 300,
quality: 50,
quality: 75,
fps: 24,
status: CameraStatus.CAMERA_ENABLED,
);
Expand Down
4 changes: 2 additions & 2 deletions lib/src/isolates/child.dart
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ abstract class CameraIsolate extends IsolateChild<IsolatePayload, VideoCommand>
/// A timer to periodically send the camera status to the dashboard.
Timer? statusTimer;
/// A timer to read from the camera at an FPS given by [details].
PeriodicTimer? frameTimer;
Timer? frameTimer;
/// A timer to log out the [fpsCount] every 5 seconds using [sendLog].
Timer? fpsTimer;
/// Records how many FPS this camera is actually running at.
Expand Down Expand Up @@ -99,7 +99,7 @@ abstract class CameraIsolate extends IsolateChild<IsolatePayload, VideoCommand>
if (details.status != CameraStatus.CAMERA_ENABLED) return;
sendLog(LogLevel.debug, "Starting camera $name. Status=${details.status}");
final interval = details.fps == 0 ? Duration.zero : Duration(milliseconds: 1000 ~/ details.fps);
frameTimer = PeriodicTimer(interval, sendFrames);
frameTimer = Timer.periodic(interval, (_) => sendFrames());
fpsTimer = Timer.periodic(const Duration(seconds: 5), (_) {
sendLog(LogLevel.trace, "Camera $name sent ${fpsCount ~/ 5} frames");
fpsCount = 0;
Expand Down
2 changes: 2 additions & 0 deletions lib/src/isolates/opencv.dart
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class OpenCVCameraIsolate extends CameraIsolate {
@override
void initCamera() {
camera = getCamera(name);
camera.setResolution(details.resolutionWidth, details.resolutionHeight);
if (!camera.isOpened) {
sendLog(LogLevel.warning, "Camera $name is not connected");
updateDetails(CameraDetails(status: CameraStatus.CAMERA_DISCONNECTED));
Expand All @@ -38,6 +39,7 @@ class OpenCVCameraIsolate extends CameraIsolate {
void sendFrames() {
final matrix = camera.getFrame();
if (matrix == nullptr) return;
// detectAndAnnotateFrames(matrix);
final frame = encodeJpg(matrix, quality: details.quality);
matrix.dispose();
if (frame == null) { // Error getting the frame
Expand Down
36 changes: 21 additions & 15 deletions lib/src/isolates/realsense.dart
Original file line number Diff line number Diff line change
Expand Up @@ -76,24 +76,30 @@ class RealSenseIsolate extends CameraIsolate {
sendFrame(colorizedJpg);
}

// Get RGB frame
final Pointer<Uint8> rawRGB = frames.ref.rgb_data;
final Pointer<Mat> rgbMatrix = getMatrix(camera.rgbResolution.height, camera.rgbResolution.width, rawRGB);
detectAndAnnotateFrames(rgbMatrix); // detect ArUco tags

// Compress the RGB frame into a JPG
final OpenCVImage? rgbJpg = encodeJpg(rgbMatrix, quality: details.quality);
if (rgbJpg == null) {
sendLog(LogLevel.debug, "Could not encode RGB frame");
} else {
final newDetails = details.deepCopy()..name = CameraName.ROVER_FRONT;
sendFrame(rgbJpg, detailsOverride: newDetails);
}
sendRgbFrame(frames.ref.rgb_data);

fpsCount++;
// send(DepthFramePayload(frames.address));
// send(DepthFramePayload(frames.address)); // For autonomy
nativeLib.Mat_destroy(colorizedMatrix);
nativeLib.Mat_destroy(rgbMatrix);
frames.dispose();
}

/// Sends the RealSense's RGB frame and optionally detects ArUco tags.
void sendRgbFrame(Pointer<Uint8> rawRGB) {
if (rawRGB == nullptr) return;
final Pointer<Mat> rgbMatrix = getMatrix(camera.rgbResolution.height, camera.rgbResolution.width, rawRGB);
//detectAndAnnotateFrames(rgbMatrix); // detect ArUco tags

// Compress the RGB frame into a JPG
if (rgbMatrix != nullptr) {
final OpenCVImage? rgbJpg = encodeJpg(rgbMatrix, quality: details.quality);
if (rgbJpg == null) {
sendLog(LogLevel.debug, "Could not encode RGB frame");
} else {
final newDetails = details.deepCopy()..name = CameraName.ROVER_FRONT;
sendFrame(rgbJpg, detailsOverride: newDetails);
}
nativeLib.Mat_destroy(rgbMatrix);
}
}
}
14 changes: 8 additions & 6 deletions lib/src/utils/constants.dart
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@ import "package:burt_network/generated.dart";
///
/// Map for MAC or LINUX devices
Map<CameraName, String> cameraNames = {
CameraName.ROVER_FRONT: "/dev/realsense_rgb",
CameraName.ROVER_REAR: "/dev/rear_camera",
CameraName.AUTONOMY_DEPTH: "/dev/realsense_depth",
CameraName.SUBSYSTEM1: "/dev/subsystem1",
CameraName.SUBSYSTEM2: "/dev/subsystem2",
CameraName.SUBSYSTEM3: "/dev/subsystem3",
CameraName.ROVER_FRONT: "/dev/rover-cam_realsense_rgb",
CameraName.ROVER_REAR: "/dev/rover-cam_rear",
CameraName.AUTONOMY_DEPTH: "/dev/rover-cam_realsense_depth",
CameraName.SUBSYSTEM1: "/dev/rover-cam_bottom-right",
CameraName.SUBSYSTEM2: "/dev/rover-cam_bottom-left",
// CameraName.SUBSYSTEM1: "/dev/rover-cam_subsystem_1",
// CameraName.SUBSYSTEM2: "/dev/rover-cam_subsystem_2",
CameraName.SUBSYSTEM3: "/dev/rover-cam_subsystem_3",
};

/// Map for WINDOWS devices
Expand Down
33 changes: 18 additions & 15 deletions src/realsense_internal.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,23 +48,26 @@ const char* burt_rs::RealSense::getDeviceName() {
BurtRsStatus burt_rs::RealSense::startStream() {
rs2::config rs_config;
rs_config.enable_stream(RS2_STREAM_DEPTH, DEPTH_WIDTH, HEIGHT);
rs_config.enable_stream(RS2_STREAM_COLOR, RGB_WIDTH, HEIGHT, RS2_FORMAT_BGR8);
// rs_config.enable_stream(RS2_STREAM_COLOR, RGB_WIDTH, HEIGHT, RS2_FORMAT_BGR8);
auto profile = pipeline.start(rs_config);
auto frames = pipeline.wait_for_frames();
auto depth_frame = frames.get_depth_frame();
auto rgb_frame = frames.get_color_frame();
// auto rgb_frame = frames.get_color_frame();
auto depth_width = depth_frame.get_width();
auto depth_height = depth_frame.get_height();
auto rgb_width = rgb_frame.get_width();
auto rgb_height = rgb_frame.get_height();
// auto rgb_width = rgb_frame.get_width();
// auto rgb_height = rgb_frame.get_height();

if (rgb_width == 0 || rgb_height == 0 || depth_width == 0 || depth_height == 0) {
// if (rgb_width == 0 || rgb_height == 0 || depth_width == 0 || depth_height == 0) {
if (depth_width == 0 || depth_height == 0) {
return BurtRsStatus::BurtRsStatus_resolution_unknown;
} else {
config.depth_width = depth_width;
config.depth_height = depth_height;
config.rgb_width = rgb_width;
config.rgb_height = rgb_height;
// config.rgb_width = rgb_width;
// config.rgb_height = rgb_height;
config.rgb_width = 0;
config.rgb_height = 0;
return BurtRsStatus::BurtRsStatus_ok;
}
}
Expand All @@ -81,16 +84,16 @@ NativeFrames* burt_rs::RealSense::getDepthFrame() {
if (!pipeline.poll_for_frames(&frames)) return nullptr;
rs2::depth_frame depth_frame = frames.get_depth_frame();
rs2::frame colorized_frame = colorizer.colorize(depth_frame);
rs2::frame rgb_frame = frames.get_color_frame();
// rs2::frame rgb_frame = frames.get_color_frame();

// Copy both frames -- TODO: optimize this to be a move instead
int depth_length = depth_frame.get_data_size();
int colorized_length = colorized_frame.get_data_size();
int rgb_length = rgb_frame.get_data_size();
if (depth_length == 0 || colorized_length == 0 || rgb_length == 0) return nullptr;
// int rgb_length = rgb_frame.get_data_size();
// if (depth_length == 0 || colorized_length == 0 || rgb_length == 0) return nullptr;
uint8_t* depth_copy = new uint8_t[depth_length];
uint8_t* colorized_copy = new uint8_t[colorized_length];
uint8_t* rgb_copy = new uint8_t[rgb_length];
// uint8_t* rgb_copy = new uint8_t[rgb_length];

// Copy all the data in the depth frame
const uint8_t* depth_data = static_cast<const uint8_t*>(depth_frame.get_data());
Expand All @@ -105,19 +108,19 @@ NativeFrames* burt_rs::RealSense::getDepthFrame() {
}

// Copy all the data in the RGB frame
const uint8_t* rgb_data = static_cast<const uint8_t*>(rgb_frame.get_data());
/* const uint8_t* rgb_data = static_cast<const uint8_t*>(rgb_frame.get_data());
for (int i = 0; i < rgb_length; i++) {
rgb_copy[i] = rgb_data[i];
}
}*/

// Return both frames
return new NativeFrames {
depth_data: depth_copy,
depth_length: depth_length,
colorized_data: colorized_copy,
colorized_length: colorized_length,
rgb_data: rgb_copy,
rgb_length: rgb_length,
rgb_data: nullptr,
rgb_length: 0,
};
}

Expand Down
Loading