diff --git a/wrappers/opencv/CMakeLists.txt b/wrappers/opencv/CMakeLists.txt
index 82e1eaa503..4d3afe3911 100644
--- a/wrappers/opencv/CMakeLists.txt
+++ b/wrappers/opencv/CMakeLists.txt
@@ -12,6 +12,7 @@ add_subdirectory(grabcuts)
add_subdirectory(latency-tool)
add_subdirectory(dnn)
add_subdirectory(depth-filter)
+add_subdirectory(rotate-pointcloud)
if(BUILD_CV_KINFU_EXAMPLE)
add_subdirectory(kinfu)
endif()
diff --git a/wrappers/opencv/readme.md b/wrappers/opencv/readme.md
index de4b9a7ec9..184b83f386 100644
--- a/wrappers/opencv/readme.md
+++ b/wrappers/opencv/readme.md
@@ -10,6 +10,7 @@ Examples in this folder are designed to complement existing [SDK examples](../..
3. [Latency-Tool](./latency-tool) - Basic latency estimation using computer vision
4. [DNN](./dnn) - Intel RealSense camera used for real-time object-detection
5. [Depth Filter](./depth-filter) - Depth Filtering for Collision Avoidance
+6. [Rotate](./rotate) - Rotate point cloud before visualization
## Getting Started:
This page is certainly **not** a comprehensive guide to getting started with OpenCV and CMake, but it can help get on the right track.
diff --git a/wrappers/opencv/rotate-pointcloud/CMakeLists.txt b/wrappers/opencv/rotate-pointcloud/CMakeLists.txt
new file mode 100644
index 0000000000..92f4454546
--- /dev/null
+++ b/wrappers/opencv/rotate-pointcloud/CMakeLists.txt
@@ -0,0 +1,20 @@
+# minimum required cmake version: 3.1.0
+cmake_minimum_required(VERSION 3.1.0)
+
+project(rs-rotate-pc)
+
+add_executable( ${PROJECT_NAME} rs-rotate-pc.cpp)
+set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
+target_link_libraries(${PROJECT_NAME} ${DEPENDENCIES})
+set_target_properties (${PROJECT_NAME} PROPERTIES
+ FOLDER "Examples/OpenCV"
+)
+
+install(
+ TARGETS
+
+ ${PROJECT_NAME}
+
+ RUNTIME DESTINATION
+ ${CMAKE_INSTALL_PREFIX}/bin
+)
diff --git a/wrappers/opencv/rotate-pointcloud/readme.md b/wrappers/opencv/rotate-pointcloud/readme.md
new file mode 100644
index 0000000000..8d5c6fb162
--- /dev/null
+++ b/wrappers/opencv/rotate-pointcloud/readme.md
@@ -0,0 +1,25 @@
+# rs-rotate-ps sample
+
+## Overview
+This example shows how to rotate point cloud before colorization.
+Current examples requires OpenCV.
+
+
+## Usage:
+ - press 'c' for change mode
+ - press 'q' for exit
+
+
+## Examples:
+1. Ordinary colorization
+
+
+
+2. Rotate point cloud by roll angle for 90 degrees
+
+
+
+3. Rotate point cloud by yaw angle for 90 degrees
+
+
+
diff --git a/wrappers/opencv/rotate-pointcloud/res/front.png b/wrappers/opencv/rotate-pointcloud/res/front.png
new file mode 100644
index 0000000000..2392e364a4
Binary files /dev/null and b/wrappers/opencv/rotate-pointcloud/res/front.png differ
diff --git a/wrappers/opencv/rotate-pointcloud/res/roll_90.png b/wrappers/opencv/rotate-pointcloud/res/roll_90.png
new file mode 100644
index 0000000000..349b029dd1
Binary files /dev/null and b/wrappers/opencv/rotate-pointcloud/res/roll_90.png differ
diff --git a/wrappers/opencv/rotate-pointcloud/res/yaw_90.png b/wrappers/opencv/rotate-pointcloud/res/yaw_90.png
new file mode 100644
index 0000000000..f285293e2d
Binary files /dev/null and b/wrappers/opencv/rotate-pointcloud/res/yaw_90.png differ
diff --git a/wrappers/opencv/rotate-pointcloud/rs-rotate-pc.cpp b/wrappers/opencv/rotate-pointcloud/rs-rotate-pc.cpp
new file mode 100644
index 0000000000..148df01fea
--- /dev/null
+++ b/wrappers/opencv/rotate-pointcloud/rs-rotate-pc.cpp
@@ -0,0 +1,174 @@
+
+#include // Include RealSense Cross Platform API
+#include // Include OpenCV API
+
+using namespace cv;
+
+bool is_yaw{false};
+bool is_roll{false};
+
+cv::RotateFlags angle(cv::RotateFlags::ROTATE_90_CLOCKWISE);
+
+void changeMode() {
+ static int i = 0;
+
+ i++;
+ if (i==5) i=0;
+
+ switch (i) {
+ case 0:
+ is_yaw = false;
+ is_roll = false;
+ break;
+ case 1:
+ is_yaw = true;
+ is_roll = false;
+ break;
+ case 2:
+ is_yaw = false;
+ is_roll = true;
+
+ angle = cv::RotateFlags::ROTATE_90_CLOCKWISE;
+
+ break;
+ case 3:
+ is_yaw = false;
+ is_roll = true;
+
+ angle = cv::RotateFlags::ROTATE_180;
+
+ break;
+ case 4:
+ is_yaw = false;
+ is_roll = true;
+
+ angle = cv::RotateFlags::ROTATE_90_COUNTERCLOCKWISE;
+
+ break;
+ default:
+ break;
+ }
+}
+
+int main(int argc, char * argv[]) try
+{
+ // Declare depth colorizer for pretty visualization of depth data
+ rs2::colorizer color_map;
+
+ // Aligning frames to color size
+ rs2::align depthToColorAlignment(RS2_STREAM_COLOR);
+
+ // Declare threshold filter for work with dots in range
+ rs2::threshold_filter threshold;
+ float threshold_min = 0.3;
+ float threshold_max = 1.5;
+
+ // Keep dots on the depth frame in range
+ threshold.set_option(RS2_OPTION_MIN_DISTANCE, threshold_min);
+ threshold.set_option(RS2_OPTION_MAX_DISTANCE, threshold_max);
+
+ // Declare RealSense pipeline, encapsulating the actual device and sensors
+ rs2::pipeline pipe;
+ // Start streaming with default recommended configuration
+ pipe.start();
+
+ rs2::processing_block procBlock( [&](rs2::frame f, rs2::frame_source& src )
+ {
+ const int origWidth = f.as().get_width();
+ const int origHeight = f.as().get_height();
+
+ cv::Mat image(cv::Size(origWidth, origHeight), CV_16UC1, (void*)f.get_data(), cv::Mat::AUTO_STEP);
+ cv::Mat rotated;
+
+ if ( !is_yaw && !is_roll )
+ rotated = image;
+
+ if ( is_yaw ) {
+ int rotWidth(threshold_max * 1000);
+
+ rotated = cv::Mat::zeros(cv::Size(rotWidth, image.size().height), CV_16UC1 );
+
+ for(int h = 0; h < rotated.size().height; h++) {
+ for(int w = 0; w < rotated.size().width; w++) {
+
+ if ( ( h >= image.size().height ) || ( w >= image.size().width ) )
+ continue;
+
+ unsigned short value = image.at(h, w);
+
+ if ( value == 0 )
+ continue;
+
+ rotated.at( h, value ) = w;
+ }
+ }
+ }
+
+ if (is_roll) {
+ cv::rotate( image, rotated, angle );
+ }
+
+ auto res = src.allocate_video_frame(f.get_profile(), f, 0, rotated.size().width, rotated.size().height);
+ memcpy((void*)res.get_data(), rotated.data, rotated.size().width * rotated.size().height * 2);
+
+ src.frame_ready(res);
+ });
+
+ rs2::frame_queue frame_queue;
+ procBlock.start(frame_queue);
+
+ while ( true )
+ {
+ // get set of frames
+ rs2::frameset frames = pipe.wait_for_frames(); // Wait for next set of frames from the camera
+
+ // align images
+ frames = depthToColorAlignment.process(frames);
+
+ // get depth frames
+ rs2::frame depthFrame = frames.get_depth_frame();
+
+ // keep points in range from threshold_min to threshold_max
+ depthFrame = threshold.process(depthFrame);
+
+ // call processing block for handle cloud point
+ procBlock.invoke( depthFrame );
+ depthFrame = frame_queue.wait_for_frame();
+
+ // Query frame size (width and height)
+ const int w = depthFrame.as().get_width();
+ const int h = depthFrame.as().get_height();
+
+ // Create OpenCV matrix of size (w,h) from the colorized depth data
+ cv::Mat image(cv::Size(w, h), CV_8UC3, (void*)depthFrame.apply_filter(color_map).get_data());
+
+ // Rescale image for convenience
+ if ( ( image.size().width > 1000 ) || (image.size().height > 1000) )
+ resize( image, image, Size(), 0.5, 0.5);
+
+ // Update the window with new data
+ imshow("window_name", image);
+ int k = waitKey(1);
+
+ if ( k == 'q' )
+ break;
+
+ if ( k == 'c' )
+ changeMode();
+ }
+
+ return EXIT_SUCCESS;
+}
+catch (const rs2::error & e)
+{
+ std::cerr << "RealSense error calling " << e.get_failed_function() << "(" << e.get_failed_args() << "):\n " << e.what() << std::endl;
+ return EXIT_FAILURE;
+}
+catch (const std::exception& e)
+{
+ std::cerr << e.what() << std::endl;
+ return EXIT_FAILURE;
+}
+
+
+