diff --git a/latest/_downloads/178240cfa3d90132ff57517a72d31d46/ex-pcicclient_set_io.cpp b/latest/_downloads/178240cfa3d90132ff57517a72d31d46/ex-pcicclient_set_io.cpp
new file mode 100644
index 00000000..37384f42
--- /dev/null
+++ b/latest/_downloads/178240cfa3d90132ff57517a72d31d46/ex-pcicclient_set_io.cpp
@@ -0,0 +1,94 @@
+// -*- c++ -*-
+/*
+ * Copyright (C) 2017 Kuhn & Völkel GmbH
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-pcicclient_set_io.cpp
+//
+// Shows how to use the PCICClient module to control the IOs state
+// of the camera.
+//
+
+#include
+#include
+#include
+#include
+#include
+
+
+// Camera configuration string:
+// * Create and activate application with index 1
+// * Set LogicGraph to control IOs state via PCIC interface
+const char *config = R"CONFIG(
+{
+ "ifm3d":
+ {
+ "Device":
+ {
+ "ActiveApplication": "1"
+ },
+ "Apps":
+ [
+ {
+ "Name": "PCICClient Example",
+ "Description": "Manipulates digital IOs",
+ "Index" : "1",
+ "LogicGraph": "{\n \"IOMap\": {\n \"OUT1\": \"PCIC_OUT\",\n \"OUT2\": \"PCIC_OUT\"\n },\n \"blocks\": {\n \"B00001\": {\n \"pos\": {\n \"x\": 262,\n \"y\": 132\n },\n \"properties\": {\n },\n \"type\": \"PIN_EVENT_PCIC_O_CMD\"\n },\n \"B00003\": {\n \"pos\": {\n \"x\": 600,\n \"y\": 75\n },\n \"properties\": {\n \"pulse_duration\": 0\n },\n \"type\": \"DIGITAL_OUT1\"\n },\n \"B00005\": {\n \"pos\": {\n \"x\": 600,\n \"y\": 200\n },\n \"properties\": {\n \"pulse_duration\": 0\n },\n \"type\": \"DIGITAL_OUT2\"\n }\n },\n \"connectors\": {\n \"C00000\": {\n \"dst\": \"B00003\",\n \"dstEP\": 0,\n \"src\": \"B00001\",\n \"srcEP\": 0\n },\n \"C00001\": {\n \"dst\": \"B00005\",\n \"dstEP\": 0,\n \"src\": \"B00001\",\n \"srcEP\": 0\n }\n }\n}\n"
+ }
+ ]
+ }
+}
+)CONFIG";
+
+int main(int argc, char** argv)
+{
+ // Create camera
+ ifm3d::Camera::Ptr cam = std::make_shared();
+
+ // Configure camera to allow user defined IO state
+ cam->FromJSONStr(config);
+
+ // Create pcic interface
+ ifm3d::PCICClient::Ptr pcic = std::make_shared(cam);
+
+ // Start setting IOs (and led flashing)
+ pcic->Call("o010"); // OUT1 off
+ pcic->Call("o020"); // OUT2 off
+ for(int i = 0; i < 10; ++i)
+ {
+ std::cout << "Pass " << (i+1) << "/" << 10 << std::endl;
+
+ pcic->Call("o011"); // OUT1 on
+ std::cout << "State: " << pcic->Call("O01?") << " " << pcic->Call("O02?") << std::endl;
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+
+ pcic->Call("o021"); // OUT2 on
+ std::cout << "State: " << pcic->Call("O01?") << " " << pcic->Call("O02?") << std::endl;
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+
+ pcic->Call("o010"); // OUT1 off
+ std::cout << "State: " << pcic->Call("O01?") << " " << pcic->Call("O02?") << std::endl;
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+
+ pcic->Call("o020"); // OUT2 off
+ std::cout << "State: " << pcic->Call("O01?") << " " << pcic->Call("O02?") << std::endl;
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+
+ std::cout << std::endl;
+ }
+
+ return 0;
+}
diff --git a/latest/_downloads/1c4abee94807a36434d74b9abd9251c2/ex-multi_camera_grabber.cpp b/latest/_downloads/1c4abee94807a36434d74b9abd9251c2/ex-multi_camera_grabber.cpp
new file mode 100644
index 00000000..947ef5d5
--- /dev/null
+++ b/latest/_downloads/1c4abee94807a36434d74b9abd9251c2/ex-multi_camera_grabber.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2018 ifm electronics, gmbh
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-multi_camera_grabber.cpp
+//
+// Capture frames from multiple ifm 3D cameras which are configured to be triggered through software,
+// and display the time stamp of the frame received.
+// also measues the time taken to receive the set of frames.
+//
+// Prerequisites:
+// *) Each ifm 3D camera should be configured to use "Process Interface" for trigger.
+// *) You Should be able to ping each of the 3D camera from the PC on which this code executes.
+// *) Incase your network uses a proxy, you will need to configure your system to bypass the proxy for the used IP's.
+//
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+namespace
+{
+ //CHANGE IP addreses to those of camera's avaialible!!!!
+ const auto CAMERA0 = "192.168.0.70";
+ const auto CAMERA1 = "192.168.0.71";
+ const auto CAMERA2 = "192.168.0.72";
+
+ //Add the IP of cameras to be used.
+ const std::array camera_ips = {CAMERA0, CAMERA1, CAMERA2};
+
+ //Utility function to format the timestamp
+ std::string formatTimestamp(ifm3d::TimePointT timestamp)
+ {
+ std::time_t time = std::chrono::system_clock::to_time_t(
+ std::chrono::time_point_cast(
+ timestamp));
+
+ std::chrono::milliseconds milli = std::chrono::duration_cast(
+ timestamp.time_since_epoch() - std::chrono::duration_cast(
+ timestamp.time_since_epoch()));
+
+ std::ostringstream s;
+ s << std::put_time(std::localtime(&time), "%Y-%m-%d %H:%M:%S")
+ << ":" << std::setw(3) << std::setfill('0') << milli.count();
+
+ return s.str();
+ }
+}
+
+
+int main(int argc, const char **argv)
+{
+ std::chrono::system_clock::time_point start, end;
+
+ //vectors for the objects to be used.
+ std::vector cameras;
+ std::vector frame_grabbers;
+ std::vector image_buffers;
+
+ // Create ifm3d objects of Camera, ImageBuffer and FrameGrabber for each of the camera devices.
+ for(auto camera_ip:camera_ips)
+ {
+ auto cam = ifm3d::Camera::MakeShared(camera_ip);
+ cameras.push_back(cam);
+ image_buffers.push_back(std::make_shared());
+ frame_grabbers.push_back(std::make_shared(cam));
+
+ }
+
+ int count = 0;
+ while (count++ < 10)
+ {
+ //for each of the camera device, software trigger is sent and wait for frame is done sequentially.
+ start = std::chrono::system_clock::now();
+ for(int index = 0; indexSWTrigger();
+ if (!frame_grabber->WaitForFrame(image_buffer.get(), 1000))
+ {
+ std::cerr << "Timeout waiting for camera("<TimeStamp();
+ std::cout << "got camera("< duration_ms = (end - start);
+ std::cout << "total get time in ms: " << duration_ms.count()<< std::endl;
+ }
+
+ return 0;
+}
diff --git a/latest/_downloads/4772efb72001809b0e37f615d891d7f5/time_sync.cpp b/latest/_downloads/4772efb72001809b0e37f615d891d7f5/time_sync.cpp
deleted file mode 100644
index fb6d6151..00000000
--- a/latest/_downloads/4772efb72001809b0e37f615d891d7f5/time_sync.cpp
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright 2021-present ifm electronic, gmbh
- * SPDX-License-Identifier: Apache-2.0
- *
- * This example mimics a multi-camera synchronization with no trigger
- * functionality: two camera heads are switched to "RUN" mode
- * simultaneously. Frames are grabbed from each head in parallel.
- * The timestamps are used to calculate and display the time delay
- * between the reception of the frames from the two heads.
- */
-#include
-#include
-#include
-#include
-#include
-#include
-
-#include
-#include
-
-std::mutex mutex;
-
-std::string formatTimestamp(ifm3d::TimePointT timestamp)
-{
- /**
- * This function formats the timestamps for proper display
- * a.k.a converts to local time
- */
- using namespace std::chrono;
- std::time_t time = std::chrono::system_clock::to_time_t(
- std::chrono::time_point_cast(
- timestamp));
-
- milliseconds milli = duration_cast(
- timestamp.time_since_epoch() - duration_cast(
- timestamp.time_since_epoch()));
-
- std::ostringstream s;
- s << std::put_time(std::localtime(&time), "%Y-%m-%d %H:%M:%S")
- << ":" << std::setw(3) << std::setfill('0') << milli.count();
-
- return s.str();
-}
-
-int getFrameTime(ifm3d::FrameGrabber::Ptr fg, ifm3d::TimePointT &t, int &ready){
- /**
- * Grab frame from a camera head and update global variable
- * with the timestamp of the frame.
- * When the frame has been captured, switches the global variable ready
- * to release the display thread.
- */
- int i = 0;
- while (true) {
- auto frame = fg->WaitForFrame();
- if (frame.wait_for(std::chrono::milliseconds(1000)) != std::future_status::ready)
- {
- std::cerr << "Timeout waiting for camera!" << std::endl;
- return -1;
- }
- else {
- i+=1;
- if (i == 10){
- std::lock_guard guard(mutex);
- t = frame.get()->TimeStamps().front();
- i = 0;
- ready = 1;
- }
- }
- }
- return 1;
-}
-
-int main(){
- //////////////////////////
- // Declare the objects:
- //////////////////////////
- // Declare the device object (one object only, corresponding to the VPU)
- auto o3r = std::make_shared();
- ifm3d::json conf = o3r->Get();
- // Declare the FrameGrabber and ImageBuffer objects.
- // One FrameGrabber per camera head (define the port number).
- auto fg0 = std::make_shared(o3r, 50012);
- auto fg1 = std::make_shared(o3r, 50013);
-
- //Start the framegrabbber with empty schema
- fg0->Start({});
- fg1->Start({});
-
- std::thread thread0;
- std::thread thread1;
- //////////////////////////
- // Set framerate:
- //////////////////////////
- o3r->Set(ifm3d::json::parse(R"({"ports":{"port2":{"acquisition": {"framerate": 10}},
- "port3":{"acquisition": {"framerate": 10}}}})"));
-
-
- //////////////////////////
- // Start the cameras at
- // the same time
- /////////////////////////
- o3r->Set(ifm3d::json::parse(R"({"ports":{"port2":{"state": "CONF"},
- "port3":{"state": "CONF"}}})"));
- std::this_thread::sleep_for(std::chrono::seconds(1));
- o3r->Set(ifm3d::json::parse(R"({"ports":{"port2":{"state": "RUN"},
- "port3":{"state": "RUN"}}})"));
- std::this_thread::sleep_for(std::chrono::milliseconds(500));
-
- // Check which frame comes first:
- std::vector timestamps;
- std::vector fgs;
- fgs.push_back(fg0); fgs.push_back(fg1);
- for (auto fg: fgs) {
- auto frame = fg->WaitForFrame();
- if (frame.wait_for(std::chrono::milliseconds(1000)) != std::future_status::ready)
- {
- std::cerr << "Timeout waiting for camera!" << std::endl;
- return -1;
- }
- else {
- timestamps.push_back(frame.get()->TimeStamps().front());
- }
- }
- ifm3d::TimePointT t0;
- ifm3d::TimePointT t1;
- auto ready0 = 0;
- auto ready1 = 0;
- // Start grabbing frames in the right order
- if (abs(std::chrono::duration_cast(timestamps[0] - timestamps[1]).count())>=50)
- {
- thread1 = std::thread{ getFrameTime, fg1, std::ref(t1), std::ref(ready1) };
- thread0 = std::thread{ getFrameTime, fg0, std::ref(t0), std::ref(ready0)};
- }
- else {
- thread0 = std::thread{ getFrameTime, fg0, std::ref(t0), std::ref(ready0) };
- thread1 = std::thread{ getFrameTime, fg1, std::ref(t1), std::ref(ready1) };
- }
- using namespace std::chrono_literals;
- // Display delay
- while (true){
- std::this_thread::sleep_for(10ms);
- if (ready0 == 1 && ready1 == 1){
- std::lock_guard guard0(mutex);
- ready0 = 0 ; ready1 = 0;
- fmt::print("Delay (ms): {}\n", abs(std::chrono::duration_cast(t0 - t1).count()));
- }
- }
-
- thread0.join();
- thread1.join();
-
- return 0;
-}
\ No newline at end of file
diff --git a/latest/_downloads/4a5387122df65ea4eefd234a52ca9b16/ex-timestamp.cpp b/latest/_downloads/4a5387122df65ea4eefd234a52ca9b16/ex-timestamp.cpp
new file mode 100644
index 00000000..f71a9023
--- /dev/null
+++ b/latest/_downloads/4a5387122df65ea4eefd234a52ca9b16/ex-timestamp.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2018 ifm syntron gmbh
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-timetamp.cpp
+//
+// Request some frames from the camera and write the timestamps to stdout
+//
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+std::string formatTimestamp(ifm3d::TimePointT timestamp)
+{
+ using namespace std::chrono;
+ std::time_t time = std::chrono::system_clock::to_time_t(
+ std::chrono::time_point_cast(
+ timestamp));
+
+ milliseconds milli = duration_cast(
+ timestamp.time_since_epoch() - duration_cast(
+ timestamp.time_since_epoch()));
+
+ std::ostringstream s;
+ s << std::put_time(std::localtime(&time), "%Y-%m-%d %H:%M:%S")
+ << ":" << std::setw(3) << std::setfill('0') << milli.count();
+
+ return s.str();
+}
+
+int main(int argc, const char **argv)
+{
+ auto cam = ifm3d::Camera::MakeShared();
+
+ ifm3d::ImageBuffer::Ptr img = std::make_shared();
+ ifm3d::FrameGrabber::Ptr fg =
+ std::make_shared(
+ cam, ifm3d::IMG_AMP|ifm3d::IMG_CART);
+
+ for (int i = 0; i < 10; i++)
+ {
+ if (!fg->WaitForFrame(img.get(), 1000))
+ {
+ std::cerr << "Error getting frame from camera" << std::endl;
+ continue;
+ }
+
+ ifm3d::TimePointT timestamp = img->TimeStamp();
+ std::cout << "Timestamp of frame "
+ << std::setw(2) << std::setfill('0')
+ << (i+1) << ": " << formatTimestamp(timestamp)
+ << std::endl;
+ }
+
+ return 0;
+}
diff --git a/latest/_downloads/5d5476cc52e7759bbc638f78cede3645/ex-getmac.cpp b/latest/_downloads/5d5476cc52e7759bbc638f78cede3645/ex-getmac.cpp
new file mode 100644
index 00000000..e297d9dc
--- /dev/null
+++ b/latest/_downloads/5d5476cc52e7759bbc638f78cede3645/ex-getmac.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2016 Love Park Robotics, LLC
+ * Copyright (C) 2017 ifm syntron gmbh
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-getmac.cpp
+//
+// Request the MAC address from the camera. The MAC address can be used as
+// a unique identifier.
+//
+
+#include
+#include
+
+int main(int argc, const char **argv)
+{
+ // get access to the camera
+ auto cam = ifm3d::Camera::MakeShared();
+
+ // get the JSON configuration data from the camera
+ auto jsonConfig = cam->ToJSON();
+
+ // print out the MAC address
+ std::cout << "The MAC address of the camera: "
+ << jsonConfig["ifm3d"]["Net"]["MACAddress"]
+ << std::endl;
+
+ return 0;
+}
diff --git a/latest/_downloads/60a536fcd8b18c8a3a4fcf3cc59c5b36/ex-fast_app_switch.cpp b/latest/_downloads/60a536fcd8b18c8a3a4fcf3cc59c5b36/ex-fast_app_switch.cpp
new file mode 100644
index 00000000..ff1d297f
--- /dev/null
+++ b/latest/_downloads/60a536fcd8b18c8a3a4fcf3cc59c5b36/ex-fast_app_switch.cpp
@@ -0,0 +1,286 @@
+/*
+ * Copyright (C) 2017 Love Park Robotics, LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-fast_app_switch.cpp
+//
+// Shows how to switch between two applications on the camera using PCIC,
+// which should (theoretically) be fast. It also prints out some high-level
+// latency metrics. Please NOTE: this is not a micro-benchmarking test suite,
+// just a first order approximation of the expected latency.
+//
+
+#include
+#include
+#include
+#include
+#include
+#include "ifm3d/fg.h"
+#include
+#include
+
+template
+double timeit(T func)
+{
+ std::vector times;
+ times.reserve(N);
+
+ for (std::size_t i = 0; i < N; ++i)
+ {
+ auto start = std::chrono::steady_clock::now();
+ func();
+ auto stop = std::chrono::steady_clock::now();
+
+ auto diff = std::chrono::duration(stop - start).count();
+ times.push_back(diff);
+ }
+
+ double median = 0.;
+ std::sort(times.begin(), times.end());
+
+ if (N > 0)
+ {
+ if (N % 2 == 0)
+ {
+ median = (times.at(N / 2 - 1) + times.at(N / 2)) / 2;
+ }
+ else
+ {
+ median = times.at(N / 2);
+ }
+ }
+
+ return median;
+}
+
+
+int main(int argc, const char **argv)
+{
+ constexpr std::size_t N = 20;
+
+ std::string json_streaming =
+ R"(
+ {
+ "ifm3d":
+ {
+ "Device":
+ {
+ "ActiveApplication": "1"
+ },
+ "Apps":
+ [
+ {
+ "Name": "23k",
+ "TriggerMode": "1",
+ "Index": "1",
+ "Type": "Camera",
+ "Imager":
+ {
+ "ExposureTime": "5000",
+ "FrameRate": "5",
+ "MinimumAmplitude": "42",
+ "Resolution": "0",
+ "SpatialFilterType": "0",
+ "SymmetryThreshold": "0",
+ "TemporalFilterType": "0",
+ "Type": "upto30m_moderate"
+ }
+ },
+ {
+ "Name": "100k",
+ "TriggerMode": "1",
+ "Index": "2",
+ "Type": "Camera",
+ "Imager":
+ {
+ "ExposureTime": "5000",
+ "FrameRate": "5",
+ "MinimumAmplitude": "42",
+ "Resolution": "1",
+ "SpatialFilterType": "0",
+ "SymmetryThreshold": "0",
+ "TemporalFilterType": "0",
+ "Type": "upto30m_moderate"
+ }
+ }
+ ]
+ }
+ }
+ )";
+
+ std::string json_swtrigger =
+ R"(
+ {
+ "ifm3d":
+ {
+ "Device":
+ {
+ "ActiveApplication": "1"
+ },
+ "Apps":
+ [
+ {
+ "TriggerMode": "2",
+ "Index": "1"
+ },
+ {
+ "TriggerMode": "2",
+ "Index": "2"
+ }
+ ]
+ }
+ }
+ )";
+
+ // instantiate the camera and set the configuration
+ auto cam = ifm3d::Camera::MakeShared();
+ while (cam->ApplicationList().size() < 2)
+ {
+ cam->FromJSONStr(R"({"Apps":[{}]})");
+ }
+
+ std::cout << "Setting camera configuration: " << std::endl
+ << json_streaming << std::endl;
+ cam->FromJSONStr(json_streaming);
+
+ // instantiate our framegrabber
+ auto fg = std::make_shared(cam);
+
+ // create our image buffer to hold frame data from the camera
+ auto img = std::make_shared();
+
+ // instantiate our pcic interface
+ auto pcic = std::make_shared(cam);
+
+ //-----------------------------------------------------
+ // OK, let's run some tests....
+ //-----------------------------------------------------
+
+ auto acquire_frame =
+ [fg, img](cv::Mat& cloud, int resolution, bool sw = false) -> void
+ {
+ for (int i = 0; i < 5; ++i)
+ {
+ if (sw)
+ {
+ fg->SWTrigger();
+ }
+
+ if (!fg->WaitForFrame(img.get(), 1000))
+ {
+ std::cerr << "Timeout waiting for camera!" << std::endl;
+ std::abort();
+ }
+
+ cloud = img->XYZImage();
+
+ if (resolution == 23)
+ {
+ // 23k
+ if ((cloud.rows == 132) && (cloud.cols == 176))
+ {
+ return;
+ }
+ }
+ else
+ {
+ // 100k
+ if ((cloud.rows == 264) && (cloud.cols == 352))
+ {
+ return;
+ }
+ }
+ }
+
+ std::cerr << "Acquired frame is of incorrect resolution!" << std::endl;
+ std::abort();
+ };
+
+ auto assert_pcic_ok = [](std::string const& res) -> void
+ {
+ if (res != "*")
+ {
+ std::cerr << "PCIC switch failed!" << std::endl;
+ std::abort();
+ }
+ };
+
+ cv::Mat cloud;
+ std::string res;
+
+ //
+ // Toggle back and forth N times (NOTE: 2xN switches per loop)
+ // ... in streaming mode
+ //
+ std::cout << "OK, running streaming benchmarks..." << std::endl;
+ std::cout << "Test 0: Streaming mode, toggling 23K <-> 100k "
+ << N << "x (" << 2 * N << " switches)" << std::endl;
+ double switch0 =
+ timeit([&res, pcic, assert_pcic_ok, acquire_frame, &cloud]()
+ {
+ res = pcic->Call("a02");
+ assert_pcic_ok(res);
+ acquire_frame(cloud, 100);
+
+ res = pcic->Call("a01");
+ assert_pcic_ok(res);
+ acquire_frame(cloud, 23);
+ });
+
+ //
+ // Run same test as above but in s/w trigger mode
+ //
+ std::cout << "Setting camera configuration: " << std::endl
+ << json_swtrigger << std::endl;
+ cam->FromJSONStr(json_swtrigger);
+ std::cout << "OK, running s/w trigger benchmarks..." << std::endl;
+
+ std::cout << "Test 1: S/W trigger mode, toggling 23K <-> 100k "
+ << N << "x (" << 2 * N << " switches)" << std::endl;
+ double switch1 =
+ timeit([&res, pcic, assert_pcic_ok, acquire_frame, &cloud]()
+ {
+ res = pcic->Call("a02");
+ assert_pcic_ok(res);
+ acquire_frame(cloud, 100, true);
+
+ res = pcic->Call("a01");
+ assert_pcic_ok(res);
+ acquire_frame(cloud, 23, true);
+ });
+
+ //
+ // Show our results
+ //
+
+ std::cout << std::endl
+ << "*********************************************************"
+ << std::endl << std::endl;
+
+ std::cout << "Streaming mode, median exec time = " << switch0 << " secs ("
+ << switch0 / 2. << " secs per switch + image acquisition)"
+ << std::endl;
+
+ std::cout << "S/W trigger mode, median exec time = " << switch1 << " secs ("
+ << switch1 / 2. << " secs per switch + image acquisition)"
+ << std::endl;
+
+ std::cout << std::endl
+ << "*********************************************************"
+ << std::endl;
+
+ return 0;
+}
diff --git a/latest/_downloads/75351d860c66a9c42413098e7bc7d884/ex-simpleImage_ppm_io.cpp b/latest/_downloads/75351d860c66a9c42413098e7bc7d884/ex-simpleImage_ppm_io.cpp
new file mode 100644
index 00000000..081617f8
--- /dev/null
+++ b/latest/_downloads/75351d860c66a9c42413098e7bc7d884/ex-simpleImage_ppm_io.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2018 ifm electronic, gmbh
+ *
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ //
+ // ex-simpleimage_ppm_io.cpp
+ //
+ // This example shows how to get the images from ifm3dlib without opencv and PCL dependency
+ // and how to write the ppm images with the data from device. This example scales the data
+ // from camera to unsigned char for storing in the ppm file. For Scaling Distance data
+ // Maximum distance is considered as 2.5m or 2500mm whereas for amplitude data min and max values are calculated
+ // from data (auto scaling)
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+
+
+using namespace std;
+
+
+
+bool writePPMFile(ifm3d::SimpleImageBuffer::Img &img, std::string const& filename)
+{
+ auto const write_width = (size_t)img.width;
+ auto const write_height = (size_t)img.height;
+ auto write_pixels = vector(write_width * write_height * 3); // 3 for RGB channels
+ auto pixel_index = size_t{ 0 };
+ for (auto col = size_t{ 0 }; col < write_height; ++col) {
+ for (auto row = size_t{ 0 }; row < write_width; ++row) {
+ write_pixels[pixel_index * 3 + 0] = static_cast(*(img.data.data() + pixel_index));
+ write_pixels[pixel_index * 3 + 1] = static_cast(*(img.data.data() + pixel_index));
+ write_pixels[pixel_index * 3 + 2] = static_cast(*(img.data.data() + pixel_index));
+ ++pixel_index;
+ }
+ }
+ try
+ {
+ thinks::ppm::writeRgbImage(filename, write_width, write_height, write_pixels);
+ }
+ catch (exception e)
+ {
+ std::cerr << e.what();
+ return false;
+ }
+ return true;
+}
+// scales the data with min max values
+template
+void scaleImageToRGB(ifm3d::SimpleImageBuffer::Img &input, ifm3d::SimpleImageBuffer::Img &confidence, ifm3d::SimpleImageBuffer::Img &output, double min = 0.0f, double max = 0.0f)
+{
+ output.width = input.width;
+ output.height = input.height;
+ output.format = ifm3d::pixel_format::FORMAT_8U;
+ output.data.resize(output.width*output.height);
+
+ float scalingFactor = 255.0 / ((max - min) != 0 ? (max - min) : 1);
+
+ for (int index = 0; index < input.width * input.height; index++)
+ {
+ T value = *((T*)(input.data.data()) + index);
+ if ((*(confidence.data.data() + index) & 0x01) == 0x00) // checking valid pixel
+ *(output.data.data() + index) = (uint8_t)((value - min)* scalingFactor);
+ else
+ {
+ *(output.data.data() + index) = 0; // All invalid pixels
+ }
+ }
+}
+
+// find the min max of the data
+template < typename T >
+void findMinAndMax(ifm3d::SimpleImageBuffer::Img &input, ifm3d::SimpleImageBuffer::Img &confidence, double &min, double &max)
+{
+ max = 0;
+ min = (double)INT_MAX;
+ for (int index = 0; index < input.width * input.height; index++)
+ {
+ T value = *((T*)(input.data.data()) + index);
+ if ((*(confidence.data.data() + index) & 0x01) == 0x00)
+ {
+ min = std::min((T)min, value);
+ }
+ if ((*(confidence.data.data() + index) & 0x01) == 0x00)
+ {
+ max = std::max((T)max, value);
+ }
+ }
+}
+
+int main(int argc, const char **argv)
+{
+ auto cam = ifm3d::Camera::MakeShared();
+
+ ifm3d::SimpleImageBuffer::Ptr img = std::make_shared();
+ ifm3d::FrameGrabber::Ptr fg =
+ std::make_shared(
+ cam, ifm3d::IMG_AMP | ifm3d::IMG_CART | ifm3d::IMG_RDIS);
+
+ if (!fg->WaitForFrame(img.get(), 1000))
+ {
+ std::cerr << "Timeout waiting for camera!" << std::endl;
+ return -1;
+ }
+ // acquiring data from the device
+ ifm3d::SimpleImageBuffer::Img confidence = img->ConfidenceImage();
+ ifm3d::SimpleImageBuffer::Img amplitude = img->AmplitudeImage();
+ ifm3d::SimpleImageBuffer::Img distance = img->DistanceImage();
+
+ // for storing scaled output
+ ifm3d::SimpleImageBuffer::Img distance_scaled;
+ ifm3d::SimpleImageBuffer::Img amplitude_scaled;
+
+ double min = 0.0;
+ double max = 0.0;
+ // max and mindistance for scaling distance image uint8 format
+ auto const max_distance = 2.5;
+ auto const min_distance = 0.0;
+ // for 32F data O3X camera
+ if (distance.format == ifm3d::pixel_format::FORMAT_32F)
+ {
+ scaleImageToRGB(distance, confidence, distance_scaled, min_distance, max_distance);
+ findMinAndMax(amplitude, confidence, min, max);
+ scaleImageToRGB(amplitude, confidence, amplitude_scaled, min, max);
+ }
+ //for 16u data O3D camera
+ else if(distance.format == ifm3d::pixel_format::FORMAT_16U)
+ { //if data format is 16U then distances are in millimeters, Hence max distance is multiplied by 1000.
+ scaleImageToRGB(distance, confidence, distance_scaled, min_distance, max_distance * 1000);
+ findMinAndMax(amplitude, confidence, min, max);
+ scaleImageToRGB(amplitude, confidence, amplitude_scaled, min, max);
+ }
+ else
+ {
+ std::cerr << "Unknown Format" << std::endl;
+ }
+
+ //writing images too ppm format
+ if (!writePPMFile(distance_scaled, "distanceImage.ppm"))
+ {
+ std::cerr << "Not able to write the distance data in ppm format" << std::endl;
+ return -1;
+ }
+
+ if (!writePPMFile(amplitude_scaled, "amplitudeImage.ppm"))
+ {
+ std::cerr << "Not able to write the amplitude data in ppm format" << std::endl;
+ return -1;
+ }
+
+ std::cout << "Done with simpleimage ppmio example" << std::endl;
+ return 0;
+}
diff --git a/latest/_downloads/bd95fc74c0931a6bbc83b6600b12c0bb/ex-pcicclient_async_messages.cpp b/latest/_downloads/bd95fc74c0931a6bbc83b6600b12c0bb/ex-pcicclient_async_messages.cpp
new file mode 100644
index 00000000..e671ed43
--- /dev/null
+++ b/latest/_downloads/bd95fc74c0931a6bbc83b6600b12c0bb/ex-pcicclient_async_messages.cpp
@@ -0,0 +1,92 @@
+// -*- c++ -*-
+/*
+ * Copyright (C) 2017 Kuhn & Völkel GmbH
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-pcicclient_async_messages.cpp
+//
+// Shows how to use the PCICClient module to receive asynchronous
+// notification (and error) messages from the camera.
+//
+
+#include
+#include
+#include
+#include
+#include
+
+// Camera configuration string:
+// * Create two applications with indices 1 and 2
+// and activate application with index 2
+const char *config = R"CONFIG(
+{
+ "ifm3d":
+ {
+ "Device":
+ {
+ "ActiveApplication": "2"
+ },
+ "Apps":
+ [
+ {
+ "Name": "PCICClient Example 2a",
+ "Description": "First application",
+ "Index" : "1"
+ },
+ {
+ "Name": "PCICClient Example 2b",
+ "Description": "Second application",
+ "Index" : "2"
+ }
+ ]
+ }
+}
+)CONFIG";
+
+
+int main(int argc, char** argv)
+{
+ // Create camera
+ ifm3d::Camera::Ptr cam = std::make_shared();
+
+ // Configure two applications on the camera
+// cam->FromJSONStr(config);
+
+ // Create pcic interface
+ ifm3d::PCICClient::Ptr pcic = std::make_shared(cam);
+
+ // Set notification (and error) callbacks, which simply print received messages
+ pcic->SetNotificationCallback([](const std::string& notification)
+ {
+ std::cout << "Notification: " << notification << std::endl;
+ });
+
+ pcic->SetErrorCallback([](const std::string& error)
+ {
+ std::cout << "Error: " << error << std::endl;
+ });
+
+ // Switch between applications (and receive notification 000500000)
+ std::cout << "Switch to application 1" << std::endl;
+ pcic->Call("a01");
+ std::this_thread::sleep_for(std::chrono::seconds(1));
+
+ std::cout << "Switch to application 2" << std::endl;
+ pcic->Call("a02");
+ std::this_thread::sleep_for(std::chrono::seconds(1));
+
+ return 0;
+}
diff --git a/latest/_downloads/ddd652e302f29584910cad75d2331818/ex-exposure_times.cpp b/latest/_downloads/ddd652e302f29584910cad75d2331818/ex-exposure_times.cpp
new file mode 100644
index 00000000..dfe68d90
--- /dev/null
+++ b/latest/_downloads/ddd652e302f29584910cad75d2331818/ex-exposure_times.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2016 Love Park Robotics, LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-exposure_times.cpp
+//
+// Shows how to change imager exposure times on the fly while streaming in
+// pixel data and validating the setting of the exposure times registered to
+// the frame data.
+//
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+int main(int argc, const char **argv)
+{
+ // example configuration for the camera we will use for exemplary purpose
+ // we will use a double exposure imager.
+ std::string json = R"(
+ {
+ "ifm3d":
+ {
+ "Device":
+ {
+ "ActiveApplication": "1"
+ },
+ "Apps":
+ [
+ {
+ "TriggerMode": "1",
+ "Index": "1",
+ "Imager":
+ {
+ "ExposureTime": "5000",
+ "ExposureTimeList": "125;5000",
+ "ExposureTimeRatio": "40",
+ "Type":"under5m_moderate"
+ }
+ }
+ ]
+ }
+ }
+ )";
+
+
+ // instantiate the camera and set the configuration
+ ifm3d::Camera::Ptr cam = std::make_shared();
+ std::cout << "Setting camera configuration: " << std::endl
+ << json << std::endl;
+
+
+ // create our image buffer to hold frame data from the camera
+ ifm3d::ImageBuffer::Ptr img = std::make_shared();
+
+ // instantiate our framegrabber and be sure to explicitly tell it to
+ // stream back the exposure times registered to the frame data
+ ifm3d::FrameGrabber::Ptr fg =
+ std::make_shared(
+ cam, ifm3d::DEFAULT_SCHEMA_MASK|ifm3d::EXP_TIME|ifm3d::ILLU_TEMP);
+
+ // a vector to hold the exposure times (we will just print them to the
+ // screen)
+ std::vector exposure_times;
+
+ // a map use to modulate the `ExposureTime` and `ExposureTimeRatio`
+ // on-the-fly. We seed it with data consistent with our config above
+ std::unordered_map params =
+ {
+ {"imager_001/ExposureTime", "5000"},
+ {"imager_001/ExposureTimeRatio", "40"}
+ };
+
+ // create a session with the camera so we can modulate the exposure times
+ cam->RequestSession();
+
+ // set our session timeout --
+ //
+ // NOTE: I'm going to do nothing with this here. However, in a *real*
+ // application, you will have to send `Heartbeats` at least every `hb_secs`
+ // seconds to the camera. The best technique for doing that is left as an
+ // exercise for the reader.
+ int hb_secs = cam->Heartbeat(300);
+
+ // now we start looping over the image data, every 20 frames, we will
+ // change the exposure times, after 100 frames we will exit.
+ int i = 0;
+ while (true)
+ {
+ if (! fg->WaitForFrame(img.get(), 1000))
+ {
+ std::cerr << "Timeout waiting for camera!" << std::endl;
+ continue;
+ }
+
+ // get the exposure times registered to the frame data
+ exposure_times = img->ExposureTimes();
+ auto illu_temp = img->IlluTemp();
+
+ // depending on your imager config, you can have up to 3 exposure
+ // times. I'll print all three for exemplary purposes, but we know there
+ // are only two valid ones based on our double exposure imager
+ // configuration from above. We expect the third to be 0.
+ std::cout << "Exposure Time 0: " << exposure_times.at(0)
+ << std::endl;
+ std::cout << "Exposure Time 1: " << exposure_times.at(1)
+ << std::endl;
+ std::cout << "Exposure Time 2: " << exposure_times.at(2)
+ << std::endl;
+ std::cout << "Illu Temp: " << illu_temp
+ << std::endl;
+ std::cout << "---" << std::endl;
+
+ i++;
+
+ if (i == 100)
+ {
+ break;
+ }
+
+ if (i % 20 == 0)
+ {
+ std::cout << "Setting long exposure time to: ";
+ if (exposure_times.at(1) == 5000)
+ {
+ std::cout << 10000 << std::endl;
+ params["imager_001/ExposureTime"] = "10000";
+ }
+ else
+ {
+ std::cout << 5000 << std::endl;
+ params["imager_001/ExposureTime"] = "5000";
+ }
+
+ cam->SetTemporaryApplicationParameters(params);
+ }
+ }
+
+ //
+ // In a long-running program, you will need to take care to
+ // clean up your session if necessary. Here we don't worry about it because
+ // the camera dtor will do that for us.
+ //
+ std::cout << "et voila." << std::endl;
+ return 0;
+}
diff --git a/latest/_downloads/e3f3c0903788d9405a5b19d99301bd53/ex-file_io.cpp b/latest/_downloads/e3f3c0903788d9405a5b19d99301bd53/ex-file_io.cpp
new file mode 100644
index 00000000..fb93490d
--- /dev/null
+++ b/latest/_downloads/e3f3c0903788d9405a5b19d99301bd53/ex-file_io.cpp
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2016 Love Park Robotics, LLC
+ * Copyright (C) 2017 ifm syntron gmbh
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribted on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// ex-file_io.cpp
+//
+// Capture a frame from the camera, and write the data out to files. For
+// exemplary purposes, we will write the amplitdue and radial distance images
+// to PNG files. NOTE: we have removed the PCL I/O from this example for now as
+// we are trying to deprecate PCL from our library.
+//
+
+#include
+#include
+#include
+#include
+#include
+#include
+
+int main(int argc, const char **argv)
+{
+ auto cam = ifm3d::Camera::MakeShared();
+
+ ifm3d::ImageBuffer::Ptr img = std::make_shared();
+ ifm3d::FrameGrabber::Ptr fg =
+ std::make_shared(
+ cam, ifm3d::IMG_AMP|ifm3d::IMG_CART|ifm3d::IMG_RDIS);
+
+ if (! fg->WaitForFrame(img.get(), 1000))
+ {
+ std::cerr << "Timeout waiting for camera!" << std::endl;
+ return -1;
+ }
+
+ imwrite("amplitude.png", img->AmplitudeImage());
+ imwrite("radial_distance.png", img->DistanceImage());
+
+ return 0;
+}
diff --git a/latest/_downloads/efe3699e16207ab287d02227ab95546b/LICENSE b/latest/_downloads/efe3699e16207ab287d02227ab95546b/LICENSE
new file mode 100644
index 00000000..d6456956
--- /dev/null
+++ b/latest/_downloads/efe3699e16207ab287d02227ab95546b/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/latest/_images/registration1.drawio.svg b/latest/_images/registration1.drawio.svg
new file mode 100644
index 00000000..92a73448
--- /dev/null
+++ b/latest/_images/registration1.drawio.svg
@@ -0,0 +1,111 @@
+
\ No newline at end of file
diff --git a/latest/_sources/content/README.md.txt b/latest/_sources/content/README.md.txt
index 2c15f232..4164b540 100644
--- a/latest/_sources/content/README.md.txt
+++ b/latest/_sources/content/README.md.txt
@@ -65,6 +65,10 @@ Note that we provide 2 tags, *stable* always points to the latest tagged version
For more details on the available containers, see [here](ifm3d/doc/sphinx/content/installation_instructions/install_docker:Docker%20dev%20container).
For more details on docker and the O3R platform see [here](documentation/O3R/Docker/README:Docker%20on%20O3R).
+## Examples
+
+Examples for the ifm3d library can be found in the [ifm3d-examples](https://github.com/ifm/ifm3d-examples) repository.
+
## Report a bug and check the known issues
Please see the [GitHub Issue Tracker](https://github.com/ifm/ifm3d/issues), or contact `support.robotics@ifm.com`.
diff --git a/latest/_sources/examples/o3r/2d_data/2d_data.md.txt b/latest/_sources/content/basic_lib_usage/2d_data/2d_data.md.txt
similarity index 90%
rename from latest/_sources/examples/o3r/2d_data/2d_data.md.txt
rename to latest/_sources/content/basic_lib_usage/2d_data/2d_data.md.txt
index e76bb879..c4804e16 100644
--- a/latest/_sources/examples/o3r/2d_data/2d_data.md.txt
+++ b/latest/_sources/content/basic_lib_usage/2d_data/2d_data.md.txt
@@ -63,13 +63,14 @@ cv::waitKey(0);
## The full example
:::::{tabs}
::::{group-tab} Python
-:::{literalinclude} 2d_data.py
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/2d_data.py
+
:language: python
:::
::::
::::{group-tab} C++
-:::{literalinclude} 2d_data.cpp
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/cpp/core/2d_data/2d_data.cpp
:language: cpp
:::
::::
diff --git a/latest/_sources/examples/o3r/configuration/configuration.md.txt b/latest/_sources/content/basic_lib_usage/configuration/configuration.md.txt
similarity index 89%
rename from latest/_sources/examples/o3r/configuration/configuration.md.txt
rename to latest/_sources/content/basic_lib_usage/configuration/configuration.md.txt
index 9a23a010..08b8c7f0 100644
--- a/latest/_sources/examples/o3r/configuration/configuration.md.txt
+++ b/latest/_sources/content/basic_lib_usage/configuration/configuration.md.txt
@@ -68,7 +68,10 @@ json conf = o3r->Get();
## Write a new configuration
-To set a new configuration, you need to provide said configuration in JSON formatting. The provided configuration can be a subset or the full configuration, as long as it follows the proper JSON hierarchy.
+To set a new configuration, you need to provide said configuration in JSON formatting. The provided configuration can be a subset or the full configuration, as long as it follows the proper JSON hierarchy.
+:::{note}
+For the O3R platform, you can refer to [the JSON schema documentation](https://ifm3d.com/latest/Technology/configuration.html#json-schema)
+:::
:::::{tabs}
::::{group-tab} Python
@@ -88,13 +91,13 @@ Note: we use [string literals](https://en.cppreference.com/w/cpp/language/string
## The full example
:::::{tabs}
::::{group-tab} Python
-:::{literalinclude} configuration.py
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/configuration.py
:language: python
:::
::::
::::{group-tab} C++
-:::{literalinclude} configuration.cpp
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/cpp/core/configuration/configuration.cpp
:language: cpp
:::
::::
diff --git a/latest/_sources/examples/o3r/deserialize/deserialize.md.txt b/latest/_sources/content/basic_lib_usage/deserialize/deserialize.md.txt
similarity index 89%
rename from latest/_sources/examples/o3r/deserialize/deserialize.md.txt
rename to latest/_sources/content/basic_lib_usage/deserialize/deserialize.md.txt
index c1045a58..e3c4959e 100644
--- a/latest/_sources/examples/o3r/deserialize/deserialize.md.txt
+++ b/latest/_sources/content/basic_lib_usage/deserialize/deserialize.md.txt
@@ -13,12 +13,12 @@ The usage of the deserializer is the same for all the buffers mentioned above: c
:::::{tabs}
:::: {group-tab} Python
-:::{literalinclude} deserialize.py
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/deserialize.py
:language: python
:::
::::
:::: {group-tab} C++
-:::{literalinclude} deserialize.cpp
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/cpp/core/deserialize/deserialize.cpp
:language: cpp
:::
::::
diff --git a/latest/_sources/examples/o3r/getting_data/getting_data.md.txt b/latest/_sources/content/basic_lib_usage/getting_data/getting_data.md.txt
similarity index 94%
rename from latest/_sources/examples/o3r/getting_data/getting_data.md.txt
rename to latest/_sources/content/basic_lib_usage/getting_data/getting_data.md.txt
index d52b3ff3..7753ff6a 100644
--- a/latest/_sources/examples/o3r/getting_data/getting_data.md.txt
+++ b/latest/_sources/content/basic_lib_usage/getting_data/getting_data.md.txt
@@ -160,13 +160,13 @@ auto rgb = frame->get_buffer(ifm3d::buffer_id::JPEG_IMAGE);
### Using a callback
:::::{tabs}
::::{group-tab} Python
-:::{literalinclude} getting_data_callback.py
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/getting_data_callback.py
:language: python
:::
::::
::::{group-tab} C++
-:::{literalinclude} getting_data_callback.cpp
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/cpp/core/getting_data/getting_data_callback.cpp
:language: cpp
:::
::::
@@ -175,13 +175,13 @@ auto rgb = frame->get_buffer(ifm3d::buffer_id::JPEG_IMAGE);
### Using the polling mode
:::::{tabs}
::::{group-tab} Python
-:::{literalinclude} getting_data.py
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/getting_data.py
:language: python
:::
::::
::::{group-tab} C++
-:::{literalinclude} getting_data.cpp
+:::{literalinclude} ../../ifm3d-examples/ovp8xx/cpp/core/getting_data/getting_data.cpp
:language: cpp
:::
::::
diff --git a/latest/_sources/content/basic_lib_usage/index.md.txt b/latest/_sources/content/basic_lib_usage/index.md.txt
new file mode 100644
index 00000000..21fed904
--- /dev/null
+++ b/latest/_sources/content/basic_lib_usage/index.md.txt
@@ -0,0 +1,10 @@
+# Basic Library Usage
+
+:::{toctree}
+How to: configure the camera
+How to: receive an image
+How to: view the images
+How to: handle rgb data <2d_data/2d_data>
+How to: use the deserializer module
+How to: configure the logging
+:::
\ No newline at end of file
diff --git a/latest/_sources/examples/logging/logging.md.txt b/latest/_sources/content/basic_lib_usage/logging/logging.md.txt
similarity index 100%
rename from latest/_sources/examples/logging/logging.md.txt
rename to latest/_sources/content/basic_lib_usage/logging/logging.md.txt
diff --git a/latest/_sources/content/basic_lib_usage/viewer/README.md.txt b/latest/_sources/content/basic_lib_usage/viewer/README.md.txt
new file mode 100644
index 00000000..f59b6459
--- /dev/null
+++ b/latest/_sources/content/basic_lib_usage/viewer/README.md.txt
@@ -0,0 +1,5 @@
+# ifm3dpy Viewer
+
+:::{note}
+This example was moved to [the ifm3d-examples repository](https://github.com/ifm/ifm3d-examples).
+:::
\ No newline at end of file
diff --git a/latest/_sources/content/cmdline_configuring.md.txt b/latest/_sources/content/cmdline_configuring.md.txt
index 67840368..5bc67595 100644
--- a/latest/_sources/content/cmdline_configuring.md.txt
+++ b/latest/_sources/content/cmdline_configuring.md.txt
@@ -4,7 +4,7 @@
Configuring the parameters of an ifm 3D camera is accomplished in ifm3d in one
of two ways: 1) via the `ifm3d` command line tool; 2) via the `ifm3d`
library's `camera` module API. We show below how to do so with the command line tool.
-Please refer to [this doc](ifm3d/doc/sphinx/content/examples/o3r/configuration/configuration:How%20to%3A%20configure%20the%20camera) for instructions on configuring the camera through `ifm3d` library.
+Please refer to [this doc](content/basic_lib_usage/configuration/configuration.md) for instructions on configuring the camera through `ifm3d` library.
The primary mechanism for using the `ifm3d` command line tool to configure an
ifm 3D camera is to utilize the `dump` and `config` subcommands to `ifm3d`. The
diff --git a/latest/_sources/content/examples_list.md.txt b/latest/_sources/content/examples_list.md.txt
new file mode 100644
index 00000000..f824b5f0
--- /dev/null
+++ b/latest/_sources/content/examples_list.md.txt
@@ -0,0 +1,3 @@
+# Available examples
+
+Examples in Python and C++ are available in the [ifm3d-examples](https://github.com/ifm/ifm3d-examples) repository. We recommend that you clone the complete repository and read the instructions in the READMEs for details on each example.
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/CHANGELOG.md.txt b/latest/_sources/content/ifm3d-examples/CHANGELOG.md.txt
new file mode 100644
index 00000000..f1848605
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/CHANGELOG.md.txt
@@ -0,0 +1,20 @@
+# Changelog for ifm3d-examples
+
+## UNRELEASED
+- Update the examples for O3D3xx and O3X1xx for ifm3d 1.4.3.
+
+## 1.1.0
+- Add a Dockerfile example for CAN usage on the OVP,
+- Add a Python example on how to activate the CAN interface on the OVP,
+- Add a Python example to get data from the IMU and another to deserialize it,
+- Add a Python example to generate SSH keys,
+- Add a Python example to update a JSON configuration from one firmware version to another,
+- Improve the Python firmware utils example (code cleanup, bootup monitoring, ...),
+- Update the versions of the required Python packages.
+
+
+## 1.0.0
+- Initial release: added examples in Python and C++ for the OVP8xx devices,
+
+## 0.1.0
+- This is a release for examples for O3D and O3X devices using ifm3d < 1.0.0 (tested with ifm3d v0.11.0)
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/README.md.txt b/latest/_sources/content/ifm3d-examples/README.md.txt
new file mode 100644
index 00000000..895b8606
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/README.md.txt
@@ -0,0 +1,62 @@
+# ifm3d examples
+
+This project provides a collection of code examples for the O3 camera series of [ifm](https://www.ifm.com/), O3D3xx, O3X1xx and the O3R platform (OVP8xx along with the O3R22x cameras).
+
+These examples showcase the use of the [ifm3d library](https://api.ifm3d.com/stable/).
+
+## Supported languages
+
+Currently, we support the following languages:
+| Name | Versions |
+| ------ | -------------------------- |
+| Python | 3.8, 3.9, 3.10, 3.11, 3.12 |
+| C++ | GCC 7.5+, MSVC 2019+ |
+
+## Compatibility
+
+The examples have been tested in the following combination of versions:
+
+| ifm3d-examples version | O3R firmware | O3D firmware | O3X firmware | ifm3d library | Comment |
+| ---------------------- | ------------- | -------------------- | ------------- | ------------- | ----------------------------------------------------------------------------------------------------------- |
+| 0.1.0 | NA | 1.80.8656, 1.71.9079 | 1.1.190 | 0.11.0 | |
+| 1.0.0 | 1.1.30 | Not supported | Not supported | 1.4.3 | |
+| 1.1.0 | 1.1.30, 1.4.X | Not supported | Not supported | 1.4.3, 1.5.3 | Some of the examples are only applicable to the O3R firmware version 1.4.X (for example, the IMU and CAN examples). |
+
+Any other version might work but has not been explicitly tested.
+
+## Prerequisites
+To use these examples, you need to install the ifm3d library.
+
+For the c++ library, follow the installation instructions on [api.ifm3d.com](https://api.ifm3d.com/stable/content/installation_instructions/index.html).
+
+For the Python library, install using pip: `pip install ifm3dpy`.
+
+For more details refer to [the ifm3d documentation on api.ifm3d.com](https://api.ifm3d.com/stable/index.html).
+
+
+## o3d3xx-o3x1xx
+
+This folder contains examples for the O3D3XX and the O3X1XX camera series.
+
+## ovp8xx
+
+This folder contains examples for the O3R platform, which is composed of an OVP8xx compute unit and one or more O3R22x camera.
+
+## Getting Started
+
+To get started with this project, follow the instructions below:
+
+1. Clone the repository.
+2. Navigate to o3d3xx-o3x1xx or ovp8xx, depending on the device you are interested in.
+3. Choose a programming language and the example that aligns with your requirements.
+4. Follow the instructions provided in the example's README file to set up and run the example, or open up the example file to read through the relevant setup.
+
+Most of the examples are amply commented out and sections that should be adapted to the user's specific setup are marked in the code.
+
+## Contributing
+
+Contributions are welcome! If you have any improvements or additional examples to share, please submit a pull request.
+
+## License
+
+This project is licensed under the Apache version 2.0 license. See the [LICENSE FILE](./LICENSE) for more details.
diff --git a/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/README.md.txt b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/README.md.txt
new file mode 100644
index 00000000..4077cb16
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/README.md.txt
@@ -0,0 +1,83 @@
+
+ifm3d Examples
+==============
+This project is formerly the `examples` sub-module of the
+[ifm3d](https://github.com/ifm/ifm3d) project. It has been moved to a
+standalone project to increase its efficacy as a teaching tool. Specifically,
+beyond providing concrete code examples for interfacing to `ifm3d` it also
+shows how to integrate `ifm3d` into an external project via `cmake`. This
+project relies upon `ifm3d` version 0.9.0 or better. The remainder of the old
+`README` now follows -- with minor edits.
+
+This directory contains example programs that utilize `ifm3d`. The
+intention is to create standalone programs that illustrate one very specific
+concept in order to serve the purpose of letting developers ramp up quickly
+with using the library. The build infrastructure in this directory is minimal
+and the programs are intended to be run in place. Additonally, unless
+specifically stated otherwise, things like performance and robust error
+handling are not demonstrated. The purpose is to clearly illustrate the task
+without clouding it with the details of real-world software engineering --
+unless, of course, that was the point of the example.
+
+It is expected that this library of examples will grow over time in response to
+common themes we see on the issue tracker.
+
+Building the examples
+----------------------
+
+Assuming you are starting from the top-level directory of this source
+distribution:
+
+ $ mkdir build
+ $ cd build
+ $ cmake ..
+ $ make
+
+### Windows examples
+For Windows-based target, with Visual Studio 2017, assuming you are starting from the top-level directory of this source
+distribution:
+
+ $ mkdir build
+ $ cd build
+ $ cmake -Ax64 ..
+ $ cmake --build . --clean-first --config Release
+
+At this stage, projects are built and you will find *IFM3D_EXAMPLES.sln* in build folder.
+Use Release / RelWithDebInfo configuration to run and investigate application examples.
+Please add PATH variable to projects :
+
+ PATH=%IFM3D_BUILD_DIR%\install\bin;%IFM3D_BUILD_DIR%\install\x64\vc%MSVC_MAJOR_VERSION%.%MSVC_MINOR_VERSION%\bin;%PATH%
+
+For instance, you can fill directly in VS *Project Properties* / *Debugging* / *Environment* with `PATH=C:\ifm3d\install\bin;C:\ifm3d\install\x64\vc14.1\bin;%PATH%`
+
+What is included?
+-----------------
+
+* [ex-file_io](file_io/ex-file_io.cpp) Shows how to capture data from the camera and
+ write the images to disk. In this example, the amplitude and radial distance
+ image are written out as PNG files. We have removed the PCL-related example
+ as we are in the process of deprecating our support for PCL from the `ifm3d`
+ core.
+* [ex-getmac](getmac/ex-getmac.cpp)
+ Request the MAC address from the camera. The MAC address can be used as
+ a unique identifier.
+* [ex-timestamp](timestamp/ex-timestamp.cpp)
+ Request some frames from the camera and write the timestamps to stdout
+* [ex-exposure_times](exposure_time/ex-exposure_times.cpp) Shows how to change imager
+ exposure times on the fly while streaming in pixel data and validating the
+ setting of the exposure times registered to the frame data.
+* [ex-fast_app_switch](fast_app_switch/ex-fast_app_switch.cpp) Shows how to switch between two
+ applications on the camera using PCIC
+* [ex-pcicclient_async_messages](pcicclient_async_messages/ex-pcicclient_async_messages.cpp) Shows how to
+ use the PCICClient module to receive asynchronous notification (and error)
+ messages from the camera.
+* [ex-pcicclient_set_io](pcicclient_set_io/ex-pcicclient_set_io.cpp) Shows how to mutate the digial IO pins
+ on the O3D camera by the PCIC interface.
+* [ex-simpleImage_ppm_io](simpleimage/example/ex-simpleImage_ppm_io.cpp) Shows how to write your own
+ image container which does not depend on PCL nor OpenCV.
+* [ex-multi_camera_grabber](multi_camera_grabber/ex-multi_camera_grabber.cpp) demonstrate's how to accquire frames from multiple ifm 3D camera's,
+ see the example [documentation](doc/ex-multi_camera_grabber.md) for more details.
+
+LICENSE
+-------
+Please see the file called [LICENSE](LICENSE).
diff --git a/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/doc/ex-multi_camera_grabber.md.txt b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/doc/ex-multi_camera_grabber.md.txt
new file mode 100644
index 00000000..f22a8a8f
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/doc/ex-multi_camera_grabber.md.txt
@@ -0,0 +1,67 @@
+ex-multi_camera_grabber
+=========================
+
+The primary objective of this example is to demonstrate how to accquire frames from multiple ifm 3D camera's.
+
+The code in this example capture's frames from multiple 3D cameras which are configured to be triggered through software,
+and display's the time stamp of the frame received. It also measues the time taken to receive the set of frames.
+
+The below diagram is representational image of the setup used for this example consisting of a PC and three 3D-camera all on the same network.
+This can be used as a reference to setup your PC/3D-camera network.
+
+Inorder to try out this example following are the pre-requisites:
+
++ Each camera used should be configured to use "Process Interface" for trigger.
++ Each of your camera need's to have an unique IP Address.
++ You should be able to ping each of the camera from the PC on which this code executes.
++ Incase your network uses a proxy, you will need to configure your system to bypass the proxy for the used IP's.
+
+Setting unique IP Address to your ifm 3D camera
+--------------------------------------------------
+The following set of IP-addresses are utilised in the example code, these can be changed as per your setup.
+
++ camera 1: 192.168.0.70
++ camera 2: 192.168.0.71
++ camera 3: 192.168.0.72
+
+Incase your O3X/O3D camera's are having conflicts due the default IP 192.168.0.69 being set,
+you can set a unique IP addresses for your camera using the ifm Vision Assistant or by just following the step by step guide below.
+
+Connect the first camera to the network and execute the following command:
+
+```commands
+ifm3d --ip=192.168.0.69 dump > camera1_config.json
+```
+Edit the JSON file with an editor of your choice to set the field **ifm3d.Net.StaticIPv4Address** to **192.168.0.70**.
+Save the file.
+
+configure the new IP to your camera with the following command:
+
+```commands
+ifm3d --ip=192.168.0.69 config < camera1_config.json
+```
+After this add the second camera to the network and repeat the above steps to set the next IP to 192.168.0.71 and so on for the rest of the camera's.
+
+**Using jq**
+
+If you prefer to use jq, a single step command can be used to set IP address of the camera,
+Connect the first camera to the network and execute the following command:
+
+```commands
+ifm3d --ip=192.168.0.69 dump | jq ".ifm3d.Net.StaticIPv4Address=\"192.168.0.70\"" | ifm3d --ip=192.168.0.69 config
+```
+this will set the IP of first camera to 192.168.0.70.
+
+After this add the second camera to the network, set the next IP with the below command and so on:
+
+
+```commands
+ifm3d --ip=192.168.0.69 dump | jq ".ifm3d.Net.StaticIPv4Address=\"192.168.0.71\"" | ifm3d --ip=192.168.0.69 config
+```
+
+
+
+
+
+
+
diff --git a/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/image_rectification/python/README.md.txt b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/image_rectification/python/README.md.txt
new file mode 100644
index 00000000..a5f99a76
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/image_rectification/python/README.md.txt
@@ -0,0 +1,8 @@
+# Image Rectification
+
+This Jupyter notebook demonstrates how to perform image rectification on an
+O3D303's amplitude image, using the camera's inverse intrinsics and the camera
+model of the O3D303.
+
+The O3D303 camera model is based on the model described in this document:
+http://www.vision.caltech.edu/bouguetj/calib_doc/htmls/parameters.html
diff --git a/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/intrinsics_to_cartesian/python/README.md.txt b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/intrinsics_to_cartesian/python/README.md.txt
new file mode 100644
index 00000000..e73ad631
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/intrinsics_to_cartesian/python/README.md.txt
@@ -0,0 +1,11 @@
+# Using Intrinsics and Unit Vectors
+
+This Jupyter notebook demonstrates how to use the intrinsic calibration of the
+camera to calculate the unit vectors.
+
+It also demonstrates how to use the unit vectors (whether they were calculated
+or obtained directly from the camera) to convert radial distance to cartesian
+information in the camera coordinate frame.
+
+The O3D303 camera model is based on the model described in this document:
+http://www.vision.caltech.edu/bouguetj/calib_doc/htmls/parameters.html
diff --git a/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/simpleimage/README.md.txt b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/simpleimage/README.md.txt
new file mode 100644
index 00000000..50d800d2
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/simpleimage/README.md.txt
@@ -0,0 +1,47 @@
+
+ifm3d - Simple Image Container
+===============================
+
+`ifm3d`, through its modularity, also encourages the
+creation of new image containers to be utilized within the overall `ifm3d`
+ecosystem. The interface is explained is very well in
+[ifm3d image conatiners](https://github.com/ifm/ifm3d/blob/master/doc/img_container.md)
+
+`simpleimage` container is an header only interface for ifm3d lib which is independent of any third party library.
+Following are the structure used in this module for storing image, point and pointClouds
+**Image**
+```c++
+struct Img
+ {
+ std::vector data;
+ int width;
+ int height;
+ pixel_format format;
+ };
+```
+**point**
+```c++
+ struct Point
+ {
+ float x;
+ float y;
+ float z;
+ };
+ ```
+**pointCloud**
+```c++
+ struct PointCloud
+ {
+ std::vector points;
+ int width;
+ int height;
+ };
+```
+[pixel_format](https://github.com/ifm/ifm3d/blob/master/modules/framegrabber/include/ifm3d/fg/byte_buffer.h)
+
+The example in this modules explains how to grab the data from the ifm 3d devices and saved a .ppm image using [ppm-io](https://github.com/thinks/ppm-io) module. To save the images the data is scaled to uint8 format. Amplitude image data is scaled by the minimum and maximum value in the grabbed amplitude data, whereas for Distance image the data is scaled in distance range from 0.0m to 2.5m, which means the data values after 2.5m will be shown as 255.
+
+simpleimage module is configured in such a way that it can be build outside ifm3d-example repository which can be helpful for build applications which do not want to add OPenCV or PCL as a dependency.
+
+If you have questions, feel free to ask on our
+[issue tracker](https://github.com/ifm/ifm3d/issues).
diff --git a/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/simpleimage/ppm-io-master/README.md.txt b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/simpleimage/ppm-io-master/README.md.txt
new file mode 100644
index 00000000..5047ba9c
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/o3d3xx-o3x1xx/simpleimage/ppm-io-master/README.md.txt
@@ -0,0 +1,54 @@
+# PPM IO
+This repository implements reading and writing of images in the [PPM format](http://netpbm.sourceforge.net/doc/ppm.html). The PPM image format is extremely simple, making it ideal for small exploratory projects. The major benefit of this simplicity is that is it possible to implement the PPM file format without using external dependencies, such as compression libraries. All production code in this repository is implemented in a single [header file](https://github.com/thinks/ppm-io/blob/master/include/thinks/ppm.hpp), making it very simple to add to any existing project without having to set up additional linker rules. Also, the implementation uses only standard types and holds no state, meaning it should be fairly straight-forward to use the read and write functions. Detailed documentation is available in the source code.
+
+All code in this repository is released under the [MIT license](https://en.wikipedia.org/wiki/MIT_License).
+
+## Usage
+The implementation supports both reading and writing of PPM images. We provide some brief usage examples here, also refer to the [tests](https://github.com/thinks/ppm-io/blob/master/test/include/thinks/testPpm.hpp) for further examples.
+
+Reading an image is done as follows.
+```cpp
+using namespace std;
+
+auto width = size_t{0};
+auto height = size_t{0};
+auto pixel_data = vector();
+auto ifs = ifstream("my_file.ppm", ios::binary);
+thinks::ppm::readRgbImage(ifs, &width, &height, &pixel_data);
+ifs.close();
+```
+The above version uses the stream interface. This interface is the most flexible, since it does not assume that the image is stored on disk. Also, this version is useful for testing since it allows the tests to run in memory avoiding file permission issues. However, since the image being stored on disk is probably the most likely scenario a convenience version is also available.
+```cpp
+using namespace std;
+
+auto width = size_t{0};
+auto height = size_t{0};
+auto pixel_data = vector();
+thinks::ppm::readRgbImage("my_file.ppm", &width, &height, &pixel_data);
+```
+
+Writing image files is done in a similar fashion.
+```cpp
+using namespace std;
+
+// Write a 10x10 image where all pixels have the value (128, 128, 128).
+auto const width = size_t{10};
+auto const height = size_t{10};
+auto pixel_data = vector(width * height * 3, 128);
+auto ofs = ofstream("my_file.ppm", ios::binary);
+thinks::ppm::writeRgbImage(ofs, width, height, pixel_data);
+ofs.close();
+```
+Again, there is a convenience version for writing to disk.
+```cpp
+using namespace std;
+
+// Write a 10x10 image where all pixels have the value (128, 128, 128).
+auto const width = size_t{10};
+auto const height = size_t{10};
+auto pixel_data = vector(width * height * 3, 128);
+thinks::ppm::writeRgbImage("my_file.ppm", width, height, pixel_data);
+```
+
+## Tests
+This repository includes a simple [CMake project](https://github.com/thinks/ppm-io/blob/master/test/CMakeLists.txt) for running a small test suite. The test can be found in [this](https://github.com/thinks/ppm-io/blob/master/test/include/thinks/testPpm.hpp) header file. At present the test project builds and runs without errors.
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/README.md.txt
new file mode 100644
index 00000000..38fa3666
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/README.md.txt
@@ -0,0 +1,21 @@
+# ifm3d examples for OVP8xx
+
+This directory contains example codes in Python3 and C++ to help you start working with the O3R system.
+
+The available example codes are divided into three subdirectories:
+
+1. The core examples show you how to use the `ifm3d` API to obtain image data (2D, 3D, distance image, etc.), configure camera parameters, update the embedded firmware, and more.
+
+2. The ODS examples, demonstrates how to work with the Obstacle Detection System (ODS). This includes streaming data, analyzing the data, visualizing results, configuring the ODS, and using diagnostic scripts.
+
+3. Within the Toolbox, you find various helper scripts that showcase how to use the data for specific applications (only Python examples available at the moment).
+
+## Getting started
+Each example folder contains a README.md which provides an overview of the available examples and some more detailed explanation of the concepts showed if necessary. We recommend reading through the examples READMEs and the examples code and comments to fully comprehend the concepts.
+
+A recommended order when getting started with the examples would be as follows:
+- Start with the core examples, understanding how to collect data (`getting_data*` and `2d_data`) and how to configure the camera (`configuration`),
+- Continue with the diagnostic example (`diagnostic`) to understand how to inspect the current state of the device and react to potential errors,
+- Look through the deserialization examples (`deserialize*`) to understand how the non-image data is structured (calibration, camera information, etc).
+
+Once you have a good grasp of the core concepts and tools provided by the ifm3d API, you can move to the application specific examples: explore the ODS examples, the convenient tools in the toolbox or continue with the rest of the core examples.
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/README.md.txt
new file mode 100644
index 00000000..76a20c75
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/README.md.txt
@@ -0,0 +1,67 @@
+# C++ examples
+
+## Pre-requisites
+
+- The [ifm3d library >= v1.2.6](https://api.ifm3d.com/stable/content/installation_instructions/install_binary_package_index.html),
+- Optional:
+ - The [`json-schema-validator`](https://github.com/pboettch/json-schema-validator) library, which depends on [`nlohmann-json` >= 3.8.x](https://github.com/nlohmann/json). We use this library to validate configurations before attempting to set them. The `json-schema-validator` library provides a more verbose error handling than ifm3d, which allows to identify precisely where the error is in the provided configuration.
+ - OpenCV is used to display data and convert data formats. Follow the instructions [here for Linux](https://docs.opencv.org/4.x/d7/d9f/tutorial_linux_install.html) and [here for Windows](https://docs.opencv.org/4.x/d3/d52/tutorial_windows_install.html) to install it.
+
+## Build the examples
+
+Follow the instructions below to build the examples. The commands might need to be updated to run on Windows.
+
+```bash
+$ cd cpp
+$ mkdir build
+$ cd build
+$ cmake ..
+$ cmake --build .
+```
+
+This will create executables for all the examples in the sub-folders:
+```bash
+$ ls
+CMakeCache.txt CMakeFiles cmake_install.cmake Configs core Makefile ods
+$ ls core
+2d_data cmake_install.cmake deserialize getting_data_callback Makefile
+CMakeFiles configuration getting_data ifm3d_playground multi_head
+$ ls ods
+bootup_monitor cmake_install.cmake libods_config_lib.a ods_config ods_get_data
+CMakeFiles diagnostic Makefile ods_demo
+```
+
+## Configuration
+The examples are setup to "try their best" to run with the current configuration of the device. This means that when a port number is needed, the current configuration will be queried and the first available port used. In absence of configuration, the default IP address will be used.
+
+To change the IP address of the device, you can use an environment variable that will be retrieved in the code:
+
+```bash
+# Edit with the IP address of your OVP8xx
+# On Linux
+$ export IFM3D_IP="192.168.0.69"
+# On Windows (standard command prompt)
+$ set IFM3D_IP=192.168.0.69
+# On Windows (PowerShell)
+$ $env:IFM3D_IP = "192.168.0.69"
+```
+
+To change the port used, you need to open up the code and edit the port number manually. The structure is already there to be able to use a hardcoded port number, and you just need to uncomment it and edit with your setup.
+For example, in the `getting_data_callback.cpp` example, you will see the following lines:
+```cpp
+/////////////////////////////////////////////////////////
+// Alternatively, manually pick the port corresponding
+// to your 3D camera (uncomment the line below and comment
+// the block above)
+/////////////////////////////////////////////////////////
+// std::string port_nb = "port2";
+// if (o3r->Port(port_nb).type != "3D") {
+// std::cerr << "Please provide a 3D port number." << std::endl;
+// return -1;
+// }
+// uint16_t pcic_port = o3r->Port(port_nb).pcic_port;
+// std::cout << "Using 3D port: " << port_nb << std::endl;
+```
+
+Uncomment the lines of code and replace the port number variable string `port_nb`.
+Don't forget to compile the code again after making these changes.
\ No newline at end of file
diff --git a/latest/_sources/examples/o3r/ifm3d_playground/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/core/ifm3d_playground/README.md.txt
similarity index 93%
rename from latest/_sources/examples/o3r/ifm3d_playground/README.md.txt
rename to latest/_sources/content/ifm3d-examples/ovp8xx/cpp/core/ifm3d_playground/README.md.txt
index 88f509fb..eade3620 100644
--- a/latest/_sources/examples/o3r/ifm3d_playground/README.md.txt
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/core/ifm3d_playground/README.md.txt
@@ -1,6 +1,6 @@
# ifm3d playground project
-```ifm3d_playground``` provides the out of ifm3d source example with cmake configurations.
+```ifm3d_playground``` provides the out of ifm3d source example with CMake configurations.
User can copy ifm3d_playground folder out of the ifm3d source and can use following instructions
to build the example and develop it further.
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/ods/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/ods/README.md.txt
new file mode 100644
index 00000000..c239c7c2
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/cpp/ods/README.md.txt
@@ -0,0 +1,18 @@
+# ODS examples
+
+## Overview
+
+The example files provided in this mini-library are intended to be used as teaching resources and as building block in more complex applications. The user is free to disassemble, extend, or do whatever they please with this code. All the functions needed to work with an ODS applications are part of the ifm3d library. The rest of the code is added to simplify readability, usage and error handling.
+- `ods_config.h` and `ods_config_main.cpp` show how to get and set configurations on the O3R platform. The header files can be reused in other applications that need configurations functionalities. This example showcases the use of a JSON validator that provides verbose errors when wrong configurations are provided.
+- `ods_get_data.h` and `ods_get_data_main.cpp` show how to properly start the data stream, implement a callback that will fill a queue with data, and retrieve data from the queue. Use the header file to make use of the data queue or of the data streamer in your application.
+
+In `ods_demo.cpp`, we show how all these pieces can be used together to form a complete ODS application:
+- We configure two applications, one for the “front” view and one for the “back” view,
+- We start streaming data from the front view and display it,
+- After some seconds, we switch view to use the “back” view, and display the data,
+- In parallel, we display the diagnostic messages as they are received from the O3R.
+
+>Note: The scripts mentioned above do not take into account all that is necessary for a production application to function long term. We de not handle deployment details, for instance using docker, or specific error handling strategies, like turning off cameras if overheating or restarting the data stream if it was interrupted.
+
+## Configuration
+The examples use example JSON configuration files which contain dummy camera calibration and application parameters. These example files expect specific camera ports (3D cameras in ports 2 and 3). If your setup is different, edit the configuration files in `/ods/configs`. Make sure to recompile the code after changing the configuration files, because they are copied as part of the compilation process.
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/docker/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/README.md.txt
new file mode 100644
index 00000000..c947bb55
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/README.md.txt
@@ -0,0 +1,20 @@
+# Docker examples
+
+## Available examples
+
+The following example Dockerfiles are provided:
+- `o3r-l4t-base` provides a base Dockerfile to build an image based on Ubuntu 18.04 containing the necessary libraries to enable GPU acceleration with CUDA. The `o3r-l4t-base` container is intended to be run on board the OVP8xx.
+- `o3r-l4t-tensorrt` builds on top of the base L4T image and includes the TensorRT examples provided by NVIDIA ready to be run. The `o3r-l4t-tensorrt` container is intended to be run on board the OVP8xx.
+
+## EXCLUSION OF LIABILITY
+
+**DISCLAIMER**:
+
+This software and the accompanying files are provided "as is" and without warranties as to performance, merchantability, or any other warranties whether expressed or implied. The user must assume the entire risk of using the software.
+
+**LIABILITY**:
+
+In no event shall the author or contributors be liable for any special, incidental, indirect, or consequential damages whatsoever (including, without limitation, damages for loss of business profits, business interruption, loss of business information, or any other pecuniary loss) arising out of the use of or inability to use this software, even if the author or contributors have been advised of the possibility of such damages.
+
+## Further documentation
+See the respective documentation on the [ifm3d developer portal](https://ifm3d.com/latest/SoftwareInterfaces/Docker/index_docker.html).
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-base/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-base/README.md.txt
new file mode 100644
index 00000000..16b773a1
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-base/README.md.txt
@@ -0,0 +1,17 @@
+# `o3r-l4t-base`
+
+## EXCLUSION OF LIABILITY
+
+**DISCLAIMER**:
+
+This software and the accompanying files are provided "as is" and without warranties as to performance, merchantability, or any other warranties whether expressed or implied. The user must assume the entire risk of using the software.
+
+**LIABILITY**:
+
+In no event shall the author or contributors be liable for any special, incidental, indirect, or consequential damages whatsoever (including, without limitation, damages for loss of business profits, business interruption, loss of business information, or any other pecuniary loss) arising out of the use of or inability to use this software, even if the author or contributors have been advised of the possibility of such damages.
+
+## Dockerfile content and usage objective
+The Dockerfiles provided here serve as a sample code project for using (base) Docker containers on the OVP80x VPU hardware architecture.
+
+Please use the provided `build.sh`, `config.sh` and `run.sh` helper scripts to build and run the images and containers based on the Dockerfiles.
+The helper files are not intended to be used as a deployment tool, but rather as a centralized text-based documentation tool of versions and dependencies.
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-tensorrt/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-tensorrt/README.md.txt
new file mode 100644
index 00000000..84018ddf
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-tensorrt/README.md.txt
@@ -0,0 +1,16 @@
+# `o3r-l4t-tensorrt`
+## EXCLUSION OF LIABILITY
+
+**DISCLAIMER**:
+
+This software and the accompanying files are provided "as is" and without warranties as to performance, merchantability, or any other warranties whether expressed or implied. The user must assume the entire risk of using the software.
+
+**LIABILITY**:
+
+In no event shall the author or contributors be liable for any special, incidental, indirect, or consequential damages whatsoever (including, without limitation, damages for loss of business profits, business interruption, loss of business information, or any other pecuniary loss) arising out of the use of or inability to use this software, even if the author or contributors have been advised of the possibility of such damages.
+
+## Dockerfile content and usage objective
+The Dockerfiles provided here serve as a sample code project for using (base) Docker containers on the OVP80x VPU hardware architecture.
+
+Please use the provided `build.sh`, `config.sh` and `run.sh` helper scripts to build and run the images and containers based on the Dockerfiles.
+The helper files are not intended to be used as a deployment tool, but rather as a centralized text-based documentation tool of versions and dependencies.
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-tensorrt/Readme.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-tensorrt/Readme.md.txt
new file mode 100644
index 00000000..c509bd23
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/docker/o3r-l4t-tensorrt/Readme.md.txt
@@ -0,0 +1,11 @@
+
+This Docker image provides some of the TensorRT examples provided by NVIDIA. The Dockerfile uses a multistage build which provides a way of separating the build and deploy stages. In the deployed image there are no build tools installed to keep the footprint small.
+
+## Testing the TensorRT capabilities
+
+During the build, the examples are copied to `/opt/ifm/tensorrt/` instead of the default location `/usr/src/tensorrt`, due to the fact that the NVIDIA Docker runtime overloads this folder with the data installed on the VPU, but the OS does not come with the examples preinstalled.
+
+```bash
+./trtexec --deploy=/opt/ifm/tensorrt/data/mnist/mnist.prototxt --model=/opt/ifm/tensorrt/data/mnist/mnist.caffemodel --output=prob --batch=16 --saveEngine=/tmp/mnist16.trt
+./trtexec --loadEngine=/tmp/mnist16.trt --batch=16
+```
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/python/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/python/README.md.txt
new file mode 100644
index 00000000..4d134c72
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/python/README.md.txt
@@ -0,0 +1,24 @@
+# Python examples
+
+## Dependencies
+Our examples rely on a number of other Python packages listed in the `requirements.txt` file. Before running the examples, install the dependencies with (from the `/python` folder):
+```sh
+$ pip install -r requirements.txt
+```
+
+## Package installation (optional)
+You can skip this step if you are *only* going to use examples in the `core` or `toolbox` folders.
+
+The examples in this repository are available as a Python package that can be locally installed.
+Examples in the ODS folder depend on each other and on core examples. Therefore, to simplify importing and reusing code, you will need to install the package.
+
+From the `/python` folder, run the following command:
+```sh
+$ pip install -e .
+```
+This will install a packaged called `ovp8xxexamples`.
+
+You can now run the ODS examples.
+
+## Configuration
+The examples are setup with some default values for variables like the IP address or the camera ports. If the example Python package was installed (see [section above](#package-installation-optional)), default values are defined in the `config.py` file. Otherwise, hardcoded default values are defined in each example, typically with the default IP address 192.168.0.69 and a 2D camera on port 0 and a 3D on port 2. To use a different setup than the default one, you will need to edit the `config.py` file or edit the individual examples with your configuration.
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/README.md.txt
new file mode 100644
index 00000000..55e289e1
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/README.md.txt
@@ -0,0 +1,57 @@
+# Core
+In this directory you find multiple general O3R scripts that are explained below.
+
+## `2d_data.py`
+Receiving RGB data with `ifm3dpy` is done similarly as 3D data: the core objects have to be instantiated, and a frame has to be retrieved.
+The important part is how to access the RGB image and how to decode it for further use.
+Once decoded, the image can be displayed using tools such as OpenCV. The example code in `2d_data.py` illustrates the explained process.
+
+
+## `bootup_monitor.py`
+The script `bootup_monitor.py` checks that the VPU completes it's boot sequence before attempting to initialize an application.
+
+## `can_activate.py`
+
+The CAN interface can only be activate through the JSON configuration with firmware version 1.4.X or higher.
+
+This examples shows how to activate or deactivate the `can0` interface.
+
+## `configuration.py`
+
+The O3R has multiple parameters that have an influence on the point cloud. Some of them affect the raw measurement and others modify how the data is converted into x,y,z, etc values. These parameters can be changed to better fit your applications and the script `configuration.py` presents how. You can refer to [this page](https://ifm3d.com/latest/Technology/3D/index_3d.html) for a detailed description of each parameter.
+
+The ifm3d API provides functions to read and set the configuration of the device. Note that JSON formatting is used for all the configurations.
+
+## `deserialize_rgb.py`
+
+Some of the data provided by the O3R platform needs to be deserialized to be used.
+For more information on the data structures of each buffer please refer to the [Python API documentation](https://api.ifm3d.com/latest/_autosummary/ifm3dpy.deserialize.html).
+
+The usage of the deserializer is the same for all the deserializable buffers: create the object, and call the deserialize function. Follow the example code, `deserialize_rgb.py` for an example on deserializing the `RGBInfoV1` buffer.
+
+## `deserialize_imu.py` and `imu_data.py`
+
+The IMU data can only be accessed with firmware versions 1.4.X or higher, and ifm3d version 1.5.X or higher.
+
+These two examples show how to retrieve IMU data from the device and how to deserialize it.
+
+## `diagnostic.py`
+The script `diagnostic.py` contains helper functions for retrieving diagnostics when requested or asynchronously.
+
+## `fw_update_utils.py`
+
+The script `fw_update_utils.py` demonstrates how to perform a firmware update for your O3R system. Additionally, the script includes several utility functions that provide information, such as determining the current firmware version.
+
+## `getting_data*.py`
+
+The recommended way to receive data is to use the callback function, as shown in the `getting_data_callback.py` script. You can register a callback function that will be executed for every received frame, until the program exits. Alternatively, wait for a frame: you just need to call the `WaitForFrame` function, as shown in the `getting_data.py` script.
+
+## `ifm3dpy_viewer.py`
+In the `ifm3dpy_viewer.py` Python script, a full demonstration of how to view the different images is done. For more details refer to the [viewer documentation](viewer.md).
+
+## `multi_head.py`
+The `multi_head.py` script demonstrates how to retrieve the list of camera heads connected to the VPU and their types.
+
+## `timestamps.py`
+
+The script `timestamps.py` demonstrate how to get the timestamps and the effect of `sNTP` on the timestamps.
diff --git a/latest/_sources/examples/o3r/viewer/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/viewer.md.txt
similarity index 99%
rename from latest/_sources/examples/o3r/viewer/README.md.txt
rename to latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/viewer.md.txt
index 10fbe93d..51a91bb1 100644
--- a/latest/_sources/examples/o3r/viewer/README.md.txt
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/core/viewer.md.txt
@@ -5,7 +5,6 @@ This is an example application for retrieving different kinds of images from an
## Download the code
If you built ifm3d from source, you already have the code, in `ifm3d/examples/o3r/viewer`.
If not, you can find the script [here](https://github.com/ifm/ifm3d/tree/main/examples/o3r/viewer).
-
## Installation
The recommended way is to use a virtual environment.
@@ -75,6 +74,6 @@ python examples/python/viewer/ifm3dpy_viewer.py --pcic-port 50012 --image xyz
```
### Display the JPEG image
-```
+```sh
python examples/python/viewer/ifm3dpy_viewer.py --pcic-port 50010 --image jpeg
```
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/ods/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/ods/README.md.txt
new file mode 100644
index 00000000..8a63d26b
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/ods/README.md.txt
@@ -0,0 +1,15 @@
+# ODS
+
+## Overview
+The ODS Python scripts will be briefly described below:
+
+- `ods_config.py` demonstrates how to set JSON configurations to the O3R system following the O3R schema.
+- `ods_queue.py` handles the data queues of an ODS application.
+- `ods_stream.py` provides functions showcasing how to receive data from the O3R platform
+- `ods_visualization.py` is a script used for ODS visualization.
+- `ods_demo.py` is using the other scripts to do a full demonstration of the ODS application.
+- `transform_cell_to_user.py` showcases how to transform the occupancy grid cell index to coordinates in the user frame.
+
+## Configuration
+
+The examples use example JSON configuration files which contain dummy camera calibration and application parameters. These example files expect specific camera ports (3D cameras in ports 2 and 3). If your setup is different, edit the configuration files in `/ods/configs`.
\ No newline at end of file
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/toolbox/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/toolbox/README.md.txt
new file mode 100644
index 00000000..8ee4bc19
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/toolbox/README.md.txt
@@ -0,0 +1,25 @@
+# Toolbox
+
+Within the Toolbox, you find helper scripts, including:
+- `collect_calibrations.py`: this is a helper script that gathers calibration information for all the connected heads.
+- `h5_to_pcd_converter.py`: converts point cloud data from H5 files recorded with the ifmVisionAssistant to open3D PCD format.
+- `registration_2d_3d.py`: shows how to find the color pixel corresponding to a distance pixel. See more details on the process below.
+- `rot_human_read.py`: this script showcases two functions from the `o3r_algo_utilities` Python package that convert angles from Euler angles in radians to (roll, pitch, yaw) angles in degrees that are easier to interpret.
+- `extrinsic_calibration/static_camera_calibration/calib_cam.py`: this is a script to use to perform the static calibration process using a checkerboard. Make sure to closely follow the instructions in the accompanying README.
+- `update_settings_to_new_fw_schema.py`: this script can be used to update a configuration file from one firmware version to another, in the case where the schema was updated including breaking changes. The script will list out all the settings that were deleted and the user should check if these should be reapplied. This script expect a system with the same hardware configuration as the JSON configuration to be replicated.
+
+## `registration_2d_3d.py`
+
+This example shows how to find the closest pixel in the RGB image corresponding to each pixel in the distance image, in order to generate a colored point cloud.
+
+The process is done in several steps:
+
+1. Calculate the point cloud from the unit vectors of the 3D camera and the distance value for each pixels. Unit vectors are vectors of length 1 which point in the direction in which the pixel senses light. They are calculated using the intrinsic parameters of the optical system. This point cloud represents the surfaces visible to the TOF imager with (0, 0, 0) representing the center of the 3D optical frame.
+2. Transform the point cloud to the user's coordinate system using the extrinsic calibration parameters of the camera.
+3. Transform the point cloud to the 2D camera optical frame using the extrinsic calibration parameters of the 2D camera. Projecting the point cloud using the 2D camera's inverse intrinsic parameters will allow us to associate color information to the point cloud.
+
+
+
+
+
+The same process would be used to associate an object identified in the 2D camera frame to specific points in the 3D point cloud. This is useful for example to associate a mask derived from human segmentation algorithms to the 3D point cloud for an accurate measure of proximity.
diff --git a/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/toolbox/extrinsic_calibration/static_camera_calibration/README.md.txt b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/toolbox/extrinsic_calibration/static_camera_calibration/README.md.txt
new file mode 100644
index 00000000..e2ef2ad7
--- /dev/null
+++ b/latest/_sources/content/ifm3d-examples/ovp8xx/python/ovp8xxexamples/toolbox/extrinsic_calibration/static_camera_calibration/README.md.txt
@@ -0,0 +1,3 @@
+# Static camera calibration
+
+This script is intended to be used along [the static calibration instructions on ifm3d.com](https://ifm3d.com/latest/SoftwareInterfaces/Toolbox/ExtrinsicCalibration/StaticCameraCalibration/README.html).
\ No newline at end of file
diff --git a/latest/_sources/content/installation_instructions/install_from_source_windows.md.txt b/latest/_sources/content/installation_instructions/install_from_source_windows.md.txt
index d440d3f7..cb10a991 100644
--- a/latest/_sources/content/installation_instructions/install_from_source_windows.md.txt
+++ b/latest/_sources/content/installation_instructions/install_from_source_windows.md.txt
@@ -65,11 +65,6 @@ cmake --build . --config %CONFIG% --target install
On successful execution of install step, user can disable the `BUILD_IN_DEPS` flag by appending
``` -DBUILD_IN_DEPS=OFF``` to CMake configure step, this will avoid building dependencies on every clean build.
-### Building the Examples
-
-To build the [examples](https://ifm3d.com/sphinx-doc/build/html/ifm3d/doc/sphinx/content/examples/index.html), provide the path `IFM3D_BINARY_DIR` to `CMAKE_PREFIX_PATH` when running CMake configure stage.
-To build the examples from source alongside the ifm3d library, enable the build with the `-DBUILD_EXAMPLES=ON`.
-
### Running ifm3d command line tools
After Building `ifm3d`, the binary files will be installed at
``%IFM3D_BUILD_DIR%\install\bin``. To run the ifm3d tool you need to add this
@@ -87,5 +82,4 @@ $ ifm3d
### Using ifm3d-playground projects
-After installing the ifm3d, one can use ifm3d-playground example, which shows basic CMake configuration required for using
-installed ifm3d libraries [ifm3d playground example](https://github.com/ifm/ifm3d/tree/main/examples/o3r/ifm3d_playground)
\ No newline at end of file
+After installing the ifm3d, one can use [ifm3d-playground example available on GitHub](https://github.com/ifm/ifm3d-examples/tree/main/ovp8xx/cpp/core/ifm3d_playground), which shows basic CMake configuration required for using installed ifm3d libraries.
\ No newline at end of file
diff --git a/latest/_sources/content/migration_guide/v1_3_0.md.txt b/latest/_sources/content/migration_guide/v1_3_0.md.txt
index 902c2dc6..8b6de733 100644
--- a/latest/_sources/content/migration_guide/v1_3_0.md.txt
+++ b/latest/_sources/content/migration_guide/v1_3_0.md.txt
@@ -1,5 +1,7 @@
# Migration Guide V1.3.0
+**There are no code changes required for customer code, however the default for logging changed, log messages will now be printed by default on the command line instead of a log file.**
+
## Change of installed Python version in docker containers
The default docker containers used to all contain the same Python version (3.9) regardless of the base image. Starting with this ifm3d version the installed Python will be replaced with the default of the corresponding Ubuntu version:
@@ -10,7 +12,6 @@ The default docker containers used to all contain the same Python version (3.9)
| Ubuntu 22.04 | Python 3.10.x |
## Replacement of glog with ifm3d::Logger
-Version 1.3.0 marks the introduction of the ifm3d::Logger as a replacement for glog.
-
-There are no code changes required for customer code, however the default for logging changed, log messages will now be printed by default on the command line instead of a log file. Please see [](../../examples/logging/logging) for details on how to configure the logging behaviour.
+Version 1.3.0 marks the introduction of the ifm3d::Logger as a replacement for glog.
+There are no code changes required for customer code, however the default for logging changed, log messages will now be printed by default on the command line instead of a log file. Please see the [logger configuration docs](../../examples/logging/logging) for details on how to configure the logging behaviour.
diff --git a/latest/_sources/content/stlimage.md.txt b/latest/_sources/content/stlimage.md.txt
index b0da5f5c..d0d0b8ad 100644
--- a/latest/_sources/content/stlimage.md.txt
+++ b/latest/_sources/content/stlimage.md.txt
@@ -3,7 +3,7 @@
```ifm3d::Buffer``` is designed to provide a C++ STL container that can hold
buffer data of different types provided by ifm3d vision devices. Data is stored
in sequential memory layout and ```ifm3d::Buffer``` provides a function template to access
-the pixel. The pixel data type is defined in an enum [```ifm3d::pixel_format```](../../../modules/device/include/ifm3d/device/device.h#L69).
+the pixel. The pixel data type is defined in an enumeration [```ifm3d::pixel_format```](../../../modules/device/include/ifm3d/device/device.h#L69).
```ifm3d::Buffer``` class does memory management and the user is free from the memory allocation
and deallocation. The assignment operator and the copy constructor
only copy the attributes and data is shared across the object.
diff --git a/latest/_sources/examples/examples_list.md.txt b/latest/_sources/examples/examples_list.md.txt
deleted file mode 100644
index 0fd567a8..00000000
--- a/latest/_sources/examples/examples_list.md.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-## List of available examples for the 3D vision platforms
-
-The examples listed below show various applications using one of the ifm 3D perceptions platforms. Each example is intended to be used as inspiration and might not be up to date with the latest library updates.
-
-| File | Comments | Last updated on |
-| ---- | -------- | --------------- |
-| [time_sync.cpp](o3r/time_sync/time_sync.cpp) | DEPRECATED: camera heads are now automatically synchronized. Usage of timestamps and CONF/RUN modes for multi head application | March 2022 |
\ No newline at end of file
diff --git a/latest/_sources/examples/index.md.txt b/latest/_sources/examples/index.md.txt
deleted file mode 100644
index 0a751542..00000000
--- a/latest/_sources/examples/index.md.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-# Basic Library Usage
-```{include} ../news/news_release.md
-```
-
-All ifm3d example scripts are, as their name implies, example scripts.
-The customer may use these for testing, development, and deployment as permitted in their respective licenses.
-
-:::{toctree}
-How to: configure the camera
-How to: receive an image
-How to: receive data from multiple heads
-How to: view the images
-How to: handle rgb data
-How to: use the deserializer module
-How to: configure the logging
-:::
\ No newline at end of file
diff --git a/latest/_sources/examples/o3r/multi_head/multi_head.md.txt b/latest/_sources/examples/o3r/multi_head/multi_head.md.txt
deleted file mode 100644
index 8ccaa5cb..00000000
--- a/latest/_sources/examples/o3r/multi_head/multi_head.md.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-# How to: receive data from multiple heads
-```{literalinclude} multi_head.cpp
-:language: cpp
-```
\ No newline at end of file
diff --git a/latest/_sources/examples/o3r/oem/ssh/README.md.txt b/latest/_sources/examples/o3r/oem/ssh/README.md.txt
deleted file mode 100644
index 3f59df13..00000000
--- a/latest/_sources/examples/o3r/oem/ssh/README.md.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-# Generating and uploading SSH keys
-
-This documentation was moved to [ifm3d.com](https://ifm3d.com/latest/Technology/VPU/ssh.html).
\ No newline at end of file
diff --git a/latest/_sources/examples/oem/ifm3d-on-VPU/README.md.txt b/latest/_sources/examples/oem/ifm3d-on-VPU/README.md.txt
deleted file mode 100644
index 4817d6aa..00000000
--- a/latest/_sources/examples/oem/ifm3d-on-VPU/README.md.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-# Using the ifm3d locally on the VPU
-
-This documentation was moved to [ifm3d.com](https://ifm3d.com/latest/SoftwareInterfaces/Docker/deployVPU.html).
\ No newline at end of file
diff --git a/latest/_sources/index.md.txt b/latest/_sources/index.md.txt
index ca6ea5e1..b19a91d2 100644
--- a/latest/_sources/index.md.txt
+++ b/latest/_sources/index.md.txt
@@ -7,12 +7,12 @@
Overview
Installation instructions
Migration Guide
-Basic library usage
+Basic library usage
Command Line tool
Manipulating images
Python API
C++ API
-Examples
+Examples
Changelog
<<< Back to O3R docs
:::
diff --git a/latest/cli_link.html b/latest/cli_link.html
index 7ae40eba..365de98c 100644
--- a/latest/cli_link.html
+++ b/latest/cli_link.html
@@ -23,7 +23,7 @@
-
+
@@ -48,12 +48,12 @@
The primary mechanism for using the ifm3d command line tool to configure an
ifm 3D camera is to utilize the dump and config subcommands to ifm3d. The
dump command serializes the camera state to JSON and prints it to stdout,
diff --git a/latest/content/ChangeLog.html b/latest/content/ChangeLog.html
index 3bd4db2c..7bd158f4 100644
--- a/latest/content/ChangeLog.html
+++ b/latest/content/ChangeLog.html
@@ -21,7 +21,7 @@
-
+
importtime
-importcv2
-fromifm3dpy.deviceimportO3R
-fromifm3dpy.framegrabberimportFrameGrabber,buffer_id
-
-
-defcallback(self):
- rgb=cv2.imdecode(self.get_buffer(buffer_id.JPEG_IMAGE),cv2.IMREAD_UNCHANGED)
- cv2.imshow("2D image",rgb)
- cv2.waitKey(1)
-
-
-# Initialize the objects
-o3r=O3R()
-port="port0"
-fg=FrameGrabber(o3r,pcic_port=50010)
-
-# Change port to RUN state
-config=o3r.get()
-config["ports"][port]["state"]="RUN"
-o3r.set(config)
-
-# Register a callback and start streaming frames
-fg.on_new_frame(callback)
-fg.start([buffer_id.JPEG_IMAGE])
-
-time.sleep(10)
-# Stop the streaming
-fg.stop()
-
-
-
/*
+
/* * Copyright 2022-present ifm electronic, gmbh * SPDX-License-Identifier: Apache-2.0 */
-#include<iostream>#include<chrono>
-#include<thread>#include<ifm3d/device/o3r.h>#include<ifm3d/fg.h>#include<ifm3d/fg/buffer.h>#include<ifm3d/fg/distance_image_info.h>
+#include<iostream>#include<opencv2/core/core.hpp>
-#include<opencv2/core/core.hpp>
-#include<opencv2/opencv.hpp>
+#include<opencv2/core/mat.hpp>#include<opencv2/highgui.hpp>
+#include<opencv2/opencv.hpp>
+#include<queue>
+#include<string>
+#include<thread>// LUT for image format conversionstaticstd::unordered_map<ifm3d::pixel_format,int>LUT_TYPE{
-{ifm3d::pixel_format::FORMAT_8U,CV_8U},
-{ifm3d::pixel_format::FORMAT_8S,CV_8S},
-{ifm3d::pixel_format::FORMAT_16U,CV_16U},
-{ifm3d::pixel_format::FORMAT_16S,CV_16S},
-{ifm3d::pixel_format::FORMAT_32S,CV_32S},
-{ifm3d::pixel_format::FORMAT_32F,CV_32F},
-{ifm3d::pixel_format::FORMAT_32F3,CV_32F},
-{ifm3d::pixel_format::FORMAT_64F,CV_64F}};
+{ifm3d::pixel_format::FORMAT_8U,CV_8U},
+{ifm3d::pixel_format::FORMAT_8S,CV_8S},
+{ifm3d::pixel_format::FORMAT_16U,CV_16U},
+{ifm3d::pixel_format::FORMAT_16S,CV_16S},
+{ifm3d::pixel_format::FORMAT_32S,CV_32S},
+{ifm3d::pixel_format::FORMAT_32F,CV_32F},
+{ifm3d::pixel_format::FORMAT_32F3,CV_32F},
+{ifm3d::pixel_format::FORMAT_64F,CV_64F}};// LUT for image format sizestaticstd::unordered_map<ifm3d::pixel_format,int>LUT_SIZE{
-{ifm3d::pixel_format::FORMAT_8U,1},
-{ifm3d::pixel_format::FORMAT_8S,1},
-{ifm3d::pixel_format::FORMAT_16U,2},
-{ifm3d::pixel_format::FORMAT_16S,2},
-{ifm3d::pixel_format::FORMAT_32S,4},
-{ifm3d::pixel_format::FORMAT_32F,4},
-{ifm3d::pixel_format::FORMAT_32F3,4},
-{ifm3d::pixel_format::FORMAT_64F,8}};
+{ifm3d::pixel_format::FORMAT_8U,1},
+{ifm3d::pixel_format::FORMAT_8S,1},
+{ifm3d::pixel_format::FORMAT_16U,2},
+{ifm3d::pixel_format::FORMAT_16S,2},
+{ifm3d::pixel_format::FORMAT_32S,4},
+{ifm3d::pixel_format::FORMAT_32F,4},
+{ifm3d::pixel_format::FORMAT_32F3,4},
+{ifm3d::pixel_format::FORMAT_64F,8}};// Converts ifm3d::Buffer to cv:Mat.// cv::Mat will not take ownership of the data.// Make sure ifm3d::Buffer is not destroyed while the cv::Mat is still around.
-cv::MatConvertImageToMatNoCopy(ifm3d::Buffer&img)
-{
-returncv::Mat(img.height(),img.width(),LUT_TYPE[img.dataFormat()],img.ptr(0));
+cv::MatConvertImageToMatNoCopy(ifm3d::Buffer&img){
+returncv::Mat(img.height(),img.width(),LUT_TYPE[img.dataFormat()],
+img.ptr(0));}// Converts ifm3d::Buffer to cv:Mat.// This function copies the data so that// you can safely dispose of the ifm3d::Buffer.
-cv::MatConvertImageToMatCopy(ifm3d::Buffer&img)
-{
+cv::MatConvertImageToMatCopy(ifm3d::Buffer&img){automat=cv::Mat(img.height(),img.width(),LUT_TYPE[img.dataFormat()]);
-std::memcpy(mat.data,img.ptr(0),img.width()*img.height()*LUT_SIZE[img.dataFormat()]);
+std::memcpy(mat.data,img.ptr(0),
+img.width()*img.height()*LUT_SIZE[img.dataFormat()]);returnmat;}
-voidCallback(ifm3d::Frame::Ptrframe){
+std::queue<cv::Mat>img_queue;
-autorgb_img=frame->GetBuffer(ifm3d::buffer_id::JPEG_IMAGE);
-// No copy conversion of the image to cv::Mat:
-autorgb_cv=ConvertImageToMatNoCopy(rgb_img);
-// Alternatively, use:
-// auto rgb_cv = ConvertImageToMatCopy(rgb_img);
-// Display the image
-cv::startWindowThread();
-cv::imshow("RGB Image",cv::imdecode(rgb_cv,cv::IMREAD_UNCHANGED));
-cv::waitKey(1);
-
+voidDisplay(){
+cv::startWindowThread();
+while(true){
+if(!img_queue.empty()){
+cv::imshow("RGB Image",cv::imdecode(img_queue.front(),cv::IMREAD_UNCHANGED));
+img_queue.pop();
+cv::waitKey(1);
+}
+std::this_thread::sleep_for(std::chrono::milliseconds(10));
+}}
+voidCallback(ifm3d::Frame::Ptrframe){
+autorgb_img=frame->GetBuffer(ifm3d::buffer_id::JPEG_IMAGE);
+// For displaying the data, make sure to use to copy method.
+// This ensure the data is still available for display after the callback has returned.
+autorgb_cv=ConvertImageToMatCopy(rgb_img);
+// No copy conversion of the image to cv::Mat:
+// auto rgb_cv = ConvertImageToMatNoCopy(rgb_img);
+// Push image to queue for display
+img_queue.push(rgb_cv);
+}
+
+intmain(){
+// Get the IP from the environment if defined
+constchar*IP=std::getenv("IFM3D_IP")?std::getenv("IFM3D_IP"):ifm3d::DEFAULT_IP.c_str();
+std::clog<<"IP: "<<IP<<std::endl;
+
+//////////////////////////
+// Declare the O3R object
+//////////////////////////
+// Declare the device object (one object only, corresponding to the VPU)
+autoo3r=std::make_shared<ifm3d::O3R>(IP);
-intmain(){
+//////////////////////////
+// Pick a 2D port to use
+//////////////////////////
+// Pick the first available 2D port.
+uint16_tpcic_port=0;
+for(constauto&port:o3r->Ports()){
+if(port.type=="2D"){
+std::cout<<"Using first available 2D port: "<<port.port<<std::endl;
+pcic_port=port.pcic_port;
+break;
+}
+}
+
+// Alternatively, manually pick the port
+// corresponding to your 2D camera
+// std::string port_nb = "port0";
+// if (o3r->Port(port_nb).type != "2D") {
+// std::cerr << "Please provide a 2D port number." << std::endl;
+// return -1;
+// }
+// uint16_t pcic_port = o3r->Port(port_nb).pcic_port;
-//////////////////////////
-// Declare the objects:
-//////////////////////////
-// Declare the device object (one object only, corresponding to the VPU)
-autoo3r=std::make_shared<ifm3d::O3R>();
-// Declare the FrameGrabber object.
-constautoPCIC_PORT=o3r->Port("port0").pcic_port;
-autofg=std::make_shared<ifm3d::FrameGrabber>(o3r,PCIC_PORT);
+// Verify that a correct port number was provided
+if(pcic_port==0){
+std::cerr<<"No 2D port found in the configuration,"<<std::endl;
+return-1;
+}
+//////////////////////////
+// Declare the FrameGrabber object
+//////////////////////////
+autofg=std::make_shared<ifm3d::FrameGrabber>(o3r,pcic_port);
-//////////////////////////
-// Get a frame:
-//////////////////////////
-fg->OnNewFrame(&Callback);
-fg->Start({ifm3d::buffer_id::JPEG_IMAGE});
+//////////////////////////
+// Get a frame:
+//////////////////////////
+fg->OnNewFrame(&Callback);
+fg->Start({ifm3d::buffer_id::JPEG_IMAGE});
-std::this_thread::sleep_for(std::chrono::seconds(10));
-fg->Stop();
-return0;
-}
+Display();
+
+fg->Stop();
+return0;
+}
diff --git a/latest/examples/o3r/configuration/configuration.html b/latest/content/basic_lib_usage/configuration/configuration.html
similarity index 63%
rename from latest/examples/o3r/configuration/configuration.html
rename to latest/content/basic_lib_usage/configuration/configuration.html
index 52ceb868..c91e93fb 100644
--- a/latest/examples/o3r/configuration/configuration.html
+++ b/latest/content/basic_lib_usage/configuration/configuration.html
@@ -23,7 +23,7 @@
-
+
@@ -45,25 +45,24 @@
To set a new configuration, you need to provide said configuration in JSON formatting. The provided configuration can be a subset or the full configuration, as long as it follows the proper JSON hierarchy.
#############################################
+# Copyright 2021-present ifm electronic, gmbh
+# SPDX-License-Identifier: Apache-2.0
+#############################################
-# Define the ifm3d objects for the communication
+importjsonfromifm3dpy.deviceimportO3R
-o3r=O3R()
-# Get the current configuration
-config=o3r.get()
+defmain(ip,port):
+ # Initialize the O3R object
+ o3r=O3R(ip=ip)
+
+ # Get the current configuration
+ config=o3r.get()
+
+ # Print a little part from the config to verify the configuration
+ print(
+ f'Firmware version: {json.dumps(config["device"]["swVersion"]["firmware"],indent=4)}'
+ )
+
+ print(f'State of port {port}: {config["ports"][port]["state"]}')
+
+ # Let's change the name of the device
+ o3r.set({"device":{"info":{"name":"great_o3r"}}})
+
+ # Double check the configuration
+ config=o3r.get()
+ print(f'Device name: {config["device"]["info"]["name"]}')
-# Print a little part from the config to verify the configuration
-print(json.dumps(config["device"]["swVersion"],indent=4))
-# Note: this assumes that a camera is plugged into port 1
-print(config["ports"]["port1"]["state"])
-# Let's change the name of the device
-o3r.set({"device":{"info":{"name":"great_o3r"}}})
+if__name__=="__main__":
+ try:
+ # If the example python package was build, import the configuration
+ fromovp8xxexamplesimportconfig
-# Double check the configuration
-config=o3r.get()
-print(config["device"]["info"]["name"])
+ IP=config.IP
+ PORT=config.PORT_2D
+ exceptImportError:
+ # Otherwise, use default values
+ print(
+ "Unable to import the configuration.\nPlease run 'pip install -e .' from the python root directory"
+ )
+ print("Defaulting to the default configuration.")
+ IP="192.168.0.69"
+ PORT="port0"
+ main(ip=IP,port=PORT)
Some of the data provided by the O3R platform needs to be deserialized to be used. This is the case for:
+
+
the intrinsic calibration parameters (ifm3dpy.deserialize.Calibration), which provides details like which optical model is used (Fisheye, pinhole) and the values for each of the model’s parameters,
+
the extrinsic calibration (optics to user) parameters (ifm3dpy.deserialize.ExtrinsicOpticToUser), which provides the transformations between the optical system and the reference point on the camera housing,
+
the ODS zone information (ifm3dpy.deserialize.ODSInfoV1), which contains the zone id being used and the occupancy of the zones,
+
the ODS occupancy grid information (ifm3dpy.deserialize.ODSOccupancyGridV1), which contains occupancy grid data and the transformation matrix,
+
the RGB information (ifm3dpy.deserialize.RGBInfoV1), which provides exposure times and calibration parameters for the O3R RGB cameras.
+
+
For more information on the data structures of each buffer please refer to the Python API documentation or the [C++ API documentation].
+
The usage of the deserializer is the same for all the buffers mentioned above: create the object, and call the deserialize function. Follow the example below for an example on deserializing the RGBInfoV1 buffer.
+
+
/*
+ * Copyright 2021-present ifm electronic, gmbh
+ * SPDX-License-Identifier: Apache-2.0
+ */
+#include<chrono>
+#include<ifm3d/deserialize.h>
+#include<ifm3d/device/o3r.h>
+#include<ifm3d/fg.h>
+#include<iostream>
+#include<thread>
+// Namespace used for writing time "3s"
+usingnamespacestd::chrono_literals;
+// Namespace used for json pointers
+usingnamespaceifm3d::literals;
+
+intmain(){
+//////////////////////////
+// Create the O3R object
+//////////////////////////
+// Get the IP from the environment if defined
+constchar*IP=std::getenv("IFM3D_IP")?std::getenv("IFM3D_IP"):ifm3d::DEFAULT_IP.c_str();
+std::clog<<"IP: "<<IP<<std::endl;
+
+autoo3r=std::make_shared<ifm3d::O3R>(IP);
+
+//////////////////////////
+// Select the first available
+// 2D port from the configuration
+//////////////////////////
+uint16_tpcic_port=0;
+for(constauto&port:o3r->Ports()){
+if(port.type=="2D"){
+std::cout<<"Using first available 2D port: "<<port.port<<std::endl;
+pcic_port=port.pcic_port;
+break;
+}
+}
+
+/////////////////////////////////////////////////////////
+// Alternatively, manually pick the port corresponding
+// to your 2D camera (uncomment the line below and comment
+// the block above)
+/////////////////////////////////////////////////////////
+// std::string port_nb = "port0";
+// if (o3r->Port(port_nb).type != "2D") {
+// std::cerr << "Please provide a 2D port number." << std::endl;
+// return -1;
+// }
+// uint16_t pcic_port = o3r->Port(port_nb).pcic_port;
+// std::cout << "Using 2D port: " << port_nb << std::endl;
+
+//////////////////////////////////////////////////
+// Verify that a correct port number was provided
+// and create the framegrabber object
+//////////////////////////////////////////////////
+if(pcic_port==0){
+std::cerr<<"No 2D port found in the configuration,"<<std::endl;
+return-1;
+}
+
+////////////////////////////
+// Create the FrameGrabber object
+////////////////////////////
+autofg=std::make_shared<ifm3d::FrameGrabber>(o3r,pcic_port);
+
+// Define which buffer to retrieve and start the data stream
+fg->Start({ifm3d::buffer_id::RGB_INFO});
+
+//////////////////////////
+// Receive a frame:
+//////////////////////////
+autofuture=fg->WaitForFrame();
+if(future.wait_for(3s)!=std::future_status::ready){
+std::cerr<<"Timeout waiting for camera!"<<std::endl;
+return-1;
+}
+autoframe=future.get();
+// Get the data from the relevant buffer
+autorgb_info_buffer=frame->GetBuffer(ifm3d::buffer_id::RGB_INFO);
+fg->Stop();
+
+//////////////////////////
+// Extract data from the buffer
+// Using the deserializer module
+//////////////////////////
+autorgb_info=ifm3d::RGBInfoV1::Deserialize(rgb_info_buffer);
+std::cout<<"Sample of data available in the RGBInfoV1 buffer:"<<std::endl;
+std::cout<<"RGB info timestamp: "<<rgb_info.timestamp_ns<<std::endl;
+std::cout<<"Exposure time: "<<rgb_info.exposure_time<<std::endl;
+std::cout<<"Intrinsic calibration model id: "
+<<rgb_info.intrinsic_calibration.model_id<<std::endl;
+std::cout<<"Intrinsic calibration parameter [0]: "
+<<rgb_info.intrinsic_calibration.model_parameters[0]<<std::endl;
+return0;
+}
+
/* * Copyright 2021-present ifm electronic, gmbh * SPDX-License-Identifier: Apache-2.0 */
-#include<iostream>#include<chrono>
-#include<thread>
+#include<ifm3d/common/json_impl.hpp>
+#include<ifm3d/device/err.h>#include<ifm3d/device/o3r.h>#include<ifm3d/fg.h>
+#include<iostream>
+#include<string>
+#include<thread>usingnamespacestd::chrono_literals;usingnamespaceifm3d::literals;
-voidCallback(ifm3d::Frame::Ptrframe){
+voidCallback(ifm3d::Frame::Ptrframe){autodist=frame->GetBuffer(ifm3d::buffer_id::RADIAL_DISTANCE_IMAGE);
+std::cout<<"Distance image dimensions:"<<std::endl;std::cout<<dist.height()<<" "<<dist.width()<<std::endl;}
-int
-main()
-{
-
+intmain(){
+// Get the IP from the environment if defined
+constchar*IP=std::getenv("IFM3D_IP")?std::getenv("IFM3D_IP"):ifm3d::DEFAULT_IP.c_str();
+std::clog<<"IP: "<<IP<<std::endl;
+//////////////////////////
-// Declare the objects:
+// Declare the objects//////////////////////////// Declare the device object (one object only, corresponding to the VPU)
-autoo3r=std::make_shared<ifm3d::O3R>();
-// Declare the FrameGrabber and ImageBuffer objects.
-// One FrameGrabber per camera head (define the port number).
-constautoFG_PCIC_PORT=
-o3r->Get()["/ports/port2/data/pcicTCPPort"_json_pointer];
-autofg=std::make_shared<ifm3d::FrameGrabber>(o3r,FG_PCIC_PORT);
+autoo3r=std::make_shared<ifm3d::O3R>(IP);
+
+//////////////////////////
+// Select the first available
+// 3D port from the configuration
+//////////////////////////
+uint16_tpcic_port=0;
+for(constauto&port:o3r->Ports()){
+if(port.type=="3D"){
+std::cout<<"Using first available 3D port: "<<port.port<<std::endl;
+pcic_port=port.pcic_port;
+break;
+}
+}
+
+/////////////////////////////////////////////////////////
+// Alternatively, manually pick the port corresponding
+// to your 3D camera (uncomment the line below and comment
+// the block above)
+/////////////////////////////////////////////////////////
+// std::string port_nb = "port2";
+// if (o3r->Port(port_nb).type != "3D") {
+// std::cerr << "Please provide a 3D port number." << std::endl;
+// return -1;
+// }
+// uint16_t pcic_port = o3r->Port(port_nb).pcic_port;
+// std::cout << "Using 3D port: " << port_nb << std::endl;
+
+//////////////////////////////////////////////////
+// Verify that a correct port number was provided
+// and create the framegrabber object
+//////////////////////////////////////////////////
+if(pcic_port==0){
+std::cerr<<"No 3D port found in the configuration,"<<std::endl;
+return-1;
+}
+
+autofg=std::make_shared<ifm3d::FrameGrabber>(o3r,pcic_port);
+
+//////////////////////////
+// Start the framegrabber
+// and register the callback
+//////////////////////////
-//Set Schema and start the grabber
-fg->Start({ifm3d::buffer_id::AMPLITUDE_IMAGE,ifm3d::buffer_id::RADIAL_DISTANCE_IMAGE,ifm3d::buffer_id::XYZ});
+// Set Schema and start the grabber
+fg->Start({ifm3d::buffer_id::AMPLITUDE_IMAGE,
+ifm3d::buffer_id::RADIAL_DISTANCE_IMAGE,ifm3d::buffer_id::XYZ});
-//Register callback function
+// Register callback functionfg->OnNewFrame(&Callback);// This sleep is to prevent the program from before the
@@ -320,14 +386,19 @@
/* * Copyright 2021-present ifm electronic, gmbh * SPDX-License-Identifier: Apache-2.0 */
-#include<iostream>#include<chrono>#include<ifm3d/device/o3r.h>#include<ifm3d/fg.h>
+#include<iostream>usingnamespacestd::chrono_literals;usingnamespaceifm3d::literals;
-int
-main()
-{
+intmain(){//////////////////////////
-// Declare the objects:
+// Declare the objects//////////////////////////// Declare the device object (one object only, corresponding to the VPU)
-autocam=std::make_shared<ifm3d::O3R>();
-// Declare the FrameGrabber and ImageBuffer objects.
-// One FrameGrabber per camera head (define the port number).
-constautoFG_PCIC_PORT=
-cam->Get()["/ports/port2/data/pcicTCPPort"_json_pointer];
-autofg=std::make_shared<ifm3d::FrameGrabber>(cam,FG_PCIC_PORT);
+autoo3r=std::make_shared<ifm3d::O3R>();
+
+//////////////////////////
+// Select the first available
+// 3D port from the configuration
+//////////////////////////
+uint16_tpcic_port=0;
+for(constauto&port:o3r->Ports()){
+if(port.type=="3D"){
+std::cout<<"Using first available 3D port: "<<port.port<<std::endl;
+pcic_port=port.pcic_port;
+break;
+}
+}
+
+/////////////////////////////////////////////////////////
+// Alternatively, manually pick the port corresponding
+// to your 3D camera (uncomment the line below and comment
+// the block above)
+/////////////////////////////////////////////////////////
+// std::string port_nb = "port2";
+// if (o3r->Port(port_nb).type != "3D") {
+// std::cerr << "Please provide a 3D port number." << std::endl;
+// return -1;
+// }
+// uint16_t pcic_port = o3r->Port(port_nb).pcic_port;
+// std::cout << "Using 3D port: " << port_nb << std::endl;
+
+//////////////////////////////////////////////////
+// Verify that a correct port number was provided
+// and create the framegrabber object
+//////////////////////////////////////////////////
+if(pcic_port==0){
+std::cerr<<"No 3D port found in the configuration,"<<std::endl;
+return-1;
+}
+
+autofg=std::make_shared<ifm3d::FrameGrabber>(o3r,pcic_port);
-//Set Schema and start the grabber
-fg->Start({ifm3d::buffer_id::AMPLITUDE_IMAGE,ifm3d::buffer_id::RADIAL_DISTANCE_IMAGE,ifm3d::buffer_id::XYZ});
+//////////////////////////
+// Start the framegrabber
+// and run the callback
+//////////////////////////
+
+// Set Schema and start the grabber
+fg->Start({ifm3d::buffer_id::AMPLITUDE_IMAGE,
+ifm3d::buffer_id::RADIAL_DISTANCE_IMAGE,ifm3d::buffer_id::XYZ});//////////////////////////// Get a frame://////////////////////////autofuture=fg->WaitForFrame();
-if(future.wait_for(3s)!=std::future_status::ready)
-{
-std::cerr<<"Timeout waiting for camera!"<<std::endl;
-return-1;
-}
+if(future.wait_for(3s)!=std::future_status::ready){
+std::cerr<<"Timeout waiting for camera!"<<std::endl;
+return-1;
+}autoframe=future.get();//////////////////////////
diff --git a/latest/content/basic_lib_usage/index.html b/latest/content/basic_lib_usage/index.html
new file mode 100644
index 00000000..67429b63
--- /dev/null
+++ b/latest/content/basic_lib_usage/index.html
@@ -0,0 +1,189 @@
+
+
+
+
+
+ Basic Library Usage — ifm3d v1.5.3 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+