From 301ac04ac421c60810ada3922d6cc0606e5482ae Mon Sep 17 00:00:00 2001 From: Aymeric Dujardin Date: Thu, 4 Apr 2024 10:57:47 +0200 Subject: [PATCH] 4.1.0 (#628) --- README.md | 18 +- .../body tracking/cpp/include/GLViewer.hpp | 4 +- .../body tracking/cpp/src/GLViewer.cpp | 12 +- body tracking/body tracking/cpp/src/main.cpp | 11 +- .../body tracking/csharp/MainWindow.cs | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../JSON export/cpp/include/GLViewer.hpp | 4 +- .../export/JSON export/cpp/src/GLViewer.cpp | 12 +- .../export/JSON export/cpp/src/main.cpp | 2 +- .../export/fbx export/cpp/include/utils.hpp | 2 +- .../export/fbx export/cpp/src/main.cpp | 2 +- .../multi-camera/cpp/include/GLViewer.hpp | 4 +- .../multi-camera/cpp/src/GLViewer.cpp | 20 +- body tracking/multi-camera/cpp/src/main.cpp | 11 +- camera control/cpp/src/main.cpp | 11 +- camera control/csharp/Program.cs | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- camera streaming/receiver/cpp/src/main.cpp | 2 +- camera streaming/sender/cpp/include/utils.hpp | 2 +- camera streaming/sender/cpp/src/main.cpp | 2 +- .../cpp/src/main.cpp | 9 +- .../depth sensing/cpp/include/GLViewer.hpp | 4 +- .../depth sensing/cpp/src/GLViewer.cpp | 12 +- depth sensing/depth sensing/cpp/src/main.cpp | 47 +- .../depth sensing/csharp/MainWindow.cs | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- depth sensing/export/cpp/main.cpp | 2 +- depth sensing/image refocus/cpp/src/main.cpp | 2 +- .../cpp/src/main.cpp | 261 +- depth sensing/multi camera/cpp/src/main.cpp | 2 +- fusion/README.md | 27 - geotracking/README.md | 20 - geotracking/live geotracking/README.md | 29 - .../cpp/src/display/GenericDisplay.cpp | 69 - .../python/display/generic_display.py | 61 - .../python/gnss_reader/gpsd_reader.py | 90 - geotracking/playback/README.md | 28 - .../cpp/include/display/GenericDisplay.h | 83 - geotracking/playback/cpp/src/GNSSReplay.cpp | 155 - .../cpp/src/display/GenericDisplay.cpp | 71 - .../python/display/generic_display.py | 61 - geotracking/playback/python/gnss_replay.py | 136 - geotracking/recording/README.md | 29 - .../cpp/src/display/GenericDisplay.cpp | 69 - .../recording/cpp/src/exporter/GNSSSaver.cpp | 68 - .../python/display/generic_display.py | 61 - .../recording/python/exporter/gnss_saver.py | 41 - .../python/ogl_viewer/tracking_viewer.py | 557 ---- .../recording/python/ogl_viewer/zed_model.py | 2734 ----------------- global localization/README.md | 20 + global localization/live/README.md | 41 + .../live}/cpp/CMakeLists.txt | 18 +- .../live}/cpp/cmake/FindGPS.cmake | 0 .../live}/cpp/include/display/GLViewer.hpp | 9 +- .../cpp/include/display/GenericDisplay.h | 10 +- .../live}/cpp/include/display/ZEDModel.hpp | 0 .../live}/cpp/include/exporter/KMLExporter.h | 0 .../cpp/include/gnss_reader/GPSDReader.hpp | 0 .../cpp/include/gnss_reader/IGNSSReader.h | 0 .../live}/cpp/include/json.hpp | 0 .../live}/cpp/src/display/GLViewer.cpp | 72 +- .../live/cpp/src/display/GenericDisplay.cpp | 62 + .../live}/cpp/src/exporter/KMLExporter.cpp | 0 .../live}/cpp/src/gnss_reader/GPSDReader.cpp | 122 +- .../live}/cpp/src/main.cpp | 28 +- .../live/python/display/generic_display.py | 62 + .../live}/python/display/gl_viewer.py | 2 +- .../live}/python/display/zed_model.py | 0 .../live}/python/exporter/KMLExporter.py | 6 +- .../live}/python/gnss_reader/gpsd_reader.py | 16 +- .../live/python/live.py | 46 +- .../live/python/requirements.txt | 2 + global localization/map server/README.md | 19 + .../map server/assets/css/index.css | 133 + .../map server/assets/favicon.ico | Bin 0 -> 15086 bytes .../assets/leaflet/images/layers.png | Bin 0 -> 696 bytes .../map server/assets/leaflet/leaflet.css | 661 ++++ .../map server/assets/leaflet/leaflet.js | 6 + global localization/map server/index.html | 327 ++ global localization/playback/README.md | 30 + .../playback/cpp/CMakeLists.txt | 19 +- .../playback/cpp/cmake/FindGPS.cmake | 0 .../playback/cpp/include/GNSSReplay.hpp | 9 +- .../cpp/include/display/GLViewer.hpp | 9 +- .../cpp/include/display/GenericDisplay.h | 12 +- .../playback/cpp/include/display/ZEDModel.hpp | 0 .../cpp/include/exporter/KMLExporter.h | 0 .../playback/cpp/include/json.hpp | 0 .../playback/cpp/src/GNSSReplay.cpp | 406 +++ .../playback/cpp/src/display/GLViewer.cpp | 82 +- .../cpp/src/display/GenericDisplay.cpp | 62 + .../playback/cpp/src/exporter/KMLExporter.cpp | 0 .../playback/cpp/src/main.cpp | 138 +- .../python/display/generic_display.py | 64 + .../playback/python/display/gl_viewer.py | 8 +- .../playback/python/display}/zed_model.py | 0 .../playback/python/exporter/KMLExporter.py | 9 +- .../playback/python/gnss_replay.py | 235 ++ .../playback/python/playback.py | 128 +- .../playback/python/requirements.txt | 2 + global localization/recording/README.md | 42 + .../recording/cpp/CMakeLists.txt | 2 +- .../recording/cpp/cmake/FindGPS.cmake | 0 .../cpp/include/display/GLViewer.hpp | 18 +- .../cpp/include/display/GenericDisplay.h | 60 + .../cpp/include/display/ZEDModel.hpp | 0 .../cpp/include/exporter/GNSSSaver.h | 7 +- .../cpp/include/exporter/KMLExporter.h | 0 .../cpp/include/exporter/TimestampUtils.h | 0 .../cpp/include/gnss_reader/GPSDReader.hpp | 0 .../cpp/include/gnss_reader/IGNSSReader.h | 0 .../recording/cpp/include/json.hpp | 0 .../recording/cpp/src/display/GLViewer.cpp | 82 +- .../cpp/src/display/GenericDisplay.cpp | 62 + .../recording/cpp/src/exporter/GNSSSaver.cpp | 83 + .../cpp/src/exporter/KMLExporter.cpp | 0 .../cpp/src/exporter/TimestampUtils.cpp | 0 .../cpp/src/gnss_reader/GPSDReader.cpp | 115 +- .../recording/cpp/src/main.cpp | 73 +- .../python/display/generic_display.py | 56 + .../recording/python/display/gl_viewer.py | 2 +- .../recording}/python/display/zed_model.py | 0 .../recording}/python/exporter/KMLExporter.py | 6 +- .../recording/python/exporter/gnss_saver.py | 62 + .../python/gnss_reader/gpsd_reader.py | 96 + .../python/ogl_viewer/tracking_viewer.py | 0 .../recording}/python/ogl_viewer/zed_model.py | 0 .../recording/python/recording.py | 40 +- .../recording/python/requirements.txt | 2 + .../birds eye viewer/cpp/include/GLViewer.hpp | 4 +- .../birds eye viewer/cpp/src/GLViewer.cpp | 12 +- .../birds eye viewer/cpp/src/main.cpp | 2 +- .../birds eye viewer/csharp/MainWindow.cs | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../birds eye viewer/csharp/TrackingViewer.cs | 22 +- .../cpp/include/GLViewer.hpp | 4 +- .../cpp/src/GLViewer.cpp | 12 +- .../cpp/src/TrackingViewer.cpp | 2 +- .../concurrent detections/cpp/src/main.cpp | 4 +- .../concurrent detections/python/README.md | 26 + ...ncurrent_object_detection_body_tracking.py | 190 ++ .../python/cv_viewer/tracking_viewer.py | 278 ++ .../python/cv_viewer/utils.py | 35 + .../python/ogl_viewer/viewer.py | 473 ++- .../python/ogl_viewer}/zed_model.py | 0 .../opencv_dnn_yolov4/include/GLViewer.hpp | 4 +- .../cpp/opencv_dnn_yolov4/src/GLViewer.cpp | 12 +- .../include/GLViewer.hpp | 4 +- .../src/GLViewer.cpp | 12 +- .../tensorrt_yolov5_v5.0/include/GLViewer.hpp | 4 +- .../cpp/tensorrt_yolov5_v5.0/src/GLViewer.cpp | 12 +- .../tensorrt_yolov5_v6.0/include/GLViewer.hpp | 4 +- .../cpp/tensorrt_yolov5_v6.0/src/GLViewer.cpp | 12 +- .../image viewer/cpp/include/GLViewer.hpp | 4 +- .../image viewer/cpp/src/GLViewer.cpp | 16 +- .../image viewer/cpp/src/main.cpp | 2 +- .../image viewer/csharp/MainWindow.cs | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../multi-camera/cpp/CMakeLists.txt | 56 + .../cpp/include/ClientPublisher.hpp | 35 + .../multi-camera/cpp/include/GLViewer.hpp | 330 ++ .../multi-camera/cpp/include/utils.hpp | 56 + .../multi-camera/cpp/src/ClientPublisher.cpp | 119 + .../multi-camera/cpp/src/GLViewer.cpp | 1142 +++++++ .../multi-camera/cpp/src/main.cpp | 195 ++ .../floor plane/cpp/include/GLViewer.hpp | 4 +- .../floor plane/cpp/src/GLViewer.cpp | 16 +- plane detection/floor plane/cpp/src/main.cpp | 2 +- .../plane detection/cpp/include/GLViewer.hpp | 4 +- .../plane detection/cpp/src/GLViewer.cpp | 12 +- .../plane detection/cpp/src/main.cpp | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../export/fbx/cpp/include/utils.hpp | 2 +- .../export/fbx/cpp/src/main.cpp | 2 +- .../positional tracking/README.md | 4 +- .../positional tracking/cpp/CMakeLists.txt | 17 + .../cpp/include/GLViewer.hpp | 9 +- .../positional tracking/cpp/src/GLViewer.cpp | 32 +- .../positional tracking/cpp/src/main.cpp | 120 +- .../positional tracking/csharp/MainWindow.cs | 360 ++- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../python/ogl_viewer/tracking_viewer.py | 68 +- .../python/positional_tracking.py | 78 +- recording/export/README.md | 2 +- recording/export/svo/cpp/include/utils.hpp | 2 +- recording/export/svo/cpp/src/main.cpp | 2 +- recording/export/svo/csharp/Program.cs | 2 +- .../svo/csharp/Properties/AssemblyInfo.cs | 2 +- recording/export/svo/python/svo_export.py | 2 +- .../playback/external_data/cpp/CMakeLists.txt | 40 + .../external_data/cpp/include/utils.hpp | 81 + .../playback/external_data/cpp/src/main.cpp | 127 + .../playback/external_data/csharp/App.config | 6 + .../external_data/csharp/CMakeLists.txt | 33 + .../playback/external_data/csharp/Program.cs | 170 + .../csharp/Properties/AssemblyInfo.cs | 36 + .../external_data/csharp/packages.config | 4 + .../external_data/python/svo_playback.py | 94 + recording/playback/mono/cpp/include/utils.hpp | 2 +- recording/playback/mono/cpp/src/main.cpp | 8 +- recording/playback/mono/csharp/Program.cs | 2 +- .../mono/csharp/Properties/AssemblyInfo.cs | 2 +- .../playback/mono/python/svo_playback.py | 2 +- .../multi camera/cpp/include/utils.hpp | 2 +- .../playback/multi camera/cpp/src/main.cpp | 2 +- .../external_data/cpp/CMakeLists.txt | 51 + .../external_data/cpp/include/utils.hpp | 81 + .../recording/external_data/cpp/src/main.cpp | 166 + .../recording/external_data/csharp/App.config | 6 + .../external_data/csharp/CMakeLists.txt | 34 + .../recording/external_data/csharp/Program.cs | 114 + .../csharp/Properties/AssemblyInfo.cs | 36 + .../external_data/csharp/packages.config | 4 + .../external_data/python/svo_recording.py | 74 + recording/recording/mono/cpp/CMakeLists.txt | 2 +- .../recording/mono/cpp/include/utils.hpp | 2 +- recording/recording/mono/cpp/src/main.cpp | 2 +- recording/recording/mono/csharp/Program.cs | 2 +- .../mono/csharp/Properties/AssemblyInfo.cs | 2 +- .../recording/mono/python/svo_recording.py | 2 +- .../multi camera/cpp/include/utils.hpp | 2 +- .../multi camera/cpp/include/GLViewer.hpp | 4 +- .../multi camera/cpp/src/GLViewer.cpp | 16 +- spatial mapping/multi camera/cpp/src/main.cpp | 2 +- .../spatial mapping/cpp/include/GLViewer.hpp | 4 +- .../spatial mapping/cpp/src/GLViewer.cpp | 16 +- .../spatial mapping/cpp/src/main.cpp | 38 +- .../spatial mapping/csharp/MainWindow.cs | 3 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- tutorials/CMakeLists.txt | 5 + tutorials/tutorial 1 - hello ZED/c/main.c | 4 +- tutorials/tutorial 1 - hello ZED/cpp/main.cpp | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../python/hello_zed.py | 2 +- tutorials/tutorial 2 - image capture/c/main.c | 3 +- .../tutorial 2 - image capture/cpp/main.cpp | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- tutorials/tutorial 3 - depth sensing/c/main.c | 3 +- .../tutorial 3 - depth sensing/cpp/main.cpp | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../tutorial 4 - positional tracking/c/main.c | 4 +- .../cpp/main.cpp | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../tutorial 5 - spatial mapping/c/main.c | 5 +- .../tutorial 5 - spatial mapping/cpp/main.cpp | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../tutorial 6 - object detection/README.md | 2 +- .../tutorial 6 - object detection/c/main.c | 5 +- .../cpp/README.md | 3 +- .../cpp/main.cpp | 3 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../python/README.md | 3 +- tutorials/tutorial 7 - sensor data/c/main.c | 4 +- .../tutorial 7 - sensor data/cpp/main.cpp | 2 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../tutorial 8 - body tracking/README.md | 2 +- tutorials/tutorial 8 - body tracking/c/main.c | 6 +- .../tutorial 8 - body tracking/cpp/README.md | 3 +- .../tutorial 8 - body tracking/cpp/main.cpp | 4 +- .../csharp/Properties/AssemblyInfo.cs | 2 +- .../csharp/README.md | 2 +- .../python/README.md | 3 +- .../python/body_tracking.py | 1 - .../tutorial 9 - geotracking/cpp/README.md | 11 - .../python/ogl_viewer/tracking_viewer.py | 557 ---- .../python/ogl_viewer/zed_model.py | 2734 ----------------- .../cpp/CMakeLists.txt | 2 +- .../cpp/README.md | 7 + .../cpp/main.cpp | 35 +- .../python/README.md | 17 +- .../python/geotracking.py | 17 +- 271 files changed, 8142 insertions(+), 8990 deletions(-) delete mode 100644 fusion/README.md delete mode 100644 geotracking/README.md delete mode 100644 geotracking/live geotracking/README.md delete mode 100644 geotracking/live geotracking/cpp/src/display/GenericDisplay.cpp delete mode 100644 geotracking/live geotracking/python/display/generic_display.py delete mode 100644 geotracking/live geotracking/python/gnss_reader/gpsd_reader.py delete mode 100644 geotracking/playback/README.md delete mode 100644 geotracking/playback/cpp/include/display/GenericDisplay.h delete mode 100644 geotracking/playback/cpp/src/GNSSReplay.cpp delete mode 100644 geotracking/playback/cpp/src/display/GenericDisplay.cpp delete mode 100644 geotracking/playback/python/display/generic_display.py delete mode 100644 geotracking/playback/python/gnss_replay.py delete mode 100644 geotracking/recording/README.md delete mode 100644 geotracking/recording/cpp/src/display/GenericDisplay.cpp delete mode 100644 geotracking/recording/cpp/src/exporter/GNSSSaver.cpp delete mode 100644 geotracking/recording/python/display/generic_display.py delete mode 100644 geotracking/recording/python/exporter/gnss_saver.py delete mode 100644 geotracking/recording/python/ogl_viewer/tracking_viewer.py delete mode 100644 geotracking/recording/python/ogl_viewer/zed_model.py create mode 100644 global localization/README.md create mode 100644 global localization/live/README.md rename {geotracking/live geotracking => global localization/live}/cpp/CMakeLists.txt (81%) rename {geotracking/live geotracking => global localization/live}/cpp/cmake/FindGPS.cmake (100%) rename {geotracking/live geotracking => global localization/live}/cpp/include/display/GLViewer.hpp (95%) rename {geotracking/live geotracking => global localization/live}/cpp/include/display/GenericDisplay.h (78%) rename {geotracking/live geotracking => global localization/live}/cpp/include/display/ZEDModel.hpp (100%) rename {geotracking/live geotracking => global localization/live}/cpp/include/exporter/KMLExporter.h (100%) rename {geotracking/live geotracking => global localization/live}/cpp/include/gnss_reader/GPSDReader.hpp (100%) rename {geotracking/live geotracking => global localization/live}/cpp/include/gnss_reader/IGNSSReader.h (100%) rename {geotracking/live geotracking => global localization/live}/cpp/include/json.hpp (100%) rename {geotracking/live geotracking => global localization/live}/cpp/src/display/GLViewer.cpp (88%) create mode 100644 global localization/live/cpp/src/display/GenericDisplay.cpp rename {geotracking/live geotracking => global localization/live}/cpp/src/exporter/KMLExporter.cpp (100%) rename {geotracking/live geotracking => global localization/live}/cpp/src/gnss_reader/GPSDReader.cpp (51%) rename {geotracking/live geotracking => global localization/live}/cpp/src/main.cpp (82%) create mode 100644 global localization/live/python/display/generic_display.py rename {geotracking/live geotracking => global localization/live}/python/display/gl_viewer.py (99%) rename {geotracking/live geotracking => global localization/live}/python/display/zed_model.py (100%) rename {geotracking/recording => global localization/live}/python/exporter/KMLExporter.py (95%) rename {geotracking/recording => global localization/live}/python/gnss_reader/gpsd_reader.py (84%) rename geotracking/live geotracking/python/geotracking.py => global localization/live/python/live.py (85%) create mode 100644 global localization/live/python/requirements.txt create mode 100644 global localization/map server/README.md create mode 100644 global localization/map server/assets/css/index.css create mode 100644 global localization/map server/assets/favicon.ico create mode 100644 global localization/map server/assets/leaflet/images/layers.png create mode 100644 global localization/map server/assets/leaflet/leaflet.css create mode 100644 global localization/map server/assets/leaflet/leaflet.js create mode 100644 global localization/map server/index.html create mode 100644 global localization/playback/README.md rename {geotracking => global localization}/playback/cpp/CMakeLists.txt (79%) rename {geotracking => global localization}/playback/cpp/cmake/FindGPS.cmake (100%) rename {geotracking => global localization}/playback/cpp/include/GNSSReplay.hpp (80%) rename {geotracking/recording => global localization/playback}/cpp/include/display/GLViewer.hpp (95%) rename {geotracking/recording => global localization/playback}/cpp/include/display/GenericDisplay.h (77%) rename {geotracking => global localization}/playback/cpp/include/display/ZEDModel.hpp (100%) rename {geotracking => global localization}/playback/cpp/include/exporter/KMLExporter.h (100%) rename {geotracking => global localization}/playback/cpp/include/json.hpp (100%) create mode 100644 global localization/playback/cpp/src/GNSSReplay.cpp rename {geotracking => global localization}/playback/cpp/src/display/GLViewer.cpp (87%) create mode 100644 global localization/playback/cpp/src/display/GenericDisplay.cpp rename {geotracking => global localization}/playback/cpp/src/exporter/KMLExporter.cpp (100%) rename {geotracking => global localization}/playback/cpp/src/main.cpp (56%) create mode 100644 global localization/playback/python/display/generic_display.py rename {geotracking => global localization}/playback/python/display/gl_viewer.py (99%) rename {geotracking/live geotracking/python/ogl_viewer => global localization/playback/python/display}/zed_model.py (100%) rename {geotracking => global localization}/playback/python/exporter/KMLExporter.py (92%) create mode 100644 global localization/playback/python/gnss_replay.py rename {geotracking => global localization}/playback/python/playback.py (71%) create mode 100644 global localization/playback/python/requirements.txt create mode 100644 global localization/recording/README.md rename {geotracking => global localization}/recording/cpp/CMakeLists.txt (98%) rename {geotracking => global localization}/recording/cpp/cmake/FindGPS.cmake (100%) rename {geotracking/playback => global localization/recording}/cpp/include/display/GLViewer.hpp (92%) create mode 100644 global localization/recording/cpp/include/display/GenericDisplay.h rename {geotracking => global localization}/recording/cpp/include/display/ZEDModel.hpp (100%) rename {geotracking => global localization}/recording/cpp/include/exporter/GNSSSaver.h (81%) rename {geotracking => global localization}/recording/cpp/include/exporter/KMLExporter.h (100%) rename {geotracking => global localization}/recording/cpp/include/exporter/TimestampUtils.h (100%) rename {geotracking => global localization}/recording/cpp/include/gnss_reader/GPSDReader.hpp (100%) rename {geotracking => global localization}/recording/cpp/include/gnss_reader/IGNSSReader.h (100%) rename {geotracking => global localization}/recording/cpp/include/json.hpp (100%) rename {geotracking => global localization}/recording/cpp/src/display/GLViewer.cpp (87%) create mode 100644 global localization/recording/cpp/src/display/GenericDisplay.cpp create mode 100644 global localization/recording/cpp/src/exporter/GNSSSaver.cpp rename {geotracking => global localization}/recording/cpp/src/exporter/KMLExporter.cpp (100%) rename {geotracking => global localization}/recording/cpp/src/exporter/TimestampUtils.cpp (100%) rename {geotracking => global localization}/recording/cpp/src/gnss_reader/GPSDReader.cpp (56%) rename {geotracking => global localization}/recording/cpp/src/main.cpp (79%) create mode 100644 global localization/recording/python/display/generic_display.py rename {geotracking => global localization}/recording/python/display/gl_viewer.py (99%) rename {geotracking/playback => global localization/recording}/python/display/zed_model.py (100%) rename {geotracking/live geotracking => global localization/recording}/python/exporter/KMLExporter.py (95%) create mode 100644 global localization/recording/python/exporter/gnss_saver.py create mode 100644 global localization/recording/python/gnss_reader/gpsd_reader.py rename {geotracking/live geotracking => global localization/recording}/python/ogl_viewer/tracking_viewer.py (100%) rename {geotracking/playback => global localization/recording}/python/ogl_viewer/zed_model.py (100%) rename {geotracking => global localization}/recording/python/recording.py (86%) create mode 100644 global localization/recording/python/requirements.txt create mode 100644 object detection/concurrent detections/python/README.md create mode 100644 object detection/concurrent detections/python/concurrent_object_detection_body_tracking.py create mode 100644 object detection/concurrent detections/python/cv_viewer/tracking_viewer.py create mode 100644 object detection/concurrent detections/python/cv_viewer/utils.py rename geotracking/playback/python/ogl_viewer/tracking_viewer.py => object detection/concurrent detections/python/ogl_viewer/viewer.py (52%) rename {geotracking/recording/python/display => object detection/concurrent detections/python/ogl_viewer}/zed_model.py (100%) create mode 100644 object detection/multi-camera/cpp/CMakeLists.txt create mode 100644 object detection/multi-camera/cpp/include/ClientPublisher.hpp create mode 100644 object detection/multi-camera/cpp/include/GLViewer.hpp create mode 100644 object detection/multi-camera/cpp/include/utils.hpp create mode 100644 object detection/multi-camera/cpp/src/ClientPublisher.cpp create mode 100644 object detection/multi-camera/cpp/src/GLViewer.cpp create mode 100644 object detection/multi-camera/cpp/src/main.cpp create mode 100644 recording/playback/external_data/cpp/CMakeLists.txt create mode 100644 recording/playback/external_data/cpp/include/utils.hpp create mode 100644 recording/playback/external_data/cpp/src/main.cpp create mode 100644 recording/playback/external_data/csharp/App.config create mode 100644 recording/playback/external_data/csharp/CMakeLists.txt create mode 100644 recording/playback/external_data/csharp/Program.cs create mode 100644 recording/playback/external_data/csharp/Properties/AssemblyInfo.cs create mode 100644 recording/playback/external_data/csharp/packages.config create mode 100644 recording/playback/external_data/python/svo_playback.py create mode 100644 recording/recording/external_data/cpp/CMakeLists.txt create mode 100644 recording/recording/external_data/cpp/include/utils.hpp create mode 100644 recording/recording/external_data/cpp/src/main.cpp create mode 100644 recording/recording/external_data/csharp/App.config create mode 100644 recording/recording/external_data/csharp/CMakeLists.txt create mode 100644 recording/recording/external_data/csharp/Program.cs create mode 100644 recording/recording/external_data/csharp/Properties/AssemblyInfo.cs create mode 100644 recording/recording/external_data/csharp/packages.config create mode 100644 recording/recording/external_data/python/svo_recording.py delete mode 100644 tutorials/tutorial 9 - geotracking/cpp/README.md delete mode 100644 tutorials/tutorial 9 - geotracking/python/ogl_viewer/tracking_viewer.py delete mode 100644 tutorials/tutorial 9 - geotracking/python/ogl_viewer/zed_model.py rename tutorials/{tutorial 9 - geotracking => tutorial 9 - global localization}/cpp/CMakeLists.txt (93%) create mode 100644 tutorials/tutorial 9 - global localization/cpp/README.md rename tutorials/{tutorial 9 - geotracking => tutorial 9 - global localization}/cpp/main.cpp (89%) rename tutorials/{tutorial 9 - geotracking => tutorial 9 - global localization}/python/README.md (90%) rename tutorials/{tutorial 9 - geotracking => tutorial 9 - global localization}/python/geotracking.py (90%) diff --git a/README.md b/README.md index fd906641..2f6626ec 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ --- -:tada: The **ZED SDK 4.0** is released! We support the [**ZED X**](https://www.stereolabs.com/zed-x/) and [**ZED X Mini**](https://www.stereolabs.com/zed-x/) cameras, added the **Fusion API** for multi-camera Body Tracking, and more! Please check the [Release Notes](https://www.stereolabs.com/developers/release/) of the latest version for more details. +:tada: The **ZED SDK 4.1** is released! We support the [**ZED X**](https://www.stereolabs.com/zed-x/) and [**ZED X Mini**](https://www.stereolabs.com/zed-x/) cameras, added the **Fusion API** for multi-camera Body Tracking, and more! Please check the [Release Notes](https://www.stereolabs.com/developers/release/) of the latest version for more details. ## Overview @@ -40,9 +40,9 @@ Depth Sensing | Object Detection | Body Tracking | :------------: | :----------: | :-------------: | [![Depth Sensing](https://user-images.githubusercontent.com/32394882/230639409-356b8dfa-df66-4bc2-84d8-a25fd0229779.gif)](https://www.stereolabs.com/docs/depth-sensing) | [![Object Detection](https://user-images.githubusercontent.com/32394882/230630901-9d53502a-f3f9-45b6-bf57-027148bb18ad.gif)](https://www.stereolabs.com/docs/object-detection) | [![Body Tracking](https://user-images.githubusercontent.com/32394882/230631989-24dd2b58-2c85-451b-a4ed-558d74d1b922.gif)](https://www.stereolabs.com/docs/body-tracking) | -Positional Tracking | Geo Tracking | Spatial Mapping | +Positional Tracking | Global Localization | Spatial Mapping | :------------: | :----------: | :-------------: | -[![Positional Tracking](https://user-images.githubusercontent.com/32394882/229093429-a445e8ae-7109-4995-bc1d-6a27a61bdb60.gif)](https://www.stereolabs.com/docs/positional-tracking/) | [![GeoTracking](https://user-images.githubusercontent.com/32394882/230602944-ed61e6dd-e485-4911-8a4c-d6c9e4fab0fd.gif)](/geotracking) | [![Spatial Mapping](https://user-images.githubusercontent.com/32394882/229099549-63ca7832-b7a2-42eb-9971-c1635d205b0c.gif)](https://www.stereolabs.com/docs/spatial-mapping) | +[![Positional Tracking](https://user-images.githubusercontent.com/32394882/229093429-a445e8ae-7109-4995-bc1d-6a27a61bdb60.gif)](https://www.stereolabs.com/docs/positional-tracking/) | [![Global Localization](https://user-images.githubusercontent.com/32394882/230602944-ed61e6dd-e485-4911-8a4c-d6c9e4fab0fd.gif)](/global%20localization) | [![Spatial Mapping](https://user-images.githubusercontent.com/32394882/229099549-63ca7832-b7a2-42eb-9971-c1635d205b0c.gif)](https://www.stereolabs.com/docs/spatial-mapping) | Camera Control | Plane Detection | Multi Camera Fusion | :------------: | :----------: | :-------------: | @@ -83,7 +83,7 @@ This repository contains ready-to-use and samples to start using the ZED SDK wit * [**Positional Tracking**](/positional%20tracking) - This sample shows how to use **positional tracking** and display the result with *OpenGL*. -* [**GeoTracking**](/geotracking) - This sample shows how to fuse the ZED SDK's **positional tracking with GNSS data** for global positioning. +* [**Global Localization**](/global%20localization) - This sample shows how to fuse the ZED SDK's **positional tracking with GNSS data** for global positioning. * [**Spatial Mapping**](/spatial%20mapping) - This sample shows how to capture **3D meshes** with the ZED and display it with *OpenGL*. Classic Mesh and Point Cloud fusion are available. @@ -127,16 +127,6 @@ Thanks to its comprehensive API, ZED cameras can be interfaced with **multiple t |
|
|
|
|
-## ZED Hub - -Experience ZED like never before with [ZED Hub](https://hub.stereolabs.com), Stereolabs' IoT platform allowing **easy management of camera fleets** and **data aggregation**, fully managed on the cloud. Unlike competitors, ZED Hub offers **plug-and-play** functionality and dedicated features for your applications developed with the ZED SDK. ZED Hub simplifies the **integration of 3D perception**, offering **remote access** and **monitoring** to your cameras . Start for **Free** now! - -
- -
- -
-
## Community diff --git a/body tracking/body tracking/cpp/include/GLViewer.hpp b/body tracking/body tracking/cpp/include/GLViewer.hpp index cd29bf97..d8ab06ab 100644 --- a/body tracking/body tracking/cpp/include/GLViewer.hpp +++ b/body tracking/body tracking/cpp/include/GLViewer.hpp @@ -129,7 +129,7 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); @@ -137,7 +137,7 @@ class Shader { static const GLint ATTRIB_COLOR_POS = 1; static const GLint ATTRIB_NORMAL = 2; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/body tracking/body tracking/cpp/src/GLViewer.cpp b/body tracking/body tracking/cpp/src/GLViewer.cpp index 56a5cdee..c470ddea 100644 --- a/body tracking/body tracking/cpp/src/GLViewer.cpp +++ b/body tracking/body tracking/cpp/src/GLViewer.cpp @@ -5,7 +5,7 @@ #error "This sample should not be built in Debug mode, use RelWithDebInfo if you want to do step by step." #endif -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -16,7 +16,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -24,7 +24,7 @@ GLchar* FRAGMENT_SHADER = " out_Color = b_color;\n" "}"; -GLchar* SK_VERTEX_SHADER = +const GLchar* SK_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -41,7 +41,7 @@ GLchar* SK_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* SK_FRAGMENT_SHADER = +const GLchar* SK_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "in vec3 b_position;\n" @@ -731,7 +731,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -779,7 +779,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/body tracking/body tracking/cpp/src/main.cpp b/body tracking/body tracking/cpp/src/main.cpp index 8642d100..cb681779 100644 --- a/body tracking/body tracking/cpp/src/main.cpp +++ b/body tracking/body tracking/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -50,7 +50,8 @@ int main(int argc, char **argv) { Camera zed; InitParameters init_parameters; init_parameters.camera_resolution = RESOLUTION::AUTO; - init_parameters.depth_mode = DEPTH_MODE::ULTRA; + init_parameters.depth_mode = isJetson ? DEPTH_MODE::PERFORMANCE : DEPTH_MODE::ULTRA; + //init_parameters.depth_mode = DEPTH_MODE::ULTRA; init_parameters.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; parseArgs(argc, argv, init_parameters); @@ -78,7 +79,7 @@ int main(int argc, char **argv) { // Enable the Body tracking module BodyTrackingParameters body_tracker_params; body_tracker_params.enable_tracking = true; // track people across images flow - body_tracker_params.enable_body_fitting = true; // smooth skeletons moves + body_tracker_params.enable_body_fitting = false; // smooth skeletons moves body_tracker_params.body_format = sl::BODY_FORMAT::BODY_34; body_tracker_params.detection_model = isJetson ? BODY_TRACKING_MODEL::HUMAN_BODY_FAST : BODY_TRACKING_MODEL::HUMAN_BODY_ACCURATE; //body_tracker_params.allow_reduced_precision_inference = true; @@ -111,7 +112,7 @@ int main(int argc, char **argv) { // Configure object detection runtime parameters BodyTrackingRuntimeParameters body_tracker_parameters_rt; - body_tracker_parameters_rt.detection_confidence_threshold = 60; + body_tracker_parameters_rt.detection_confidence_threshold = 40; body_tracker_parameters_rt.skeleton_smoothing = 0.7; // Create ZED Bodies filled in the main loop @@ -221,4 +222,4 @@ void print(string msg_prefix, ERROR_CODE err_code, string msg_suffix) { if (!msg_suffix.empty()) cout << " " << msg_suffix; cout << endl; -} +} \ No newline at end of file diff --git a/body tracking/body tracking/csharp/MainWindow.cs b/body tracking/body tracking/csharp/MainWindow.cs index 26bd3ba4..06ad24ea 100644 --- a/body tracking/body tracking/csharp/MainWindow.cs +++ b/body tracking/body tracking/csharp/MainWindow.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/body tracking/body tracking/csharp/Properties/AssemblyInfo.cs b/body tracking/body tracking/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/body tracking/body tracking/csharp/Properties/AssemblyInfo.cs +++ b/body tracking/body tracking/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/body tracking/export/JSON export/cpp/include/GLViewer.hpp b/body tracking/export/JSON export/cpp/include/GLViewer.hpp index d1ea4d39..1d6413e5 100644 --- a/body tracking/export/JSON export/cpp/include/GLViewer.hpp +++ b/body tracking/export/JSON export/cpp/include/GLViewer.hpp @@ -129,7 +129,7 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); @@ -137,7 +137,7 @@ class Shader { static const GLint ATTRIB_COLOR_POS = 1; static const GLint ATTRIB_NORMAL = 2; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/body tracking/export/JSON export/cpp/src/GLViewer.cpp b/body tracking/export/JSON export/cpp/src/GLViewer.cpp index 88cfd7ec..74807224 100644 --- a/body tracking/export/JSON export/cpp/src/GLViewer.cpp +++ b/body tracking/export/JSON export/cpp/src/GLViewer.cpp @@ -5,7 +5,7 @@ #error "This sample should not be built in Debug mode, use RelWithDebInfo if you want to do step by step." #endif -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -16,7 +16,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -24,7 +24,7 @@ GLchar* FRAGMENT_SHADER = " out_Color = b_color;\n" "}"; -GLchar* SK_VERTEX_SHADER = +const GLchar* SK_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -41,7 +41,7 @@ GLchar* SK_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* SK_FRAGMENT_SHADER = +const GLchar* SK_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "in vec3 b_position;\n" @@ -720,7 +720,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -768,7 +768,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/body tracking/export/JSON export/cpp/src/main.cpp b/body tracking/export/JSON export/cpp/src/main.cpp index 3000f9ca..442a9cfb 100644 --- a/body tracking/export/JSON export/cpp/src/main.cpp +++ b/body tracking/export/JSON export/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/body tracking/export/fbx export/cpp/include/utils.hpp b/body tracking/export/fbx export/cpp/include/utils.hpp index c3503915..ec878dc4 100644 --- a/body tracking/export/fbx export/cpp/include/utils.hpp +++ b/body tracking/export/fbx export/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/body tracking/export/fbx export/cpp/src/main.cpp b/body tracking/export/fbx export/cpp/src/main.cpp index dbac1730..609f29e6 100644 --- a/body tracking/export/fbx export/cpp/src/main.cpp +++ b/body tracking/export/fbx export/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/body tracking/multi-camera/cpp/include/GLViewer.hpp b/body tracking/multi-camera/cpp/include/GLViewer.hpp index 0a366d89..7713f654 100644 --- a/body tracking/multi-camera/cpp/include/GLViewer.hpp +++ b/body tracking/multi-camera/cpp/include/GLViewer.hpp @@ -30,7 +30,7 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); @@ -38,7 +38,7 @@ class Shader { static const GLint ATTRIB_COLOR_POS = 1; static const GLint ATTRIB_NORMAL = 2; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/body tracking/multi-camera/cpp/src/GLViewer.cpp b/body tracking/multi-camera/cpp/src/GLViewer.cpp index 4e6dfc4f..74e0455d 100644 --- a/body tracking/multi-camera/cpp/src/GLViewer.cpp +++ b/body tracking/multi-camera/cpp/src/GLViewer.cpp @@ -1,6 +1,6 @@ #include "GLViewer.hpp" -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -11,7 +11,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 color;\n" @@ -20,7 +20,7 @@ GLchar* FRAGMENT_SHADER = "}"; -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "out vec4 b_color;\n" @@ -39,7 +39,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -47,7 +47,7 @@ GLchar* POINTCLOUD_FRAGMENT_SHADER = " out_Color = b_color;\n" "}"; -GLchar* VERTEX_SHADER_TEXTURE = +const GLchar* VERTEX_SHADER_TEXTURE = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec2 in_UVs;\n" @@ -58,7 +58,7 @@ GLchar* VERTEX_SHADER_TEXTURE = " UV = in_UVs;\n" "}\n"; -GLchar* FRAGMENT_SHADER_TEXTURE = +const GLchar* FRAGMENT_SHADER_TEXTURE = "#version 330 core\n" "in vec2 UV;\n" "uniform sampler2D texture_sampler;\n" @@ -651,7 +651,7 @@ void Simple3DObject::draw() { glBindVertexArray(0); } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -699,7 +699,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -727,7 +727,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* IMAGE_FRAGMENT_SHADER = +const GLchar* IMAGE_FRAGMENT_SHADER = "#version 330 core\n" " in vec2 UV;\n" " out vec4 color;\n" @@ -739,7 +739,7 @@ GLchar* IMAGE_FRAGMENT_SHADER = " color = vec4(color_rgb,1);\n" "}"; -GLchar* IMAGE_VERTEX_SHADER = +const GLchar* IMAGE_VERTEX_SHADER = "#version 330\n" "layout(location = 0) in vec3 vert;\n" "out vec2 UV;" diff --git a/body tracking/multi-camera/cpp/src/main.cpp b/body tracking/multi-camera/cpp/src/main.cpp index af28c530..8e09135d 100644 --- a/body tracking/multi-camera/cpp/src/main.cpp +++ b/body tracking/multi-camera/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -25,6 +25,13 @@ int main(int argc, char **argv) { + +#ifdef _SL_JETSON_ + const bool isJetson = true; +#else + const bool isJetson = false; +#endif + if (argc != 2) { // this file should be generated by using the tool ZED360 std::cout << "Need a Configuration file in input" << std::endl; @@ -115,7 +122,7 @@ int main(int argc, char **argv) { // we enable the Body Tracking module with its options sl::BodyTrackingFusionParameters body_fusion_init_params; body_fusion_init_params.enable_tracking = true; - body_fusion_init_params.enable_body_fitting = false; // skeletons will looks more natural but requires more computations + body_fusion_init_params.enable_body_fitting = !isJetson; // skeletons will looks more natural but requires more computations fusion.enableBodyTracking(body_fusion_init_params); // define fusion behavior diff --git a/camera control/cpp/src/main.cpp b/camera control/cpp/src/main.cpp index cb393434..4ad6ce56 100644 --- a/camera control/cpp/src/main.cpp +++ b/camera control/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -100,6 +100,7 @@ int main(int argc, char **argv) { init_parameters.camera_resolution= sl::RESOLUTION::AUTO; init_parameters.depth_mode = sl::DEPTH_MODE::NONE; // no depth computation required here init_parameters.async_grab_camera_recovery = true; + init_parameters.enable_image_validity_check = true; parseArgs(argc,argv, init_parameters); // Open the camera @@ -137,9 +138,13 @@ int main(int argc, char **argv) { while (key != 'q') { // Check that a new image is successfully acquired returned_state = zed.grab(); - if (returned_state == ERROR_CODE::SUCCESS) { + if (returned_state != ERROR_CODE::SUCCESS) + std::cout << "returned_state " << returned_state << std::endl; + int current_value=10; + zed.getCameraSettings(VIDEO_SETTINGS::EXPOSURE, current_value); + if (1/*returned_state == ERROR_CODE::SUCCESS)*/) { // Retrieve left image - zed.retrieveImage(zed_image, VIEW::LEFT); + zed.retrieveImage(zed_image, VIEW::SIDE_BY_SIDE); // Convert sl::Mat to cv::Mat (share buffer) cv::Mat cvImage = cv::Mat((int) zed_image.getHeight(), (int) zed_image.getWidth(), CV_8UC4, zed_image.getPtr(sl::MEM::CPU)); diff --git a/camera control/csharp/Program.cs b/camera control/csharp/Program.cs index ddea6427..280c671a 100644 --- a/camera control/csharp/Program.cs +++ b/camera control/csharp/Program.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/camera control/csharp/Properties/AssemblyInfo.cs b/camera control/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/camera control/csharp/Properties/AssemblyInfo.cs +++ b/camera control/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/camera streaming/receiver/cpp/src/main.cpp b/camera streaming/receiver/cpp/src/main.cpp index d606d078..f59c90eb 100644 --- a/camera streaming/receiver/cpp/src/main.cpp +++ b/camera streaming/receiver/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/camera streaming/sender/cpp/include/utils.hpp b/camera streaming/sender/cpp/include/utils.hpp index 9a3b7ba5..3dcbb93e 100644 --- a/camera streaming/sender/cpp/include/utils.hpp +++ b/camera streaming/sender/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/camera streaming/sender/cpp/src/main.cpp b/camera streaming/sender/cpp/src/main.cpp index 1f6cf302..7ebe5314 100644 --- a/camera streaming/sender/cpp/src/main.cpp +++ b/camera streaming/sender/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/depth sensing/automatic region of interest/cpp/src/main.cpp b/depth sensing/automatic region of interest/cpp/src/main.cpp index 00d7482d..1e866e32 100644 --- a/depth sensing/automatic region of interest/cpp/src/main.cpp +++ b/depth sensing/automatic region of interest/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -58,6 +58,10 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } + PositionalTrackingParameters tracking_parameters; + tracking_parameters.mode = sl::POSITIONAL_TRACKING_MODE::GEN_2; + zed.enablePositionalTracking(tracking_parameters); + cv::String imWndName = "Image"; cv::String depthWndName = "Depth"; cv::String ROIWndName = "ROI"; @@ -79,13 +83,12 @@ int main(int argc, char **argv) { cv::Mat cvDepthImage(resolution.height, resolution.width, CV_8UC4, zed_depth_image.getPtr(MEM::CPU)); std::string mask_name = "Mask.png"; - Mat mask_roi(resolution, MAT_TYPE::U8_C1); cv::Mat cvMaskROI(resolution.height, resolution.width, CV_8UC1, mask_roi.getPtr(MEM::CPU)); bool roi_running = false; sl::RegionOfInterestParameters roi_param; - roi_param.auto_apply = true; + //roi_param.auto_apply_module = {sl::MODULE::ALL}; roi_param.depth_far_threshold_meters = 2.5; roi_param.image_height_ratio_cutoff = 0.5; zed.startRegionOfInterestAutoDetection(roi_param); diff --git a/depth sensing/depth sensing/cpp/include/GLViewer.hpp b/depth sensing/depth sensing/cpp/include/GLViewer.hpp index af149a79..e2099b91 100644 --- a/depth sensing/depth sensing/cpp/include/GLViewer.hpp +++ b/depth sensing/depth sensing/cpp/include/GLViewer.hpp @@ -99,14 +99,14 @@ class Shader { public: Shader() {} - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/depth sensing/depth sensing/cpp/src/GLViewer.cpp b/depth sensing/depth sensing/cpp/src/GLViewer.cpp index ee676008..ce46d16a 100644 --- a/depth sensing/depth sensing/cpp/src/GLViewer.cpp +++ b/depth sensing/depth sensing/cpp/src/GLViewer.cpp @@ -21,7 +21,7 @@ void print(std::string msg_prefix, sl::ERROR_CODE err_code, std::string msg_suff -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -32,7 +32,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -428,7 +428,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { print("ERROR: while compiling vertex shader"); } @@ -476,7 +476,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { return false; @@ -503,7 +503,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -516,7 +516,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" diff --git a/depth sensing/depth sensing/cpp/src/main.cpp b/depth sensing/depth sensing/cpp/src/main.cpp index 593d5138..723514ea 100644 --- a/depth sensing/depth sensing/cpp/src/main.cpp +++ b/depth sensing/depth sensing/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -33,16 +33,16 @@ using namespace std; using namespace sl; -void parseArgs(int argc, char **argv, sl::InitParameters& param); +std::string parseArgs(int argc, char **argv, sl::InitParameters& param); int main(int argc, char **argv) { Camera zed; // Set configuration parameters for the ZED InitParameters init_parameters; - init_parameters.depth_mode = DEPTH_MODE::ULTRA; + init_parameters.depth_mode = DEPTH_MODE::NEURAL; init_parameters.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; // OpenGL's coordinate system is right_handed init_parameters.sdk_verbose = 1; - parseArgs(argc, argv, init_parameters); + auto mask_path = parseArgs(argc, argv, init_parameters); // Open the camera auto returned_state = zed.open(init_parameters); @@ -51,6 +51,16 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } + // Load optional region of interest to exclude irrelevant area of the image + if(!mask_path.empty()) { + sl::Mat mask_roi; + auto err = mask_roi.read(mask_path.c_str()); + if(err == sl::ERROR_CODE::SUCCESS) + zed.setRegionOfInterest(mask_roi, {MODULE::ALL}); + else + std::cout << "Error loading Region of Interest file: " << err << std::endl; + } + auto camera_config = zed.getCameraInformation().camera_configuration; float image_aspect_ratio = camera_config.resolution.width / (1.f * camera_config.resolution.height); int requested_low_res_w = min(720, (int)camera_config.resolution.width); @@ -69,7 +79,7 @@ int main(int argc, char **argv) { RuntimeParameters runParameters; // Setting the depth confidence parameters - runParameters.confidence_threshold = 50; + runParameters.confidence_threshold = 100; runParameters.texture_confidence_threshold = 100; // Allocation of 4 channels of float on GPU @@ -103,7 +113,23 @@ int main(int argc, char **argv) { return EXIT_SUCCESS; } -void parseArgs(int argc, char **argv, sl::InitParameters& param) { +inline int findImageExtension(int argc, char **argv) { + int arg_idx=-1; + int arg_idx_search = 0; + if (argc > 2) arg_idx_search=2; + else if(argc > 1) arg_idx_search=1; + + if(arg_idx_search > 0 && (string(argv[arg_idx_search]).find(".png") != string::npos || + string(argv[arg_idx_search]).find(".jpg") != string::npos)) + arg_idx = arg_idx_search; + return arg_idx; +} + + +std::string parseArgs(int argc, char **argv, sl::InitParameters& param) { + int mask_arg = findImageExtension(argc, argv); + std::string mask_path; + if (argc > 1 && string(argv[1]).find(".svo") != string::npos) { // SVO input mode param.input.setFromSVOFile(argv[1]); @@ -139,7 +165,12 @@ void parseArgs(int argc, char **argv, sl::InitParameters& param) { param.camera_resolution = RESOLUTION::VGA; cout << "[Sample] Using Camera in resolution VGA" << endl; } - } else { - // Default + } + + if (mask_arg > 0) { + mask_path = string(argv[mask_arg]); + cout << "[Sample] Using Region of Interest from file : " << mask_path << endl; } + + return mask_path; } diff --git a/depth sensing/depth sensing/csharp/MainWindow.cs b/depth sensing/depth sensing/csharp/MainWindow.cs index 6bc19085..7632a7c1 100644 --- a/depth sensing/depth sensing/csharp/MainWindow.cs +++ b/depth sensing/depth sensing/csharp/MainWindow.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/depth sensing/depth sensing/csharp/Properties/AssemblyInfo.cs b/depth sensing/depth sensing/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/depth sensing/depth sensing/csharp/Properties/AssemblyInfo.cs +++ b/depth sensing/depth sensing/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/depth sensing/export/cpp/main.cpp b/depth sensing/export/cpp/main.cpp index 4daeb306..8013ff40 100644 --- a/depth sensing/export/cpp/main.cpp +++ b/depth sensing/export/cpp/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/depth sensing/image refocus/cpp/src/main.cpp b/depth sensing/image refocus/cpp/src/main.cpp index 59f9a627..2e52e7c2 100644 --- a/depth sensing/image refocus/cpp/src/main.cpp +++ b/depth sensing/image refocus/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/depth sensing/manual region of interest/cpp/src/main.cpp b/depth sensing/manual region of interest/cpp/src/main.cpp index 820d3c39..a6e4b5f2 100644 --- a/depth sensing/manual region of interest/cpp/src/main.cpp +++ b/depth sensing/manual region of interest/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -41,25 +41,49 @@ using namespace sl; void print(string msg_prefix, ERROR_CODE err_code, string msg_suffix); void parseArgs(int argc, char **argv, InitParameters& param); -#define SELECT_RECT 1 +struct ROIdata +{ + const int radius = 50; + cv::Point2i last_pt; + cv::Mat mask, seeds, image; + bool selectInProgress_frgrnd = false; + bool selectInProgress_backgrnd = false; + bool isInit = false; + cv::Mat im_bgr, frgrnd, bckgrnd; + + void init(sl::Resolution resolution){ + mask = cv::Mat(resolution.height, resolution.width, CV_8UC1); + mask.setTo(0); + seeds = cv::Mat(resolution.height, resolution.width, CV_8UC1); + seeds.setTo(cv::GrabCutClasses::GC_PR_BGD); + image = cv::Mat(resolution.height, resolution.width, CV_8UC4); + image.setTo(127); + isInit = false; + frgrnd.release(); + bckgrnd.release(); + } + + void set(bool background, cv::Point current_pt){ + cv::line(seeds, current_pt, last_pt, cv::Scalar(background ? cv::GrabCutClasses::GC_BGD: cv::GrabCutClasses::GC_PR_FGD), radius); + cv::line(image, current_pt, last_pt, cv::Scalar(background ? cv::Scalar::all(0) : cv::Scalar::all(255)), radius); + last_pt = current_pt; + } -#if SELECT_RECT + void updateImage(cv::Mat &im){ + cv::addWeighted(image, 0.5, im, 0.5, 0, im); + } -struct ROIdata { - // Current ROI, 0: means discard, other value will keep the pixel - cv::Mat ROI; - cv::Rect selection_rect; - cv::Point origin_rect; - bool selectInProgress = false; - bool selection = false; + void compute(cv::Mat &cvImage){ + cv::cvtColor(cvImage, im_bgr, cv::COLOR_BGRA2BGR); + cv::Mat seeds_cpy; + seeds.copyTo(seeds_cpy); + cv::grabCut(im_bgr, seeds_cpy, cv::Rect(0,0,im_bgr.cols, im_bgr.rows), frgrnd, bckgrnd, 1,isInit ? cv::GrabCutModes::GC_EVAL : cv::GrabCutModes::GC_INIT_WITH_MASK); - void reset(bool full = true) { - selectInProgress = false; - selection_rect = cv::Rect(0, 0, 0, 0); - if (full) { - ROI.setTo(0); - selection = false; - } + mask.setTo(255); + mask.setTo(0, seeds_cpy & 1); + cv::erode(mask, mask,cv::Mat(5,5,CV_8UC1)); + + isInit = true; } }; @@ -68,75 +92,38 @@ static void onMouse(int event, int x, int y, int, void* data) { switch (event) { case cv::EVENT_LBUTTONDOWN: { - pdata->origin_rect = cv::Point(x, y); - pdata->selectInProgress = true; + pdata->last_pt = cv::Point(x, y); + pdata->selectInProgress_frgrnd = true; break; } case cv::EVENT_LBUTTONUP: { - pdata->selectInProgress = false; - // set ROI to valid for the given rectangle - cv::rectangle(pdata->ROI, pdata->selection_rect, cv::Scalar(250), -1); - pdata->selection = true; + pdata->selectInProgress_frgrnd = false; break; } + case cv::EVENT_RBUTTONDOWN: { - pdata->reset(false); + pdata->last_pt = cv::Point(x, y); + pdata->selectInProgress_backgrnd = true; + break; + } + case cv::EVENT_RBUTTONUP: + { + pdata->selectInProgress_backgrnd = false; break; } - } - - if (pdata->selectInProgress) { - pdata->selection_rect.x = MIN(x, pdata->origin_rect.x); - pdata->selection_rect.y = MIN(y, pdata->origin_rect.y); - pdata->selection_rect.width = abs(x - pdata->origin_rect.x) + 1; - pdata->selection_rect.height = abs(y - pdata->origin_rect.y) + 1; - } -} -#else - -struct ROIdata { - // Current ROI, 0: means discard, other value will keep the pixel - cv::Mat ROI; - std::vector> polygons; - std::vector current_select; - bool selection = false; - bool selectInProgress = false; - - void reset() { - polygons.clear(); - ROI.setTo(0); - selection = true; - selectInProgress = false; - } -}; -static void onMouse(int event, int x, int y, int, void* data) { - auto pdata = reinterpret_cast (data); - switch (event) { - case cv::EVENT_LBUTTONDOWN : - pdata->selectInProgress = true; - break; - case cv::EVENT_MOUSEMOVE: - if (pdata->selectInProgress) - pdata->current_select.push_back(cv::Point(x, y)); - break; - case cv::EVENT_LBUTTONUP: - if (pdata->current_select.size() > 2) { - pdata->polygons.push_back(pdata->current_select); - pdata->current_select.clear(); + case cv::EVENT_MOUSEMOVE: + { + if(pdata->selectInProgress_backgrnd) + pdata->set(true, cv::Point(x, y)); + + if(pdata->selectInProgress_frgrnd) + pdata->set(false, cv::Point(x, y)); } - pdata->selectInProgress = false; - break; - case cv::EVENT_RBUTTONDOWN: - pdata->reset(); - break; } } -#endif - -void applyMask(cv::Mat& cvImage, ROIdata& data); int main(int argc, char **argv) { @@ -144,7 +131,7 @@ int main(int argc, char **argv) { Camera zed; InitParameters init_parameters; - init_parameters.camera_resolution = RESOLUTION::AUTO; + init_parameters.depth_mode = sl::DEPTH_MODE::NEURAL; parseArgs(argc, argv, init_parameters); // Open the camera @@ -161,27 +148,23 @@ int main(int argc, char **argv) { cv::namedWindow(ROIWndName, cv::WINDOW_NORMAL); cv::namedWindow(depthWndName, cv::WINDOW_NORMAL); -#if SELECT_RECT - std::cout << "Draw some rectangles on the left image with a left click\n"; -#else - std::cout << "Draw some shapes on the left image with a left click\n"; -#endif - std::cout << "Press 'a' to apply the ROI\n" + std::cout << + "Press LeftButton (and keep it pressed) to select foreground seeds\n" + "Press LeftRight (and keep it pressed) to select background seeds\n" + "Press 'a' to apply the ROI\n" "Press 'r' to reset the ROI\n" "Press 's' to save the ROI as image file to reload it later\n" - "Press 'l' to load the ROI from an image file" << std::endl; + << std::endl; auto resolution = zed.getCameraInformation().camera_configuration.resolution; // Create a Mat to store images Mat zed_image(resolution, MAT_TYPE::U8_C4); cv::Mat cvImage(resolution.height, resolution.width, CV_8UC4, zed_image.getPtr(MEM::CPU)); - Mat zed_depth_image(resolution, MAT_TYPE::U8_C4); cv::Mat cvDepthImage(resolution.height, resolution.width, CV_8UC4, zed_depth_image.getPtr(MEM::CPU)); ROIdata roi_data; - roi_data.ROI = cv::Mat(resolution.height, resolution.width, CV_8UC1); - roi_data.reset(); + roi_data.init(resolution); // set Mouse Callback to handle User inputs cv::setMouseCallback(imWndName, onMouse, &roi_data); @@ -198,39 +181,32 @@ int main(int argc, char **argv) { zed.retrieveImage(zed_image, VIEW::LEFT); zed.retrieveImage(zed_depth_image, VIEW::DEPTH); - // Draw rectangle on the image - if (roi_data.selection) - applyMask(cvImage, roi_data); + roi_data.updateImage(cvImage); cv::imshow(imWndName, cvImage); //Display the image and the current global ROI cv::imshow(depthWndName, cvDepthImage); - cv::imshow(ROIWndName, roi_data.ROI); + cv::imshow(ROIWndName, roi_data.mask); } - key = cv::waitKey(15); + key = cv::waitKey(10); // Apply Current ROI if (key == 'a') { - Mat slROI(resolution, MAT_TYPE::U8_C1, roi_data.ROI.data, roi_data.ROI.step); + zed.retrieveImage(zed_image, VIEW::LEFT); + roi_data.compute(cvImage); + + Mat slROI(resolution, MAT_TYPE::U8_C1, roi_data.mask.data, roi_data.mask.step); zed.setRegionOfInterest(slROI); } else if (key == 'r') { //Reset ROI Mat emptyROI; zed.setRegionOfInterest(emptyROI); // clear user data - roi_data.reset(); } else if (key == 's') { // Save the current Mask to be loaded in another app - cv::imwrite(mask_name, roi_data.ROI); - } else if (key == 'l') { - // Load the mask from a previously saved file - cv::Mat tmp = cv::imread(mask_name); - if (!tmp.empty()) { - roi_data.ROI = tmp; - Mat slROI(resolution, MAT_TYPE::U8_C1, roi_data.ROI.data, roi_data.ROI.step); - zed.setRegionOfInterest(slROI); - } else std::cout << mask_name << " could not be found" << std::endl; - } + cv::imwrite(mask_name, roi_data.mask); + }else if (key == 'r') + roi_data.init(resolution); } // Exit @@ -293,88 +269,3 @@ void parseArgs(int argc, char **argv, InitParameters& param) { // Default } } - - -#if SELECT_RECT -void applyMask(cv::Mat& cvImage, ROIdata& data) { - auto res = cvImage.size(); - const float darker = 0.8f; // make the image darker - - for (int y = 0; y < res.height; y++) { - // line pointer - uchar * ptr_mask = (uchar *) ((data.ROI.data) + y * data.ROI.step); - sl::uchar4* ptr_image = (sl::uchar4*) (cvImage.data + y * cvImage.step); - - for (int x = 0; x < res.width; x++) { - if (ptr_mask[x] == 0) { - auto &px = ptr_image[x]; - // make the pixel darker without overflow - px.x = px.x * darker; - px.y = px.y * darker; - px.z = px.z * darker; - } - } - } - - // DIsplay current selection - cv::rectangle(cvImage, data.selection_rect, cv::Scalar(255, 90, 0, 255), 3); -} -#else - -inline bool contains(std::vector& poly, cv::Point2f test) { - int i, j; - bool c = false; - const int nvert = poly.size(); - for (i = 0, j = nvert - 1; i < nvert; j = i++) { - if (((poly[i].y > test.y) != (poly[j].y > test.y)) && - (test.x < (poly[j].x - poly[i].x) * (test.y - poly[i].y) / (poly[j].y - poly[i].y) + poly[i].x)) - c = !c; - } - return c; -} - -inline bool contains(std::vector>& polygons, cv::Point2f test) { - bool c = false; - for (auto& it : polygons) { - c = contains(it, test); - if (c) break; - } - return c; -} - -void applyMask(cv::Mat& cvImage, ROIdata &data) { - // left_sl and mask must be at the same size - auto res = cvImage.size(); - const float darker = 0.8f; // make the image darker - - // Convert P�lygons into real Mask -#if 1 // manual check - for (int y = 0; y < res.height; y++) { - uchar* ptr_mask = (uchar*)((data.ROI.data) + y * data.ROI.step); - sl::uchar4* ptr_image = (sl::uchar4*)(cvImage.data+ y * cvImage.step); - for (int x = 0; x < res.width; x++) { - if (contains(data.polygons, cv::Point2f(x,y))) - ptr_mask[x] = 255; - else { - auto& px = ptr_image[x]; - // make the pixel darker without overflow - px.x = px.x * darker; - px.y = px.y * darker; - px.z = px.z * darker; - } - } - } -#else // same with open Function - cv::fillPoly(data.ROI, data.polygons, 255); -#endif - - // Display current selection - if (data.current_select.size() > 2) { - auto last = data.current_select.back(); - for (auto& it : data.current_select) { - cv::line(cvImage, last, it, cv::Scalar(30, 130, 240), 1); - last = it; - } - } -} -#endif diff --git a/depth sensing/multi camera/cpp/src/main.cpp b/depth sensing/multi camera/cpp/src/main.cpp index 764fb310..b071f4f9 100644 --- a/depth sensing/multi camera/cpp/src/main.cpp +++ b/depth sensing/multi camera/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/fusion/README.md b/fusion/README.md deleted file mode 100644 index f62b569e..00000000 --- a/fusion/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# Fusion Samples - -The ZED SDK's Fusion API is designed to combine data from multiple cameras, resulting in higher quality data. The API can fuse data from several cameras to improve the accuracy and robustness of tracking systems. - -For instance, the Fusion API can be used in outdoor robot tracking with GNSS to provide real-time fusion of the 3D position and orientation of the robot, even in challenging environments. Additionally, the API can be used with the ZED Camera's body tracking feature to fuse data from multiple cameras to track an entire space with much higher quality. This capability enables a range of applications that require accurate spatial tracking, such as robotics, autonomous vehicles, augmented reality, and virtual reality. - - -## Overview - -This section lists the available modules available in the **Fusion API**. It provides a convenient way to discover and access additional resources related to the Fusion API, including examples, tutorials, and integrations with other software platforms. These resources can be used to further explore the capabilities of the Fusion API and to build more sophisticated applications that leverage the data fusion capabilities of the ZED Camera. - -## Body tracking - -

- -

- - -The [Multi camera Body Tracking sample](/body%20tracking/multi-camera/) demonstrates how to combine multiple body detections from an array of cameras to create a more accurate and robust representation of the detected bodies. By fusing data from multiple cameras, the sample can improve the accuracy and robustness of the body tracking system, especially in challenging environments with occlusions or complex motions. The sample showcases the capabilities of the ZED SDK's Fusion API and provides a starting point for building more sophisticated applications that require multi-camera body tracking. - -## GeoTracking - -

- -

- -The [GeoTracking sample](/geotracking/) demonstrates how to combine data from the ZED Camera and a Global Navigation Satellite System (GNSS) receiver for outdoor tracking applications. The sample showcases the Fusion API of the ZED SDK and provides an example of how to use it to integrate data from multiple sources, such as the camera and GNSS receiver. By fusing data from these sources, the sample can improve the accuracy and robustness of the tracking system, especially in challenging outdoor environments. The sample provides a starting point for building more sophisticated applications that require outdoor tracking with the ZED Camera and GNSS. diff --git a/geotracking/README.md b/geotracking/README.md deleted file mode 100644 index 0e8c74a4..00000000 --- a/geotracking/README.md +++ /dev/null @@ -1,20 +0,0 @@ -# Geotracking - -These samples show how to use the ZED SDK Geotracking module for **global scale localization on a real-world map**. - -

- -

- -## Overview - -The samples provided using the Geotracking API are organized as follows: - -- [Live Geotracking](./live%20geotracking/) The Live Geotracking sample demonstrates how to use the Geotracking API using both the ZED camera and an external GNSS sensor. It displays the corrected positional tracking in the ZED reference frame on an OpenGL window and the geo-position on a real-world map in ZEDHub. - -- [Geotracking Data Recording](./recording/): The Recording sample demonstrates how to **record data** from both a ZED camera and an external GNSS sensor. The recorded data is saved in an SVO file and a JSON file, respectively. This sample provides the necessary data to be used by the Playback sample. - -- [Geotracking Data Playback](./playback/): The Playback sample shows how to use the geotracking API for global scale localization on a real-world map. It takes the data generated by the Recording sample and uses it to display geo-positions on a real-world map. - - - diff --git a/geotracking/live geotracking/README.md b/geotracking/live geotracking/README.md deleted file mode 100644 index 25ce846f..00000000 --- a/geotracking/live geotracking/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Live Geotracking Sample - -## Overview - -This sample demonstrates how to use the ZED SDK Geotracking module to achieve **global scale localization** on a real-world map using the ZED camera. The ZED SDK Live Geotracking sample fuses visual odometry from the ZED SDK with external GNSS data in real-time, making it a valuable resource for applications such as autonomous robotics and drone navigation. - -## Features - -- Displays the camera's path in an OpenGL window in 3D -- Displays path data, including translation and rotation -- Displays the fused path on a map on ZED Hub -- Exports KML files for the fused trajectory and raw GNSS data - -## Dependencies - -Before using this sample, ensure that you have the following dependencies installed on your system: -- ZED Hub Edge Agent: to be able to display the computed trajectory on a real-world map, connect your device to [ZED Hub](https://hub.stereolabs.com/). Detailed tutorials can be found [here](https://www.stereolabs.com/docs/cloud/overview/setup-device/). -- libgps-dev: required to use an external GNSS sensor. - -## Installation and Usage - -To use the ZED SDK Geotracking sample, follow these steps: -1. Download and install the ZED SDK on your system from the official [Stereolabs website](https://www.stereolabs.com/developers/release/). -2. Install Edge Agent from [ZED Hub](https://hub.stereolabs.com/) and the libgps-dev dependency using your operating system's package manager. -3. Connect your ZED camera and GNSS sensor to your computer. -4. Open a terminal and navigate to the zed-geotracking sample directory. -5. Compile the sample. -6. Run the zed-geotracking executable. -7. The sample will display the camera's path and path data in a 3D window. The fused path will be displayed on ZED Hub's maps page, and KML files will be generated for the fused trajectory and raw GNSS data. \ No newline at end of file diff --git a/geotracking/live geotracking/cpp/src/display/GenericDisplay.cpp b/geotracking/live geotracking/cpp/src/display/GenericDisplay.cpp deleted file mode 100644 index 6f1e74f2..00000000 --- a/geotracking/live geotracking/cpp/src/display/GenericDisplay.cpp +++ /dev/null @@ -1,69 +0,0 @@ -#include "display/GenericDisplay.h" -#include "exporter/KMLExporter.h" - -#ifdef COMPILE_WITH_ZEDHUB -#include -#endif - -GenericDisplay::GenericDisplay() -{ -} - -GenericDisplay::~GenericDisplay() -{ -#ifdef COMPILE_WITH_ZEDHUB - sl_hub::STATUS_CODE exit_sl_hub_status = sl_hub::HubClient::disconnect(); - if (exit_sl_hub_status != sl_hub::STATUS_CODE::SUCCESS) - { - std::cout << "[ZedHub][ERROR] Terminate with error " << exit_sl_hub_status << std::endl; - exit(EXIT_FAILURE); - } -#endif -} - -void GenericDisplay::init(int argc, char **argv) -{ - opengl_viewer.init(argc, argv); - -#ifdef COMPILE_WITH_ZEDHUB - sl_hub::STATUS_CODE status_iot = sl_hub::HubClient::connect("geotracking"); - if (status_iot != sl_hub::STATUS_CODE::SUCCESS) - { - std::cout << "[ZedHub][ERROR] Initialization error " << status_iot << std::endl; - exit(EXIT_FAILURE); - } -#endif -} - -void GenericDisplay::updatePoseData(sl::Transform zed_rt, sl::POSITIONAL_TRACKING_STATE state) -{ - opengl_viewer.updateData(zed_rt, state); -} - -bool GenericDisplay::isAvailable(){ - return opengl_viewer.isAvailable(); -} - -void GenericDisplay::updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp) -{ -#ifdef COMPILE_WITH_ZEDHUB - sl_hub::json zedhub_message; - zedhub_message["layer_type"] = "geolocation"; - zedhub_message["label"] = "Fused_position"; - zedhub_message["position"] = { - {"latitude", geo_pose.latlng_coordinates.getLatitude(false)}, - {"longitude", geo_pose.latlng_coordinates.getLongitude(false)}, - {"altitude", geo_pose.latlng_coordinates.getAltitude()}}; - zedhub_message["epoch_timestamp"] = static_cast(current_timestamp); - sl_hub::HubClient::sendDataToPeers("geolocation", zedhub_message.dump()); - std::this_thread::sleep_for(std::chrono::milliseconds(5)); -#else - static bool already_display_warning_message = false; - if(!already_display_warning_message){ - already_display_warning_message = true; - std::cerr << std::endl << "ZEDHub was not found ... the computed Geopose will be saved as KML file." << std::endl; - std::cerr << "Results will be saved in \"fused_position.kml\" file. You could use google myMaps (https://www.google.com/maps/about/mymaps/) to visualize it." << std::endl; - } - saveKMLData("fused_position.kml", geo_pose); -#endif -} diff --git a/geotracking/live geotracking/python/display/generic_display.py b/geotracking/live geotracking/python/display/generic_display.py deleted file mode 100644 index a48d3ef0..00000000 --- a/geotracking/live geotracking/python/display/generic_display.py +++ /dev/null @@ -1,61 +0,0 @@ -from display.gl_viewer import GLViewer -import pyzed.sl as sl -import time -import json - - -class GenericDisplay: - def __init__(self): - pass - - def __del__(self): - pass - - def init(self,camera_model): - self.glviewer = GLViewer() - self.glviewer.init(camera_model) - # Remplacez cette partie par la connexion appropriée à votre système IoT - - def updatePoseData(self, zed_rt,str_t,str_r, state): - self.glviewer.updateData(zed_rt,str_t,str_r, state) - - def isAvailable(self): - return self.glviewer.is_available() - - def updateGeoPoseData(self, geo_pose, current_timestamp): - try: - # Remplacez cette partie par l'envoi approprié des données à votre système IoT - zedhub_message = { - "layer_type": "geolocation", - "label": "Fused_position", - "position": { - "latitude": geo_pose.latlng_coordinates.get_latitude(False), - "longitude": geo_pose.latlng_coordinates.get_latitude(False), - "altitude": geo_pose.latlng_coordinates.get_altitude() - }, - "epoch_timestamp": int(current_timestamp) - } - time.sleep(0.005) - except ImportError: - already_display_warning_message = False - if not already_display_warning_message: - already_display_warning_message = True - print("\nZEDHub n'a pas été trouvé ... la GeoPose calculée sera sauvegardée sous forme de fichier KML.") - print("Les résultats seront enregistrés dans le fichier \"fused_position.kml\".") - print("Vous pouvez utiliser Google MyMaps (https://www.google.com/maps/about/mymaps/) pour le visualiser.") - self.saveKMLData("fused_position.kml", geo_pose) - - def saveKMLData(self, filename, geo_pose): - # Implémentez la sauvegarde de données KML appropriée ici - pass - -if __name__ == "__main__": - generic_display = GenericDisplay() - generic_display.init(0, []) - - try: - while True: - # Votre logique ici... - pass - except KeyboardInterrupt: - pass \ No newline at end of file diff --git a/geotracking/live geotracking/python/gnss_reader/gpsd_reader.py b/geotracking/live geotracking/python/gnss_reader/gpsd_reader.py deleted file mode 100644 index d3a18734..00000000 --- a/geotracking/live geotracking/python/gnss_reader/gpsd_reader.py +++ /dev/null @@ -1,90 +0,0 @@ -import threading -import time -import pyzed.sl as sl -from gpsdclient import GPSDClient -import random -import datetime - -class GPSDReader: - def __init__(self): - self.continue_to_grab = True - self.new_data = False - self.is_initialized = False - self.current_gnss_data = None - self.is_initialized_mtx = threading.Lock() - self.client = None - self.gnss_getter = None - def initialize(self): - try : - self.client = GPSDClient(host="127.0.0.1") - except : - print("No GPSD running .. exit") - return -1 - self.grab_gnss_data = threading.Thread(target=self.grabGNSSData) - self.grab_gnss_data.start() - print("Successfully connected to GPSD") - print("Waiting for GNSS fix") - received_fix = False - while not received_fix: - self.gnss_getter = self.client.dict_stream(convert_datetime=True, filter=["TPV"]) - gpsd_data = next(self.gnss_getter) - if "class" in gpsd_data and gpsd_data["class"] == "TPV" and "mode" in gpsd_data and gpsd_data["mode"] >=2: - received_fix = True - print("Fix found !!!") - with self.is_initialized_mtx: - self.is_initialized = True - return 0 - - def getNextGNSSValue(self): - while self.continue_to_grab : - gpsd_data = None - while gpsd_data is None: - gpsd_data = next(self.gnss_getter) - if "class" in gpsd_data and gpsd_data["class"] == "TPV" and "mode" in gpsd_data and gpsd_data["mode"] >=2: - current_gnss_data = sl.GNSSData() - current_gnss_data.set_coordinates(gpsd_data["lat"], gpsd_data["lon"], gpsd_data["altMSL"], False) - current_gnss_data.longitude_std = 0.001 - current_gnss_data.latitude_std = 0.001 - current_gnss_data.altitude_std = 1.0 - - position_covariance = [ - gpsd_data["eph"] * gpsd_data["eph"], - 0.0, - 0.0, - 0.0, - gpsd_data["eph"] * gpsd_data["eph"], - 0.0, - 0.0, - 0.0, - gpsd_data["epv"] * gpsd_data["epv"] - ] - current_gnss_data.position_covariances = position_covariance - timestamp_microseconds = int(gpsd_data["time"].timestamp() * 1000000) - ts = sl.Timestamp() - ts.set_microseconds(timestamp_microseconds) - current_gnss_data.ts = ts - return current_gnss_data - else: - print("Fix perdu : réinitialisation du GNSS") - self.initialize() - - def grab(self): - if self.new_data: - self.new_data = False - return sl.ERROR_CODE.SUCCESS, self.current_gnss_data - return sl.ERROR_CODE.FAILURE, None - - def grabGNSSData(self): - while self.continue_to_grab: - with self.is_initialized_mtx: - if self.is_initialized: - break - time.sleep(0.001) - - while self.continue_to_grab : - self.current_gnss_data = self.getNextGNSSValue() - self.new_data = True - - def stop_thread(self): - self.continue_to_grab = False - \ No newline at end of file diff --git a/geotracking/playback/README.md b/geotracking/playback/README.md deleted file mode 100644 index 761bf5fb..00000000 --- a/geotracking/playback/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Geotracking Data Playback - -## Overview - -The ZED SDK Geotracking Playback sample demonstrates how to fuse pre-recorded GNSS data (saved in a JSON file) and pre-recorded camera data (saved into an SVO file) for achieving global scale localization on a real-world map. This sample is useful for applications such as offline analysis of sensor data or simulation / testing. - -## Features - -- Displays the camera's path in an OpenGL window. -- Displays path data, including translation and rotation. -- Displays the fused path on a map on ZedHub. -- Exports KML files for the fused trajectory and raw GNSS data. - -## Dependencies - -Before using this sample, ensure that you have the following dependencies installed on your system: -- ZED SDK: download and install from the official Stereolabs website (https://www.stereolabs.com/developers/release/). -- ZED Hub Edge Agent: to be able to display the computed trajectory on a real-world map, connect your device to [ZED Hub](https://hub.stereolabs.com/). Detailed tutorials can be found [here](https://www.stereolabs.com/docs/cloud/overview/setup-device/). - -## Installation and Usage - -To use the ZED SDK Geotracking Playback sample, follow these steps: -1. Download and install the ZED SDK on your system from the official Stereolabs website (https://www.stereolabs.com/developers/release/). -2. Install the ZEDub edge-cli from the ZEDHub website. -3. Open a terminal and navigate to the zed-geotracking-playback sample directory. -4. Compile it. -5. Run the zed-geotracking-playback executable, passing the path to the SVO file as the first input argument of the command line and the path to gnss file as second argument. -6. The sample will playback the SVO file and display the camera's path and path data in a 3D window. The fused path will be displayed on a map on ZedHub, and KML files will be generated for the fused trajectory and raw GNSS data. \ No newline at end of file diff --git a/geotracking/playback/cpp/include/display/GenericDisplay.h b/geotracking/playback/cpp/include/display/GenericDisplay.h deleted file mode 100644 index 89f239e5..00000000 --- a/geotracking/playback/cpp/include/display/GenericDisplay.h +++ /dev/null @@ -1,83 +0,0 @@ -#ifndef GENERIC_DISPLAY_H -#define GENERIC_DISPLAY_H - -#include -#include "GLViewer.hpp" -#include - -inline cv::Mat slMat2cvMat(sl::Mat& input) { - // Mapping between MAT_TYPE and CV_TYPE - int cv_type = -1; - switch (input.getDataType()) { - case sl::MAT_TYPE::F32_C1: cv_type = CV_32FC1; - break; - case sl::MAT_TYPE::F32_C2: cv_type = CV_32FC2; - break; - case sl::MAT_TYPE::F32_C3: cv_type = CV_32FC3; - break; - case sl::MAT_TYPE::F32_C4: cv_type = CV_32FC4; - break; - case sl::MAT_TYPE::U8_C1: cv_type = CV_8UC1; - break; - case sl::MAT_TYPE::U8_C2: cv_type = CV_8UC2; - break; - case sl::MAT_TYPE::U8_C3: cv_type = CV_8UC3; - break; - case sl::MAT_TYPE::U8_C4: cv_type = CV_8UC4; - break; - default: break; - } - - return cv::Mat(input.getHeight(), input.getWidth(), cv_type, input.getPtr(sl::MEM::CPU)); -} - -class GenericDisplay -{ -public: -/** - * @brief Construct a new Generic Display object - * - */ - GenericDisplay(); - /** - * @brief Destroy the Generic Display object - * - */ - ~GenericDisplay(); - /** - * @brief Init OpenGL display with the requested camera_model (used as moving element in OpenGL view) - * - * @param argc default main argc - * @param argv default main argv - * @param camera_model zed camera model to use - */ - void init(int argc, char **argv); - /** - * @brief Return if the OpenGL viewer is still open - * - * @return true the OpenGL viewer is still open - * @return false the OpenGL viewer was closed - */ - bool isAvailable(); - /** - * @brief Update the OpenGL view with last pose data - * - * @param zed_rt last pose data - * @param str_t std::string that represents current translations - * @param str_r std::string that represents current rotations - * @param state current tracking state - */ - void updatePoseData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state); - /** - * @brief Display current fused pose either in KML file or in ZEDHub depending compilation options - * - * @param geo_pose geopose to display - * @param current_timestamp timestamp of the geopose to display - */ - void updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp); - -protected: - GLViewer opengl_viewer; -}; - -#endif \ No newline at end of file diff --git a/geotracking/playback/cpp/src/GNSSReplay.cpp b/geotracking/playback/cpp/src/GNSSReplay.cpp deleted file mode 100644 index f8060bed..00000000 --- a/geotracking/playback/cpp/src/GNSSReplay.cpp +++ /dev/null @@ -1,155 +0,0 @@ -#include "GNSSReplay.hpp" - -using json = nlohmann::json; - -inline bool is_microseconds(uint64_t timestamp) { - // Check if the timestamp is in microseconds - return (1'000'000'000'000'000 <= timestamp && timestamp < 10'000'000'000'000'000ULL); -} - -inline bool is_nanoseconds(uint64_t timestamp) { - // Check if the timestamp is in microseconds - return (1'000'000'000'000'000'000 <= timestamp && timestamp < 10'000'000'000'000'000'000ULL); -} - -GNSSReplay::GNSSReplay(std::string file_name) -{ - _file_name = file_name; - initialize(); -} - -GNSSReplay::~GNSSReplay() -{ -} - -void GNSSReplay::initialize() -{ - std::ifstream gnss_file_data; - gnss_file_data.open(_file_name); - if (!gnss_file_data.is_open()) - { - std::cerr << "Unable to open " << _file_name << std::endl; - exit(EXIT_FAILURE); - } - try { - gnss_data = json::parse(gnss_file_data); - } catch (const std::runtime_error &e) { - std::cerr << "Error while reading GNSS data: " << e.what() << std::endl; - } - current_gnss_idx = 0; - previous_ts = 0; -} - -void GNSSReplay::close(){ - gnss_data.clear(); - current_gnss_idx = 0; -} - - -sl::GNSSData getGNSSData(json &gnss_data, int gnss_idx){ - sl::GNSSData current_gnss_data; - current_gnss_data.ts = 0; - - // If we are at the end of GNSS data, exit - if (gnss_idx >= gnss_data["GNSS"].size()){ - std::cout << "Reached the end of the GNSS playback data." << std::endl; - return current_gnss_data; - } - - json current_gnss_data_json = gnss_data["GNSS"][gnss_idx]; - // Check inputs: - if ( - current_gnss_data_json["coordinates"].is_null() - || current_gnss_data_json["coordinates"]["latitude"].is_null() - || current_gnss_data_json["coordinates"]["longitude"].is_null() - || current_gnss_data_json["coordinates"]["altitude"].is_null() - || current_gnss_data_json["ts"].is_null() - ) - { - std::cout << "Null GNSS playback data." << std::endl; - return current_gnss_data; - } - - auto gnss_timestamp = current_gnss_data_json["ts"].get(); - // Fill out timestamp: - if (is_microseconds(gnss_timestamp)) - current_gnss_data.ts.setMicroseconds(gnss_timestamp); - else if (is_nanoseconds(gnss_timestamp)) - current_gnss_data.ts.setNanoseconds(gnss_timestamp); - else - std::cerr << "Warning: Invalid timestamp format from GNSS file" << std::endl; - - // Fill out coordinates: - current_gnss_data.setCoordinates(current_gnss_data_json["coordinates"]["latitude"].get(), - current_gnss_data_json["coordinates"]["longitude"].get(), - current_gnss_data_json["coordinates"]["altitude"].get(), - false); - - // Fill out default standard deviation: - current_gnss_data.longitude_std = current_gnss_data_json["longitude_std"]; - current_gnss_data.latitude_std = current_gnss_data_json["latitude_std"]; - current_gnss_data.altitude_std = current_gnss_data_json["altitude_std"]; - // Fill out covariance [must be not null] - std::array position_covariance; - for (unsigned i = 0; i < 9; i++) - position_covariance[i] = 0.0; // initialize empty covariance - - // set covariance diagonal - position_covariance[0] = current_gnss_data.longitude_std * current_gnss_data.longitude_std; - position_covariance[1 * 3 + 1] = current_gnss_data.latitude_std * current_gnss_data.latitude_std; - position_covariance[2 * 3 + 2] = current_gnss_data.altitude_std * current_gnss_data.altitude_std; - current_gnss_data.position_covariance = position_covariance; - - return current_gnss_data; -} - -sl::GNSSData GNSSReplay::getNextGNSSValue(uint64_t current_timestamp) -{ - sl::GNSSData current_gnss_data = getGNSSData(gnss_data, current_gnss_idx); - - if(current_gnss_data.ts.data_ns == 0) - return current_gnss_data; - - if(current_gnss_data.ts.data_ns > current_timestamp){ - current_gnss_data.ts.data_ns = 0; - return current_gnss_data; - } - - sl::GNSSData last_data; - int step = 1; - while(1){ - last_data = current_gnss_data; - int diff_last = current_timestamp - current_gnss_data.ts.data_ns; - current_gnss_data = getGNSSData(gnss_data, current_gnss_idx + step++); - if(current_gnss_data.ts.data_ns==0) //error / end of file - break; - - if(current_gnss_data.ts.data_ns > current_timestamp){ - if((current_gnss_data.ts.data_ns - current_timestamp) > diff_last) // keep last - current_gnss_data = last_data; - break; - } - current_gnss_idx++; - } - - return current_gnss_data; -} - -sl::FUSION_ERROR_CODE GNSSReplay::grab(sl::GNSSData ¤t_data, uint64_t current_timestamp) -{ - current_data.ts.data_ns = 0; - - if(current_timestamp>0 && (current_timestamp > last_cam_ts) ) - current_data = getNextGNSSValue(current_timestamp); - - if(current_data.ts.data_ns == previous_ts) - current_data.ts.data_ns = 0; - - last_cam_ts = current_timestamp; - - if (current_data.ts.data_ns == 0) // Invalid data - return sl::FUSION_ERROR_CODE::FAILURE; - - previous_ts = current_data.ts.data_ns; - return sl::FUSION_ERROR_CODE::SUCCESS; -} \ No newline at end of file diff --git a/geotracking/playback/cpp/src/display/GenericDisplay.cpp b/geotracking/playback/cpp/src/display/GenericDisplay.cpp deleted file mode 100644 index 61df48cc..00000000 --- a/geotracking/playback/cpp/src/display/GenericDisplay.cpp +++ /dev/null @@ -1,71 +0,0 @@ -#include "display/GenericDisplay.h" -#include "exporter/KMLExporter.h" - -#ifdef COMPILE_WITH_ZEDHUB -#include -#endif - -GenericDisplay::GenericDisplay() -{ -} - -GenericDisplay::~GenericDisplay() -{ -#ifdef COMPILE_WITH_ZEDHUB - sl_hub::STATUS_CODE exit_status = sl_hub::HubClient::disconnect(); - if (exit_status != sl_hub::STATUS_CODE::SUCCESS) - { - std::cout << "[ZedHub][ERROR] Terminate with error " << exit_status << std::endl; - exit(EXIT_FAILURE); - } -#else - closeAllKMLWriter(); -#endif -} - -void GenericDisplay::init(int argc, char **argv) -{ - opengl_viewer.init(argc, argv); - -#ifdef COMPILE_WITH_ZEDHUB - sl_hub::STATUS_CODE status_iot = sl_hub::HubClient::connect("geotracking"); - if (status_iot != sl_hub::STATUS_CODE::SUCCESS) - { - std::cout << "[ZedHub][ERROR] Initialization error " << status_iot << std::endl; - exit(EXIT_FAILURE); - } -#endif -} - -void GenericDisplay::updatePoseData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state) -{ - opengl_viewer.updateData(zed_rt, str_t, str_r, state); -} - -bool GenericDisplay::isAvailable(){ - return opengl_viewer.isAvailable(); -} - -void GenericDisplay::updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp) -{ -#ifdef COMPILE_WITH_ZEDHUB - sl_hub::json zedhub_message; - zedhub_message["layer_type"] = "geolocation"; - zedhub_message["label"] = "Fused_position"; - zedhub_message["position"] = { - {"latitude", geo_pose.latlng_coordinates.getLatitude(false)}, - {"longitude", geo_pose.latlng_coordinates.getLongitude(false)}, - {"altitude", geo_pose.latlng_coordinates.getAltitude()}}; - zedhub_message["epoch_timestamp"] = static_cast(current_timestamp); - sl_hub::HubClient::sendDataToPeers("geolocation", zedhub_message.dump()); - std::this_thread::sleep_for(std::chrono::milliseconds(5)); -#else - static bool already_display_warning_message = false; - if(!already_display_warning_message){ - already_display_warning_message = true; - std::cerr << std::endl << "ZEDHub was not found ... the computed Geopose will be saved as KML file." << std::endl; - std::cerr << "Results will be saved in \"fused_position.kml\" file. You could use google myMaps (https://www.google.com/maps/about/mymaps/) to visualize it." << std::endl; - } - saveKMLData("fused_position.kml", geo_pose); -#endif -} diff --git a/geotracking/playback/python/display/generic_display.py b/geotracking/playback/python/display/generic_display.py deleted file mode 100644 index a48d3ef0..00000000 --- a/geotracking/playback/python/display/generic_display.py +++ /dev/null @@ -1,61 +0,0 @@ -from display.gl_viewer import GLViewer -import pyzed.sl as sl -import time -import json - - -class GenericDisplay: - def __init__(self): - pass - - def __del__(self): - pass - - def init(self,camera_model): - self.glviewer = GLViewer() - self.glviewer.init(camera_model) - # Remplacez cette partie par la connexion appropriée à votre système IoT - - def updatePoseData(self, zed_rt,str_t,str_r, state): - self.glviewer.updateData(zed_rt,str_t,str_r, state) - - def isAvailable(self): - return self.glviewer.is_available() - - def updateGeoPoseData(self, geo_pose, current_timestamp): - try: - # Remplacez cette partie par l'envoi approprié des données à votre système IoT - zedhub_message = { - "layer_type": "geolocation", - "label": "Fused_position", - "position": { - "latitude": geo_pose.latlng_coordinates.get_latitude(False), - "longitude": geo_pose.latlng_coordinates.get_latitude(False), - "altitude": geo_pose.latlng_coordinates.get_altitude() - }, - "epoch_timestamp": int(current_timestamp) - } - time.sleep(0.005) - except ImportError: - already_display_warning_message = False - if not already_display_warning_message: - already_display_warning_message = True - print("\nZEDHub n'a pas été trouvé ... la GeoPose calculée sera sauvegardée sous forme de fichier KML.") - print("Les résultats seront enregistrés dans le fichier \"fused_position.kml\".") - print("Vous pouvez utiliser Google MyMaps (https://www.google.com/maps/about/mymaps/) pour le visualiser.") - self.saveKMLData("fused_position.kml", geo_pose) - - def saveKMLData(self, filename, geo_pose): - # Implémentez la sauvegarde de données KML appropriée ici - pass - -if __name__ == "__main__": - generic_display = GenericDisplay() - generic_display.init(0, []) - - try: - while True: - # Votre logique ici... - pass - except KeyboardInterrupt: - pass \ No newline at end of file diff --git a/geotracking/playback/python/gnss_replay.py b/geotracking/playback/python/gnss_replay.py deleted file mode 100644 index d687a015..00000000 --- a/geotracking/playback/python/gnss_replay.py +++ /dev/null @@ -1,136 +0,0 @@ -import json -import pyzed.sl as sl -import numpy as np - - -class GNSSReplay: - def __init__(self, file_name): - self._file_name = file_name - self.current_gnss_idx = 0 - self.previous_ts = 0 - self.last_cam_ts = 0 - self.gnss_data = None - self.initialize() - - def initialize(self): - try: - with open(self._file_name, 'r') as gnss_file_data: - self.gnss_data = json.load(gnss_file_data) - except FileNotFoundError: - print(f"Unable to open {self._file_name}") - exit(1) - except json.JSONDecodeError as e: - print(f"Error while reading GNSS data: {e}") - - def is_microseconds(self, timestamp): - return 1_000_000_000_000_000 <= timestamp < 10_000_000_000_000_000 - - def is_nanoseconds(self, timestamp): - return 1_000_000_000_000_000_000 <= timestamp < 10_000_000_000_000_000_000 - - def getGNSSData(self, gnss_data,gnss_idx): - current_gnss_data = sl.GNSSData() - - #if we are at the end of GNSS data, exit - if gnss_idx>=len(gnss_data["GNSS"]): - print("Reached the end of the GNSS playback data.") - return current_gnss_data - current_gnss_data_json = gnss_data["GNSS"][gnss_idx] - - if ( - current_gnss_data_json["coordinates"] is None - or current_gnss_data_json["coordinates"]["latitude"] is None - or current_gnss_data_json["coordinates"]["longitude"] is None - or current_gnss_data_json["coordinates"]["altitude"] is None - or current_gnss_data_json["ts"] is None - ): - print("Null GNSS playback data.") - return current_gnss_data_json - - gnss_timestamp = current_gnss_data_json["ts"] - ts = sl.Timestamp() - if self.is_microseconds(gnss_timestamp): - ts.set_microseconds(gnss_timestamp) - elif self.is_nanoseconds(gnss_timestamp): - ts.set_nanoseconds(gnss_timestamp) - else: - print("Warning: Invalid timestamp format from GNSS file") - current_gnss_data.ts = ts - # Fill out coordinates: - current_gnss_data.set_coordinates( - current_gnss_data_json["coordinates"]["latitude"], - current_gnss_data_json["coordinates"]["longitude"], - current_gnss_data_json["coordinates"]["altitude"], - False - ) - - # Fill out default standard deviation: - current_gnss_data.longitude_std = current_gnss_data_json["longitude_std"] - current_gnss_data.latitude_std = current_gnss_data_json["latitude_std"] - current_gnss_data.altitude_std = current_gnss_data_json["altitude_std"] - - # Fill out covariance [must not be null] - position_covariance = [ - current_gnss_data.longitude_std **2, - 0.0, - 0.0, - 0.0, - current_gnss_data.latitude_std **2, - 0.0, - 0.0, - 0.0, - current_gnss_data.altitude_std **2 - ] - - current_gnss_data.position_covariances = position_covariance - - return current_gnss_data - - def getNextGNSSValue(self, current_timestamp): - current_gnss_data = self.getGNSSData(self.gnss_data,self.current_gnss_idx) - - if current_gnss_data is None or current_gnss_data.ts.data_ns == 0: - return current_gnss_data - - if current_gnss_data.ts.data_ns > current_timestamp: - current_gnss_data.ts.data_ns = 0 - return current_gnss_data - - last_data = current_gnss_data - step = 1 - while True: - last_data = current_gnss_data - diff_last = current_timestamp - current_gnss_data.ts.data_ns - current_gnss_data = self.getGNSSData(self.gnss_data, - self.current_gnss_idx + step - ) - - if current_gnss_data is None or current_gnss_data.ts.data_ns == 0: - break - - if current_gnss_data.ts.data_ns > current_timestamp: - if ( - current_gnss_data.ts.data_ns - current_timestamp - > diff_last - ): - current_gnss_data = last_data - break - self.current_gnss_idx += 1 - return current_gnss_data - - def grab(self, current_timestamp): - current_data = sl.GNSSData() - current_data.ts.data_ns = 0 - - if current_timestamp > 0 and current_timestamp > self.last_cam_ts: - current_data = self.getNextGNSSValue(current_timestamp) - if current_data.ts.data_ns == self.previous_ts: - current_data.ts.data_ns = 0 - - self.last_cam_ts = current_timestamp - - if current_data.ts.data_ns == 0: - return sl.FUSION_ERROR_CODE.FAILURE, None - - self.previous_ts = current_data.ts.data_ns - return sl.FUSION_ERROR_CODE.SUCCESS, current_data \ No newline at end of file diff --git a/geotracking/recording/README.md b/geotracking/recording/README.md deleted file mode 100644 index 34c50e45..00000000 --- a/geotracking/recording/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Geotracking Data Recording Sample - -## Overview -The Geotracking Data Recording sample demonstrates how to record data for geotracking localization on real-world maps using the ZED camera. The sample generates data in the form of an SVO file, which contains camera data, and a JSON file, which contains pre-recorded GNSS data for use in the playback sample. This sample is a useful resource for developers working on autonomous driving, robotics, and drone navigation applications. - -## Features - -- Displays the camera's path in an OpenGL window in 3D. -- Displays path data, including translation and rotation. -- Generates KML files for displaying raw GNSS data and fused position on google maps after capture. -- Generates an SVO file corresponding to camera data. -- Generates a JSON file corresponding to recorded GNSS data. - -## Dependencies - -Before using this sample, ensure that you have the following dependencies installed on your system: -- libgps: required to use an external GNSS sensor. - -## Usage - -To use the Geotracking Data Recording sample, follow these steps: - -1. Download and install the ZED SDK on your system from the official Stereolabs website (https://www.stereolabs.com/developers/release/). -2. Install the libgps dependency using your operating system's package manager. -3. Connect your ZED camera and GNSS sensor to your computer. -4. Open a terminal and navigate to the Geotracking Data Recording sample directory. -5. Compile the sample. -6. Run the Geotracking Data Recording executable. -7. The sample will display the camera's path and path data in a 3D window. KML files will be generated for displaying the raw GNSS data and fused position on a real-world map like google maps after capture. Additionally, an SVO file corresponding to camera data and a JSON file corresponding to recorded GNSS data will be generated. \ No newline at end of file diff --git a/geotracking/recording/cpp/src/display/GenericDisplay.cpp b/geotracking/recording/cpp/src/display/GenericDisplay.cpp deleted file mode 100644 index b3d8199f..00000000 --- a/geotracking/recording/cpp/src/display/GenericDisplay.cpp +++ /dev/null @@ -1,69 +0,0 @@ -#include "display/GenericDisplay.h" -#include "exporter/KMLExporter.h" - -#ifdef COMPILE_WITH_ZEDHUB -#include -#endif - -GenericDisplay::GenericDisplay() -{ -} - -GenericDisplay::~GenericDisplay() -{ -#ifdef COMPILE_WITH_ZEDHUB - sl_iot::STATUS_CODE exit_sl_iot_status = sl_iot::HubClient::disconnect(); - if (exit_sl_iot_status != sl_iot::STATUS_CODE::SUCCESS) - { - std::cout << "[ZedHub][ERROR] Terminate with error " << exit_sl_iot_status << std::endl; - exit(EXIT_FAILURE); - } -#endif -} - -void GenericDisplay::init(int argc, char **argv) -{ - opengl_viewer.init(argc, argv); - -#ifdef COMPILE_WITH_ZEDHUB - sl_iot::STATUS_CODE status_iot = sl_iot::HubClient::connect("geotracking"); - if (status_iot != sl_iot::STATUS_CODE::SUCCESS) - { - std::cout << "[ZedHub][ERROR] Initialization error " << status_iot << std::endl; - exit(EXIT_FAILURE); - } -#endif -} - -void GenericDisplay::updatePoseData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state) -{ - opengl_viewer.updateData(zed_rt, str_t, str_r, state); -} - -bool GenericDisplay::isAvailable(){ - return opengl_viewer.isAvailable(); -} - -void GenericDisplay::updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp) -{ -#ifdef COMPILE_WITH_ZEDHUB - sl_iot::json zedhub_message; - zedhub_message["layer_type"] = "geolocation"; - zedhub_message["label"] = "Fused_position"; - zedhub_message["position"] = { - {"latitude", geo_pose.latlng_coordinates.getLatitude(false)}, - {"longitude", geo_pose.latlng_coordinates.getLongitude(false)}, - {"altitude", geo_pose.latlng_coordinates.getAltitude()}}; - zedhub_message["epoch_timestamp"] = static_cast(current_timestamp); - sl_iot::HubClient::sendDataToPeers("geolocation", zedhub_message.dump()); - std::this_thread::sleep_for(std::chrono::milliseconds(5)); -#else - static bool already_display_warning_message = false; - if(!already_display_warning_message){ - already_display_warning_message = true; - std::cerr << std::endl << "ZEDHub was not found ... the computed Geopose will be saved as KML file." << std::endl; - std::cerr << "Results will be saved in \"fused_position.kml\" file. You could use google myMaps (https://www.google.com/maps/about/mymaps/) to visualize it." << std::endl; - } - saveKMLData("fused_position.kml", geo_pose); -#endif -} diff --git a/geotracking/recording/cpp/src/exporter/GNSSSaver.cpp b/geotracking/recording/cpp/src/exporter/GNSSSaver.cpp deleted file mode 100644 index c59e3a52..00000000 --- a/geotracking/recording/cpp/src/exporter/GNSSSaver.cpp +++ /dev/null @@ -1,68 +0,0 @@ -#include "exporter/GNSSSaver.h" -#include "json.hpp" - -/** - * @brief Construct a new GNSSSaver object - * - */ -GNSSSaver::GNSSSaver() -{ - std::string current_date = getCurrentDatetime(); - this->file_path = "GNSS_" + current_date + ".json"; -} - -/** - * @brief Destroy the GNSSSaver object - * - */ -GNSSSaver::~GNSSSaver() -{ - saveAllData(); -} - -/** - * @brief Add the input gnss_data into the exported GNSS json file - * - * @param gnss_data gnss data to add - */ -void GNSSSaver::addGNSSData(sl::GNSSData gnss_data) -{ - all_gnss_data.push_back(gnss_data); -} - -/** - * @brief Save all added data into the exported json file - * - */ -void GNSSSaver::saveAllData() -{ - std::vector all_gnss_measurements; - for (unsigned i = 0; i < all_gnss_data.size(); i++) - { - double latitude, longitude, altitude; - all_gnss_data[i].getCoordinates(latitude, longitude, altitude, false); - nlohmann::json gnss_measure; - gnss_measure["ts"] = all_gnss_data[i].ts.getNanoseconds(); - gnss_measure["coordinates"] = { - {"latitude", latitude}, - {"longitude", longitude}, - {"altitude", altitude}}; - std::array position_covariance; - for (unsigned j = 0; j < 9; j++) - { - position_covariance[j] = all_gnss_data[i].position_covariance[j]; - } - gnss_measure["position_covariance"] = position_covariance; - gnss_measure["longitude_std"] = sqrt(position_covariance[0 * 3 + 0]); - gnss_measure["latitude_std"] = sqrt(position_covariance[1 * 3 + 1]); - gnss_measure["altitude_std"] = sqrt(position_covariance[2 * 3 + 2]); - all_gnss_measurements.push_back(gnss_measure); - } - - nlohmann::json final_json; - final_json["GNSS"] = all_gnss_measurements; - std::ofstream gnss_file(file_path); - gnss_file << final_json.dump(); - gnss_file.close(); - std::cout << "All GNSS data saved" << std::endl; -} diff --git a/geotracking/recording/python/display/generic_display.py b/geotracking/recording/python/display/generic_display.py deleted file mode 100644 index a48d3ef0..00000000 --- a/geotracking/recording/python/display/generic_display.py +++ /dev/null @@ -1,61 +0,0 @@ -from display.gl_viewer import GLViewer -import pyzed.sl as sl -import time -import json - - -class GenericDisplay: - def __init__(self): - pass - - def __del__(self): - pass - - def init(self,camera_model): - self.glviewer = GLViewer() - self.glviewer.init(camera_model) - # Remplacez cette partie par la connexion appropriée à votre système IoT - - def updatePoseData(self, zed_rt,str_t,str_r, state): - self.glviewer.updateData(zed_rt,str_t,str_r, state) - - def isAvailable(self): - return self.glviewer.is_available() - - def updateGeoPoseData(self, geo_pose, current_timestamp): - try: - # Remplacez cette partie par l'envoi approprié des données à votre système IoT - zedhub_message = { - "layer_type": "geolocation", - "label": "Fused_position", - "position": { - "latitude": geo_pose.latlng_coordinates.get_latitude(False), - "longitude": geo_pose.latlng_coordinates.get_latitude(False), - "altitude": geo_pose.latlng_coordinates.get_altitude() - }, - "epoch_timestamp": int(current_timestamp) - } - time.sleep(0.005) - except ImportError: - already_display_warning_message = False - if not already_display_warning_message: - already_display_warning_message = True - print("\nZEDHub n'a pas été trouvé ... la GeoPose calculée sera sauvegardée sous forme de fichier KML.") - print("Les résultats seront enregistrés dans le fichier \"fused_position.kml\".") - print("Vous pouvez utiliser Google MyMaps (https://www.google.com/maps/about/mymaps/) pour le visualiser.") - self.saveKMLData("fused_position.kml", geo_pose) - - def saveKMLData(self, filename, geo_pose): - # Implémentez la sauvegarde de données KML appropriée ici - pass - -if __name__ == "__main__": - generic_display = GenericDisplay() - generic_display.init(0, []) - - try: - while True: - # Votre logique ici... - pass - except KeyboardInterrupt: - pass \ No newline at end of file diff --git a/geotracking/recording/python/exporter/gnss_saver.py b/geotracking/recording/python/exporter/gnss_saver.py deleted file mode 100644 index 19d60a5f..00000000 --- a/geotracking/recording/python/exporter/gnss_saver.py +++ /dev/null @@ -1,41 +0,0 @@ -import numpy as np -import json -from datetime import datetime - -def get_current_datetime(): - now = datetime.now() - return now.strftime("%d-%m-%Y_%H-%M-%S") - - -class GNSSSaver: - def __init__(self): - self.current_date = get_current_datetime() - self.file_path = "GNSS_"+self.current_date+".json" - self.all_gnss_data = [] - - def addGNSSData(self,gnss_data): - self.all_gnss_data.append(gnss_data) - - def saveAllData(self): - print("Start saving GNSS data...") - all_gnss_measurements = [] - for i in range(len(self.all_gnss_data)): - latitude, longitude, altitude = self.all_gnss_data[i].get_coordinates(False) - gnss_measure = {} - gnss_measure["ts"] = self.all_gnss_data[i].ts.get_nanoseconds() - coordinates_dict = {} - coordinates_dict["latitude"] = latitude - coordinates_dict["longitude"] = longitude - coordinates_dict["altitude"] = altitude - gnss_measure["coordinates"] = coordinates_dict - position_covariance = [self.all_gnss_data[i].position_covariances[j] for j in range(9)] - gnss_measure["position_covariance"] = position_covariance - gnss_measure["longitude_std"] = np.sqrt(position_covariance[0 * 3 + 0]) - gnss_measure["latitude_std"] = np.sqrt(position_covariance[1 * 3 + 1]) - gnss_measure["altitude_std"] = np.sqrt(position_covariance[2 * 3 + 2]) - all_gnss_measurements.append(gnss_measure) - final_dict = {"GNSS" : all_gnss_measurements} - with open(self.file_path, "w") as outfile: - # json_data refers to the above JSON - json.dump(final_dict, outfile) - print("All GNSS data saved") diff --git a/geotracking/recording/python/ogl_viewer/tracking_viewer.py b/geotracking/recording/python/ogl_viewer/tracking_viewer.py deleted file mode 100644 index 21b695d5..00000000 --- a/geotracking/recording/python/ogl_viewer/tracking_viewer.py +++ /dev/null @@ -1,557 +0,0 @@ -from OpenGL.GL import * -from OpenGL.GLUT import * -from OpenGL.GLU import * - -import ctypes -import sys -import math -from threading import Lock -import numpy as np -import array - -import ogl_viewer.zed_model as zm -import pyzed.sl as sl - -VERTEX_SHADER = """ -# version 330 core -layout(location = 0) in vec3 in_Vertex; -layout(location = 1) in vec4 in_Color; -uniform mat4 u_mvpMatrix; -out vec4 b_color; -void main() { - b_color = in_Color; - gl_Position = u_mvpMatrix * vec4(in_Vertex, 1); -} -""" - -FRAGMENT_SHADER = """ -# version 330 core -in vec4 b_color; -layout(location = 0) out vec4 out_Color; -void main() { - out_Color = b_color; -} -""" - -def safe_glutBitmapString(font, str_): - for i in range(len(str_)): - glutBitmapCharacter(GLUT_BITMAP_HELVETICA_18, ord(str_[i])) - -class Shader: - def __init__(self, _vs, _fs): - - self.program_id = glCreateProgram() - vertex_id = self.compile(GL_VERTEX_SHADER, _vs) - fragment_id = self.compile(GL_FRAGMENT_SHADER, _fs) - - glAttachShader(self.program_id, vertex_id) - glAttachShader(self.program_id, fragment_id) - glBindAttribLocation( self.program_id, 0, "in_vertex") - glBindAttribLocation( self.program_id, 1, "in_texCoord") - glLinkProgram(self.program_id) - - if glGetProgramiv(self.program_id, GL_LINK_STATUS) != GL_TRUE: - info = glGetProgramInfoLog(self.program_id) - glDeleteProgram(self.program_id) - glDeleteShader(vertex_id) - glDeleteShader(fragment_id) - raise RuntimeError('Error linking program: %s' % (info)) - glDeleteShader(vertex_id) - glDeleteShader(fragment_id) - - def compile(self, _type, _src): - try: - shader_id = glCreateShader(_type) - if shader_id == 0: - print("ERROR: shader type {0} does not exist".format(_type)) - exit() - - glShaderSource(shader_id, _src) - glCompileShader(shader_id) - if glGetShaderiv(shader_id, GL_COMPILE_STATUS) != GL_TRUE: - info = glGetShaderInfoLog(shader_id) - glDeleteShader(shader_id) - raise RuntimeError('Shader compilation failed: %s' % (info)) - return shader_id - except: - glDeleteShader(shader_id) - raise - - def get_program_id(self): - return self.program_id - - -class Simple3DObject: - def __init__(self, _is_static): - self.vaoID = 0 - self.drawing_type = GL_TRIANGLES - self.is_static = _is_static - self.elementbufferSize = 0 - - self.vertices = array.array('f') - self.colors = array.array('f') - self.indices = array.array('I') - - def add_pt(self, _pts): # _pts [x,y,z] - for pt in _pts: - self.vertices.append(pt) - - def add_clr(self, _clrs): # _clr [r,g,b] - for clr in _clrs: - self.colors.append(clr) - - def add_point_clr(self, _pt, _clr): - self.add_pt(_pt) - self.add_clr(_clr) - self.indices.append(len(self.indices)) - - def add_line(self, _p1, _p2, _clr): - self.add_point_clr(_p1, _clr) - self.add_point_clr(_p2, _clr) - - def push_to_GPU(self): - self.vboID = glGenBuffers(4) - - if len(self.vertices): - glBindBuffer(GL_ARRAY_BUFFER, self.vboID[0]) - glBufferData(GL_ARRAY_BUFFER, len(self.vertices) * self.vertices.itemsize, (GLfloat * len(self.vertices))(*self.vertices), GL_STATIC_DRAW) - - if len(self.colors): - glBindBuffer(GL_ARRAY_BUFFER, self.vboID[1]) - glBufferData(GL_ARRAY_BUFFER, len(self.colors) * self.colors.itemsize, (GLfloat * len(self.colors))(*self.colors), GL_STATIC_DRAW) - - if len(self.indices): - glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.vboID[2]) - glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(self.indices) * self.indices.itemsize,(GLuint * len(self.indices))(*self.indices), GL_STATIC_DRAW) - - self.elementbufferSize = len(self.indices) - - def clear(self): - self.vertices = array.array('f') - self.colors = array.array('f') - self.indices = array.array('I') - self.elementbufferSize = 0 - - def set_drawing_type(self, _type): - self.drawing_type = _type - - def draw(self): - if (self.elementbufferSize): - glEnableVertexAttribArray(0) - glBindBuffer(GL_ARRAY_BUFFER, self.vboID[0]) - glVertexAttribPointer(0,3,GL_FLOAT,GL_FALSE,0,None) - - glEnableVertexAttribArray(1) - glBindBuffer(GL_ARRAY_BUFFER, self.vboID[1]) - glVertexAttribPointer(1,3,GL_FLOAT,GL_FALSE,0,None) - - glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.vboID[2]) - glDrawElements(self.drawing_type, self.elementbufferSize, GL_UNSIGNED_INT, None) - - glDisableVertexAttribArray(0) - glDisableVertexAttribArray(1) - -def addVert(obj, i_f, limit, clr) : - obj.add_line([i_f, 0, -limit], [i_f, 0, limit], clr) - obj.add_line([-limit, 0, i_f],[limit, 0, i_f], clr) - -class GLViewer: - def __init__(self): - self.available = False - self.mutex = Lock() - self.camera = CameraGL() - self.wheelPosition = 0. - self.mouse_button = [False, False] - self.mouseCurrentPosition = [0., 0.] - self.previousMouseMotion = [0., 0.] - self.mouseMotion = [0., 0.] - self.pose = sl.Transform() - self.trackState = sl.POSITIONAL_TRACKING_STATE - self.txtT = "" - self.txtR = "" - - def init(self, camera_model): # _params = sl.CameraParameters - glutInit() - wnd_w = int(glutGet(GLUT_SCREEN_WIDTH)*0.9) - wnd_h = int(glutGet(GLUT_SCREEN_HEIGHT) *0.9) - glutInitWindowSize(wnd_w, wnd_h) - glutInitWindowPosition(int(wnd_w*0.05), int(wnd_h*0.05)) - - glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH) - glutCreateWindow("ZED Positional Tracking") - glViewport(0, 0, wnd_w, wnd_h) - - glutSetOption(GLUT_ACTION_ON_WINDOW_CLOSE, - GLUT_ACTION_CONTINUE_EXECUTION) - - glEnable(GL_DEPTH_TEST) - - glEnable(GL_BLEND) - glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) - - glEnable(GL_LINE_SMOOTH) - glHint(GL_LINE_SMOOTH_HINT, GL_NICEST) - - # Compile and create the shader for 3D objects - self.shader_image = Shader(VERTEX_SHADER, FRAGMENT_SHADER) - self.shader_MVP = glGetUniformLocation(self.shader_image.get_program_id(), "u_mvpMatrix") - - self.bckgrnd_clr = np.array([223/255., 230/255., 233/255.]) - - # Create the bounding box object - self.floor_grid = Simple3DObject(False) - self.floor_grid.set_drawing_type(GL_LINES) - - limit = 20 - clr1 = np.array([218/255., 223/255., 225/255.]) - clr2 = np.array([108/255., 122/255., 137/255.]) - - for i in range (limit * -5, limit * 5): - i_f = i / 5. - if((i % 5) == 0): - addVert(self.floor_grid, i_f, limit, clr2) - else: - addVert(self.floor_grid, i_f, limit, clr1) - self.floor_grid.push_to_GPU() - - self.zedPath = Simple3DObject(False) - self.zedPath.set_drawing_type(GL_LINE_STRIP) - - self.zedModel = Simple3DObject(False) - if(camera_model == sl.MODEL.ZED): - for i in range(0, zm.NB_ALLUMINIUM_TRIANGLES * 3, 3): - for j in range(3): - index = int(zm.alluminium_triangles[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices[index * 3], zm.vertices[index * 3 + 1], zm.vertices[index * 3 + 2]], [zm.ALLUMINIUM_COLOR.r, zm.ALLUMINIUM_COLOR.g, zm.ALLUMINIUM_COLOR.b] ) - - for i in range(0, zm.NB_DARK_TRIANGLES * 3, 3): - for j in range(3): - index = int(zm.dark_triangles[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices[index * 3], zm.vertices[index * 3 + 1], zm.vertices[index * 3 + 2]], [zm.DARK_COLOR.r, zm.DARK_COLOR.g, zm.DARK_COLOR.b] ) - elif(camera_model == sl.MODEL.ZED_M): - for i in range(0, zm.NB_AL_ZEDM_TRI * 3, 3): - for j in range(3): - index = int(zm.al_triangles_m[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices_m[index * 3], zm.vertices_m[index * 3 + 1], zm.vertices_m[index * 3 + 2]], [zm.ALLUMINIUM_COLOR.r, zm.ALLUMINIUM_COLOR.g, zm.ALLUMINIUM_COLOR.b] ) - - for i in range(0, zm.NB_DARK_ZEDM_TRI * 3, 3): - for j in range(3): - index = int(zm.dark_triangles_m[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices_m[index * 3], zm.vertices_m[index * 3 + 1], zm.vertices_m[index * 3 + 2]], [zm.DARK_COLOR.r, zm.DARK_COLOR.g, zm.DARK_COLOR.b] ) - - for i in range(0, zm.NB_GRAY_ZEDM_TRI * 3, 3): - for j in range(3): - index = int(zm.gray_triangles_m[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices_m[index * 3], zm.vertices_m[index * 3 + 1], zm.vertices_m[index * 3 + 2]], [zm.GRAY_COLOR.r, zm.GRAY_COLOR.g, zm.GRAY_COLOR.b] ) - - for i in range(0, zm.NB_YELLOW_ZEDM_TRI * 3, 3): - for j in range(3): - index = int(zm.yellow_triangles_m[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices_m[index * 3], zm.vertices_m[index * 3 + 1], zm.vertices_m[index * 3 + 2]], [zm.YELLOW_COLOR.r, zm.YELLOW_COLOR.g, zm.YELLOW_COLOR.b] ) - - elif((camera_model == sl.MODEL.ZED2) or (camera_model == sl.MODEL.ZED2i)): - for i in range(0, zm.NB_ALLUMINIUM_TRIANGLES * 3, 3): - for j in range(3): - index = int(zm.alluminium_triangles[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices[index * 3], zm.vertices[index * 3 + 1], zm.vertices[index * 3 + 2]], [zm.DARK_COLOR.r, zm.DARK_COLOR.g, zm.DARK_COLOR.b] ) - - for i in range(0, zm.NB_DARK_TRIANGLES * 3, 3): - for j in range(3): - index = int(zm.dark_triangles[i + j] - 1) - self.zedModel.add_point_clr([zm.vertices[index * 3], zm.vertices[index * 3 + 1], zm.vertices[index * 3 + 2]], [zm.GRAY_COLOR.r, zm.GRAY_COLOR.g, zm.GRAY_COLOR.b] ) - self.zedModel.set_drawing_type(GL_TRIANGLES) - self.zedModel.push_to_GPU() - - # Register GLUT callback functions - glutDisplayFunc(self.draw_callback) - glutIdleFunc(self.idle) - glutKeyboardFunc(self.keyPressedCallback) - glutCloseFunc(self.close_func) - glutMouseFunc(self.on_mouse) - glutMotionFunc(self.on_mousemove) - glutReshapeFunc(self.on_resize) - - self.available = True - - def is_available(self): - if self.available: - glutMainLoopEvent() - return self.available - - def updateData(self, zed_rt, str_t, str_r, state): - self.mutex.acquire() - self.pose = zed_rt - self.zedPath.add_point_clr(zed_rt.get_translation().get(), [0.1,0.36,0.84]) - self.trackState = state - self.txtT = str_t - self.txtR = str_r - self.mutex.release() - - def idle(self): - if self.available: - glutPostRedisplay() - - def exit(self): - if self.available: - self.available = False - - def close_func(self): - if self.available: - self.available = False - - def keyPressedCallback(self, key, x, y): - if ord(key) == 27: - self.close_func() - - def on_mouse(self,*args,**kwargs): - (key,Up,x,y) = args - if key==0: - self.mouse_button[0] = (Up == 0) - elif key==2 : - self.mouse_button[1] = (Up == 0) - elif(key == 3): - self.wheelPosition = self.wheelPosition + 1 - elif(key == 4): - self.wheelPosition = self.wheelPosition - 1 - - self.mouseCurrentPosition = [x, y] - self.previousMouseMotion = [x, y] - - def on_mousemove(self,*args,**kwargs): - (x,y) = args - self.mouseMotion[0] = x - self.previousMouseMotion[0] - self.mouseMotion[1] = y - self.previousMouseMotion[1] - self.previousMouseMotion = [x, y] - glutPostRedisplay() - - def on_resize(self,Width,Height): - glViewport(0, 0, Width, Height) - self.camera.setProjection(Height / Width) - - def draw_callback(self): - if self.available: - glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) - glClearColor(self.bckgrnd_clr[0], self.bckgrnd_clr[1], self.bckgrnd_clr[2], 1.) - - self.mutex.acquire() - self.update() - self.draw() - self.print_text() - self.mutex.release() - - glutSwapBuffers() - glutPostRedisplay() - - def update(self): - self.zedPath.push_to_GPU() - - if(self.mouse_button[0]): - r = sl.Rotation() - vert=self.camera.vertical_ - tmp = vert.get() - vert.init_vector(tmp[0] * -1.,tmp[1] * -1., tmp[2] * -1.) - r.init_angle_translation(self.mouseMotion[0] * 0.002, vert) - self.camera.rotate(r) - - r.init_angle_translation(self.mouseMotion[1] * 0.002, self.camera.right_) - self.camera.rotate(r) - - if(self.mouse_button[1]): - t = sl.Translation() - tmp = self.camera.right_.get() - scale = self.mouseMotion[0] * -0.01 - t.init_vector(tmp[0] * scale, tmp[1] * scale, tmp[2] * scale) - self.camera.translate(t) - - tmp = self.camera.up_.get() - scale = self.mouseMotion[1] * 0.01 - t.init_vector(tmp[0] * scale, tmp[1] * scale, tmp[2] * scale) - self.camera.translate(t) - - if (self.wheelPosition != 0): - t = sl.Translation() - tmp = self.camera.forward_.get() - scale = self.wheelPosition * -0.065 - t.init_vector(tmp[0] * scale, tmp[1] * scale, tmp[2] * scale) - self.camera.translate(t) - - - self.camera.update() - - self.mouseMotion = [0., 0.] - self.wheelPosition = 0 - - def draw(self): - glPointSize(1.) - glUseProgram(self.shader_image.get_program_id()) - - vpMatrix = self.camera.getViewProjectionMatrix() - glUniformMatrix4fv(self.shader_MVP, 1, GL_TRUE, (GLfloat * len(vpMatrix))(*vpMatrix)) - - glPolygonMode(GL_FRONT_AND_BACK, GL_FILL) - glLineWidth(2) - self.zedPath.draw() - self.floor_grid.draw() - - vpMatrix = self.camera.getViewProjectionMatrixRT(self.pose) - glUniformMatrix4fv(self.shader_MVP, 1, GL_FALSE, (GLfloat * len(vpMatrix))(*vpMatrix)) - - self.zedModel.draw() - glUseProgram(0) - - def print_text(self): - glMatrixMode(GL_PROJECTION) - glPushMatrix() - glLoadIdentity() - w_wnd = glutGet(GLUT_WINDOW_WIDTH) - h_wnd = glutGet(GLUT_WINDOW_HEIGHT) - glOrtho(0, w_wnd, 0, h_wnd, -1., 1.) - - glMatrixMode(GL_MODELVIEW) - glPushMatrix() - glLoadIdentity() - - start_w = 20 - start_h = h_wnd - 40 - - if(self.trackState == sl.POSITIONAL_TRACKING_STATE.OK): - glColor3f(0.2, 0.65, 0.2) - else: - glColor3f(0.85, 0.2, 0.2) - - glRasterPos2i(start_w, start_h) - - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "POSITIONAL TRACKING : " + str(self.trackState)) - - dark_clr = 0.12 - glColor3f(dark_clr, dark_clr, dark_clr) - glRasterPos2i(start_w, start_h - 25) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :") - - glColor3f(0.4980, 0.5490, 0.5529) - glRasterPos2i(155, start_h - 25) - - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtT) - - glColor3f(dark_clr, dark_clr, dark_clr) - glRasterPos2i(start_w, start_h - 50) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :") - - glColor3f(0.4980, 0.5490, 0.5529) - glRasterPos2i(155, start_h - 50) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtR) - - glMatrixMode(GL_PROJECTION) - glPopMatrix() - glMatrixMode(GL_MODELVIEW) - glPopMatrix() - -class CameraGL: - def __init__(self): - self.ORIGINAL_FORWARD = sl.Translation() - self.ORIGINAL_FORWARD.init_vector(0,0,1) - self.ORIGINAL_UP = sl.Translation() - self.ORIGINAL_UP.init_vector(0,1,0) - self.ORIGINAL_RIGHT = sl.Translation() - self.ORIGINAL_RIGHT.init_vector(1,0,0) - self.znear = 0.5 - self.zfar = 100. - self.horizontalFOV = 70. - self.orientation_ = sl.Orientation() - self.position_ = sl.Translation() - self.forward_ = sl.Translation() - self.up_ = sl.Translation() - self.right_ = sl.Translation() - self.vertical_ = sl.Translation() - self.vpMatrix_ = sl.Matrix4f() - self.projection_ = sl.Matrix4f() - self.projection_.set_identity() - self.setProjection(1.78) - - self.position_.init_vector(0., 5., -3.) - tmp = sl.Translation() - tmp.init_vector(0, 0, -4) - tmp2 = sl.Translation() - tmp2.init_vector(0, 1, 0) - self.setDirection(tmp, tmp2) - cam_rot = sl.Rotation() - cam_rot.set_euler_angles(-50., 180., 0., False) - self.setRotation(cam_rot) - - def update(self): - dot_ = sl.Translation.dot_translation(self.vertical_, self.up_) - if(dot_ < 0.): - tmp = self.vertical_.get() - self.vertical_.init_vector(tmp[0] * -1.,tmp[1] * -1., tmp[2] * -1.) - transformation = sl.Transform() - transformation.init_orientation_translation(self.orientation_, self.position_) - transformation.inverse() - self.vpMatrix_ = self.projection_ * transformation - - def setProjection(self, im_ratio): - fov_x = self.horizontalFOV * 3.1416 / 180. - fov_y = self.horizontalFOV * im_ratio * 3.1416 / 180. - - self.projection_[(0,0)] = 1. / math.tan(fov_x * .5) - self.projection_[(1,1)] = 1. / math.tan(fov_y * .5) - self.projection_[(2,2)] = -(self.zfar + self.znear) / (self.zfar - self.znear) - self.projection_[(3,2)] = -1. - self.projection_[(2,3)] = -(2. * self.zfar * self.znear) / (self.zfar - self.znear) - self.projection_[(3,3)] = 0. - - def getViewProjectionMatrix(self): - tmp = self.vpMatrix_.m - vpMat = array.array('f') - for row in tmp: - for v in row: - vpMat.append(v) - return vpMat - - def getViewProjectionMatrixRT(self, tr): - tmp = self.vpMatrix_ - tmp.transpose() - tr.transpose() - tmp = (tr * tmp).m - vpMat = array.array('f') - for row in tmp: - for v in row: - vpMat.append(v) - return vpMat - - def setDirection(self, dir, vert): - dir.normalize() - tmp = dir.get() - dir.init_vector(tmp[0] * -1.,tmp[1] * -1., tmp[2] * -1.) - self.orientation_.init_translation(self.ORIGINAL_FORWARD, dir) - self.updateVectors() - self.vertical_ = vert - if(sl.Translation.dot_translation(self.vertical_, self.up_) < 0.): - tmp = sl.Rotation() - tmp.init_angle_translation(3.14, self.ORIGINAL_FORWARD) - self.rotate(tmp) - - def translate(self, t): - ref = self.position_.get() - tmp = t.get() - self.position_.init_vector(ref[0] + tmp[0], ref[1] + tmp[1], ref[2] + tmp[2]) - - def setPosition(self, p): - self.position_ = p - - def rotate(self, r): - tmp = sl.Orientation() - tmp.init_rotation(r) - self.orientation_ = tmp * self.orientation_ - self.updateVectors() - - def setRotation(self, r): - self.orientation_.init_rotation(r) - self.updateVectors() - - def updateVectors(self): - self.forward_ = self.ORIGINAL_FORWARD * self.orientation_ - self.up_ = self.ORIGINAL_UP * self.orientation_ - right = self.ORIGINAL_RIGHT - tmp = right.get() - right.init_vector(tmp[0] * -1.,tmp[1] * -1., tmp[2] * -1.) - self.right_ = right * self.orientation_ diff --git a/geotracking/recording/python/ogl_viewer/zed_model.py b/geotracking/recording/python/ogl_viewer/zed_model.py deleted file mode 100644 index 7a4f9211..00000000 --- a/geotracking/recording/python/ogl_viewer/zed_model.py +++ /dev/null @@ -1,2734 +0,0 @@ -import numpy as np - -class Color: - def __init__(self, pr, pg, pb): - self.r = pr - self.g = pg - self.b = pb - -NB_ALLUMINIUM_TRIANGLES = 54 -NB_DARK_TRIANGLES = 54 -ALLUMINIUM_COLOR = Color(0.79, 0.82, 0.93) -DARK_COLOR = Color(0.07, 0.07, 0.07) - -vertices = np.array([ - -0.068456, -0.016299, 0.016299 - , -0.068456, 0.016299, 0.016299 - , -0.068456, 0.016299, -0.016299 - , -0.068456, -0.016299, -0.016299 - , -0.076606, 0.014115, 0.016299 - , -0.082572, 0.008150, 0.016299 - , -0.084755, -0.000000, 0.016299 - , -0.082572, -0.008150, 0.016299 - , -0.076606, -0.014115, 0.016299 - , -0.076606, -0.014115, -0.016299 - , -0.082572, -0.008150, -0.016299 - , -0.084755, -0.000000, -0.016299 - , -0.082572, 0.008150, -0.016299 - , -0.076606, 0.014115, -0.016299 - , -0.053494, -0.009779, -0.016299 - , -0.048604, -0.008469, -0.016299 - , -0.045024, -0.004890, -0.016299 - , -0.043714, 0.000000, -0.016299 - , -0.045024, 0.004890, -0.016299 - , -0.048604, 0.008469, -0.016299 - , -0.053494, 0.009779, -0.016299 - , -0.058383, 0.008469, -0.016299 - , -0.061963, 0.004890, -0.016299 - , -0.063273, 0.000000, -0.016299 - , -0.061963, -0.004890, -0.016299 - , -0.058383, -0.008469, -0.016299 - , 0.000000, -0.016299, -0.016299 - , 0.068456, -0.016299, 0.016299 - , 0.000000, 0.016299, -0.016299 - , 0.068456, 0.016299, 0.016299 - , 0.068456, 0.016299, -0.016299 - , 0.068456, -0.016299, -0.016299 - , 0.076606, 0.014115, 0.016299 - , 0.082572, 0.008150, 0.016299 - , 0.084755, -0.000000, 0.016299 - , 0.082572, -0.008150, 0.016299 - , 0.076606, -0.014115, 0.016299 - , 0.076606, -0.014115, -0.016299 - , 0.082572, -0.008150, -0.016299 - , 0.084755, -0.000000, -0.016299 - , 0.082572, 0.008150, -0.016299 - , 0.076606, 0.014115, -0.016299 - , 0.053494, -0.009779, -0.016299 - , 0.048604, -0.008469, -0.016299 - , 0.045024, -0.004890, -0.016299 - , 0.043714, 0.000000, -0.016299 - , 0.045024, 0.004890, -0.016299 - , 0.048604, 0.008469, -0.016299 - , 0.053494, 0.009779, -0.016299 - , 0.058383, 0.008469, -0.016299 - , 0.061963, 0.004890, -0.016299 - , 0.063273, 0.000000, -0.016299 - , 0.061963, -0.004890, -0.016299 - , 0.058383, -0.008469, -0.016299 - , 0.053494, 0.000000, -0.016299 - , -0.053494, 0.000000, -0.016299 -]) - -alluminium_triangles = np.array([ - 1, 10, 4 - , 6, 14, 13 - , 7, 13, 12 - , 8, 12, 11 - , 9, 11, 10 - , 5, 3, 14 - , 44, 45, 55 - , 47, 48, 55 - , 43, 44, 55 - , 46, 47, 55 - , 52, 53, 55 - , 48, 49, 55 - , 54, 43, 55 - , 50, 51, 55 - , 53, 54, 55 - , 49, 50, 55 - , 45, 46, 55 - , 51, 52, 55 - , 27, 32, 28 - , 38, 28, 32 - , 42, 34, 41 - , 41, 35, 40 - , 40, 36, 39 - , 39, 37, 38 - , 31, 33, 42 - , 27, 1, 4 - , 20, 19, 56 - , 22, 21, 56 - , 23, 22, 56 - , 24, 23, 56 - , 19, 18, 56 - , 21, 20, 56 - , 17, 16, 56 - , 26, 25, 56 - , 15, 26, 56 - , 18, 17, 56 - , 16, 15, 56 - , 25, 24, 56 - , 2, 29, 3 - , 31, 29, 30 - , 1, 9, 10 - , 6, 5, 14 - , 7, 6, 13 - , 8, 7, 12 - , 9, 8, 11 - , 5, 2, 3 - , 38, 37, 28 - , 42, 33, 34 - , 41, 34, 35 - , 40, 35, 36 - , 39, 36, 37 - , 31, 30, 33 - , 27, 28, 1 - , 2, 30, 29 -]) - -dark_triangles = np.array([ - 23, 3, 22 - , 13, 10, 11 - , 4, 14, 3 - , 11, 12, 13 - , 9, 6, 8 - , 1, 5, 9 - , 8, 6, 7 - , 1, 30, 2 - , 21, 22, 3 - , 23, 24, 3 - , 24, 25, 4 - , 3, 24, 4 - , 25, 26, 4 - , 26, 15, 4 - , 16, 17, 27 - , 17, 18, 27 - , 18, 19, 29 - , 27, 18, 29 - , 19, 20, 29 - , 20, 21, 29 - , 3, 29, 21 - , 16, 27, 15 - , 27, 4, 15 - , 51, 50, 31 - , 38, 41, 39 - , 32, 42, 38 - , 39, 41, 40 - , 34, 37, 36 - , 28, 33, 30 - , 36, 35, 34 - , 49, 31, 50 - , 51, 31, 52 - , 52, 32, 53 - , 31, 32, 52 - , 53, 32, 54 - , 54, 32, 43 - , 44, 27, 45 - , 45, 27, 46 - , 46, 29, 47 - , 27, 29, 46 - , 47, 29, 48 - , 48, 29, 49 - , 31, 49, 29 - , 44, 43, 27 - , 27, 43, 32 - , 13, 14, 10 - , 4, 10, 14 - , 9, 5, 6 - , 1, 2, 5 - , 1, 28, 30 - , 38, 42, 41 - , 32, 31, 42 - , 34, 33, 37 - , 28, 37, 33 -]) - -vertices_m = np.array([ - 0.030800, 0.013300, 0.000001 - ,0.058785, 0.013300, -0.002250 - ,0.058785, 0.013300, 0.002251 - ,0.059839, 0.013300, -0.001999 - ,0.060770, 0.013300, -0.001351 - ,0.059839, 0.013300, 0.002000 - ,0.060770, 0.013300, 0.001352 - ,0.002815, 0.013300, -0.002250 - ,0.002815, 0.013300, 0.002251 - ,0.001761, 0.013300, -0.001999 - ,0.000830, 0.013300, -0.001351 - ,0.001761, 0.013300, 0.002000 - ,0.000830, 0.013300, 0.001352 - ,0.061449, 0.013300, -0.000563 - ,0.061449, 0.013300, 0.000564 - ,0.000152, 0.013300, 0.000564 - ,0.000152, 0.013300, -0.000563 - ,0.030800, -0.013333, 0.000001 - ,0.058785, -0.013333, -0.002250 - ,0.058785, -0.013333, 0.002251 - ,0.059839, -0.013333, -0.001999 - ,0.060770, -0.013333, -0.001351 - ,0.059839, -0.013333, 0.002000 - ,0.060770, -0.013333, 0.001352 - ,0.002815, -0.013333, -0.002250 - ,0.002815, -0.013333, 0.002251 - ,0.001761, -0.013333, -0.001999 - ,0.000830, -0.013333, -0.001351 - ,0.001761, -0.013333, 0.002000 - ,0.000830, -0.013333, 0.001352 - ,0.061449, -0.013333, 0.000564 - ,0.061449, -0.013333, -0.000563 - ,0.000152, -0.013333, -0.000563 - ,0.000152, -0.013333, 0.000564 - ,-0.031684, 0.009412, 0.000501 - ,-0.031809, 0.008300, 0.000501 - ,-0.028977, 0.012805, 0.000501 - ,-0.029926, 0.012209, 0.000501 - ,-0.026809, 0.013300, 0.000501 - ,-0.027920, 0.013175, 0.000501 - ,-0.030718, 0.011417, 0.000501 - ,-0.031314, 0.010469, 0.000501 - ,-0.031809, -0.008310, 0.000501 - ,-0.031684, -0.009431, 0.000501 - ,-0.028977, -0.012824, 0.000501 - ,-0.029926, -0.012228, 0.000501 - ,-0.026847, -0.013300, 0.000500 - ,-0.027920, -0.013194, 0.000501 - ,-0.030718, -0.011437, 0.000501 - ,-0.031314, -0.010488, 0.000501 - ,-0.031684, 0.009412, -0.000500 - ,-0.031809, 0.008300, -0.000500 - ,-0.028977, 0.012805, -0.000500 - ,-0.029926, 0.012209, -0.000500 - ,-0.026809, 0.013300, -0.000500 - ,-0.027920, 0.013175, -0.000500 - ,-0.030718, 0.011417, -0.000500 - ,-0.031314, 0.010469, -0.000500 - ,-0.031809, -0.008310, -0.000500 - ,-0.031684, -0.009431, -0.000500 - ,-0.029926, -0.012228, -0.000500 - ,-0.028977, -0.012824, -0.000500 - ,-0.027920, -0.013194, -0.000500 - ,-0.026847, -0.013300, -0.000500 - ,-0.031314, -0.010488, -0.000500 - ,-0.030718, -0.011437, -0.000500 - ,-0.031809, 0.006354, -0.000500 - ,-0.031809, 0.006354, 0.000501 - ,-0.031809, -0.006364, -0.000500 - ,-0.031809, -0.006364, 0.000501 - ,-0.031809, 0.005707, -0.000700 - ,-0.031809, 0.005707, 0.000701 - ,-0.031809, -0.005716, -0.000700 - ,-0.031809, -0.005716, 0.000701 - ,-0.031809, 0.005128, -0.001423 - ,-0.031809, 0.005128, 0.001424 - ,-0.031809, -0.005138, -0.001423 - ,-0.031809, -0.005138, 0.001424 - ,-0.031809, 0.004146, -0.002297 - ,-0.031809, 0.004146, 0.002299 - ,-0.031809, -0.004156, -0.002297 - ,-0.031809, -0.004156, 0.002299 - ,-0.031809, 0.003313, -0.002495 - ,-0.031809, 0.003313, 0.002497 - ,-0.031809, -0.003322, -0.002495 - ,-0.031809, -0.003322, 0.002497 - ,-0.031809, -0.000005, 0.000001 - ,-0.026800, 0.013300, -0.000500 - ,-0.026800, 0.013300, 0.000501 - ,0.088376, 0.013300, -0.000500 - ,0.088376, 0.013300, 0.000501 - ,0.093228, 0.009412, 0.000501 - ,0.093353, 0.008300, 0.000501 - ,0.090522, 0.012805, 0.000501 - ,0.091470, 0.012209, 0.000501 - ,0.089464, 0.013175, 0.000501 - ,0.092262, 0.011417, 0.000501 - ,0.092858, 0.010469, 0.000501 - ,0.093353, -0.008310, 0.000501 - ,0.093228, -0.009431, 0.000501 - ,0.090522, -0.012824, 0.000501 - ,0.091470, -0.012228, 0.000501 - ,0.088376, -0.013321, 0.000501 - ,0.089464, -0.013194, 0.000501 - ,0.092262, -0.011437, 0.000501 - ,0.092858, -0.010488, 0.000501 - ,0.093228, 0.009412, -0.000500 - ,0.093353, 0.008300, -0.000500 - ,0.090522, 0.012805, -0.000500 - ,0.091470, 0.012209, -0.000500 - ,0.089464, 0.013175, -0.000500 - ,0.092262, 0.011417, -0.000500 - ,0.092858, 0.010469, -0.000500 - ,0.093353, -0.008310, -0.000500 - ,0.093228, -0.009431, -0.000500 - ,0.091470, -0.012228, -0.000500 - ,0.090522, -0.012824, -0.000500 - ,0.089464, -0.013194, -0.000500 - ,0.088376, -0.013321, -0.000500 - ,0.092858, -0.010488, -0.000500 - ,0.092262, -0.011437, -0.000500 - ,-0.001600, 0.000000, -0.018000 - ,0.017592, 0.000000, -0.018000 - ,0.007996, -0.009596, -0.018000 - ,0.007996, -0.009763, -0.008431 - ,0.007996, 0.009763, -0.008431 - ,0.007996, 0.009596, -0.018000 - ,-0.001767, 0.000000, -0.008431 - ,0.002258, -0.007899, -0.008431 - ,0.002356, -0.007764, -0.018000 - ,0.005033, -0.009127, -0.018000 - ,0.004982, -0.009286, -0.008431 - ,0.000097, -0.005738, -0.008431 - ,0.000232, -0.005640, -0.018000 - ,-0.001131, -0.002963, -0.018000 - ,-0.001290, -0.003014, -0.008431 - ,0.000097, 0.005738, -0.008431 - ,0.000232, 0.005640, -0.018000 - ,-0.001131, 0.002963, -0.018000 - ,-0.001290, 0.003014, -0.008431 - ,0.002258, 0.007899, -0.008431 - ,0.002356, 0.007764, -0.018000 - ,0.005033, 0.009127, -0.018000 - ,0.004982, 0.009286, -0.008431 - ,0.017759, 0.000000, -0.008431 - ,0.013734, 0.007899, -0.008431 - ,0.013636, 0.007764, -0.018000 - ,0.010959, 0.009127, -0.018000 - ,0.011010, 0.009286, -0.008431 - ,0.015895, 0.005738, -0.008431 - ,0.015760, 0.005640, -0.018000 - ,0.017123, 0.002963, -0.018000 - ,0.017282, 0.003014, -0.008431 - ,0.015895, -0.005738, -0.008431 - ,0.015760, -0.005640, -0.018000 - ,0.017123, -0.002963, -0.018000 - ,0.017282, -0.003014, -0.008431 - ,0.013734, -0.007899, -0.008431 - ,0.013636, -0.007764, -0.018000 - ,0.010959, -0.009127, -0.018000 - ,0.011010, -0.009286, -0.008431 - ,0.004827, 0.009763, -0.007940 - ,0.007996, 0.010264, -0.007940 - ,-0.001767, -0.003169, -0.007940 - ,-0.002269, 0.000000, -0.007940 - ,0.004827, -0.009763, -0.007940 - ,0.001963, -0.008304, -0.007940 - ,0.007996, -0.010264, -0.007940 - ,-0.000308, -0.006033, -0.007940 - ,-0.001767, 0.003169, -0.007940 - ,-0.000308, 0.006033, -0.007940 - ,0.001963, 0.008304, -0.007940 - ,0.011165, -0.009763, -0.007940 - ,0.017759, 0.003169, -0.007940 - ,0.018260, -0.000000, -0.007940 - ,0.011165, 0.009763, -0.007940 - ,0.014029, 0.008304, -0.007940 - ,0.016300, 0.006033, -0.007940 - ,0.017759, -0.003169, -0.007940 - ,0.016300, -0.006033, -0.007940 - ,0.014029, -0.008304, -0.007940 - ,0.002356, -0.007764, -0.019500 - ,0.005033, -0.009127, -0.019500 - ,0.007996, -0.009596, -0.019500 - ,0.000232, -0.005640, -0.019500 - ,-0.001600, 0.000000, -0.019500 - ,-0.001131, -0.002963, -0.019500 - ,0.000232, 0.005640, -0.019500 - ,-0.001131, 0.002963, -0.019500 - ,0.002356, 0.007764, -0.019500 - ,0.007996, 0.009596, -0.019500 - ,0.005033, 0.009127, -0.019500 - ,0.013636, 0.007764, -0.019500 - ,0.010959, 0.009127, -0.019500 - ,0.015760, 0.005640, -0.019500 - ,0.017592, 0.000000, -0.019500 - ,0.017123, 0.002963, -0.019500 - ,0.015760, -0.005640, -0.019500 - ,0.017123, -0.002963, -0.019500 - ,0.013636, -0.007764, -0.019500 - ,0.010959, -0.009127, -0.019500 - ,0.002356, -0.007764, -0.022997 - ,0.005033, -0.009127, -0.022997 - ,0.007996, -0.009596, -0.022997 - ,0.000232, -0.005640, -0.022997 - ,-0.001600, 0.000000, -0.022997 - ,-0.001131, -0.002963, -0.022997 - ,0.000232, 0.005640, -0.022997 - ,-0.001131, 0.002963, -0.022997 - ,0.002356, 0.007764, -0.022997 - ,0.007996, 0.009596, -0.022997 - ,0.005033, 0.009127, -0.022997 - ,0.013636, 0.007764, -0.022997 - ,0.010959, 0.009127, -0.022997 - ,0.015760, 0.005640, -0.022997 - ,0.017592, 0.000000, -0.022997 - ,0.017123, 0.002963, -0.022997 - ,0.015760, -0.005640, -0.022997 - ,0.017123, -0.002963, -0.022997 - ,0.013636, -0.007764, -0.022997 - ,0.010959, -0.009127, -0.022997 - ,0.002745, -0.007227, -0.022997 - ,0.005238, -0.008497, -0.022997 - ,0.007996, -0.008933, -0.022997 - ,0.000769, -0.005250, -0.022997 - ,-0.000937, 0.000000, -0.022997 - ,-0.000501, -0.002758, -0.022997 - ,0.000769, 0.005250, -0.022997 - ,-0.000501, 0.002758, -0.022997 - ,0.002745, 0.007227, -0.022997 - ,0.007996, 0.008933, -0.022997 - ,0.005238, 0.008497, -0.022997 - ,0.013246, 0.007227, -0.022997 - ,0.010754, 0.008497, -0.022997 - ,0.015223, 0.005250, -0.022997 - ,0.016929, 0.000000, -0.022997 - ,0.016493, 0.002758, -0.022997 - ,0.015223, -0.005250, -0.022997 - ,0.016493, -0.002758, -0.022997 - ,0.013246, -0.007227, -0.022997 - ,0.010754, -0.008497, -0.022997 - ,0.004095, -0.005369, -0.022203 - ,0.005947, -0.006313, -0.022203 - ,0.007996, -0.006637, -0.022203 - ,0.002626, -0.003901, -0.022203 - ,0.001359, 0.000000, -0.022203 - ,0.001683, -0.002049, -0.022203 - ,0.002626, 0.003901, -0.022203 - ,0.001683, 0.002049, -0.022203 - ,0.004095, 0.005369, -0.022203 - ,0.007996, 0.006637, -0.022203 - ,0.005947, 0.006313, -0.022203 - ,0.011897, 0.005369, -0.022203 - ,0.010045, 0.006313, -0.022203 - ,0.013365, 0.003901, -0.022203 - ,0.014633, 0.000000, -0.022203 - ,0.014308, 0.002049, -0.022203 - ,0.013365, -0.003901, -0.022203 - ,0.014308, -0.002049, -0.022203 - ,0.011897, -0.005369, -0.022203 - ,0.010045, -0.006313, -0.022203 - ,0.004446, -0.004886, -0.021500 - ,0.006131, -0.005744, -0.021500 - ,0.007996, -0.006039, -0.021500 - ,0.003110, -0.003549, -0.021500 - ,0.001957, 0.000000, -0.021500 - ,0.002252, -0.001865, -0.021500 - ,0.003110, 0.003549, -0.021500 - ,0.002252, 0.001865, -0.021500 - ,0.004446, 0.004886, -0.021500 - ,0.007996, 0.006039, -0.021500 - ,0.006131, 0.005744, -0.021500 - ,0.011545, 0.004886, -0.021500 - ,0.009861, 0.005744, -0.021500 - ,0.012882, 0.003549, -0.021500 - ,0.014035, 0.000000, -0.021500 - ,0.013740, 0.001865, -0.021500 - ,0.012882, -0.003549, -0.021500 - ,0.013740, -0.001865, -0.021500 - ,0.011545, -0.004886, -0.021500 - ,0.009861, -0.005744, -0.021500 - ,0.004446, -0.004886, -0.020078 - ,0.006131, -0.005744, -0.020078 - ,0.007996, -0.006039, -0.020078 - ,0.003110, -0.003549, -0.020078 - ,0.001957, 0.000000, -0.020078 - ,0.002252, -0.001865, -0.020078 - ,0.003110, 0.003549, -0.020078 - ,0.002252, 0.001865, -0.020078 - ,0.004446, 0.004886, -0.020078 - ,0.007996, 0.006039, -0.020078 - ,0.006131, 0.005744, -0.020078 - ,0.011545, 0.004886, -0.020078 - ,0.009861, 0.005744, -0.020078 - ,0.012882, 0.003549, -0.020078 - ,0.014035, 0.000000, -0.020078 - ,0.013740, 0.001865, -0.020078 - ,0.012882, -0.003549, -0.020078 - ,0.013740, -0.001865, -0.020078 - ,0.011545, -0.004886, -0.020078 - ,0.009861, -0.005744, -0.020078 - ,-0.026847, -0.013300, -0.006500 - ,-0.031847, -0.008300, -0.006500 - ,-0.029965, -0.012209, -0.006500 - ,-0.027959, -0.013175, -0.006500 - ,-0.029016, -0.012805, -0.006500 - ,-0.031352, -0.010469, -0.006500 - ,-0.030756, -0.011417, -0.006500 - ,-0.031722, -0.009412, -0.006500 - ,0.088353, -0.013310, -0.006500 - ,-0.031847, 0.008300, -0.006500 - ,-0.026847, 0.013300, -0.006500 - ,-0.030756, 0.011417, -0.006500 - ,-0.031722, 0.009412, -0.006500 - ,-0.031352, 0.010469, -0.006500 - ,-0.029016, 0.012805, -0.006500 - ,-0.029965, 0.012209, -0.006500 - ,-0.027959, 0.013175, -0.006500 - ,0.088353, 0.013300, -0.006500 - ,0.093353, 0.008300, -0.006500 - ,0.091470, 0.012209, -0.006500 - ,0.089464, 0.013175, -0.006500 - ,0.090522, 0.012805, -0.006500 - ,0.092858, 0.010469, -0.006500 - ,0.092262, 0.011417, -0.006500 - ,0.093228, 0.009412, -0.006500 - ,0.093353, -0.008310, -0.006500 - ,0.091470, -0.012228, -0.006500 - ,0.089464, -0.013194, -0.006500 - ,0.090522, -0.012824, -0.006500 - ,0.092858, -0.010488, -0.006500 - ,0.092262, -0.011437, -0.006500 - ,0.093228, -0.009431, -0.006500 - ,-0.031722, -0.009412, -0.002250 - ,-0.031809, -0.004156, -0.002297 - ,-0.029016, -0.012805, -0.002250 - ,-0.029965, -0.012209, -0.002250 - ,-0.026847, -0.013300, -0.002250 - ,-0.027959, -0.013175, -0.002250 - ,-0.030756, -0.011417, -0.002250 - ,-0.031352, -0.010469, -0.002250 - ,0.088353, -0.013310, -0.002250 - ,-0.031809, 0.004146, -0.002297 - ,-0.027959, 0.013175, -0.002250 - ,-0.026847, 0.013300, -0.002250 - ,-0.031352, 0.010469, -0.002250 - ,-0.030756, 0.011417, -0.002250 - ,-0.031722, 0.009412, -0.002250 - ,-0.029965, 0.012209, -0.002250 - ,-0.029016, 0.012805, -0.002250 - ,0.088353, 0.013300, -0.002250 - ,0.093228, 0.009412, -0.002250 - ,0.093353, 0.008300, -0.002250 - ,0.090522, 0.012805, -0.002250 - ,0.091470, 0.012209, -0.002250 - ,0.089464, 0.013175, -0.002250 - ,0.092262, 0.011417, -0.002250 - ,0.092858, 0.010469, -0.002250 - ,0.093353, -0.008310, -0.002250 - ,0.093228, -0.009431, -0.002250 - ,0.090522, -0.012824, -0.002250 - ,0.091470, -0.012228, -0.002250 - ,0.089464, -0.013194, -0.002250 - ,0.092262, -0.011437, -0.002250 - ,0.092858, -0.010488, -0.002250 - ,0.002815, -0.013333, -0.002250 - ,0.001761, 0.013300, -0.001999 - ,0.058785, -0.013333, -0.002250 - ,0.059839, 0.013300, -0.001999 - ,-0.031722, -0.009412, -0.000500 - ,-0.031847, -0.006340, -0.000500 - ,-0.029016, -0.012805, -0.000500 - ,-0.029965, -0.012209, -0.000500 - ,-0.026847, -0.013300, -0.000500 - ,-0.027959, -0.013175, -0.000500 - ,-0.030756, -0.011417, -0.000500 - ,-0.031352, -0.010469, -0.000500 - ,0.000152, -0.013333, -0.000563 - ,-0.031847, 0.006354, -0.000500 - ,-0.027959, 0.013175, -0.000500 - ,-0.026847, 0.013300, -0.000500 - ,-0.031352, 0.010469, -0.000500 - ,-0.030756, 0.011417, -0.000500 - ,-0.031722, 0.009412, -0.000500 - ,-0.029965, 0.012209, -0.000500 - ,-0.029016, 0.012805, -0.000500 - ,0.088353, 0.013300, -0.000500 - ,0.061448, 0.013300, -0.000563 - ,0.093228, 0.009412, -0.000500 - ,0.093353, 0.008300, -0.000500 - ,0.090522, 0.012805, -0.000500 - ,0.091470, 0.012209, -0.000500 - ,0.089464, 0.013175, -0.000500 - ,0.092262, 0.011417, -0.000500 - ,0.092858, 0.010469, -0.000500 - ,0.093353, -0.008310, -0.000500 - ,0.093228, -0.009431, -0.000500 - ,0.090522, -0.012824, -0.000500 - ,0.091470, -0.012228, -0.000500 - ,0.088353, -0.013310, -0.000500 - ,0.089464, -0.013194, -0.000500 - ,0.092262, -0.011437, -0.000500 - ,0.092858, -0.010488, -0.000500 - ,0.000151, 0.013300, -0.000563 - ,0.061448, -0.013333, -0.000563 - ,0.058800, 0.013300, -0.002250 - ,0.002815, 0.013300, -0.002250 - ,0.000830, 0.013300, -0.001351 - ,0.060770, 0.013300, -0.001351 - ,0.060770, -0.013333, -0.001351 - ,0.059839, -0.013333, -0.001999 - ,0.000830, -0.013333, -0.001351 - ,0.001761, -0.013333, -0.001999 - ,-0.026844, -0.011518, -0.007940 - ,-0.026847, -0.011634, -0.007898 - ,-0.027589, -0.011551, -0.007898 - ,-0.027563, -0.011437, -0.007940 - ,-0.028294, -0.011304, -0.007898 - ,-0.028243, -0.011199, -0.007940 - ,-0.028926, -0.010907, -0.007898 - ,-0.028854, -0.010816, -0.007940 - ,-0.029454, -0.010379, -0.007898 - ,-0.029363, -0.010306, -0.007940 - ,-0.029852, -0.009746, -0.007898 - ,-0.029747, -0.009696, -0.007940 - ,-0.030098, -0.009041, -0.007898 - ,-0.029985, -0.009016, -0.007940 - ,-0.030181, -0.008300, -0.007898 - ,-0.030066, -0.008297, -0.007940 - ,-0.030181, 0.008300, -0.007898 - ,-0.030065, 0.008296, -0.007940 - ,-0.030098, 0.009041, -0.007898 - ,-0.029985, 0.009015, -0.007940 - ,-0.029852, 0.009746, -0.007898 - ,-0.029747, 0.009696, -0.007940 - ,-0.029454, 0.010379, -0.007898 - ,-0.029363, 0.010306, -0.007940 - ,-0.028926, 0.010907, -0.007898 - ,-0.028854, 0.010816, -0.007940 - ,-0.028294, 0.011304, -0.007898 - ,-0.028243, 0.011199, -0.007940 - ,-0.027589, 0.011551, -0.007898 - ,-0.027563, 0.011437, -0.007940 - ,-0.026847, 0.011634, -0.007898 - ,-0.026844, 0.011518, -0.007940 - ,0.088349, 0.011518, -0.007940 - ,0.088353, 0.011634, -0.007898 - ,0.089094, 0.011551, -0.007898 - ,0.089068, 0.011437, -0.007940 - ,0.089799, 0.011304, -0.007898 - ,0.089749, 0.011199, -0.007940 - ,0.090432, 0.010907, -0.007898 - ,0.090359, 0.010816, -0.007940 - ,0.090960, 0.010379, -0.007898 - ,0.090869, 0.010306, -0.007940 - ,0.091357, 0.009746, -0.007898 - ,0.091252, 0.009696, -0.007940 - ,0.091604, 0.009041, -0.007898 - ,0.091490, 0.009016, -0.007940 - ,0.091637, 0.008187, -0.007940 - ,0.091724, 0.008300, -0.007867 - ,0.088353, -0.011644, -0.007898 - ,0.088349, -0.011528, -0.007940 - ,0.089094, -0.011570, -0.007898 - ,0.089069, -0.011456, -0.007940 - ,0.089799, -0.011323, -0.007898 - ,0.089749, -0.011219, -0.007940 - ,0.090432, -0.010926, -0.007898 - ,0.090359, -0.010835, -0.007940 - ,0.090960, -0.010398, -0.007898 - ,0.090869, -0.010325, -0.007940 - ,0.091357, -0.009766, -0.007898 - ,0.091252, -0.009715, -0.007940 - ,0.091604, -0.009061, -0.007898 - ,0.091490, -0.009035, -0.007940 - ,0.091724, -0.008310, -0.007867 - ,0.091637, -0.008196, -0.007940 - ,-0.031809, -0.003322, -0.002495 - ,-0.031809, 0.003313, -0.002495 - ,-0.031809, 0.005707, -0.000700 - ,-0.031809, 0.005128, -0.001423 - ,-0.031809, -0.005716, -0.000700 - ,-0.031809, -0.005138, -0.001423 - ,0.061397, 0.000000, -0.018000 - ,0.080589, 0.000000, -0.018000 - ,0.070993, -0.009596, -0.018000 - ,0.070993, -0.009763, -0.008431 - ,0.070993, 0.009763, -0.008431 - ,0.070993, 0.009596, -0.018000 - ,0.061230, 0.000000, -0.008431 - ,0.065255, -0.007899, -0.008431 - ,0.065353, -0.007764, -0.018000 - ,0.068030, -0.009127, -0.018000 - ,0.067979, -0.009286, -0.008431 - ,0.063094, -0.005738, -0.008431 - ,0.063229, -0.005640, -0.018000 - ,0.061866, -0.002963, -0.018000 - ,0.061707, -0.003014, -0.008431 - ,0.063094, 0.005738, -0.008431 - ,0.063229, 0.005640, -0.018000 - ,0.061866, 0.002963, -0.018000 - ,0.061707, 0.003014, -0.008431 - ,0.065255, 0.007899, -0.008431 - ,0.065353, 0.007764, -0.018000 - ,0.068030, 0.009127, -0.018000 - ,0.067979, 0.009286, -0.008431 - ,0.080756, 0.000000, -0.008431 - ,0.076731, 0.007899, -0.008431 - ,0.076633, 0.007764, -0.018000 - ,0.073956, 0.009127, -0.018000 - ,0.074007, 0.009286, -0.008431 - ,0.078892, 0.005738, -0.008431 - ,0.078757, 0.005640, -0.018000 - ,0.080120, 0.002963, -0.018000 - ,0.080279, 0.003014, -0.008431 - ,0.078892, -0.005738, -0.008431 - ,0.078757, -0.005640, -0.018000 - ,0.080120, -0.002963, -0.018000 - ,0.080279, -0.003014, -0.008431 - ,0.076731, -0.007899, -0.008431 - ,0.076633, -0.007764, -0.018000 - ,0.073956, -0.009127, -0.018000 - ,0.074007, -0.009286, -0.008431 - ,0.067824, 0.009763, -0.007940 - ,0.070993, 0.010264, -0.007940 - ,0.061230, -0.003169, -0.007940 - ,0.060728, 0.000000, -0.007940 - ,0.067824, -0.009763, -0.007940 - ,0.064960, -0.008304, -0.007940 - ,0.070993, -0.010264, -0.007940 - ,0.062688, -0.006033, -0.007940 - ,0.061230, 0.003169, -0.007940 - ,0.062688, 0.006033, -0.007940 - ,0.064960, 0.008304, -0.007940 - ,0.074162, -0.009763, -0.007940 - ,0.080756, 0.003169, -0.007940 - ,0.081257, -0.000000, -0.007940 - ,0.074162, 0.009763, -0.007940 - ,0.077026, 0.008304, -0.007940 - ,0.079297, 0.006033, -0.007940 - ,0.080756, -0.003169, -0.007940 - ,0.079297, -0.006033, -0.007940 - ,0.077026, -0.008304, -0.007940 - ,0.065353, -0.007764, -0.019500 - ,0.068030, -0.009127, -0.019500 - ,0.070993, -0.009596, -0.019500 - ,0.063229, -0.005640, -0.019500 - ,0.061397, 0.000000, -0.019500 - ,0.061866, -0.002963, -0.019500 - ,0.063229, 0.005640, -0.019500 - ,0.061866, 0.002963, -0.019500 - ,0.065353, 0.007764, -0.019500 - ,0.070993, 0.009596, -0.019500 - ,0.068030, 0.009127, -0.019500 - ,0.076633, 0.007764, -0.019500 - ,0.073956, 0.009127, -0.019500 - ,0.078757, 0.005640, -0.019500 - ,0.080589, 0.000000, -0.019500 - ,0.080120, 0.002963, -0.019500 - ,0.078757, -0.005640, -0.019500 - ,0.080120, -0.002963, -0.019500 - ,0.076633, -0.007764, -0.019500 - ,0.073956, -0.009127, -0.019500 - ,0.065353, -0.007764, -0.022997 - ,0.068030, -0.009127, -0.022997 - ,0.070993, -0.009596, -0.022997 - ,0.063229, -0.005640, -0.022997 - ,0.061397, 0.000000, -0.022997 - ,0.061866, -0.002963, -0.022997 - ,0.063229, 0.005640, -0.022997 - ,0.061866, 0.002963, -0.022997 - ,0.065353, 0.007764, -0.022997 - ,0.070993, 0.009596, -0.022997 - ,0.068030, 0.009127, -0.022997 - ,0.076633, 0.007764, -0.022997 - ,0.073956, 0.009127, -0.022997 - ,0.078757, 0.005640, -0.022997 - ,0.080589, 0.000000, -0.022997 - ,0.080120, 0.002963, -0.022997 - ,0.078757, -0.005640, -0.022997 - ,0.080120, -0.002963, -0.022997 - ,0.076633, -0.007764, -0.022997 - ,0.073956, -0.009127, -0.022997 - ,0.065742, -0.007227, -0.022997 - ,0.068235, -0.008497, -0.022997 - ,0.070993, -0.008933, -0.022997 - ,0.063766, -0.005250, -0.022997 - ,0.062060, 0.000000, -0.022997 - ,0.062496, -0.002758, -0.022997 - ,0.063766, 0.005250, -0.022997 - ,0.062496, 0.002758, -0.022997 - ,0.065742, 0.007227, -0.022997 - ,0.070993, 0.008933, -0.022997 - ,0.068235, 0.008497, -0.022997 - ,0.076243, 0.007227, -0.022997 - ,0.073751, 0.008497, -0.022997 - ,0.078220, 0.005250, -0.022997 - ,0.079926, 0.000000, -0.022997 - ,0.079490, 0.002758, -0.022997 - ,0.078220, -0.005250, -0.022997 - ,0.079490, -0.002758, -0.022997 - ,0.076243, -0.007227, -0.022997 - ,0.073751, -0.008497, -0.022997 - ,0.067092, -0.005369, -0.022203 - ,0.068944, -0.006313, -0.022203 - ,0.070993, -0.006637, -0.022203 - ,0.065623, -0.003901, -0.022203 - ,0.064356, 0.000000, -0.022203 - ,0.064680, -0.002049, -0.022203 - ,0.065623, 0.003901, -0.022203 - ,0.064680, 0.002049, -0.022203 - ,0.067092, 0.005369, -0.022203 - ,0.070993, 0.006637, -0.022203 - ,0.068944, 0.006313, -0.022203 - ,0.074894, 0.005369, -0.022203 - ,0.073042, 0.006313, -0.022203 - ,0.076362, 0.003901, -0.022203 - ,0.077630, 0.000000, -0.022203 - ,0.077305, 0.002049, -0.022203 - ,0.076362, -0.003901, -0.022203 - ,0.077305, -0.002049, -0.022203 - ,0.074894, -0.005369, -0.022203 - ,0.073042, -0.006313, -0.022203 - ,0.067443, -0.004886, -0.021500 - ,0.069128, -0.005744, -0.021500 - ,0.070993, -0.006039, -0.021500 - ,0.066107, -0.003549, -0.021500 - ,0.064954, 0.000000, -0.021500 - ,0.065249, -0.001865, -0.021500 - ,0.066107, 0.003549, -0.021500 - ,0.065249, 0.001865, -0.021500 - ,0.067443, 0.004886, -0.021500 - ,0.070993, 0.006039, -0.021500 - ,0.069128, 0.005744, -0.021500 - ,0.074542, 0.004886, -0.021500 - ,0.072858, 0.005744, -0.021500 - ,0.075879, 0.003549, -0.021500 - ,0.077032, 0.000000, -0.021500 - ,0.076737, 0.001865, -0.021500 - ,0.075879, -0.003549, -0.021500 - ,0.076737, -0.001865, -0.021500 - ,0.074542, -0.004886, -0.021500 - ,0.072858, -0.005744, -0.021500 - ,0.067443, -0.004886, -0.020078 - ,0.069128, -0.005744, -0.020078 - ,0.070993, -0.006039, -0.020078 - ,0.066107, -0.003549, -0.020078 - ,0.064954, 0.000000, -0.020078 - ,0.065249, -0.001865, -0.020078 - ,0.066107, 0.003549, -0.020078 - ,0.065249, 0.001865, -0.020078 - ,0.067443, 0.004886, -0.020078 - ,0.070993, 0.006039, -0.020078 - ,0.069128, 0.005744, -0.020078 - ,0.074542, 0.004886, -0.020078 - ,0.072858, 0.005744, -0.020078 - ,0.075879, 0.003549, -0.020078 - ,0.077032, 0.000000, -0.020078 - ,0.076737, 0.001865, -0.020078 - ,0.075879, -0.003549, -0.020078 - ,0.076737, -0.001865, -0.020078 - ,0.074542, -0.004886, -0.020078 - ,0.072858, -0.005744, -0.020078 - ,-0.026847, -0.013300, 0.006300 - ,0.088353, -0.013310, 0.006300 - ,0.002815, -0.013333, 0.002250 - ,-0.026847, 0.013300, 0.006300 - ,0.002815, 0.013300, 0.002250 - ,0.058800, 0.013300, 0.002250 - ,0.088353, 0.013300, 0.006300 - ,-0.026847, -0.013300, 0.002250 - ,-0.026847, -0.013300, 0.000500 - ,-0.027959, -0.013175, 0.006300 - ,-0.029016, -0.012805, 0.006300 - ,-0.026847, 0.013300, 0.000500 - ,0.000151, 0.013300, 0.000563 - ,0.000830, 0.013300, 0.001351 - ,-0.029965, -0.012209, 0.006300 - ,0.001761, 0.013300, 0.001999 - ,-0.030756, -0.011417, 0.006300 - ,-0.031352, -0.010469, 0.006300 - ,-0.031722, -0.009412, 0.006300 - ,-0.031847, -0.008300, 0.006300 - ,-0.026847, 0.013300, 0.002250 - ,-0.031847, 0.008300, 0.006300 - ,-0.027959, 0.013175, 0.006300 - ,-0.031722, 0.009412, 0.006300 - ,-0.031352, 0.010469, 0.006300 - ,-0.029016, 0.012805, 0.006300 - ,-0.030756, 0.011417, 0.006300 - ,-0.029965, 0.012209, 0.006300 - ,-0.027959, -0.013175, 0.000500 - ,-0.027959, -0.013175, 0.002250 - ,-0.029016, -0.012805, 0.000500 - ,-0.029016, -0.012805, 0.002250 - ,-0.029965, -0.012209, 0.000500 - ,-0.029965, -0.012209, 0.002250 - ,-0.030756, -0.011417, 0.000500 - ,-0.030756, -0.011417, 0.002250 - ,-0.031352, -0.010469, 0.000500 - ,-0.031352, -0.010469, 0.002250 - ,-0.031722, -0.009412, 0.000500 - ,-0.031722, -0.009412, 0.002250 - ,-0.031847, -0.006364, 0.000500 - ,-0.031809, -0.004156, 0.002299 - ,-0.031847, 0.006354, 0.000500 - ,-0.031809, 0.004146, 0.002299 - ,-0.031722, 0.009412, 0.000500 - ,-0.031722, 0.009412, 0.002250 - ,-0.027959, 0.013175, 0.002250 - ,-0.027959, 0.013175, 0.000500 - ,-0.031352, 0.010469, 0.000500 - ,-0.031352, 0.010469, 0.002250 - ,-0.029016, 0.012805, 0.002250 - ,-0.029016, 0.012805, 0.000500 - ,-0.030756, 0.011417, 0.000500 - ,-0.030756, 0.011417, 0.002250 - ,-0.029965, 0.012209, 0.002250 - ,-0.029965, 0.012209, 0.000500 - ,0.059839, 0.013300, 0.001999 - ,0.060770, 0.013300, 0.001351 - ,0.088353, 0.013300, 0.002250 - ,0.058785, -0.013333, 0.002250 - ,0.089464, 0.013175, 0.006300 - ,0.090522, 0.012805, 0.006300 - ,0.091470, 0.012209, 0.006300 - ,0.092262, 0.011417, 0.006300 - ,0.092858, 0.010469, 0.006300 - ,0.093228, 0.009412, 0.006300 - ,0.093353, 0.008300, 0.006300 - ,0.088353, -0.013310, 0.002250 - ,0.059839, -0.013333, 0.001999 - ,0.060770, -0.013333, 0.001351 - ,0.061448, 0.013300, 0.000563 - ,0.088353, 0.013300, 0.000500 - ,0.088353, -0.013310, 0.000500 - ,0.061448, -0.013333, 0.000563 - ,0.093353, -0.008310, 0.006300 - ,0.089464, -0.013194, 0.006300 - ,0.093228, -0.009431, 0.006300 - ,0.092858, -0.010488, 0.006300 - ,0.090522, -0.012824, 0.006300 - ,0.092262, -0.011437, 0.006300 - ,0.091470, -0.012228, 0.006300 - ,0.089464, 0.013175, 0.002250 - ,0.090522, 0.012805, 0.002250 - ,0.091470, 0.012209, 0.002250 - ,0.089464, 0.013175, 0.000500 - ,0.090522, 0.012805, 0.000500 - ,0.092262, 0.011417, 0.002250 - ,0.091470, 0.012209, 0.000500 - ,0.092858, 0.010469, 0.002250 - ,0.092262, 0.011417, 0.000500 - ,0.093228, 0.009412, 0.002250 - ,0.093353, 0.008300, 0.002250 - ,0.092858, 0.010469, 0.000500 - ,0.093228, 0.009412, 0.000500 - ,0.093353, 0.008300, 0.000500 - ,0.093353, -0.008310, 0.002250 - ,0.093353, -0.008310, 0.000500 - ,0.093228, -0.009431, 0.002250 - ,0.089464, -0.013194, 0.002250 - ,0.092858, -0.010488, 0.002250 - ,0.090522, -0.012824, 0.002250 - ,0.092262, -0.011437, 0.002250 - ,0.091470, -0.012228, 0.002250 - ,0.093228, -0.009431, 0.000500 - ,0.089464, -0.013194, 0.000500 - ,0.092858, -0.010488, 0.000500 - ,0.090522, -0.012824, 0.000500 - ,0.092262, -0.011437, 0.000500 - ,0.091470, -0.012228, 0.000500 - ,0.000152, -0.013333, 0.000564 - ,0.000830, -0.013333, 0.001352 - ,0.001761, -0.013333, 0.002000 - ,-0.026591, -0.013201, 0.006500 - ,0.088097, -0.013211, 0.006500 - ,-0.026591, 0.013201, 0.006500 - ,0.088097, 0.013201, 0.006500 - ,0.088477, 0.011982, 0.006500 - ,-0.026591, 0.011982, 0.006500 - ,-0.027698, -0.013077, 0.006500 - ,-0.028751, -0.012710, 0.006500 - ,-0.029695, -0.012119, 0.006500 - ,-0.030483, -0.011333, 0.006500 - ,-0.031076, -0.010391, 0.006500 - ,-0.031445, -0.009342, 0.006500 - ,-0.031569, -0.008238, 0.006500 - ,-0.027698, 0.013077, 0.006500 - ,-0.031445, 0.009342, 0.006500 - ,-0.031569, 0.008238, 0.006500 - ,-0.028751, 0.012710, 0.006500 - ,-0.031076, 0.010391, 0.006500 - ,-0.029695, 0.012118, 0.006500 - ,-0.030483, 0.011332, 0.006500 - ,-0.027703, -0.011897, 0.006500 - ,-0.026192, -0.011982, 0.006500 - ,-0.028399, -0.011645, 0.006500 - ,-0.029024, -0.011241, 0.006500 - ,-0.029545, -0.010703, 0.006500 - ,-0.029937, -0.010058, 0.006500 - ,-0.030180, -0.009340, 0.006500 - ,-0.030263, -0.008585, 0.006500 - ,-0.030263, 0.008585, 0.006500 - ,-0.030180, 0.009340, 0.006500 - ,-0.027703, 0.011897, 0.006500 - ,-0.029937, 0.010059, 0.006500 - ,-0.028399, 0.011646, 0.006500 - ,-0.029545, 0.010703, 0.006500 - ,-0.029024, 0.011241, 0.006500 - ,0.088477, -0.011992, 0.006500 - ,0.089204, 0.013077, 0.006500 - ,0.090256, 0.012710, 0.006500 - ,0.091200, 0.012118, 0.006500 - ,0.091989, 0.011332, 0.006500 - ,0.092582, 0.010391, 0.006500 - ,0.092950, 0.009342, 0.006500 - ,0.093075, 0.008238, 0.006500 - ,0.089204, -0.013096, 0.006500 - ,0.092950, -0.009361, 0.006500 - ,0.093075, -0.008248, 0.006500 - ,0.090256, -0.012729, 0.006500 - ,0.092582, -0.010410, 0.006500 - ,0.091200, -0.012138, 0.006500 - ,0.091989, -0.011352, 0.006500 - ,0.089209, 0.011897, 0.006500 - ,0.089905, 0.011646, 0.006500 - ,0.090529, 0.011241, 0.006500 - ,0.091050, 0.010703, 0.006500 - ,0.091443, 0.010059, 0.006500 - ,0.091686, 0.009340, 0.006500 - ,0.091768, 0.008585, 0.006500 - ,0.091768, -0.008595, 0.006500 - ,0.091686, -0.009360, 0.006500 - ,0.089209, -0.011916, 0.006500 - ,0.091443, -0.010078, 0.006500 - ,0.089905, -0.011665, 0.006500 - ,0.091050, -0.010723, 0.006500 - ,0.090529, -0.011260, 0.006500 - ,-0.031809, -0.003322, 0.002497 - ,-0.031809, 0.003313, 0.002497 - ,-0.031809, 0.005707, 0.000701 - ,-0.031809, 0.005128, 0.001424 - ,-0.031809, -0.005138, 0.001424 - ,-0.031809, -0.005716, 0.000701 - ,0.070993, -0.000000, -0.021500 - ,0.076551, 0.004038, -0.021500 - ,0.075030, 0.005558, -0.021500 - ,0.073114, 0.006534, -0.021500 - ,0.077863, 0.000000, -0.021500 - ,0.077527, 0.002121, -0.021500 - ,0.065435, -0.004038, -0.021500 - ,0.066955, -0.005558, -0.021500 - ,0.068872, -0.006534, -0.021500 - ,0.076551, -0.004038, -0.021500 - ,0.077527, -0.002121, -0.021500 - ,0.065435, 0.004038, -0.021500 - ,0.064459, 0.002121, -0.021500 - ,0.064123, 0.000000, -0.021500 - ,0.068872, 0.006534, -0.021500 - ,0.066955, 0.005558, -0.021500 - ,0.070993, 0.006870, -0.021500 - ,0.073114, -0.006534, -0.021500 - ,0.075030, -0.005558, -0.021500 - ,0.070993, -0.006870, -0.021500 - ,0.064459, -0.002121, -0.021500 - ,0.065353, -0.007764, -0.018000 - ,0.068030, -0.009127, -0.018000 - ,0.068030, -0.009127, -0.019500 - ,0.065353, -0.007764, -0.019500 - ,0.070993, -0.009596, -0.018000 - ,0.070993, -0.009596, -0.019500 - ,0.063229, -0.005640, -0.018000 - ,0.063229, -0.005640, -0.019500 - ,0.061397, 0.000000, -0.018000 - ,0.061866, -0.002963, -0.018000 - ,0.061866, -0.002963, -0.019500 - ,0.061397, 0.000000, -0.019500 - ,0.063229, 0.005640, -0.018000 - ,0.061866, 0.002963, -0.018000 - ,0.061866, 0.002963, -0.019500 - ,0.063229, 0.005640, -0.019500 - ,0.065353, 0.007764, -0.018000 - ,0.065353, 0.007764, -0.019500 - ,0.070993, 0.009596, -0.018000 - ,0.068030, 0.009127, -0.018000 - ,0.068030, 0.009127, -0.019500 - ,0.070993, 0.009596, -0.019500 - ,0.076633, 0.007764, -0.018000 - ,0.073956, 0.009127, -0.018000 - ,0.073956, 0.009127, -0.019500 - ,0.076633, 0.007764, -0.019500 - ,0.078757, 0.005640, -0.018000 - ,0.078757, 0.005640, -0.019500 - ,0.080589, 0.000000, -0.018000 - ,0.080120, 0.002963, -0.018000 - ,0.080120, 0.002963, -0.019500 - ,0.080589, 0.000000, -0.019500 - ,0.078757, -0.005640, -0.018000 - ,0.080120, -0.002963, -0.018000 - ,0.080120, -0.002963, -0.019500 - ,0.078757, -0.005640, -0.019500 - ,0.076633, -0.007764, -0.018000 - ,0.076633, -0.007764, -0.019500 - ,0.073956, -0.009127, -0.018000 - ,0.073956, -0.009127, -0.019500 - ,0.002356, -0.007764, -0.018000 - ,0.005033, -0.009127, -0.018000 - ,0.005033, -0.009127, -0.019500 - ,0.002356, -0.007764, -0.019500 - ,0.007996, -0.009596, -0.018000 - ,0.007996, -0.009596, -0.019500 - ,0.000232, -0.005640, -0.018000 - ,0.000232, -0.005640, -0.019500 - ,-0.001600, 0.000000, -0.018000 - ,-0.001131, -0.002963, -0.018000 - ,-0.001131, -0.002963, -0.019500 - ,-0.001600, 0.000000, -0.019500 - ,0.000232, 0.005640, -0.018000 - ,-0.001131, 0.002963, -0.018000 - ,-0.001131, 0.002963, -0.019500 - ,0.000232, 0.005640, -0.019500 - ,0.002356, 0.007764, -0.018000 - ,0.002356, 0.007764, -0.019500 - ,0.007996, 0.009596, -0.018000 - ,0.005033, 0.009127, -0.018000 - ,0.005033, 0.009127, -0.019500 - ,0.007996, 0.009596, -0.019500 - ,0.013636, 0.007764, -0.018000 - ,0.010959, 0.009127, -0.018000 - ,0.010959, 0.009127, -0.019500 - ,0.013636, 0.007764, -0.019500 - ,0.015760, 0.005640, -0.018000 - ,0.015760, 0.005640, -0.019500 - ,0.017592, 0.000000, -0.018000 - ,0.017123, 0.002963, -0.018000 - ,0.017123, 0.002963, -0.019500 - ,0.017592, 0.000000, -0.019500 - ,0.015760, -0.005640, -0.018000 - ,0.017123, -0.002963, -0.018000 - ,0.017123, -0.002963, -0.019500 - ,0.015760, -0.005640, -0.019500 - ,0.013636, -0.007764, -0.018000 - ,0.013636, -0.007764, -0.019500 - ,0.010959, -0.009127, -0.018000 - ,0.010959, -0.009127, -0.019500 - ,0.007996, -0.000000, -0.021500 - ,0.013554, 0.004038, -0.021500 - ,0.012033, 0.005558, -0.021500 - ,0.010117, 0.006534, -0.021500 - ,0.014866, 0.000000, -0.021500 - ,0.014530, 0.002121, -0.021500 - ,0.002438, -0.004038, -0.021500 - ,0.003958, -0.005558, -0.021500 - ,0.005875, -0.006534, -0.021500 - ,0.013554, -0.004038, -0.021500 - ,0.014530, -0.002121, -0.021500 - ,0.002438, 0.004038, -0.021500 - ,0.001462, 0.002121, -0.021500 - ,0.001126, 0.000000, -0.021500 - ,0.005875, 0.006534, -0.021500 - ,0.003958, 0.005558, -0.021500 - ,0.007996, 0.006870, -0.021500 - ,0.010117, -0.006534, -0.021500 - ,0.012033, -0.005558, -0.021500 - ,0.007996, -0.006870, -0.021500 - ,0.001462, -0.002121, -0.021500 - ,0.090529, -0.008928, 0.006500 - ,0.089905, -0.009249, 0.006500 - ,0.089209, -0.009448, 0.006500 - ,0.088477, -0.009506, 0.006500 - ,-0.026234, -0.009497, 0.006500 - ,-0.027703, -0.009430, 0.006500 - ,-0.028399, -0.009231, 0.006500 - ,-0.029024, -0.008910, 0.006500 - ,0.090529, -0.009792, 0.006500 - ,0.089905, -0.010144, 0.006500 - ,0.089209, -0.010362, 0.006500 - ,0.088477, -0.010427, 0.006500 - ,-0.026219, -0.010417, 0.006500 - ,-0.027703, -0.010344, 0.006500 - ,-0.028399, -0.010125, 0.006500 - ,-0.029024, -0.009773, 0.006500 - ,0.090529, 0.008867, 0.006500 - ,0.089905, 0.009187, 0.006500 - ,0.089209, 0.009385, 0.006500 - ,0.088477, 0.009453, 0.006500 - ,-0.026549, 0.009454, 0.006500 - ,-0.027703, 0.009387, 0.006500 - ,-0.028399, 0.009189, 0.006500 - ,-0.029024, 0.008869, 0.006500 - ,0.090529, 0.009705, 0.006500 - ,0.089905, 0.010055, 0.006500 - ,0.089209, 0.010272, 0.006500 - ,0.088477, 0.010346, 0.006500 - ,-0.026564, 0.010347, 0.006500 - ,-0.027703, 0.010273, 0.006500 - ,-0.028399, 0.010056, 0.006500 - ,-0.029024, 0.009707, 0.006500 - ,0.090529, -0.010555, 0.006500 - ,0.089905, -0.010935, 0.006500 - ,0.089209, -0.011170, 0.006500 - ,0.088477, -0.011240, 0.006500 - ,-0.026205, -0.011231, 0.006500 - ,-0.027703, -0.011151, 0.006500 - ,-0.028399, -0.010915, 0.006500 - ,-0.029024, -0.010536, 0.006500 - ,0.090529, 0.010411, 0.006500 - ,0.089905, 0.010786, 0.006500 - ,0.089209, 0.011019, 0.006500 - ,0.088477, 0.011098, 0.006500 - ,-0.026577, 0.011099, 0.006500 - ,-0.027703, 0.011020, 0.006500 - ,-0.028399, 0.010787, 0.006500 - ,-0.029024, 0.010412, 0.006500 -]) - -NB_AL_ZEDM_TRI = 125 -al_triangles_m = np.array([ - 2, 1, 8 - ,1, 2, 4 - ,1, 4, 5 - ,1, 5, 14 - ,1, 14, 15 - ,1, 6, 3 - ,1, 7, 6 - ,1, 15, 7 - ,1, 10, 8 - ,1, 11, 10 - ,1, 17, 11 - ,1, 16, 17 - ,1, 9, 12 - ,1, 12, 13 - ,1, 13, 16 - ,3, 9, 1 - ,3, 9, 1 - ,18, 21, 19 - ,18, 22, 21 - ,18, 32, 22 - ,18, 31, 32 - ,18, 31, 32 - ,18, 20, 23 - ,18, 23, 24 - ,18, 24, 31 - ,19, 25, 18 - ,20, 18, 26 - ,18, 25, 27 - ,18, 27, 28 - ,18, 28, 33 - ,18, 33, 34 - ,18, 33, 34 - ,18, 29, 26 - ,18, 30, 29 - ,18, 34, 30 - ,51, 35, 52 - ,52, 35, 36 - ,53, 37, 54 - ,54, 37, 38 - ,55, 39, 56 - ,56, 39, 40 - ,56, 40, 53 - ,53, 40, 37 - ,57, 41, 58 - ,58, 41, 42 - ,54, 38, 57 - ,57, 38, 41 - ,58, 42, 51 - ,51, 42, 35 - ,59, 43, 60 - ,60, 43, 44 - ,61, 46, 62 - ,62, 46, 45 - ,48, 47, 63 - ,63, 47, 64 - ,62, 45, 63 - ,63, 45, 48 - ,65, 50, 66 - ,66, 50, 49 - ,66, 49, 61 - ,61, 49, 46 - ,60, 44, 65 - ,65, 44, 50 - ,52, 36, 67 - ,67, 36, 68 - ,43, 59, 70 - ,70, 59, 69 - ,67, 68, 71 - ,71, 68, 72 - ,70, 69, 74 - ,74, 69, 73 - ,71, 72, 87 - ,74, 73, 87 - ,71, 87, 75 - ,87, 72, 76 - ,74, 87, 78 - ,87, 73, 77 - ,75, 87, 79 - ,87, 76, 80 - ,87, 77, 81 - ,78, 87, 82 - ,79, 87, 83 - ,87, 80, 84 - ,87, 81, 85 - ,82, 87, 86 - ,83, 87, 85 - ,87, 84, 86 - ,88, 17, 89 - ,89, 17, 16 - ,90, 91, 14 - ,14, 91, 15 - ,107, 108, 92 - ,92, 108, 93 - ,109, 110, 94 - ,94, 110, 95 - ,90, 111, 91 - ,91, 111, 96 - ,111, 109, 96 - ,96, 109, 94 - ,112, 113, 97 - ,97, 113, 98 - ,110, 112, 95 - ,95, 112, 97 - ,113, 107, 98 - ,98, 107, 92 - ,108, 114, 93 - ,93, 114, 99 - ,114, 115, 99 - ,99, 115, 100 - ,116, 117, 102 - ,102, 117, 101 - ,118, 119, 104 - ,104, 119, 103 - ,117, 118, 101 - ,101, 118, 104 - ,120, 121, 106 - ,106, 121, 105 - ,121, 116, 105 - ,105, 116, 102 - ,115, 120, 100 - ,100, 120, 106 - ,64, 47, 33 - ,33, 47, 34 - ,119, 32, 103 - ,103, 32, 31 -]) - -NB_DARK_ZEDM_TRI = 1268 -dark_triangles_m = np.array([ - 126, 144, 127 - ,127, 144, 143 - ,128, 136, 122 - ,122, 136, 135 - ,129, 132, 130 - ,130, 132, 131 - ,132, 125, 131 - ,131, 125, 124 - ,133, 129, 134 - ,134, 129, 130 - ,136, 133, 135 - ,135, 133, 134 - ,137, 140, 138 - ,138, 140, 139 - ,140, 128, 139 - ,139, 128, 122 - ,141, 137, 142 - ,142, 137, 138 - ,144, 141, 143 - ,143, 141, 142 - ,124, 125, 160 - ,160, 125, 161 - ,145, 153, 123 - ,123, 153, 152 - ,146, 149, 147 - ,147, 149, 148 - ,149, 126, 148 - ,148, 126, 127 - ,150, 146, 151 - ,151, 146, 147 - ,153, 150, 152 - ,152, 150, 151 - ,154, 157, 155 - ,155, 157, 156 - ,157, 145, 156 - ,156, 145, 123 - ,158, 154, 159 - ,159, 154, 155 - ,161, 158, 160 - ,160, 158, 159 - ,126, 163, 144 - ,144, 163, 162 - ,128, 165, 136 - ,136, 165, 164 - ,129, 167, 132 - ,132, 167, 166 - ,132, 166, 125 - ,125, 166, 168 - ,133, 169, 129 - ,129, 169, 167 - ,136, 164, 133 - ,133, 164, 169 - ,137, 171, 140 - ,140, 171, 170 - ,140, 170, 128 - ,128, 170, 165 - ,141, 172, 137 - ,137, 172, 171 - ,144, 162, 141 - ,141, 162, 172 - ,125, 168, 161 - ,161, 168, 173 - ,145, 175, 153 - ,153, 175, 174 - ,146, 177, 149 - ,149, 177, 176 - ,149, 176, 126 - ,126, 176, 163 - ,150, 178, 146 - ,146, 178, 177 - ,153, 174, 150 - ,150, 174, 178 - ,154, 180, 157 - ,157, 180, 179 - ,157, 179, 145 - ,145, 179, 175 - ,158, 181, 154 - ,154, 181, 180 - ,161, 173, 158 - ,158, 173, 181 - ,182, 183, 202 - ,202, 183, 203 - ,183, 184, 203 - ,203, 184, 204 - ,185, 182, 205 - ,205, 182, 202 - ,186, 187, 206 - ,206, 187, 207 - ,187, 185, 207 - ,207, 185, 205 - ,188, 189, 208 - ,208, 189, 209 - ,189, 186, 209 - ,209, 186, 206 - ,190, 188, 210 - ,210, 188, 208 - ,191, 192, 211 - ,211, 192, 212 - ,192, 190, 212 - ,212, 190, 210 - ,193, 194, 213 - ,213, 194, 214 - ,194, 191, 214 - ,214, 191, 211 - ,195, 193, 215 - ,215, 193, 213 - ,196, 197, 216 - ,216, 197, 217 - ,197, 195, 217 - ,217, 195, 215 - ,198, 199, 218 - ,218, 199, 219 - ,199, 196, 219 - ,219, 196, 216 - ,200, 198, 220 - ,220, 198, 218 - ,184, 201, 204 - ,204, 201, 221 - ,201, 200, 221 - ,221, 200, 220 - ,202, 203, 222 - ,222, 203, 223 - ,203, 204, 223 - ,223, 204, 224 - ,205, 202, 225 - ,225, 202, 222 - ,206, 207, 226 - ,226, 207, 227 - ,207, 205, 227 - ,227, 205, 225 - ,208, 209, 228 - ,228, 209, 229 - ,209, 206, 229 - ,229, 206, 226 - ,210, 208, 230 - ,230, 208, 228 - ,211, 212, 231 - ,231, 212, 232 - ,212, 210, 232 - ,232, 210, 230 - ,213, 214, 233 - ,233, 214, 234 - ,214, 211, 234 - ,234, 211, 231 - ,215, 213, 235 - ,235, 213, 233 - ,216, 217, 236 - ,236, 217, 237 - ,217, 215, 237 - ,237, 215, 235 - ,218, 219, 238 - ,238, 219, 239 - ,219, 216, 239 - ,239, 216, 236 - ,220, 218, 240 - ,240, 218, 238 - ,204, 221, 224 - ,224, 221, 241 - ,221, 220, 241 - ,241, 220, 240 - ,223, 243, 222 - ,222, 243, 242 - ,224, 244, 223 - ,223, 244, 243 - ,222, 242, 225 - ,225, 242, 245 - ,227, 247, 226 - ,226, 247, 246 - ,225, 245, 227 - ,227, 245, 247 - ,229, 249, 228 - ,228, 249, 248 - ,226, 246, 229 - ,229, 246, 249 - ,228, 248, 230 - ,230, 248, 250 - ,232, 252, 231 - ,231, 252, 251 - ,230, 250, 232 - ,232, 250, 252 - ,234, 254, 233 - ,233, 254, 253 - ,231, 251, 234 - ,234, 251, 254 - ,233, 253, 235 - ,235, 253, 255 - ,237, 257, 236 - ,236, 257, 256 - ,235, 255, 237 - ,237, 255, 257 - ,239, 259, 238 - ,238, 259, 258 - ,236, 256, 239 - ,239, 256, 259 - ,238, 258, 240 - ,240, 258, 260 - ,241, 261, 224 - ,224, 261, 244 - ,240, 260, 241 - ,241, 260, 261 - ,263, 262, 243 - ,243, 262, 242 - ,264, 263, 244 - ,244, 263, 243 - ,242, 262, 245 - ,245, 262, 265 - ,267, 266, 247 - ,247, 266, 246 - ,265, 267, 245 - ,245, 267, 247 - ,269, 268, 249 - ,249, 268, 248 - ,266, 269, 246 - ,246, 269, 249 - ,268, 270, 248 - ,248, 270, 250 - ,272, 271, 252 - ,252, 271, 251 - ,270, 272, 250 - ,250, 272, 252 - ,274, 273, 254 - ,254, 273, 253 - ,271, 274, 251 - ,251, 274, 254 - ,273, 275, 253 - ,253, 275, 255 - ,277, 276, 257 - ,257, 276, 256 - ,275, 277, 255 - ,255, 277, 257 - ,279, 278, 259 - ,259, 278, 258 - ,276, 279, 256 - ,256, 279, 259 - ,278, 280, 258 - ,258, 280, 260 - ,281, 264, 261 - ,261, 264, 244 - ,280, 281, 260 - ,260, 281, 261 - ,262, 263, 282 - ,282, 263, 283 - ,263, 264, 283 - ,283, 264, 284 - ,265, 262, 285 - ,285, 262, 282 - ,266, 267, 286 - ,286, 267, 287 - ,267, 265, 287 - ,287, 265, 285 - ,268, 269, 288 - ,288, 269, 289 - ,269, 266, 289 - ,289, 266, 286 - ,270, 268, 290 - ,290, 268, 288 - ,271, 272, 291 - ,291, 272, 292 - ,272, 270, 292 - ,292, 270, 290 - ,273, 274, 293 - ,293, 274, 294 - ,274, 271, 294 - ,294, 271, 291 - ,275, 273, 295 - ,295, 273, 293 - ,276, 277, 296 - ,296, 277, 297 - ,277, 275, 297 - ,297, 275, 295 - ,278, 279, 298 - ,298, 279, 299 - ,279, 276, 299 - ,299, 276, 296 - ,280, 278, 300 - ,300, 278, 298 - ,264, 281, 284 - ,284, 281, 301 - ,281, 280, 301 - ,301, 280, 300 - ,320, 461, 327 - ,461, 460, 327 - ,460, 477, 327 - ,477, 476, 327 - ,309, 303, 334 - ,334, 303, 335 - ,306, 304, 336 - ,336, 304, 337 - ,302, 305, 338 - ,338, 305, 339 - ,305, 306, 339 - ,339, 306, 336 - ,308, 307, 340 - ,340, 307, 341 - ,304, 308, 337 - ,337, 308, 340 - ,307, 309, 341 - ,341, 309, 334 - ,366, 302, 338 - ,303, 478, 335 - ,318, 312, 344 - ,344, 312, 345 - ,315, 313, 346 - ,346, 313, 347 - ,314, 348, 311 - ,311, 348, 343 - ,314, 315, 348 - ,348, 315, 346 - ,317, 316, 349 - ,349, 316, 350 - ,313, 317, 347 - ,347, 317, 349 - ,316, 318, 350 - ,350, 318, 344 - ,312, 367, 345 - ,326, 320, 352 - ,352, 320, 353 - ,323, 321, 354 - ,354, 321, 355 - ,319, 322, 351 - ,351, 322, 356 - ,322, 323, 356 - ,356, 323, 354 - ,325, 324, 357 - ,357, 324, 358 - ,321, 325, 355 - ,355, 325, 357 - ,324, 326, 358 - ,358, 326, 352 - ,320, 327, 353 - ,353, 327, 359 - ,327, 333, 359 - ,359, 333, 360 - ,328, 330, 362 - ,362, 330, 361 - ,329, 310, 363 - ,363, 310, 342 - ,330, 329, 361 - ,361, 329, 363 - ,331, 332, 365 - ,365, 332, 364 - ,332, 328, 364 - ,364, 328, 362 - ,333, 331, 360 - ,360, 331, 365 - ,368, 310, 366 - ,366, 310, 302 - ,367, 312, 407 - ,310, 368, 342 - ,369, 319, 351 - ,370, 482, 371 - ,336, 337, 372 - ,372, 337, 373 - ,338, 339, 374 - ,374, 339, 375 - ,339, 336, 375 - ,375, 336, 372 - ,340, 341, 376 - ,376, 341, 377 - ,337, 340, 373 - ,373, 340, 376 - ,341, 334, 377 - ,377, 334, 370 - ,366, 338, 413 - ,413, 338, 412 - ,412, 338, 378 - ,338, 374, 378 - ,344, 345, 380 - ,380, 345, 381 - ,346, 347, 382 - ,382, 347, 383 - ,481, 348, 480 - ,480, 348, 384 - ,348, 346, 384 - ,384, 346, 382 - ,349, 350, 385 - ,385, 350, 386 - ,347, 349, 383 - ,383, 349, 385 - ,350, 344, 386 - ,386, 344, 380 - ,409, 351, 388 - ,388, 351, 387 - ,352, 353, 389 - ,389, 353, 390 - ,354, 355, 391 - ,391, 355, 392 - ,351, 356, 387 - ,387, 356, 393 - ,356, 354, 393 - ,393, 354, 391 - ,357, 358, 394 - ,394, 358, 395 - ,355, 357, 392 - ,392, 357, 394 - ,358, 352, 395 - ,395, 352, 389 - ,353, 359, 390 - ,390, 359, 396 - ,359, 360, 396 - ,396, 360, 397 - ,362, 361, 399 - ,399, 361, 398 - ,363, 342, 401 - ,401, 342, 400 - ,361, 363, 398 - ,398, 363, 401 - ,365, 364, 403 - ,403, 364, 402 - ,364, 362, 402 - ,402, 362, 399 - ,360, 365, 397 - ,397, 365, 403 - ,367, 408, 345 - ,345, 408, 381 - ,342, 368, 411 - ,406, 319, 369 - ,312, 319, 407 - ,407, 319, 406 - ,369, 351, 409 - ,381, 408, 404 - ,400, 410, 405 - ,411, 410, 342 - ,342, 410, 400 - ,414, 415, 463 - ,463, 415, 462 - ,414, 417, 415 - ,415, 417, 416 - ,416, 417, 418 - ,418, 417, 419 - ,418, 419, 420 - ,420, 419, 421 - ,420, 421, 422 - ,422, 421, 423 - ,422, 423, 424 - ,424, 423, 425 - ,424, 425, 426 - ,426, 425, 427 - ,426, 427, 428 - ,428, 427, 429 - ,428, 429, 430 - ,430, 429, 431 - ,431, 433, 430 - ,430, 433, 432 - ,432, 433, 434 - ,434, 433, 435 - ,434, 435, 436 - ,436, 435, 437 - ,436, 437, 438 - ,438, 437, 439 - ,438, 439, 440 - ,440, 439, 441 - ,440, 441, 442 - ,442, 441, 443 - ,442, 443, 444 - ,444, 443, 445 - ,444, 445, 447 - ,447, 445, 446 - ,446, 449, 447 - ,447, 449, 448 - ,448, 449, 450 - ,450, 449, 451 - ,450, 451, 452 - ,452, 451, 453 - ,452, 453, 454 - ,454, 453, 455 - ,454, 455, 456 - ,456, 455, 457 - ,456, 457, 458 - ,458, 457, 459 - ,458, 459, 461 - ,461, 459, 460 - ,463, 462, 465 - ,465, 462, 464 - ,464, 466, 465 - ,465, 466, 467 - ,467, 466, 469 - ,469, 466, 468 - ,469, 468, 471 - ,471, 468, 470 - ,471, 470, 473 - ,473, 470, 472 - ,473, 472, 475 - ,475, 472, 474 - ,474, 476, 475 - ,475, 476, 477 - ,303, 309, 428 - ,428, 309, 426 - ,304, 306, 420 - ,420, 306, 418 - ,305, 302, 416 - ,416, 302, 415 - ,306, 305, 418 - ,418, 305, 416 - ,307, 308, 424 - ,424, 308, 422 - ,308, 304, 422 - ,422, 304, 420 - ,309, 307, 426 - ,426, 307, 424 - ,428, 430, 303 - ,303, 430, 311 - ,312, 318, 444 - ,444, 318, 442 - ,313, 315, 436 - ,436, 315, 434 - ,314, 311, 432 - ,432, 311, 430 - ,315, 314, 434 - ,434, 314, 432 - ,316, 317, 440 - ,440, 317, 438 - ,317, 313, 438 - ,438, 313, 436 - ,318, 316, 442 - ,442, 316, 440 - ,320, 326, 461 - ,461, 326, 458 - ,321, 323, 452 - ,452, 323, 450 - ,322, 319, 448 - ,448, 319, 447 - ,323, 322, 450 - ,450, 322, 448 - ,324, 325, 456 - ,456, 325, 454 - ,325, 321, 454 - ,454, 321, 452 - ,326, 324, 458 - ,458, 324, 456 - ,462, 310, 464 - ,464, 310, 329 - ,466, 464, 330 - ,330, 464, 329 - ,468, 466, 328 - ,328, 466, 330 - ,470, 468, 332 - ,332, 468, 328 - ,472, 470, 331 - ,331, 470, 332 - ,474, 472, 333 - ,333, 472, 331 - ,327, 476, 333 - ,333, 476, 474 - ,427, 425, 433 - ,433, 425, 435 - ,425, 423, 435 - ,435, 423, 437 - ,423, 421, 437 - ,437, 421, 439 - ,421, 419, 439 - ,439, 419, 441 - ,419, 417, 441 - ,441, 417, 443 - ,417, 414, 443 - ,443, 414, 445 - ,463, 465, 446 - ,446, 465, 449 - ,465, 467, 449 - ,449, 467, 451 - ,467, 469, 451 - ,451, 469, 453 - ,469, 471, 453 - ,453, 471, 455 - ,471, 473, 455 - ,455, 473, 457 - ,473, 475, 457 - ,457, 475, 459 - ,475, 477, 459 - ,459, 477, 460 - ,431, 429, 433 - ,433, 429, 427 - ,415, 302, 462 - ,462, 302, 310 - ,463, 446, 414 - ,414, 446, 445 - ,312, 444, 319 - ,319, 444, 447 - ,311, 479, 303 - ,303, 479, 478 - ,479, 311, 343 - ,480, 384, 379 - ,343, 348, 481 - ,483, 482, 334 - ,334, 482, 370 - ,334, 335, 483 - ,488, 506, 489 - ,489, 506, 505 - ,490, 498, 484 - ,484, 498, 497 - ,491, 494, 492 - ,492, 494, 493 - ,494, 487, 493 - ,493, 487, 486 - ,495, 491, 496 - ,496, 491, 492 - ,498, 495, 497 - ,497, 495, 496 - ,499, 502, 500 - ,500, 502, 501 - ,502, 490, 501 - ,501, 490, 484 - ,503, 499, 504 - ,504, 499, 500 - ,506, 503, 505 - ,505, 503, 504 - ,486, 487, 522 - ,522, 487, 523 - ,507, 515, 485 - ,485, 515, 514 - ,508, 511, 509 - ,509, 511, 510 - ,511, 488, 510 - ,510, 488, 489 - ,512, 508, 513 - ,513, 508, 509 - ,515, 512, 514 - ,514, 512, 513 - ,516, 519, 517 - ,517, 519, 518 - ,519, 507, 518 - ,518, 507, 485 - ,520, 516, 521 - ,521, 516, 517 - ,523, 520, 522 - ,522, 520, 521 - ,488, 525, 506 - ,506, 525, 524 - ,490, 527, 498 - ,498, 527, 526 - ,491, 529, 494 - ,494, 529, 528 - ,494, 528, 487 - ,487, 528, 530 - ,495, 531, 491 - ,491, 531, 529 - ,498, 526, 495 - ,495, 526, 531 - ,499, 533, 502 - ,502, 533, 532 - ,502, 532, 490 - ,490, 532, 527 - ,503, 534, 499 - ,499, 534, 533 - ,506, 524, 503 - ,503, 524, 534 - ,487, 530, 523 - ,523, 530, 535 - ,507, 537, 515 - ,515, 537, 536 - ,508, 539, 511 - ,511, 539, 538 - ,511, 538, 488 - ,488, 538, 525 - ,512, 540, 508 - ,508, 540, 539 - ,515, 536, 512 - ,512, 536, 540 - ,516, 542, 519 - ,519, 542, 541 - ,519, 541, 507 - ,507, 541, 537 - ,520, 543, 516 - ,516, 543, 542 - ,523, 535, 520 - ,520, 535, 543 - ,544, 545, 564 - ,564, 545, 565 - ,545, 546, 565 - ,565, 546, 566 - ,547, 544, 567 - ,567, 544, 564 - ,548, 549, 568 - ,568, 549, 569 - ,549, 547, 569 - ,569, 547, 567 - ,550, 551, 570 - ,570, 551, 571 - ,551, 548, 571 - ,571, 548, 568 - ,552, 550, 572 - ,572, 550, 570 - ,553, 554, 573 - ,573, 554, 574 - ,554, 552, 574 - ,574, 552, 572 - ,555, 556, 575 - ,575, 556, 576 - ,556, 553, 576 - ,576, 553, 573 - ,557, 555, 577 - ,577, 555, 575 - ,558, 559, 578 - ,578, 559, 579 - ,559, 557, 579 - ,579, 557, 577 - ,560, 561, 580 - ,580, 561, 581 - ,561, 558, 581 - ,581, 558, 578 - ,562, 560, 582 - ,582, 560, 580 - ,546, 563, 566 - ,566, 563, 583 - ,563, 562, 583 - ,583, 562, 582 - ,564, 565, 584 - ,584, 565, 585 - ,565, 566, 585 - ,585, 566, 586 - ,567, 564, 587 - ,587, 564, 584 - ,568, 569, 588 - ,588, 569, 589 - ,569, 567, 589 - ,589, 567, 587 - ,570, 571, 590 - ,590, 571, 591 - ,571, 568, 591 - ,591, 568, 588 - ,572, 570, 592 - ,592, 570, 590 - ,573, 574, 593 - ,593, 574, 594 - ,574, 572, 594 - ,594, 572, 592 - ,575, 576, 595 - ,595, 576, 596 - ,576, 573, 596 - ,596, 573, 593 - ,577, 575, 597 - ,597, 575, 595 - ,578, 579, 598 - ,598, 579, 599 - ,579, 577, 599 - ,599, 577, 597 - ,580, 581, 600 - ,600, 581, 601 - ,581, 578, 601 - ,601, 578, 598 - ,582, 580, 602 - ,602, 580, 600 - ,566, 583, 586 - ,586, 583, 603 - ,583, 582, 603 - ,603, 582, 602 - ,585, 605, 584 - ,584, 605, 604 - ,586, 606, 585 - ,585, 606, 605 - ,584, 604, 587 - ,587, 604, 607 - ,589, 609, 588 - ,588, 609, 608 - ,587, 607, 589 - ,589, 607, 609 - ,591, 611, 590 - ,590, 611, 610 - ,588, 608, 591 - ,591, 608, 611 - ,590, 610, 592 - ,592, 610, 612 - ,594, 614, 593 - ,593, 614, 613 - ,592, 612, 594 - ,594, 612, 614 - ,596, 616, 595 - ,595, 616, 615 - ,593, 613, 596 - ,596, 613, 616 - ,595, 615, 597 - ,597, 615, 617 - ,599, 619, 598 - ,598, 619, 618 - ,597, 617, 599 - ,599, 617, 619 - ,601, 621, 600 - ,600, 621, 620 - ,598, 618, 601 - ,601, 618, 621 - ,600, 620, 602 - ,602, 620, 622 - ,603, 623, 586 - ,586, 623, 606 - ,602, 622, 603 - ,603, 622, 623 - ,625, 624, 605 - ,605, 624, 604 - ,626, 625, 606 - ,606, 625, 605 - ,604, 624, 607 - ,607, 624, 627 - ,629, 628, 609 - ,609, 628, 608 - ,627, 629, 607 - ,607, 629, 609 - ,631, 630, 611 - ,611, 630, 610 - ,628, 631, 608 - ,608, 631, 611 - ,630, 632, 610 - ,610, 632, 612 - ,634, 633, 614 - ,614, 633, 613 - ,632, 634, 612 - ,612, 634, 614 - ,636, 635, 616 - ,616, 635, 615 - ,633, 636, 613 - ,613, 636, 616 - ,635, 637, 615 - ,615, 637, 617 - ,639, 638, 619 - ,619, 638, 618 - ,637, 639, 617 - ,617, 639, 619 - ,641, 640, 621 - ,621, 640, 620 - ,638, 641, 618 - ,618, 641, 621 - ,640, 642, 620 - ,620, 642, 622 - ,643, 626, 623 - ,623, 626, 606 - ,642, 643, 622 - ,622, 643, 623 - ,624, 625, 644 - ,644, 625, 645 - ,625, 626, 645 - ,645, 626, 646 - ,627, 624, 647 - ,647, 624, 644 - ,628, 629, 648 - ,648, 629, 649 - ,629, 627, 649 - ,649, 627, 647 - ,630, 631, 650 - ,650, 631, 651 - ,631, 628, 651 - ,651, 628, 648 - ,632, 630, 652 - ,652, 630, 650 - ,633, 634, 653 - ,653, 634, 654 - ,634, 632, 654 - ,654, 632, 652 - ,635, 636, 655 - ,655, 636, 656 - ,636, 633, 656 - ,656, 633, 653 - ,637, 635, 657 - ,657, 635, 655 - ,638, 639, 658 - ,658, 639, 659 - ,639, 637, 659 - ,659, 637, 657 - ,640, 641, 660 - ,660, 641, 661 - ,641, 638, 661 - ,661, 638, 658 - ,642, 640, 662 - ,662, 640, 660 - ,626, 643, 646 - ,646, 643, 663 - ,643, 642, 663 - ,663, 642, 662 - ,664, 665, 666 - ,666, 665, 723 - ,665, 664, 777 - ,777, 664, 776 - ,667, 668, 670 - ,670, 668, 669 - ,778, 779, 781 - ,781, 779, 780 - ,671, 666, 775 - ,667, 670, 778 - ,778, 670, 779 - ,666, 671, 664 - ,776, 664, 782 - ,782, 664, 673 - ,782, 673, 783 - ,783, 673, 674 - ,675, 676, 677 - ,783, 674, 784 - ,784, 674, 678 - ,679, 668, 667 - ,784, 678, 785 - ,785, 678, 680 - ,785, 680, 786 - ,786, 680, 681 - ,786, 681, 787 - ,787, 681, 682 - ,682, 683, 787 - ,787, 683, 788 - ,667, 684, 679 - ,778, 789, 667 - ,667, 789, 686 - ,791, 685, 790 - ,790, 685, 687 - ,789, 792, 686 - ,686, 792, 689 - ,687, 688, 790 - ,790, 688, 793 - ,792, 794, 689 - ,689, 794, 691 - ,688, 690, 793 - ,793, 690, 795 - ,794, 795, 691 - ,691, 795, 690 - ,671, 672, 693 - ,693, 672, 692 - ,693, 692, 695 - ,695, 692, 694 - ,695, 694, 697 - ,697, 694, 696 - ,664, 671, 673 - ,673, 671, 693 - ,673, 693, 674 - ,674, 693, 695 - ,697, 696, 699 - ,699, 696, 698 - ,782, 796, 776 - ,776, 796, 797 - ,674, 695, 678 - ,678, 695, 697 - ,699, 698, 701 - ,701, 698, 700 - ,798, 796, 783 - ,783, 796, 782 - ,678, 697, 680 - ,680, 697, 699 - ,701, 700, 703 - ,703, 700, 702 - ,799, 798, 784 - ,784, 798, 783 - ,703, 844, 705 - ,680, 699, 681 - ,681, 699, 701 - ,785, 800, 784 - ,784, 800, 799 - ,681, 701, 682 - ,682, 701, 703 - ,786, 801, 785 - ,785, 801, 800 - ,682, 703, 683 - ,683, 703, 705 - ,787, 802, 786 - ,786, 802, 801 - ,675, 677, 684 - ,684, 677, 679 - ,788, 803, 787 - ,787, 803, 802 - ,685, 841, 707 - ,803, 788, 804 - ,804, 788, 791 - ,683, 685, 788 - ,788, 685, 791 - ,708, 842, 706 - ,710, 711, 684 - ,684, 711, 675 - ,709, 708, 713 - ,713, 708, 712 - ,714, 715, 710 - ,710, 715, 711 - ,713, 712, 717 - ,717, 712, 716 - ,718, 719, 714 - ,714, 719, 715 - ,717, 716, 718 - ,718, 716, 719 - ,707, 709, 685 - ,685, 709, 687 - ,686, 710, 667 - ,667, 710, 684 - ,687, 709, 688 - ,688, 709, 713 - ,689, 714, 686 - ,686, 714, 710 - ,805, 804, 790 - ,790, 804, 791 - ,688, 713, 690 - ,690, 713, 717 - ,691, 718, 689 - ,689, 718, 714 - ,778, 781, 789 - ,789, 781, 806 - ,690, 717, 691 - ,691, 717, 718 - ,807, 805, 793 - ,793, 805, 790 - ,789, 806, 792 - ,792, 806, 808 - ,809, 807, 795 - ,795, 807, 793 - ,792, 808, 794 - ,794, 808, 810 - ,810, 809, 794 - ,794, 809, 795 - ,811, 777, 797 - ,797, 777, 776 - ,669, 720, 670 - ,720, 721, 722 - ,720, 722, 670 - ,779, 670, 812 - ,812, 670, 724 - ,812, 724, 813 - ,813, 724, 725 - ,813, 725, 814 - ,814, 725, 726 - ,814, 726, 815 - ,815, 726, 727 - ,815, 727, 816 - ,816, 727, 728 - ,816, 728, 817 - ,817, 728, 729 - ,729, 730, 817 - ,817, 730, 818 - ,731, 732, 723 - ,732, 731, 733 - ,733, 731, 736 - ,721, 734, 722 - ,722, 734, 735 - ,736, 737, 733 - ,665, 731, 723 - ,777, 819, 665 - ,665, 819, 739 - ,821, 738, 820 - ,820, 738, 740 - ,819, 822, 739 - ,739, 822, 742 - ,740, 741, 820 - ,820, 741, 823 - ,822, 824, 742 - ,742, 824, 744 - ,825, 823, 743 - ,743, 823, 741 - ,824, 825, 744 - ,744, 825, 743 - ,826, 780, 812 - ,812, 780, 779 - ,827, 826, 813 - ,813, 826, 812 - ,670, 722, 724 - ,724, 722, 745 - ,828, 827, 814 - ,814, 827, 813 - ,724, 745, 725 - ,725, 745, 746 - ,815, 829, 814 - ,814, 829, 828 - ,725, 746, 726 - ,726, 746, 747 - ,722, 735, 745 - ,745, 735, 748 - ,816, 830, 815 - ,815, 830, 829 - ,745, 748, 746 - ,746, 748, 749 - ,726, 747, 727 - ,727, 747, 750 - ,817, 831, 816 - ,816, 831, 830 - ,818, 832, 817 - ,817, 832, 831 - ,746, 749, 747 - ,747, 749, 751 - ,727, 750, 728 - ,728, 750, 752 - ,747, 751, 750 - ,750, 751, 753 - ,728, 752, 729 - ,729, 752, 754 - ,729, 754, 730 - ,730, 754, 755 - ,750, 753, 752 - ,752, 753, 756 - ,752, 756, 754 - ,754, 756, 757 - ,754, 757, 755 - ,755, 757, 758 - ,818, 821, 832 - ,832, 821, 833 - ,730, 738, 818 - ,818, 738, 821 - ,730, 755, 738 - ,738, 755, 759 - ,755, 758, 759 - ,759, 758, 760 - ,821, 820, 833 - ,833, 820, 834 - ,835, 819, 811 - ,811, 819, 777 - ,820, 823, 834 - ,834, 823, 836 - ,837, 822, 835 - ,835, 822, 819 - ,823, 825, 836 - ,836, 825, 838 - ,839, 824, 837 - ,837, 824, 822 - ,825, 824, 838 - ,838, 824, 839 - ,738, 759, 740 - ,740, 759, 761 - ,739, 762, 665 - ,665, 762, 731 - ,740, 761, 741 - ,741, 761, 763 - ,742, 764, 739 - ,739, 764, 762 - ,741, 763, 743 - ,743, 763, 765 - ,744, 766, 742 - ,742, 766, 764 - ,743, 765, 744 - ,744, 765, 766 - ,759, 760, 761 - ,761, 760, 767 - ,762, 768, 731 - ,731, 768, 736 - ,761, 767, 763 - ,763, 767, 769 - ,764, 770, 762 - ,762, 770, 768 - ,763, 769, 765 - ,765, 769, 771 - ,766, 772, 764 - ,764, 772, 770 - ,765, 771, 766 - ,766, 771, 772 - ,775, 774, 671 - ,671, 774, 672 - ,672, 774, 773 - ,683, 705, 840 - ,840, 841, 683 - ,683, 841, 685 - ,843, 842, 709 - ,709, 842, 708 - ,707, 843, 709 - ,702, 845, 703 - ,703, 845, 844 - ,845, 702, 704 - ,808, 1014, 810 - ,810, 1014, 1015 - ,806, 1013, 808 - ,808, 1013, 1014 - ,781, 1012, 806 - ,806, 1012, 1013 - ,781, 780, 1012 - ,1012, 780, 1011 - ,780, 826, 1011 - ,1011, 826, 1010 - ,826, 827, 1010 - ,1010, 827, 1009 - ,827, 828, 1009 - ,1009, 828, 1008 - ,968, 976, 969 - ,969, 976, 977 - ,969, 977, 970 - ,970, 977, 978 - ,970, 978, 971 - ,971, 978, 979 - ,972, 971, 980 - ,980, 971, 979 - ,973, 972, 981 - ,981, 972, 980 - ,974, 973, 982 - ,982, 973, 981 - ,975, 974, 983 - ,983, 974, 982 - ,976, 1000, 977 - ,977, 1000, 1001 - ,977, 1001, 978 - ,978, 1001, 1002 - ,978, 1002, 979 - ,979, 1002, 1003 - ,980, 979, 1004 - ,1004, 979, 1003 - ,981, 980, 1005 - ,1005, 980, 1004 - ,982, 981, 1006 - ,1006, 981, 1005 - ,983, 982, 1007 - ,1007, 982, 1006 - ,985, 984, 969 - ,969, 984, 968 - ,986, 985, 970 - ,970, 985, 969 - ,987, 986, 971 - ,971, 986, 970 - ,988, 987, 972 - ,972, 987, 971 - ,989, 988, 973 - ,973, 988, 972 - ,990, 989, 974 - ,974, 989, 973 - ,991, 990, 975 - ,975, 990, 974 - ,993, 992, 985 - ,985, 992, 984 - ,994, 993, 986 - ,986, 993, 985 - ,995, 994, 987 - ,987, 994, 986 - ,996, 995, 988 - ,988, 995, 987 - ,996, 988, 997 - ,997, 988, 989 - ,997, 989, 998 - ,998, 989, 990 - ,998, 990, 999 - ,999, 990, 991 - ,1000, 839, 1001 - ,1001, 839, 837 - ,1001, 837, 1002 - ,1002, 837, 835 - ,1002, 835, 1003 - ,1003, 835, 811 - ,1004, 1003, 797 - ,797, 1003, 811 - ,1005, 1004, 796 - ,796, 1004, 797 - ,1006, 1005, 798 - ,798, 1005, 796 - ,1007, 1006, 799 - ,799, 1006, 798 - ,1009, 1008, 993 - ,993, 1008, 992 - ,1010, 1009, 994 - ,994, 1009, 993 - ,1011, 1010, 995 - ,995, 1010, 994 - ,1012, 1011, 996 - ,996, 1011, 995 - ,1012, 996, 1013 - ,1013, 996, 997 - ,1013, 997, 1014 - ,1014, 997, 998 - ,1014, 998, 1015 - ,1015, 998, 999 - ,831, 992, 830 - ,830, 992, 1008 - ,832, 984, 831 - ,831, 984, 992 - ,832, 833, 984 - ,984, 833, 968 - ,833, 834, 968 - ,968, 834, 976 - ,834, 836, 976 - ,976, 836, 1000 - ,802, 983, 801 - ,801, 983, 1007 - ,803, 975, 802 - ,802, 975, 983 - ,804, 991, 803 - ,803, 991, 975 - ,804, 805, 991 - ,991, 805, 999 - ,805, 807, 999 - ,999, 807, 1015 - ,801, 1007, 800 - ,800, 1007, 799 - ,807, 809, 1015 - ,1015, 809, 810 - ,830, 1008, 829 - ,829, 1008, 828 - ,839, 1000, 838 - ,838, 1000, 836 -]) - -NB_GRAY_ZEDM_TRI = 40 -GRAY_COLOR = Color(0.22, 0.22, 0.22) -gray_triangles_m = np.array([ - 849, 846, 848 - ,846, 847, 848 - ,847, 846, 851 - ,846, 850, 851 - ,852, 853, 846 - ,846, 853, 854 - ,850, 846, 856 - ,846, 855, 856 - ,859, 846, 858 - ,846, 857, 858 - ,860, 861, 846 - ,846, 861, 857 - ,860, 846, 862 - ,862, 846, 849 - ,863, 864, 846 - ,846, 864, 855 - ,854, 865, 846 - ,846, 865, 863 - ,859, 866, 846 - ,846, 866, 852 - ,950, 947, 949 - ,947, 948, 949 - ,948, 947, 952 - ,947, 951, 952 - ,953, 954, 947 - ,947, 954, 955 - ,951, 947, 957 - ,947, 956, 957 - ,960, 947, 959 - ,947, 958, 959 - ,961, 962, 947 - ,947, 962, 958 - ,961, 947, 963 - ,963, 947, 950 - ,964, 965, 947 - ,947, 965, 956 - ,955, 966, 947 - ,947, 966, 964 - ,960, 967, 947 - ,947, 967, 953 -]) - -NB_YELLOW_ZEDM_TRI = 80 -YELLOW_COLOR = Color(1., 1., 0.) -yellow_triangles_m = np.array([ - 867, 868, 870 - ,870, 868, 869 - ,868, 871, 869 - ,869, 871, 872 - ,873, 867, 874 - ,874, 867, 870 - ,875, 876, 878 - ,878, 876, 877 - ,876, 873, 877 - ,877, 873, 874 - ,879, 880, 882 - ,882, 880, 881 - ,880, 875, 881 - ,881, 875, 878 - ,883, 879, 884 - ,884, 879, 882 - ,885, 886, 888 - ,888, 886, 887 - ,886, 883, 887 - ,887, 883, 884 - ,889, 890, 892 - ,892, 890, 891 - ,890, 885, 891 - ,891, 885, 888 - ,893, 889, 894 - ,894, 889, 892 - ,895, 896, 898 - ,898, 896, 897 - ,896, 893, 897 - ,897, 893, 894 - ,899, 900, 902 - ,902, 900, 901 - ,900, 895, 901 - ,901, 895, 898 - ,903, 899, 904 - ,904, 899, 902 - ,871, 905, 872 - ,872, 905, 906 - ,905, 903, 906 - ,906, 903, 904 - ,907, 908, 910 - ,910, 908, 909 - ,908, 911, 909 - ,909, 911, 912 - ,913, 907, 914 - ,914, 907, 910 - ,915, 916, 918 - ,918, 916, 917 - ,916, 913, 917 - ,917, 913, 914 - ,919, 920, 922 - ,922, 920, 921 - ,920, 915, 921 - ,921, 915, 918 - ,923, 919, 924 - ,924, 919, 922 - ,925, 926, 928 - ,928, 926, 927 - ,926, 923, 927 - ,927, 923, 924 - ,929, 930, 932 - ,932, 930, 931 - ,930, 925, 931 - ,931, 925, 928 - ,933, 929, 934 - ,934, 929, 932 - ,935, 936, 938 - ,938, 936, 937 - ,936, 933, 937 - ,937, 933, 934 - ,939, 940, 942 - ,942, 940, 941 - ,940, 935, 941 - ,941, 935, 938 - ,943, 939, 944 - ,944, 939, 942 - ,911, 945, 912 - ,912, 945, 946 - ,945, 943, 946 - ,946, 943, 944 -]) diff --git a/global localization/README.md b/global localization/README.md new file mode 100644 index 00000000..3f45884b --- /dev/null +++ b/global localization/README.md @@ -0,0 +1,20 @@ +# Global Localization + +These samples show how to use the ZED SDK Global Localization module for **global scale localization on a real-world map**. + +

+ +

+ +## Overview + +The samples provided using the Global Localization API are organized as follows: + +- [Live](./live/) The Live sample demonstrates how to use the Global Localization API using both the ZED camera and an external GNSS sensor. It displays the corrected positional tracking in the ZED reference frame on an OpenGL window and the geo-position on a real-world map in a browser. + +- [Recording](./recording/): The Recording sample demonstrates how to **record data** from both a ZED camera and an external GNSS sensor. The recorded data is saved in an SVO file and a JSON file, respectively. This sample provides the necessary data to be used by the Playback sample. + +- [Playback](./playback/): The Playback sample shows how to use the Global Localization API for global scale localization on a real-world map. It takes the data generated by the Recording sample and uses it to display geo-positions on a real-world map. + +- [Map Server](./map%20server/): The Map Server Sample is utilized by other samples to display location data in a web browser. + diff --git a/global localization/live/README.md b/global localization/live/README.md new file mode 100644 index 00000000..8883ad50 --- /dev/null +++ b/global localization/live/README.md @@ -0,0 +1,41 @@ +# Live Global Localization Sample + +## Overview + +This sample demonstrates how to use the ZED SDK Global Localization module to achieve **global scale localization** on a real-world map using the ZED camera. The ZED SDK Live Global Localization sample fuses visual odometry from the ZED SDK with external GNSS data in real-time, making it a valuable resource for applications such as autonomous robotics and drone navigation. + +## Features + +- Displays the camera's path in an OpenGL window in 3D +- Displays path data, including translation and rotation +- Displays the fused path on a map in a web browser +- Exports KML files for the fused trajectory and raw GNSS data + +## Dependencies + +Before using this sample, ensure that you have the following dependencies installed on your system: + +- ZED SDK: download and install from the official [Stereolabs website](https://www.stereolabs.com/developers/release/). +- `gpsd`: required to use an external GNSS sensor. + > **Note**: Since [`gpsd`](https://gpsd.gitlab.io/gpsd/index.html) does not support Windows, this sample is not supported on Windows. + +### C++ + +- `libgps-dev`: used to read data from `gpsd`. + +### Python + +- `gpsdclient`: used to read data from `gpsd`. + +## Installation and Usage + +To use the ZED SDK Global Localization sample, follow these steps: + +1. Download and install the ZED SDK on your system from the official [Stereolabs website](https://www.stereolabs.com/developers/release/). +2. Install dependencies using your operating system's package manager. +3. Connect your ZED camera and GNSS sensor to your computer. +4. Open a terminal and navigate to the live sample directory. +5. Compile the sample for C++ in a *build* directory. +6. Run the `ZED_Live_Global_Localization` executable for C++ and `live.py` for Python. +7. The sample will display the camera's path and path data in a 3D window. +8. Go to the [map server sample](../map%20server) and run a simple server. diff --git a/geotracking/live geotracking/cpp/CMakeLists.txt b/global localization/live/cpp/CMakeLists.txt similarity index 81% rename from geotracking/live geotracking/cpp/CMakeLists.txt rename to global localization/live/cpp/CMakeLists.txt index 7c39d98d..1bec988b 100644 --- a/geotracking/live geotracking/cpp/CMakeLists.txt +++ b/global localization/live/cpp/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.5) -PROJECT(ZED_GeoTacking) +PROJECT(ZED_Live_Global_Localization) set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED ON) @@ -23,16 +23,6 @@ else() message("GPSD was not FOUND. the sample will not be able to read live GNSS data") endif() -# ZEDHub - if found -find_package(SL_HUB) - -if(SL_HUB_FOUND) - message("Found ZEDHub library") - add_definitions(-DCOMPILE_WITH_ZEDHUB=TRUE) - link_directories(${SL_HUB_LIB_DIR}) - include_directories(${SL_HUB_INCLUDE_DIR}) -endif() - IF(NOT MSVC) SET(SPECIAL_OS_LIBS "pthread") ENDIF() @@ -75,11 +65,7 @@ FILE(GLOB_RECURSE HDR_FILES include/*.h*) add_executable(${PROJECT_NAME} ${HDR_FILES} ${SRC_FILES}) -if(SL_HUB_FOUND) - set(LIBS sl_hub util ${ZED_LIBRARIES} ${OpenCV_LIBRARIES} ${OPENGL_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) -else() - set(LIBS ${ZED_LIBRARIES} ${OpenCV_LIBRARIES} ${OPENGL_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) -endif() +set(LIBS ${ZED_LIBRARIES} ${OpenCV_LIBRARIES} ${OPENGL_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) if(GPS_FOUND) list(APPEND LIBS ${GPS_LIBRARIES}) diff --git a/geotracking/live geotracking/cpp/cmake/FindGPS.cmake b/global localization/live/cpp/cmake/FindGPS.cmake similarity index 100% rename from geotracking/live geotracking/cpp/cmake/FindGPS.cmake rename to global localization/live/cpp/cmake/FindGPS.cmake diff --git a/geotracking/live geotracking/cpp/include/display/GLViewer.hpp b/global localization/live/cpp/include/display/GLViewer.hpp similarity index 95% rename from geotracking/live geotracking/cpp/include/display/GLViewer.hpp rename to global localization/live/cpp/include/display/GLViewer.hpp index 6067666f..ba5d26e7 100644 --- a/geotracking/live geotracking/cpp/include/display/GLViewer.hpp +++ b/global localization/live/cpp/include/display/GLViewer.hpp @@ -12,6 +12,7 @@ #include "ZEDModel.hpp" /* OpenGL Utility Toolkit header */ #include +#include #ifndef M_PI #define M_PI 3.1416f @@ -78,14 +79,14 @@ class Shader { public: Shader() {} - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; @@ -157,7 +158,7 @@ class GLViewer { void exit(); bool isAvailable(); void init(int argc, char **argv); - void updateData(sl::Transform zed_rt, sl::POSITIONAL_TRACKING_STATE state); + void updateData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state); private: // Rendering loop method called each frame by glutDisplayFunc @@ -213,7 +214,7 @@ class GLViewer { std::string txtR; std::string txtT; - sl::POSITIONAL_TRACKING_STATE trackState; + sl::FusedPositionalTrackingStatus trackState; const std::string str_tracking = "POSITIONAL TRACKING : "; sl::float3 bckgrnd_clr; diff --git a/geotracking/live geotracking/cpp/include/display/GenericDisplay.h b/global localization/live/cpp/include/display/GenericDisplay.h similarity index 78% rename from geotracking/live geotracking/cpp/include/display/GenericDisplay.h rename to global localization/live/cpp/include/display/GenericDisplay.h index 1ee28962..89de6ad5 100644 --- a/geotracking/live geotracking/cpp/include/display/GenericDisplay.h +++ b/global localization/live/cpp/include/display/GenericDisplay.h @@ -38,9 +38,15 @@ class GenericDisplay * @param zed_rt last pose data * @param state current tracking state */ - void updatePoseData(sl::Transform zed_rt, sl::POSITIONAL_TRACKING_STATE state); + void updatePoseData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state); /** - * @brief Display current fused pose either in KML file or in ZEDHub depending compilation options + * @brief Display current pose on the Live Server + * + * @param geo_pose geopose to display + */ + void updateRawGeoPoseData(sl::GNSSData geo_data); + /** + * @brief Display current fused pose on the Live Server & in a KML file * * @param geo_pose geopose to display * @param current_timestamp timestamp of the geopose to display diff --git a/geotracking/live geotracking/cpp/include/display/ZEDModel.hpp b/global localization/live/cpp/include/display/ZEDModel.hpp similarity index 100% rename from geotracking/live geotracking/cpp/include/display/ZEDModel.hpp rename to global localization/live/cpp/include/display/ZEDModel.hpp diff --git a/geotracking/live geotracking/cpp/include/exporter/KMLExporter.h b/global localization/live/cpp/include/exporter/KMLExporter.h similarity index 100% rename from geotracking/live geotracking/cpp/include/exporter/KMLExporter.h rename to global localization/live/cpp/include/exporter/KMLExporter.h diff --git a/geotracking/live geotracking/cpp/include/gnss_reader/GPSDReader.hpp b/global localization/live/cpp/include/gnss_reader/GPSDReader.hpp similarity index 100% rename from geotracking/live geotracking/cpp/include/gnss_reader/GPSDReader.hpp rename to global localization/live/cpp/include/gnss_reader/GPSDReader.hpp diff --git a/geotracking/live geotracking/cpp/include/gnss_reader/IGNSSReader.h b/global localization/live/cpp/include/gnss_reader/IGNSSReader.h similarity index 100% rename from geotracking/live geotracking/cpp/include/gnss_reader/IGNSSReader.h rename to global localization/live/cpp/include/gnss_reader/IGNSSReader.h diff --git a/geotracking/live geotracking/cpp/include/json.hpp b/global localization/live/cpp/include/json.hpp similarity index 100% rename from geotracking/live geotracking/cpp/include/json.hpp rename to global localization/live/cpp/include/json.hpp diff --git a/geotracking/live geotracking/cpp/src/display/GLViewer.cpp b/global localization/live/cpp/src/display/GLViewer.cpp similarity index 88% rename from geotracking/live geotracking/cpp/src/display/GLViewer.cpp rename to global localization/live/cpp/src/display/GLViewer.cpp index 02a8b07a..d2bfa105 100644 --- a/geotracking/live geotracking/cpp/src/display/GLViewer.cpp +++ b/global localization/live/cpp/src/display/GLViewer.cpp @@ -19,7 +19,7 @@ void print(std::string msg_prefix, sl::ERROR_CODE err_code, std::string msg_suff -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -30,7 +30,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -256,7 +256,7 @@ void GLViewer::draw() { glUseProgram(0); } -void GLViewer::updateData(sl::Transform zed_rt, sl::POSITIONAL_TRACKING_STATE state) { +void GLViewer::updateData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state) { mtx.lock(); vecPath.push_back(zed_rt.getTranslation()); zedModel.setRT(zed_rt); @@ -293,27 +293,75 @@ void GLViewer::printText() { int start_w = 20; int start_h = h_wnd - 40; - (trackState == sl::POSITIONAL_TRACKING_STATE::OK) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + float dark_clr = 0.12f; + std::string odom_status = "POSITIONAL TRACKING STATUS: "; + + glColor3f(dark_clr, dark_clr, dark_clr); glRasterPos2i(start_w, start_h); - std::string track_str = (str_tracking + sl::toString(trackState).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, odom_status.c_str()); + + (trackState.tracking_fusion_status != sl::POSITIONAL_TRACKING_FUSION_STATUS::UNAVAILABLE) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + std::string track_str = (sl::toString(trackState.tracking_fusion_status).c_str()); + glRasterPos2i(start_w + 300, start_h); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); - float dark_clr = 0.12f; + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 40); + std::string imu_status = "GNSS MODE: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, imu_status.c_str()); + + if (trackState.gnss_mode == sl::GNSS_MODE::FIX_3D) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 40); + track_str = (sl::toString(trackState.gnss_mode).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 60); + std::string gnss_status = "GNSS STATUS: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, gnss_status.c_str()); + + if (trackState.gnss_status == sl::GNSS_STATUS::RTK_FIX || trackState.gnss_status == sl::GNSS_STATUS::RTK_FLOAT) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 60); + track_str = (sl::toString(trackState.gnss_status).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 80); + std::string gnss_fusion_status = "GNSS FUSION STATUS: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, gnss_fusion_status.c_str()); + + if (trackState.gnss_fusion_status == sl::GNSS_FUSION_STATUS::OK) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 80); + track_str = (sl::toString(trackState.gnss_fusion_status).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + + glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 25); + glRasterPos2i(start_w, start_h - 105); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 25); + glRasterPos2i(155, start_h - 105); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtT.c_str()); glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 50); + glRasterPos2i(start_w, start_h - 130); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 50); + glRasterPos2i(155, start_h - 130); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtR.c_str()); glMatrixMode(GL_PROJECTION); @@ -520,7 +568,7 @@ Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -568,7 +616,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/global localization/live/cpp/src/display/GenericDisplay.cpp b/global localization/live/cpp/src/display/GenericDisplay.cpp new file mode 100644 index 00000000..7375d124 --- /dev/null +++ b/global localization/live/cpp/src/display/GenericDisplay.cpp @@ -0,0 +1,62 @@ +#include "display/GenericDisplay.h" +#include "exporter/KMLExporter.h" + + +GenericDisplay::GenericDisplay() +{ +} + +GenericDisplay::~GenericDisplay() +{ + closeAllKMLWriter(); +} + +void GenericDisplay::init(int argc, char **argv) +{ + opengl_viewer.init(argc, argv); +} + +void GenericDisplay::updatePoseData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state) +{ + opengl_viewer.updateData(zed_rt, state); +} + +bool GenericDisplay::isAvailable(){ + return opengl_viewer.isAvailable(); +} + +void GenericDisplay::updateRawGeoPoseData(sl::GNSSData geo_data) +{ + double latitude, longitude, altitude; + geo_data.getCoordinates(latitude, longitude, altitude, false); + + // Make the pose available for the Live Server + ofstream data; + data.open ("../../../map server/raw_data.txt"); + data << std::fixed << std::setprecision(17); + data << latitude; + data << ","; + data << longitude; + data << ","; + data << geo_data.ts.getMilliseconds(); + data << "\n"; + data.close(); +} + +void GenericDisplay::updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp) +{ + // Make the pose available for the Live Server + ofstream data; + data.open ("../../../map server/data.txt"); + data << std::fixed << std::setprecision(17); + data << geo_pose.latlng_coordinates.getLatitude(false); + data << ","; + data << geo_pose.latlng_coordinates.getLongitude(false); + data << ","; + data << current_timestamp.getMilliseconds(); + data << "\n"; + data.close(); + + // Save the pose in a .kml file + saveKMLData("fused_position.kml", geo_pose); +} diff --git a/geotracking/live geotracking/cpp/src/exporter/KMLExporter.cpp b/global localization/live/cpp/src/exporter/KMLExporter.cpp similarity index 100% rename from geotracking/live geotracking/cpp/src/exporter/KMLExporter.cpp rename to global localization/live/cpp/src/exporter/KMLExporter.cpp diff --git a/geotracking/live geotracking/cpp/src/gnss_reader/GPSDReader.cpp b/global localization/live/cpp/src/gnss_reader/GPSDReader.cpp similarity index 51% rename from geotracking/live geotracking/cpp/src/gnss_reader/GPSDReader.cpp rename to global localization/live/cpp/src/gnss_reader/GPSDReader.cpp index e2cba521..61c3f490 100644 --- a/geotracking/live geotracking/cpp/src/gnss_reader/GPSDReader.cpp +++ b/global localization/live/cpp/src/gnss_reader/GPSDReader.cpp @@ -1,26 +1,24 @@ #include "gnss_reader/GPSDReader.hpp" -GPSDReader::GPSDReader(){ - +GPSDReader::GPSDReader() { + } -GPSDReader::~GPSDReader() -{ +GPSDReader::~GPSDReader() { continue_to_grab = false; grab_gnss_data.join(); #ifdef GPSD_FOUND - + #else std::cerr << "[library not found] GPSD library was not found ... please install it before using this sample" << std::endl; #endif } -void GPSDReader::initialize() -{ + +void GPSDReader::initialize() { grab_gnss_data = std::thread(&GPSDReader::grabGNSSData, this); #ifdef GPSD_FOUND gnss_getter.reset(new gpsmm("localhost", DEFAULT_GPSD_PORT)); - if (gnss_getter->stream(WATCH_ENABLE | WATCH_JSON) == nullptr) - { + if (gnss_getter->stream(WATCH_ENABLE | WATCH_JSON) == nullptr) { std::cerr << "No GPSD running .. exit" << std::endl; exit(EXIT_FAILURE); } @@ -29,12 +27,10 @@ void GPSDReader::initialize() bool received_fix = false; struct gps_data_t *gpsd_data; - while (!received_fix) - { + while (!received_fix) { if (!gnss_getter->waiting(0)) continue; - if ((gpsd_data = gnss_getter->read()) == NULL) - { + if ((gpsd_data = gnss_getter->read()) == NULL) { std::cerr << "[GNSS] read error ... exit program" << std::endl; exit(EXIT_FAILURE); } @@ -50,16 +46,21 @@ void GPSDReader::initialize() #endif } -sl::GNSSData GPSDReader::getNextGNSSValue() -{ +sl::GNSSData GPSDReader::getNextGNSSValue() { #ifdef GPSD_FOUND // 0. Check if GNSS is initialized: // 1. Get GNSS datas: struct gps_data_t *gpsd_data; while ((gpsd_data = gnss_getter->read()) == NULL) ; - if (gpsd_data->fix.mode >= MODE_2D) - { + if (gpsd_data->fix.mode >= MODE_2D) { + int nb_low_snr = 0; + for (int i = 0; i < gpsd_data->satellites_visible; i++) { + satellite_t &satellite = gpsd_data->skyview[i]; + if (satellite.used && satellite.ss < 16) nb_low_snr++; + } + if (nb_low_snr > 0) std::cout << "[Warning] Low SNR (<16) on " << nb_low_snr << " satellite(s) (using " << gpsd_data->satellites_used << " out of " << gpsd_data->satellites_visible << " visible)" << std::endl; + sl::GNSSData current_gnss_data; // Fill out coordinates: current_gnss_data.setCoordinates(gpsd_data->fix.latitude, gpsd_data->fix.longitude, gpsd_data->fix.altMSL, false); @@ -78,11 +79,71 @@ sl::GNSSData GPSDReader::getNextGNSSValue() auto current_gnss_timestamp = current_ts_gps + current_tns_gps; current_gnss_data.ts.setMicroseconds(current_gnss_timestamp); + int gpsd_mode = gpsd_data->fix.mode; + sl::GNSS_MODE sl_mode = sl::GNSS_MODE::UNKNOWN; + + switch (gpsd_mode) { + case 0: // MODE_NOT_SEEN + sl_mode = sl::GNSS_MODE::UNKNOWN; + break; + case 1: // MODE_NO_FIX + sl_mode = sl::GNSS_MODE::NO_FIX; + break; + case 2: // MODE_2D + sl_mode = sl::GNSS_MODE::FIX_2D; + break; + case 3: // MODE_3D + sl_mode = sl::GNSS_MODE::FIX_3D; + break; + default: + sl_mode = sl::GNSS_MODE::UNKNOWN; + break; + } + + int gpsd_status = gpsd_data->fix.status; + sl::GNSS_STATUS sl_status = sl::GNSS_STATUS::UNKNOWN; + + switch (gpsd_status) { + case 0: // STATUS_UNK + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 1: // STATUS_GPS + sl_status = sl::GNSS_STATUS::SINGLE; + break; + case 2: // STATUS_DGPS + sl_status = sl::GNSS_STATUS::DGNSS; + break; + case 3: // STATUS_RTK_FIX + sl_status = sl::GNSS_STATUS::RTK_FIX; + break; + case 4: // STATUS_RTK_FLT + sl_status = sl::GNSS_STATUS::RTK_FLOAT; + break; + case 5: // STATUS_DR + sl_status = sl::GNSS_STATUS::SINGLE; + break; + case 6: // STATUS_GNSSDR + sl_status = sl::GNSS_STATUS::DGNSS; + break; + case 7: // STATUS_TIME + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 8: // STATUS_SIM + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 9: // STATUS_PPS_FIX + sl_status = sl::GNSS_STATUS::SINGLE; + break; + default: + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + } + + current_gnss_data.gnss_status = sl_status; + current_gnss_data.gnss_mode = sl_mode; return current_gnss_data; - } - else - { + } else { std::cout << "Fix lost: reinit GNSS" << std::endl; initialize(); return getNextGNSSValue(); @@ -94,31 +155,30 @@ sl::GNSSData GPSDReader::getNextGNSSValue() return sl::GNSSData(); } -sl::ERROR_CODE GPSDReader::grab(sl::GNSSData & current_data){ - if(new_data){ - new_data=false; +sl::ERROR_CODE GPSDReader::grab(sl::GNSSData & current_data) { + if (new_data) { + new_data = false; current_data = current_gnss_data; return sl::ERROR_CODE::SUCCESS; } return sl::ERROR_CODE::FAILURE; } -void GPSDReader::grabGNSSData(){ - while(1){ +void GPSDReader::grabGNSSData() { + while (1) { is_initialized_mtx.lock(); - if(is_initialized){ + if (is_initialized) { is_initialized_mtx.unlock(); break; } is_initialized_mtx.unlock(); std::this_thread::sleep_for(std::chrono::milliseconds(1)); } - while (continue_to_grab) - { - #ifdef GPSD_FOUND + while (continue_to_grab) { +#ifdef GPSD_FOUND current_gnss_data = getNextGNSSValue(); new_data = true; - #endif +#endif } - + } \ No newline at end of file diff --git a/geotracking/live geotracking/cpp/src/main.cpp b/global localization/live/cpp/src/main.cpp similarity index 82% rename from geotracking/live geotracking/cpp/src/main.cpp rename to global localization/live/cpp/src/main.cpp index db236f21..3a3c1a58 100644 --- a/geotracking/live geotracking/cpp/src/main.cpp +++ b/global localization/live/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -19,8 +19,8 @@ /////////////////////////////////////////////////////////////////////////// /*************************************************************************** - ** This sample shows how to use geotracking for global scale ** - ** localization on real-world map with ZED camera ** + ** This sample shows how to use global localization on real-world map ** + ** with ZED camera ** **************************************************************************/ #include @@ -72,12 +72,17 @@ int main(int argc, char **argv) // Enable positional tracking for Fusion object sl::PositionalTrackingFusionParameters positional_tracking_fusion_parameters; positional_tracking_fusion_parameters.enable_GNSS_fusion = true; - fusion.enablePositionalTracking(positional_tracking_fusion_parameters); + sl::FUSION_ERROR_CODE tracking_error_code = fusion.enablePositionalTracking(positional_tracking_fusion_parameters); + if(tracking_error_code != sl::FUSION_ERROR_CODE::SUCCESS){ + std::cout << "[Fusion][ERROR] Could not start tracking, error: " << tracking_error_code << std::endl; + return EXIT_FAILURE; + } + // Setup viewer: GenericDisplay viewer; viewer.init(argc, argv); - std::cout << "Start grabbing data ... the geo-tracking will be displayed in ZEDHub map section" << std::endl; + std::cout << "Start grabbing data... Global localization data will be displayed on the Live Server" << std::endl; while (viewer.isAvailable()) { // Grab camera: @@ -92,6 +97,9 @@ int main(int argc, char **argv) sl::GNSSData input_gnss; if (gnss_reader.grab(input_gnss) == sl::ERROR_CODE::SUCCESS) { + // Display it on the Live Server: + viewer.updateRawGeoPoseData(input_gnss); + // Publish GNSS data to Fusion auto ingest_error = fusion.ingestGNSSData(input_gnss); if(ingest_error != sl::FUSION_ERROR_CODE::SUCCESS){ @@ -104,23 +112,25 @@ int main(int argc, char **argv) sl::Pose fused_position; // Get position into the ZED CAMERA coordinate system: sl::POSITIONAL_TRACKING_STATE current_state = fusion.getPosition(fused_position); + + sl::FusedPositionalTrackingStatus fused_status = fusion.getFusedPositionalTrackingStatus(); // Display it on OpenGL: - viewer.updatePoseData(fused_position.pose_data, current_state); + viewer.updatePoseData(fused_position.pose_data, fused_status); // Get position into the GNSS coordinate system - this needs a initialization between CAMERA // and GNSS. When the initialization is finish the getGeoPose will return sl::POSITIONAL_TRACKING_STATE::OK sl::GeoPose current_geopose; auto current_geopose_satus = fusion.getGeoPose(current_geopose); - if (current_geopose_satus == sl::GNSS_CALIBRATION_STATE::CALIBRATED) + if (current_geopose_satus == sl::GNSS_FUSION_STATUS::OK) { - // Display it on ZED Hub: + // Display it on the Live Server: viewer.updateGeoPoseData(current_geopose, zed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); } else { // GNSS coordinate system to ZED coordinate system is not initialize yet - // The initialisation between the coordinates system is basicaly an optimization problem that + // The initialization between the coordinates system is an optimization problem that // Try to fit the ZED computed path with the GNSS computed path. In order to do it just move // your system by the distance you specified in positional_tracking_fusion_parameters.gnss_initialisation_distance } diff --git a/global localization/live/python/display/generic_display.py b/global localization/live/python/display/generic_display.py new file mode 100644 index 00000000..3544957d --- /dev/null +++ b/global localization/live/python/display/generic_display.py @@ -0,0 +1,62 @@ +from display.gl_viewer import GLViewer +from exporter.KMLExporter import * +import time + + +class GenericDisplay: + def __init__(self): + pass + + def __del__(self): + closeAllKMLFiles() + + def init(self, camera_model): + self.glviewer = GLViewer() + self.glviewer.init(camera_model) + # Replace this part with the appropriate connection to your IoT system + + def updatePoseData(self, zed_rt, str_t, str_r, state): + self.glviewer.updateData(zed_rt, str_t, str_r, state) + + def isAvailable(self): + return self.glviewer.is_available() + + def updateRawGeoPoseData(self, geo_data): + try: + # Replace this part with the appropriate sending of data to your IoT system + latitude, longitude, _ = geo_data.get_coordinates(False) + f = open('../../map server/raw_data.txt', 'w') + f.write("{},{},{}".format(latitude, longitude, geo_data.ts.get_milliseconds())) + + except ImportError: + print("An exception was raised: the raw geo-pose data was not sent.") + + def updateGeoPoseData(self, geo_pose, current_timestamp): + try: + # Replace this part with the appropriate sending of data to your IoT system + f = open('../../map server/data.txt', 'w') + f.write("{},{},{}" + .format(geo_pose.latlng_coordinates.get_latitude(False), + geo_pose.latlng_coordinates.get_longitude(False), + current_timestamp.get_milliseconds())) + + gnss_data = {} + gnss_data["longitude"] = geo_pose.latlng_coordinates.get_latitude(False) + gnss_data["latitude"] = geo_pose.latlng_coordinates.get_latitude(False) + gnss_data["altitude"] = geo_pose.latlng_coordinates.get_altitude() + saveKMLData("fused_position.kml", gnss_data) + + except ImportError: + print("An exception was raised: the geo-pose data was not sent.") + + +if __name__ == "__main__": + generic_display = GenericDisplay() + generic_display.init(0, []) + + try: + while True: + # Your logic here... + pass + except KeyboardInterrupt: + pass diff --git a/geotracking/live geotracking/python/display/gl_viewer.py b/global localization/live/python/display/gl_viewer.py similarity index 99% rename from geotracking/live geotracking/python/display/gl_viewer.py rename to global localization/live/python/display/gl_viewer.py index e6b2f211..4427e8cd 100644 --- a/geotracking/live geotracking/python/display/gl_viewer.py +++ b/global localization/live/python/display/gl_viewer.py @@ -300,7 +300,7 @@ def close_func(self): self.available = False def keyPressedCallback(self, key, x, y): - if ord(key) == 27: + if key == b'q' or key == b'Q' or ord(key) == 27: self.close_func() def on_mouse(self,*args,**kwargs): diff --git a/geotracking/live geotracking/python/display/zed_model.py b/global localization/live/python/display/zed_model.py similarity index 100% rename from geotracking/live geotracking/python/display/zed_model.py rename to global localization/live/python/display/zed_model.py diff --git a/geotracking/recording/python/exporter/KMLExporter.py b/global localization/live/python/exporter/KMLExporter.py similarity index 95% rename from geotracking/recording/python/exporter/KMLExporter.py rename to global localization/live/python/exporter/KMLExporter.py index 4ddf7487..4ef70078 100644 --- a/geotracking/recording/python/exporter/KMLExporter.py +++ b/global localization/live/python/exporter/KMLExporter.py @@ -41,15 +41,15 @@ def closeAllKMLFiles(): """ Close all KML file writer and place KML files footer """ - for file_name, file_object in all_file: + for file_name in all_file: file_footer = "" file_footer += "\t\n" file_footer += "\t\n" file_footer += "\n" file_footer += "\t\n" file_footer += "\n" - file_object.write(file_footer) - file_object.close() + all_file[file_name].write(file_footer) + all_file[file_name].close() def saveKMLData(file_path, gnss_data): diff --git a/geotracking/recording/python/gnss_reader/gpsd_reader.py b/global localization/live/python/gnss_reader/gpsd_reader.py similarity index 84% rename from geotracking/recording/python/gnss_reader/gpsd_reader.py rename to global localization/live/python/gnss_reader/gpsd_reader.py index b0cc794c..c0d5a3f4 100644 --- a/geotracking/recording/python/gnss_reader/gpsd_reader.py +++ b/global localization/live/python/gnss_reader/gpsd_reader.py @@ -2,8 +2,7 @@ import time import pyzed.sl as sl from gpsdclient import GPSDClient -import random -import datetime + class GPSDReader: def __init__(self): @@ -14,6 +13,7 @@ def __init__(self): self.is_initialized_mtx = threading.Lock() self.client = None self.gnss_getter = None + def initialize(self): try : self.client = GPSDClient(host="127.0.0.1") @@ -26,7 +26,7 @@ def initialize(self): print("Waiting for GNSS fix") received_fix = False while not received_fix: - self.gnss_getter = self.client.dict_stream(convert_datetime=True, filter=["TPV"]) + self.gnss_getter = self.client.dict_stream(convert_datetime=True, filter=["TPV", "SKY"]) gpsd_data = next(self.gnss_getter) if "class" in gpsd_data and gpsd_data["class"] == "TPV" and "mode" in gpsd_data and gpsd_data["mode"] >=2: received_fix = True @@ -64,8 +64,16 @@ def getNextGNSSValue(self): ts.set_microseconds(timestamp_microseconds) current_gnss_data.ts = ts return current_gnss_data + elif "class" in gpsd_data and gpsd_data["class"] == "SKY": + nb_low_snr = 0 + for satellite in gpsd_data['satellites']: + if satellite['used'] and satellite['ss'] < 16: + nb_low_snr += 1 + if nb_low_snr > 0: + print("[Warning] Low SNR (<16) on {} satellite(s) (using {} out of {} visible)".format(nb_low_snr, gpsd_data['uSat'], gpsd_data['nSat'])) + return self.getNextGNSSValue() else: - print("Fix lost : GNSS reinitialization") + print("Fix lost: GNSS reinitialization") self.initialize() def grab(self): diff --git a/geotracking/live geotracking/python/geotracking.py b/global localization/live/python/live.py similarity index 85% rename from geotracking/live geotracking/python/geotracking.py rename to global localization/live/python/live.py index bab671b8..50b5db56 100644 --- a/geotracking/live geotracking/python/geotracking.py +++ b/global localization/live/python/live.py @@ -1,6 +1,6 @@ ######################################################################## # -# Copyright (c) 2023, STEREOLABS. +# Copyright (c) 2024, STEREOLABS. # # All rights reserved. # @@ -36,11 +36,13 @@ def main(): if status != sl.ERROR_CODE.SUCCESS: print("[ZED][ERROR] Camera Open : "+repr(status)+". Exit program.") exit() + # Enable positional tracking: positional_init = zed.enable_positional_tracking() if positional_init != sl.ERROR_CODE.SUCCESS: print("[ZED][ERROR] Can't start tracking of camera : "+repr(status)+". Exit program.") exit() + # Create Fusion object: fusion = sl.Fusion() init_fusion_param = sl.InitFusionParameters() @@ -49,9 +51,11 @@ def main(): if fusion_init_code != sl.FUSION_ERROR_CODE.SUCCESS: print("[ZED][ERROR] Failed to initialize fusion :"+repr(fusion_init_code)+". Exit program") exit() + # Enable odometry publishing: configuration = sl.CommunicationParameters() zed.start_publishing(configuration) + # Enable GNSS data producing: gnss_reader = GPSDReader() status_initialize = gnss_reader.initialize() @@ -59,22 +63,24 @@ def main(): gnss_reader.stop_thread() zed.close() exit() - # Subscribe to Odometry + + # Subscribe to odometry: uuid = sl.CameraIdentifier(zed.get_camera_information().serial_number) fusion.subscribe(uuid,configuration,sl.Transform(0,0,0)) - # Enable positional tracking for Fusion object + + # Enable positional tracking for Fusion object: positional_tracking_fusion_parameters = sl.PositionalTrackingFusionParameters() positional_tracking_fusion_parameters.enable_GNSS_fusion = True - gnss_calibration_parameters = { - "target_yaw_uncertainty" : 0.1, - "enable_translation_uncertainty_target" : False, - "target_translation_uncertainty" : 10e-2, - "enable_reinitialization" : True, - "gnss_vio_reinit_threshold" : 5, - "enable_rolling_calibration" : True - } - fusion.enable_positionnal_tracking({"gnss_calibration_parameters" : gnss_calibration_parameters , "enable_GNSS_fusion" : True}) + gnss_calibration_parameters = sl.GNSSCalibrationParameters() + gnss_calibration_parameters.target_yaw_uncertainty = 0.1 + gnss_calibration_parameters.enable_translation_uncertainty_target = False + gnss_calibration_parameters.enable_reinitialization = True + gnss_calibration_parameters.gnss_vio_reinit_threshold = 5 + positional_tracking_fusion_parameters.gnss_calibration_parameters = gnss_calibration_parameters + fusion.enable_positionnal_tracking(positional_tracking_fusion_parameters) + py_translation = sl.Translation() + # Setup viewer: viewer = GenericDisplay() viewer.init(zed.get_camera_information().camera_model) @@ -87,41 +93,51 @@ def main(): # You can still use the classical getPosition for your application, just not that the position returned by this method # is the position without any GNSS/cameras fusion zed.get_position(zed_pose, sl.REFERENCE_FRAME.CAMERA) + # Get GNSS data: status, input_gnss = gnss_reader.grab() if status == sl.ERROR_CODE.SUCCESS: + # Display it on the Live Server + viewer.updateRawGeoPoseData(input_gnss) + # Publish GNSS data to Fusion ingest_error = fusion.ingest_gnss_data(input_gnss) if ingest_error != sl.FUSION_ERROR_CODE.SUCCESS: print("Ingest error occurred when ingesting GNSSData: ",ingest_error) + # Process data and compute positions: if fusion.process() == sl.FUSION_ERROR_CODE.SUCCESS: fused_position = sl.Pose() + # Get position into the ZED CAMERA coordinate system: current_state = fusion.get_position(fused_position) + # Display it on OpenGL: rotation = fused_position.get_rotation_vector() translation = fused_position.get_translation(py_translation) text_rotation = str((round(rotation[0], 2), round(rotation[1], 2), round(rotation[2], 2))) text_translation = str((round(translation.get()[0], 2), round(translation.get()[1], 2), round(translation.get()[2], 2))) viewer.updatePoseData(fused_position.pose_data(),text_translation,text_rotation, current_state) + # Get position into the GNSS coordinate system - this needs a initialization between CAMERA # and GNSS. When the initialization is finish the getGeoPose will return sl.POSITIONAL_TRACKING_STATE.OK current_geopose = sl.GeoPose() current_geopose_satus = fusion.get_geo_pose(current_geopose) if current_geopose_satus == sl.GNSS_CALIBRATION_STATE.CALIBRATED: - viewer.updateGeoPoseData(current_geopose, zed.get_timestamp(sl.TIME_REFERENCE.CURRENT).data_ns/1000) + viewer.updateGeoPoseData(current_geopose, zed.get_timestamp(sl.TIME_REFERENCE.CURRENT)) """ else: GNSS coordinate system to ZED coordinate system is not initialize yet - The initialisation between the coordinates system is basicaly an optimization problem that + The initialization between the coordinates system is basically an optimization problem that Try to fit the ZED computed path with the GNSS computed path. In order to do it just move your system by the distance you specified in positional_tracking_fusion_parameters.gnss_initialisation_distance """ + + gnss_reader.stop_thread() fusion.close() zed.close() if __name__ == '__main__' : main() - \ No newline at end of file + diff --git a/global localization/live/python/requirements.txt b/global localization/live/python/requirements.txt new file mode 100644 index 00000000..55499adc --- /dev/null +++ b/global localization/live/python/requirements.txt @@ -0,0 +1,2 @@ +PyOpenGL +gpsdclient \ No newline at end of file diff --git a/global localization/map server/README.md b/global localization/map server/README.md new file mode 100644 index 00000000..13b74c6c --- /dev/null +++ b/global localization/map server/README.md @@ -0,0 +1,19 @@ +# Live Server + +## Overview + +This sample displays in real-time the result of the [Live sample](../README.md) in a web browser with [Leaflet](https://leafletjs.com/). + +## Usage + +In order to use this sample, you need to run a simple HTTP web server. + +With Python, you can run: + +```bash +python -m http.server 8000 +# or +python3 -m http.server 8000 +``` + +> **Note**: The map will be available on http://localhost:8000/. diff --git a/global localization/map server/assets/css/index.css b/global localization/map server/assets/css/index.css new file mode 100644 index 00000000..355c74bf --- /dev/null +++ b/global localization/map server/assets/css/index.css @@ -0,0 +1,133 @@ +html, +body { + height: 100%; + margin: 0; +} + +.map-container { + height: 100%; + width: 100%; +} + +/* Radio button */ +input[type="radio"] { + position: relative; + appearance: none; + width: 13px; + height: 13px; + border: 1px solid #6b6b6b; + border-radius: 50%; +} + +input[type="radio"]:after { + content: ""; + position: absolute; + top: 2px; + left: 2px; + width: 7px; + height: 7px; + border-radius: 50%; + background-color: #6b6b6b; +} + +input[type="radio"]:hover { + border-color: #c2c2c2; +} + +input[type="radio"]:hover:after { + background-color: #c2c2c2; +} + +input[type="radio"]:checked { + border-color: #d9ff42; +} + +input[type="radio"]:checked:after { + background-color: #d9ff42; +} + +input[type="radio"]:hover:checked { + border-color: #8a9d3f; +} + +input[type="radio"]:hover:checked:after { + background-color: #8a9d3f; +} + +/* Legends */ +.legends { + display: none; +} + +.legends:has(> label) { + display: block; + position: absolute; + z-index: 1000; + bottom: 0; + left: 0; + margin-bottom: 45px; + margin-left: 5px; + background: #fff; + border-radius: 2px; + border: 2px solid rgba(0, 0, 0, 0.2); + background-clip: padding-box; +} + +.legend { + display: flex; + margin: 10px; +} + +/* Checkbox */ +.legend input { + opacity: 0; + height: 0; + width: 0; +} + +.legend .checkmark { + position: relative; + margin: 3px 0; + height: 12px; + width: 12px; + border: 1px solid #6b6b6b; +} + +.legend .checkmark:after { + content: ""; + position: absolute; + top: 3px; + left: 3px; + width: 6px; + height: 6px; + background-color: #6b6b6b; +} + +.legend:hover input ~ .checkmark { + border-color: #c2c2c2; +} + +.legend:hover input ~ .checkmark:after { + background-color: #c2c2c2; +} + +.legend input:checked ~ .checkmark { + border-color: #d9ff42; +} + +.legend input:checked ~ .checkmark:after { + background-color: #d9ff42; +} + +.legend:hover input:checked ~ .checkmark { + border-color: #8a9d3f; +} + +.legend:hover input:checked ~ .checkmark:after { + background-color: #8a9d3f; +} +.legend .color { + height: 5px; + width: 18px; + margin: auto 10px; +} diff --git a/global localization/map server/assets/favicon.ico b/global localization/map server/assets/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..93fe59278da74e9da30004117715e3bb262189e7 GIT binary patch literal 15086 zcmeI3d2Ccg9LHZdB1138pP#1jvUV4J8Y8Vw01 z9z-;V3I1b9Q1Qe?V>}_sVYuW9qKS$tcvJ+&&v(0Td40UMyKmpNrDlK0r}N&-Z{|1m z%$wO-R)JMyRaIHI71pxTEo-D@SrrwD_cF`cj$awB!f)+hS@TdBgbq=x$sqr@3}|v% z@NYNL{t}D_NAQ~j$Ft<~-?pujum61ghpB;l{|QrjO>s}e>6~RUEmi*oX&qi?S+s_B_e;>v{G5AT?{FjsP zqp%lj+irIiVd_&(IhB_+;MR4&|BlXL7zP?sZt03gHv2b~`({`NYIC<;=ezpSOmNB^ zUYt7Vo_C}FFr4B@L}R}Owu4{%rwQu?wx}&Hg?Hf}@Qdg6{|?;>kfWFxAT%P&=1GCZc{h7T$p%{W45Gz2a(|&4qrD zjc8t&2ARwYN&6*{ex~2FiKcS?y{2N$4AZ@KlqoFflnf`mbkFKvFTqeyp9m}EB=89Q z04Z(vylk`?wSI*eb9l3DZ%N%DAMhb>=-tx6YsvShOwpW^%i^j$*(70-r zmjUzd^T>NsIqJ+-bJ6bArpv(o&Eh1|_zJfYT1&3QP_z6OmK2++_g?nRzuKbq;6~Hy zidwJyM@g?sYh@lgxz7LUR?NR||EYW;P5;@Y{!`&q|E5$33Hf;Ve6F;-`Tj%!^ex;Or zqz?ampfa?#^FtM>y`SHz4C+Vi_557<4Mv~UeEo;5 z!Eo_gz5b1fMGU)zuCGu(K z+b)M)F1wsz>&uQmh-_6r4J3c3RmMwhWC6)g>Le=u*xV$YQ7U=+N9$hg@PDX(eJ7_n z2XS_cY}=oi!P2*^m(<{?JCf_*1V|;>*w?qXz9E-EPgml)zgrG!%fI0bxENF?UJ}H< z$~y(V0ZmuTbjRG2Uyq;qkaJA`%)9KI4sOZBzP`CGftBF?e%C#BB}jC>^*K0gJD;}@ zb{E+9>)m-)9XfS%{-bYc)tAmM&Vm=v`?+Q<)vxD#lpbvNnE2(uk z_?0nfOZTR_Pc1&Vw<##{yt_;4lAc{R`aObOW*&V-Z88Hk!ZGDJVekNR_UjAGg}Xj7 z=WkkR`ZwHeI&>{{+Owb4r{97)P=5?6{fY1-B-=!GhP}Or`?T$_wdr2%&?ZwpYpQM2 zb~h1kGGscZq3S6*Q(6o0bi0lmYDUM_aEHp>Y2q$bcfrG>OHEOi&bDp2^F31w?%9_1 zRsT;x^ZI|Q#p~bhf10OSv;Vc#_-o7gC&>JJC+*=kH`#McJvuwUIToDH?)kT^*B`QK zwdMLN`>nnIxJ^b~Qp0;&?Z1>2?Hw|ms~4c_>{FTUzqS9&)CQ`)lC6E;1Oo=-?Pu5i z0jt?pJ!kcA8mo;Lzcp6>_(@~+XVJ#$qK4TQ7evcSigt3MLPw9VAl4gBQA|Sup5>=7 z=}0#ohqy{ZX?fX@pUx@DpaPQT6dH+{%LG&p!#+4C^Jq!UAo2BED2-vKq>5jRVo5?- zJ@T2LZS|jDBdw2D<2uM_x@$fO|o5*zR)!m#r^9IxRrU}md=ih`a zhtc5Gnrd>@g^A3b9^XgTHf}H{O&lkiYRlQs8T>@?RX0K7EB{wRHu+cmy$0FV-z4IO zX}_@j2lMgY@&)O?`ew5Gui|@&SO4|W^YhCn{*ih*d4k0c^3OPTGcXt>I{S^$JB6O* Lr!eW*`f~pR!<&il literal 0 HcmV?d00001 diff --git a/global localization/map server/assets/leaflet/images/layers.png b/global localization/map server/assets/leaflet/images/layers.png new file mode 100644 index 0000000000000000000000000000000000000000..1a72e5784b2b456eac5d7670738db80697af3377 GIT binary patch literal 696 zcmV;p0!RIcP)*@&l2<6p=!C&s@#ZL+%BQvF&b?w6S%wp=I>1QHj7AP5C)IWy#b znXXB;g;j=$a-tW89K%FbDceHVq&unY*Wx3L#=EGWH=rjqnp|4c_Ulec!ql3#G-5ZF zVlbBA@XP=)C8U&+Lrc)S4O5%1$&{(;7R^K(CSnvSr$v;+B$8q&7Bf|h$#PARo1^%M zf1H^nG-EiXVXr07OH(*8R)xa|FD;lXUlg_-%)~ZGsL2cX0NXaAzN2q%jqLRR6ruVk8`Jb7n#{`T;o@`F= z#3YcynIR^s83UNF3D!f5m#Mg)NJ24&Qfrqb&_z=yF;=B)#9Iq7u-@^O!(mW{D;qvr zPc)gVb%aowtS8m@ElL4A9G>w#ffQ~q{i&_i)*6f^)Sz|C?C>zb4Uo?H<-&Hz@a?J; z$ml@zGygWofb9$ZBj6aLjpLhsT2AzjOu=-*u_gSCU svg, +.leaflet-pane > canvas, +.leaflet-zoom-box, +.leaflet-image-layer, +.leaflet-layer { + position: absolute; + left: 0; + top: 0; + } +.leaflet-container { + overflow: hidden; + } +.leaflet-tile, +.leaflet-marker-icon, +.leaflet-marker-shadow { + -webkit-user-select: none; + -moz-user-select: none; + user-select: none; + -webkit-user-drag: none; + } +/* Prevents IE11 from highlighting tiles in blue */ +.leaflet-tile::selection { + background: transparent; +} +/* Safari renders non-retina tile on retina better with this, but Chrome is worse */ +.leaflet-safari .leaflet-tile { + image-rendering: -webkit-optimize-contrast; + } +/* hack that prevents hw layers "stretching" when loading new tiles */ +.leaflet-safari .leaflet-tile-container { + width: 1600px; + height: 1600px; + -webkit-transform-origin: 0 0; + } +.leaflet-marker-icon, +.leaflet-marker-shadow { + display: block; + } +/* .leaflet-container svg: reset svg max-width decleration shipped in Joomla! (joomla.org) 3.x */ +/* .leaflet-container img: map is broken in FF if you have max-width: 100% on tiles */ +.leaflet-container .leaflet-overlay-pane svg { + max-width: none !important; + max-height: none !important; + } +.leaflet-container .leaflet-marker-pane img, +.leaflet-container .leaflet-shadow-pane img, +.leaflet-container .leaflet-tile-pane img, +.leaflet-container img.leaflet-image-layer, +.leaflet-container .leaflet-tile { + max-width: none !important; + max-height: none !important; + width: auto; + padding: 0; + } + +.leaflet-container img.leaflet-tile { + /* See: https://bugs.chromium.org/p/chromium/issues/detail?id=600120 */ + mix-blend-mode: plus-lighter; +} + +.leaflet-container.leaflet-touch-zoom { + -ms-touch-action: pan-x pan-y; + touch-action: pan-x pan-y; + } +.leaflet-container.leaflet-touch-drag { + -ms-touch-action: pinch-zoom; + /* Fallback for FF which doesn't support pinch-zoom */ + touch-action: none; + touch-action: pinch-zoom; +} +.leaflet-container.leaflet-touch-drag.leaflet-touch-zoom { + -ms-touch-action: none; + touch-action: none; +} +.leaflet-container { + -webkit-tap-highlight-color: transparent; +} +.leaflet-container a { + -webkit-tap-highlight-color: rgba(51, 181, 229, 0.4); +} +.leaflet-tile { + filter: inherit; + visibility: hidden; + } +.leaflet-tile-loaded { + visibility: inherit; + } +.leaflet-zoom-box { + width: 0; + height: 0; + -moz-box-sizing: border-box; + box-sizing: border-box; + z-index: 800; + } +/* workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=888319 */ +.leaflet-overlay-pane svg { + -moz-user-select: none; + } + +.leaflet-pane { z-index: 400; } + +.leaflet-tile-pane { z-index: 200; } +.leaflet-overlay-pane { z-index: 400; } +.leaflet-shadow-pane { z-index: 500; } +.leaflet-marker-pane { z-index: 600; } +.leaflet-tooltip-pane { z-index: 650; } +.leaflet-popup-pane { z-index: 700; } + +.leaflet-map-pane canvas { z-index: 100; } +.leaflet-map-pane svg { z-index: 200; } + +.leaflet-vml-shape { + width: 1px; + height: 1px; + } +.lvml { + behavior: url(#default#VML); + display: inline-block; + position: absolute; + } + + +/* control positioning */ + +.leaflet-control { + position: relative; + z-index: 800; + pointer-events: visiblePainted; /* IE 9-10 doesn't have auto */ + pointer-events: auto; + } +.leaflet-top, +.leaflet-bottom { + position: absolute; + z-index: 1000; + pointer-events: none; + } +.leaflet-top { + top: 0; + } +.leaflet-right { + right: 0; + } +.leaflet-bottom { + bottom: 0; + } +.leaflet-left { + left: 0; + } +.leaflet-control { + float: left; + clear: both; + } +.leaflet-right .leaflet-control { + float: right; + } +.leaflet-top .leaflet-control { + margin-top: 10px; + } +.leaflet-bottom .leaflet-control { + margin-bottom: 10px; + } +.leaflet-left .leaflet-control { + margin-left: 10px; + } +.leaflet-right .leaflet-control { + margin-right: 10px; + } + + +/* zoom and fade animations */ + +.leaflet-fade-anim .leaflet-popup { + opacity: 0; + -webkit-transition: opacity 0.2s linear; + -moz-transition: opacity 0.2s linear; + transition: opacity 0.2s linear; + } +.leaflet-fade-anim .leaflet-map-pane .leaflet-popup { + opacity: 1; + } +.leaflet-zoom-animated { + -webkit-transform-origin: 0 0; + -ms-transform-origin: 0 0; + transform-origin: 0 0; + } +svg.leaflet-zoom-animated { + will-change: transform; +} + +.leaflet-zoom-anim .leaflet-zoom-animated { + -webkit-transition: -webkit-transform 0.25s cubic-bezier(0,0,0.25,1); + -moz-transition: -moz-transform 0.25s cubic-bezier(0,0,0.25,1); + transition: transform 0.25s cubic-bezier(0,0,0.25,1); + } +.leaflet-zoom-anim .leaflet-tile, +.leaflet-pan-anim .leaflet-tile { + -webkit-transition: none; + -moz-transition: none; + transition: none; + } + +.leaflet-zoom-anim .leaflet-zoom-hide { + visibility: hidden; + } + + +/* cursors */ + +.leaflet-interactive { + cursor: pointer; + } +.leaflet-grab { + cursor: -webkit-grab; + cursor: -moz-grab; + cursor: grab; + } +.leaflet-crosshair, +.leaflet-crosshair .leaflet-interactive { + cursor: crosshair; + } +.leaflet-popup-pane, +.leaflet-control { + cursor: auto; + } +.leaflet-dragging .leaflet-grab, +.leaflet-dragging .leaflet-grab .leaflet-interactive, +.leaflet-dragging .leaflet-marker-draggable { + cursor: move; + cursor: -webkit-grabbing; + cursor: -moz-grabbing; + cursor: grabbing; + } + +/* marker & overlays interactivity */ +.leaflet-marker-icon, +.leaflet-marker-shadow, +.leaflet-image-layer, +.leaflet-pane > svg path, +.leaflet-tile-container { + pointer-events: none; + } + +.leaflet-marker-icon.leaflet-interactive, +.leaflet-image-layer.leaflet-interactive, +.leaflet-pane > svg path.leaflet-interactive, +svg.leaflet-image-layer.leaflet-interactive path { + pointer-events: visiblePainted; /* IE 9-10 doesn't have auto */ + pointer-events: auto; + } + +/* visual tweaks */ + +.leaflet-container { + background: #ddd; + outline-offset: 1px; + } +.leaflet-container a { + color: #0078A8; + } +.leaflet-zoom-box { + border: 2px dotted #38f; + background: rgba(255,255,255,0.5); + } + + +/* general typography */ +.leaflet-container { + font-family: "Helvetica Neue", Arial, Helvetica, sans-serif; + font-size: 12px; + font-size: 0.75rem; + line-height: 1.5; + } + + +/* general toolbar styles */ + +.leaflet-bar { + box-shadow: 0 1px 5px rgba(0,0,0,0.65); + border-radius: 4px; + } +.leaflet-bar a { + background-color: #fff; + border-bottom: 1px solid #ccc; + width: 26px; + height: 26px; + line-height: 26px; + display: block; + text-align: center; + text-decoration: none; + color: black; + } +.leaflet-bar a, +.leaflet-control-layers-toggle { + background-position: 50% 50%; + background-repeat: no-repeat; + display: block; + } +.leaflet-bar a:hover, +.leaflet-bar a:focus { + background-color: #f4f4f4; + } +.leaflet-bar a:first-child { + border-top-left-radius: 4px; + border-top-right-radius: 4px; + } +.leaflet-bar a:last-child { + border-bottom-left-radius: 4px; + border-bottom-right-radius: 4px; + border-bottom: none; + } +.leaflet-bar a.leaflet-disabled { + cursor: default; + background-color: #f4f4f4; + color: #bbb; + } + +.leaflet-touch .leaflet-bar a { + width: 30px; + height: 30px; + line-height: 30px; + } +.leaflet-touch .leaflet-bar a:first-child { + border-top-left-radius: 2px; + border-top-right-radius: 2px; + } +.leaflet-touch .leaflet-bar a:last-child { + border-bottom-left-radius: 2px; + border-bottom-right-radius: 2px; + } + +/* zoom control */ + +.leaflet-control-zoom-in, +.leaflet-control-zoom-out { + font: bold 18px 'Lucida Console', Monaco, monospace; + text-indent: 1px; + } + +.leaflet-touch .leaflet-control-zoom-in, .leaflet-touch .leaflet-control-zoom-out { + font-size: 22px; + } + + +/* layers control */ + +.leaflet-control-layers { + box-shadow: 0 1px 5px rgba(0,0,0,0.4); + background: #fff; + border-radius: 5px; + } +.leaflet-control-layers-toggle { + background-image: url(images/layers.png); + width: 36px; + height: 36px; + } +.leaflet-retina .leaflet-control-layers-toggle { + background-image: url(images/layers-2x.png); + background-size: 26px 26px; + } +.leaflet-touch .leaflet-control-layers-toggle { + width: 44px; + height: 44px; + } +.leaflet-control-layers .leaflet-control-layers-list, +.leaflet-control-layers-expanded .leaflet-control-layers-toggle { + display: none; + } +.leaflet-control-layers-expanded .leaflet-control-layers-list { + display: block; + position: relative; + } +.leaflet-control-layers-expanded { + padding: 6px 10px 6px 6px; + color: #333; + background: #fff; + } +.leaflet-control-layers-scrollbar { + overflow-y: scroll; + overflow-x: hidden; + padding-right: 5px; + } +.leaflet-control-layers-selector { + margin-top: 2px; + position: relative; + top: 1px; + } +.leaflet-control-layers label { + display: block; + font-size: 13px; + font-size: 1.08333em; + } +.leaflet-control-layers-separator { + height: 0; + border-top: 1px solid #ddd; + margin: 5px -10px 5px -6px; + } + +/* Default icon URLs */ +.leaflet-default-icon-path { /* used only in path-guessing heuristic, see L.Icon.Default */ + background-image: url(images/marker-icon.png); + } + + +/* attribution and scale controls */ + +.leaflet-container .leaflet-control-attribution { + background: #fff; + background: rgba(255, 255, 255, 0.8); + margin: 0; + } +.leaflet-control-attribution, +.leaflet-control-scale-line { + padding: 0 5px; + color: #333; + line-height: 1.4; + } +.leaflet-control-attribution a { + text-decoration: none; + } +.leaflet-control-attribution a:hover, +.leaflet-control-attribution a:focus { + text-decoration: underline; + } +.leaflet-attribution-flag { + display: inline !important; + vertical-align: baseline !important; + width: 1em; + height: 0.6669em; + } +.leaflet-left .leaflet-control-scale { + margin-left: 5px; + } +.leaflet-bottom .leaflet-control-scale { + margin-bottom: 5px; + } +.leaflet-control-scale-line { + border: 2px solid #777; + border-top: none; + line-height: 1.1; + padding: 2px 5px 1px; + white-space: nowrap; + -moz-box-sizing: border-box; + box-sizing: border-box; + background: rgba(255, 255, 255, 0.8); + text-shadow: 1px 1px #fff; + } +.leaflet-control-scale-line:not(:first-child) { + border-top: 2px solid #777; + border-bottom: none; + margin-top: -2px; + } +.leaflet-control-scale-line:not(:first-child):not(:last-child) { + border-bottom: 2px solid #777; + } + +.leaflet-touch .leaflet-control-attribution, +.leaflet-touch .leaflet-control-layers, +.leaflet-touch .leaflet-bar { + box-shadow: none; + } +.leaflet-touch .leaflet-control-layers, +.leaflet-touch .leaflet-bar { + border: 2px solid rgba(0,0,0,0.2); + background-clip: padding-box; + } + + +/* popup */ + +.leaflet-popup { + position: absolute; + text-align: center; + margin-bottom: 20px; + } +.leaflet-popup-content-wrapper { + padding: 1px; + text-align: left; + border-radius: 12px; + } +.leaflet-popup-content { + margin: 13px 24px 13px 20px; + line-height: 1.3; + font-size: 13px; + font-size: 1.08333em; + min-height: 1px; + } +.leaflet-popup-content p { + margin: 17px 0; + margin: 1.3em 0; + } +.leaflet-popup-tip-container { + width: 40px; + height: 20px; + position: absolute; + left: 50%; + margin-top: -1px; + margin-left: -20px; + overflow: hidden; + pointer-events: none; + } +.leaflet-popup-tip { + width: 17px; + height: 17px; + padding: 1px; + + margin: -10px auto 0; + pointer-events: auto; + + -webkit-transform: rotate(45deg); + -moz-transform: rotate(45deg); + -ms-transform: rotate(45deg); + transform: rotate(45deg); + } +.leaflet-popup-content-wrapper, +.leaflet-popup-tip { + background: white; + color: #333; + box-shadow: 0 3px 14px rgba(0,0,0,0.4); + } +.leaflet-container a.leaflet-popup-close-button { + position: absolute; + top: 0; + right: 0; + border: none; + text-align: center; + width: 24px; + height: 24px; + font: 16px/24px Tahoma, Verdana, sans-serif; + color: #757575; + text-decoration: none; + background: transparent; + } +.leaflet-container a.leaflet-popup-close-button:hover, +.leaflet-container a.leaflet-popup-close-button:focus { + color: #585858; + } +.leaflet-popup-scrolled { + overflow: auto; + } + +.leaflet-oldie .leaflet-popup-content-wrapper { + -ms-zoom: 1; + } +.leaflet-oldie .leaflet-popup-tip { + width: 24px; + margin: 0 auto; + + -ms-filter: "progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678)"; + filter: progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678); + } + +.leaflet-oldie .leaflet-control-zoom, +.leaflet-oldie .leaflet-control-layers, +.leaflet-oldie .leaflet-popup-content-wrapper, +.leaflet-oldie .leaflet-popup-tip { + border: 1px solid #999; + } + + +/* div icon */ + +.leaflet-div-icon { + background: #fff; + border: 1px solid #666; + } + + +/* Tooltip */ +/* Base styles for the element that has a tooltip */ +.leaflet-tooltip { + position: absolute; + padding: 6px; + background-color: #fff; + border: 1px solid #fff; + border-radius: 3px; + color: #222; + white-space: nowrap; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + pointer-events: none; + box-shadow: 0 1px 3px rgba(0,0,0,0.4); + } +.leaflet-tooltip.leaflet-interactive { + cursor: pointer; + pointer-events: auto; + } +.leaflet-tooltip-top:before, +.leaflet-tooltip-bottom:before, +.leaflet-tooltip-left:before, +.leaflet-tooltip-right:before { + position: absolute; + pointer-events: none; + border: 6px solid transparent; + background: transparent; + content: ""; + } + +/* Directions */ + +.leaflet-tooltip-bottom { + margin-top: 6px; +} +.leaflet-tooltip-top { + margin-top: -6px; +} +.leaflet-tooltip-bottom:before, +.leaflet-tooltip-top:before { + left: 50%; + margin-left: -6px; + } +.leaflet-tooltip-top:before { + bottom: 0; + margin-bottom: -12px; + border-top-color: #fff; + } +.leaflet-tooltip-bottom:before { + top: 0; + margin-top: -12px; + margin-left: -6px; + border-bottom-color: #fff; + } +.leaflet-tooltip-left { + margin-left: -6px; +} +.leaflet-tooltip-right { + margin-left: 6px; +} +.leaflet-tooltip-left:before, +.leaflet-tooltip-right:before { + top: 50%; + margin-top: -6px; + } +.leaflet-tooltip-left:before { + right: 0; + margin-right: -12px; + border-left-color: #fff; + } +.leaflet-tooltip-right:before { + left: 0; + margin-left: -12px; + border-right-color: #fff; + } + +/* Printing */ + +@media print { + /* Prevent printers from removing background-images of controls. */ + .leaflet-control { + -webkit-print-color-adjust: exact; + print-color-adjust: exact; + } + } diff --git a/global localization/map server/assets/leaflet/leaflet.js b/global localization/map server/assets/leaflet/leaflet.js new file mode 100644 index 00000000..a3bf693d --- /dev/null +++ b/global localization/map server/assets/leaflet/leaflet.js @@ -0,0 +1,6 @@ +/* @preserve + * Leaflet 1.9.4, a JS library for interactive maps. https://leafletjs.com + * (c) 2010-2023 Vladimir Agafonkin, (c) 2010-2011 CloudMade + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).leaflet={})}(this,function(t){"use strict";function l(t){for(var e,i,n=1,o=arguments.length;n=this.min.x&&i.x<=this.max.x&&e.y>=this.min.y&&i.y<=this.max.y},intersects:function(t){t=_(t);var e=this.min,i=this.max,n=t.min,t=t.max,o=t.x>=e.x&&n.x<=i.x,t=t.y>=e.y&&n.y<=i.y;return o&&t},overlaps:function(t){t=_(t);var e=this.min,i=this.max,n=t.min,t=t.max,o=t.x>e.x&&n.xe.y&&n.y=n.lat&&i.lat<=o.lat&&e.lng>=n.lng&&i.lng<=o.lng},intersects:function(t){t=g(t);var e=this._southWest,i=this._northEast,n=t.getSouthWest(),t=t.getNorthEast(),o=t.lat>=e.lat&&n.lat<=i.lat,t=t.lng>=e.lng&&n.lng<=i.lng;return o&&t},overlaps:function(t){t=g(t);var e=this._southWest,i=this._northEast,n=t.getSouthWest(),t=t.getNorthEast(),o=t.lat>e.lat&&n.late.lng&&n.lng","http://www.w3.org/2000/svg"===(Wt.firstChild&&Wt.firstChild.namespaceURI));function y(t){return 0<=navigator.userAgent.toLowerCase().indexOf(t)}var b={ie:pt,ielt9:mt,edge:n,webkit:ft,android:gt,android23:vt,androidStock:yt,opera:xt,chrome:wt,gecko:bt,safari:Pt,phantom:Lt,opera12:o,win:Tt,ie3d:Mt,webkit3d:zt,gecko3d:_t,any3d:Ct,mobile:Zt,mobileWebkit:St,mobileWebkit3d:Et,msPointer:kt,pointer:Ot,touch:Bt,touchNative:At,mobileOpera:It,mobileGecko:Rt,retina:Nt,passiveEvents:Dt,canvas:jt,svg:Ht,vml:!Ht&&function(){try{var t=document.createElement("div"),e=(t.innerHTML='',t.firstChild);return e.style.behavior="url(#default#VML)",e&&"object"==typeof e.adj}catch(t){return!1}}(),inlineSvg:Wt,mac:0===navigator.platform.indexOf("Mac"),linux:0===navigator.platform.indexOf("Linux")},Ft=b.msPointer?"MSPointerDown":"pointerdown",Ut=b.msPointer?"MSPointerMove":"pointermove",Vt=b.msPointer?"MSPointerUp":"pointerup",qt=b.msPointer?"MSPointerCancel":"pointercancel",Gt={touchstart:Ft,touchmove:Ut,touchend:Vt,touchcancel:qt},Kt={touchstart:function(t,e){e.MSPOINTER_TYPE_TOUCH&&e.pointerType===e.MSPOINTER_TYPE_TOUCH&&O(e);ee(t,e)},touchmove:ee,touchend:ee,touchcancel:ee},Yt={},Xt=!1;function Jt(t,e,i){return"touchstart"!==e||Xt||(document.addEventListener(Ft,$t,!0),document.addEventListener(Ut,Qt,!0),document.addEventListener(Vt,te,!0),document.addEventListener(qt,te,!0),Xt=!0),Kt[e]?(i=Kt[e].bind(this,i),t.addEventListener(Gt[e],i,!1),i):(console.warn("wrong event specified:",e),u)}function $t(t){Yt[t.pointerId]=t}function Qt(t){Yt[t.pointerId]&&(Yt[t.pointerId]=t)}function te(t){delete Yt[t.pointerId]}function ee(t,e){if(e.pointerType!==(e.MSPOINTER_TYPE_MOUSE||"mouse")){for(var i in e.touches=[],Yt)e.touches.push(Yt[i]);e.changedTouches=[e],t(e)}}var ie=200;function ne(t,i){t.addEventListener("dblclick",i);var n,o=0;function e(t){var e;1!==t.detail?n=t.detail:"mouse"===t.pointerType||t.sourceCapabilities&&!t.sourceCapabilities.firesTouchEvents||((e=Ne(t)).some(function(t){return t instanceof HTMLLabelElement&&t.attributes.for})&&!e.some(function(t){return t instanceof HTMLInputElement||t instanceof HTMLSelectElement})||((e=Date.now())-o<=ie?2===++n&&i(function(t){var e,i,n={};for(i in t)e=t[i],n[i]=e&&e.bind?e.bind(t):e;return(t=n).type="dblclick",n.detail=2,n.isTrusted=!1,n._simulated=!0,n}(t)):n=1,o=e))}return t.addEventListener("click",e),{dblclick:i,simDblclick:e}}var oe,se,re,ae,he,le,ue=we(["transform","webkitTransform","OTransform","MozTransform","msTransform"]),ce=we(["webkitTransition","transition","OTransition","MozTransition","msTransition"]),de="webkitTransition"===ce||"OTransition"===ce?ce+"End":"transitionend";function _e(t){return"string"==typeof t?document.getElementById(t):t}function pe(t,e){var i=t.style[e]||t.currentStyle&&t.currentStyle[e];return"auto"===(i=i&&"auto"!==i||!document.defaultView?i:(t=document.defaultView.getComputedStyle(t,null))?t[e]:null)?null:i}function P(t,e,i){t=document.createElement(t);return t.className=e||"",i&&i.appendChild(t),t}function T(t){var e=t.parentNode;e&&e.removeChild(t)}function me(t){for(;t.firstChild;)t.removeChild(t.firstChild)}function fe(t){var e=t.parentNode;e&&e.lastChild!==t&&e.appendChild(t)}function ge(t){var e=t.parentNode;e&&e.firstChild!==t&&e.insertBefore(t,e.firstChild)}function ve(t,e){return void 0!==t.classList?t.classList.contains(e):0<(t=xe(t)).length&&new RegExp("(^|\\s)"+e+"(\\s|$)").test(t)}function M(t,e){var i;if(void 0!==t.classList)for(var n=F(e),o=0,s=n.length;othis.options.maxZoom)?this.setZoom(t):this},panInsideBounds:function(t,e){this._enforcingBounds=!0;var i=this.getCenter(),t=this._limitCenter(i,this._zoom,g(t));return i.equals(t)||this.panTo(t,e),this._enforcingBounds=!1,this},panInside:function(t,e){var i=m((e=e||{}).paddingTopLeft||e.padding||[0,0]),n=m(e.paddingBottomRight||e.padding||[0,0]),o=this.project(this.getCenter()),t=this.project(t),s=this.getPixelBounds(),i=_([s.min.add(i),s.max.subtract(n)]),s=i.getSize();return i.contains(t)||(this._enforcingBounds=!0,n=t.subtract(i.getCenter()),i=i.extend(t).getSize().subtract(s),o.x+=n.x<0?-i.x:i.x,o.y+=n.y<0?-i.y:i.y,this.panTo(this.unproject(o),e),this._enforcingBounds=!1),this},invalidateSize:function(t){if(!this._loaded)return this;t=l({animate:!1,pan:!0},!0===t?{animate:!0}:t);var e=this.getSize(),i=(this._sizeChanged=!0,this._lastCenter=null,this.getSize()),n=e.divideBy(2).round(),o=i.divideBy(2).round(),n=n.subtract(o);return n.x||n.y?(t.animate&&t.pan?this.panBy(n):(t.pan&&this._rawPanBy(n),this.fire("move"),t.debounceMoveend?(clearTimeout(this._sizeTimer),this._sizeTimer=setTimeout(a(this.fire,this,"moveend"),200)):this.fire("moveend")),this.fire("resize",{oldSize:e,newSize:i})):this},stop:function(){return this.setZoom(this._limitZoom(this._zoom)),this.options.zoomSnap||this.fire("viewreset"),this._stop()},locate:function(t){var e,i;return t=this._locateOptions=l({timeout:1e4,watch:!1},t),"geolocation"in navigator?(e=a(this._handleGeolocationResponse,this),i=a(this._handleGeolocationError,this),t.watch?this._locationWatchId=navigator.geolocation.watchPosition(e,i,t):navigator.geolocation.getCurrentPosition(e,i,t)):this._handleGeolocationError({code:0,message:"Geolocation not supported."}),this},stopLocate:function(){return navigator.geolocation&&navigator.geolocation.clearWatch&&navigator.geolocation.clearWatch(this._locationWatchId),this._locateOptions&&(this._locateOptions.setView=!1),this},_handleGeolocationError:function(t){var e;this._container._leaflet_id&&(e=t.code,t=t.message||(1===e?"permission denied":2===e?"position unavailable":"timeout"),this._locateOptions.setView&&!this._loaded&&this.fitWorld(),this.fire("locationerror",{code:e,message:"Geolocation error: "+t+"."}))},_handleGeolocationResponse:function(t){if(this._container._leaflet_id){var e,i,n=new v(t.coords.latitude,t.coords.longitude),o=n.toBounds(2*t.coords.accuracy),s=this._locateOptions,r=(s.setView&&(e=this.getBoundsZoom(o),this.setView(n,s.maxZoom?Math.min(e,s.maxZoom):e)),{latlng:n,bounds:o,timestamp:t.timestamp});for(i in t.coords)"number"==typeof t.coords[i]&&(r[i]=t.coords[i]);this.fire("locationfound",r)}},addHandler:function(t,e){return e&&(e=this[t]=new e(this),this._handlers.push(e),this.options[t]&&e.enable()),this},remove:function(){if(this._initEvents(!0),this.options.maxBounds&&this.off("moveend",this._panInsideMaxBounds),this._containerId!==this._container._leaflet_id)throw new Error("Map container is being reused by another instance");try{delete this._container._leaflet_id,delete this._containerId}catch(t){this._container._leaflet_id=void 0,this._containerId=void 0}for(var t in void 0!==this._locationWatchId&&this.stopLocate(),this._stop(),T(this._mapPane),this._clearControlPos&&this._clearControlPos(),this._resizeRequest&&(r(this._resizeRequest),this._resizeRequest=null),this._clearHandlers(),this._loaded&&this.fire("unload"),this._layers)this._layers[t].remove();for(t in this._panes)T(this._panes[t]);return this._layers=[],this._panes=[],delete this._mapPane,delete this._renderer,this},createPane:function(t,e){e=P("div","leaflet-pane"+(t?" leaflet-"+t.replace("Pane","")+"-pane":""),e||this._mapPane);return t&&(this._panes[t]=e),e},getCenter:function(){return this._checkIfLoaded(),this._lastCenter&&!this._moved()?this._lastCenter.clone():this.layerPointToLatLng(this._getCenterLayerPoint())},getZoom:function(){return this._zoom},getBounds:function(){var t=this.getPixelBounds();return new s(this.unproject(t.getBottomLeft()),this.unproject(t.getTopRight()))},getMinZoom:function(){return void 0===this.options.minZoom?this._layersMinZoom||0:this.options.minZoom},getMaxZoom:function(){return void 0===this.options.maxZoom?void 0===this._layersMaxZoom?1/0:this._layersMaxZoom:this.options.maxZoom},getBoundsZoom:function(t,e,i){t=g(t),i=m(i||[0,0]);var n=this.getZoom()||0,o=this.getMinZoom(),s=this.getMaxZoom(),r=t.getNorthWest(),t=t.getSouthEast(),i=this.getSize().subtract(i),t=_(this.project(t,n),this.project(r,n)).getSize(),r=b.any3d?this.options.zoomSnap:1,a=i.x/t.x,i=i.y/t.y,t=e?Math.max(a,i):Math.min(a,i),n=this.getScaleZoom(t,n);return r&&(n=Math.round(n/(r/100))*(r/100),n=e?Math.ceil(n/r)*r:Math.floor(n/r)*r),Math.max(o,Math.min(s,n))},getSize:function(){return this._size&&!this._sizeChanged||(this._size=new p(this._container.clientWidth||0,this._container.clientHeight||0),this._sizeChanged=!1),this._size.clone()},getPixelBounds:function(t,e){t=this._getTopLeftPoint(t,e);return new f(t,t.add(this.getSize()))},getPixelOrigin:function(){return this._checkIfLoaded(),this._pixelOrigin},getPixelWorldBounds:function(t){return this.options.crs.getProjectedBounds(void 0===t?this.getZoom():t)},getPane:function(t){return"string"==typeof t?this._panes[t]:t},getPanes:function(){return this._panes},getContainer:function(){return this._container},getZoomScale:function(t,e){var i=this.options.crs;return e=void 0===e?this._zoom:e,i.scale(t)/i.scale(e)},getScaleZoom:function(t,e){var i=this.options.crs,t=(e=void 0===e?this._zoom:e,i.zoom(t*i.scale(e)));return isNaN(t)?1/0:t},project:function(t,e){return e=void 0===e?this._zoom:e,this.options.crs.latLngToPoint(w(t),e)},unproject:function(t,e){return e=void 0===e?this._zoom:e,this.options.crs.pointToLatLng(m(t),e)},layerPointToLatLng:function(t){t=m(t).add(this.getPixelOrigin());return this.unproject(t)},latLngToLayerPoint:function(t){return this.project(w(t))._round()._subtract(this.getPixelOrigin())},wrapLatLng:function(t){return this.options.crs.wrapLatLng(w(t))},wrapLatLngBounds:function(t){return this.options.crs.wrapLatLngBounds(g(t))},distance:function(t,e){return this.options.crs.distance(w(t),w(e))},containerPointToLayerPoint:function(t){return m(t).subtract(this._getMapPanePos())},layerPointToContainerPoint:function(t){return m(t).add(this._getMapPanePos())},containerPointToLatLng:function(t){t=this.containerPointToLayerPoint(m(t));return this.layerPointToLatLng(t)},latLngToContainerPoint:function(t){return this.layerPointToContainerPoint(this.latLngToLayerPoint(w(t)))},mouseEventToContainerPoint:function(t){return De(t,this._container)},mouseEventToLayerPoint:function(t){return this.containerPointToLayerPoint(this.mouseEventToContainerPoint(t))},mouseEventToLatLng:function(t){return this.layerPointToLatLng(this.mouseEventToLayerPoint(t))},_initContainer:function(t){t=this._container=_e(t);if(!t)throw new Error("Map container not found.");if(t._leaflet_id)throw new Error("Map container is already initialized.");S(t,"scroll",this._onScroll,this),this._containerId=h(t)},_initLayout:function(){var t=this._container,e=(this._fadeAnimated=this.options.fadeAnimation&&b.any3d,M(t,"leaflet-container"+(b.touch?" leaflet-touch":"")+(b.retina?" leaflet-retina":"")+(b.ielt9?" leaflet-oldie":"")+(b.safari?" leaflet-safari":"")+(this._fadeAnimated?" leaflet-fade-anim":"")),pe(t,"position"));"absolute"!==e&&"relative"!==e&&"fixed"!==e&&"sticky"!==e&&(t.style.position="relative"),this._initPanes(),this._initControlPos&&this._initControlPos()},_initPanes:function(){var t=this._panes={};this._paneRenderers={},this._mapPane=this.createPane("mapPane",this._container),Z(this._mapPane,new p(0,0)),this.createPane("tilePane"),this.createPane("overlayPane"),this.createPane("shadowPane"),this.createPane("markerPane"),this.createPane("tooltipPane"),this.createPane("popupPane"),this.options.markerZoomAnimation||(M(t.markerPane,"leaflet-zoom-hide"),M(t.shadowPane,"leaflet-zoom-hide"))},_resetView:function(t,e,i){Z(this._mapPane,new p(0,0));var n=!this._loaded,o=(this._loaded=!0,e=this._limitZoom(e),this.fire("viewprereset"),this._zoom!==e);this._moveStart(o,i)._move(t,e)._moveEnd(o),this.fire("viewreset"),n&&this.fire("load")},_moveStart:function(t,e){return t&&this.fire("zoomstart"),e||this.fire("movestart"),this},_move:function(t,e,i,n){void 0===e&&(e=this._zoom);var o=this._zoom!==e;return this._zoom=e,this._lastCenter=t,this._pixelOrigin=this._getNewPixelOrigin(t),n?i&&i.pinch&&this.fire("zoom",i):((o||i&&i.pinch)&&this.fire("zoom",i),this.fire("move",i)),this},_moveEnd:function(t){return t&&this.fire("zoomend"),this.fire("moveend")},_stop:function(){return r(this._flyToFrame),this._panAnim&&this._panAnim.stop(),this},_rawPanBy:function(t){Z(this._mapPane,this._getMapPanePos().subtract(t))},_getZoomSpan:function(){return this.getMaxZoom()-this.getMinZoom()},_panInsideMaxBounds:function(){this._enforcingBounds||this.panInsideBounds(this.options.maxBounds)},_checkIfLoaded:function(){if(!this._loaded)throw new Error("Set map center and zoom first.")},_initEvents:function(t){this._targets={};var e=t?k:S;e((this._targets[h(this._container)]=this)._container,"click dblclick mousedown mouseup mouseover mouseout mousemove contextmenu keypress keydown keyup",this._handleDOMEvent,this),this.options.trackResize&&e(window,"resize",this._onResize,this),b.any3d&&this.options.transform3DLimit&&(t?this.off:this.on).call(this,"moveend",this._onMoveEnd)},_onResize:function(){r(this._resizeRequest),this._resizeRequest=x(function(){this.invalidateSize({debounceMoveend:!0})},this)},_onScroll:function(){this._container.scrollTop=0,this._container.scrollLeft=0},_onMoveEnd:function(){var t=this._getMapPanePos();Math.max(Math.abs(t.x),Math.abs(t.y))>=this.options.transform3DLimit&&this._resetView(this.getCenter(),this.getZoom())},_findEventTargets:function(t,e){for(var i,n=[],o="mouseout"===e||"mouseover"===e,s=t.target||t.srcElement,r=!1;s;){if((i=this._targets[h(s)])&&("click"===e||"preclick"===e)&&this._draggableMoved(i)){r=!0;break}if(i&&i.listens(e,!0)){if(o&&!We(s,t))break;if(n.push(i),o)break}if(s===this._container)break;s=s.parentNode}return n=n.length||r||o||!this.listens(e,!0)?n:[this]},_isClickDisabled:function(t){for(;t&&t!==this._container;){if(t._leaflet_disable_click)return!0;t=t.parentNode}},_handleDOMEvent:function(t){var e,i=t.target||t.srcElement;!this._loaded||i._leaflet_disable_events||"click"===t.type&&this._isClickDisabled(i)||("mousedown"===(e=t.type)&&Me(i),this._fireDOMEvent(t,e))},_mouseEvents:["click","dblclick","mouseover","mouseout","contextmenu"],_fireDOMEvent:function(t,e,i){"click"===t.type&&((a=l({},t)).type="preclick",this._fireDOMEvent(a,a.type,i));var n=this._findEventTargets(t,e);if(i){for(var o=[],s=0;sthis.options.zoomAnimationThreshold)return!1;var n=this.getZoomScale(e),n=this._getCenterOffset(t)._divideBy(1-1/n);if(!0!==i.animate&&!this.getSize().contains(n))return!1;x(function(){this._moveStart(!0,i.noMoveStart||!1)._animateZoom(t,e,!0)},this)}return!0},_animateZoom:function(t,e,i,n){this._mapPane&&(i&&(this._animatingZoom=!0,this._animateToCenter=t,this._animateToZoom=e,M(this._mapPane,"leaflet-zoom-anim")),this.fire("zoomanim",{center:t,zoom:e,noUpdate:n}),this._tempFireZoomEvent||(this._tempFireZoomEvent=this._zoom!==this._animateToZoom),this._move(this._animateToCenter,this._animateToZoom,void 0,!0),setTimeout(a(this._onZoomTransitionEnd,this),250))},_onZoomTransitionEnd:function(){this._animatingZoom&&(this._mapPane&&z(this._mapPane,"leaflet-zoom-anim"),this._animatingZoom=!1,this._move(this._animateToCenter,this._animateToZoom,void 0,!0),this._tempFireZoomEvent&&this.fire("zoom"),delete this._tempFireZoomEvent,this.fire("move"),this._moveEnd(!0))}});function Ue(t){return new B(t)}var B=et.extend({options:{position:"topright"},initialize:function(t){c(this,t)},getPosition:function(){return this.options.position},setPosition:function(t){var e=this._map;return e&&e.removeControl(this),this.options.position=t,e&&e.addControl(this),this},getContainer:function(){return this._container},addTo:function(t){this.remove(),this._map=t;var e=this._container=this.onAdd(t),i=this.getPosition(),t=t._controlCorners[i];return M(e,"leaflet-control"),-1!==i.indexOf("bottom")?t.insertBefore(e,t.firstChild):t.appendChild(e),this._map.on("unload",this.remove,this),this},remove:function(){return this._map&&(T(this._container),this.onRemove&&this.onRemove(this._map),this._map.off("unload",this.remove,this),this._map=null),this},_refocusOnMap:function(t){this._map&&t&&0",e=document.createElement("div");return e.innerHTML=t,e.firstChild},_addItem:function(t){var e,i=document.createElement("label"),n=this._map.hasLayer(t.layer),n=(t.overlay?((e=document.createElement("input")).type="checkbox",e.className="leaflet-control-layers-selector",e.defaultChecked=n):e=this._createRadioElement("leaflet-base-layers_"+h(this),n),this._layerControlInputs.push(e),e.layerId=h(t.layer),S(e,"click",this._onInputClick,this),document.createElement("span")),o=(n.innerHTML=" "+t.name,document.createElement("span"));return i.appendChild(o),o.appendChild(e),o.appendChild(n),(t.overlay?this._overlaysList:this._baseLayersList).appendChild(i),this._checkDisabledLayers(),i},_onInputClick:function(){if(!this._preventClick){var t,e,i=this._layerControlInputs,n=[],o=[];this._handlingClick=!0;for(var s=i.length-1;0<=s;s--)t=i[s],e=this._getLayer(t.layerId).layer,t.checked?n.push(e):t.checked||o.push(e);for(s=0;se.options.maxZoom},_expandIfNotCollapsed:function(){return this._map&&!this.options.collapsed&&this.expand(),this},_expandSafely:function(){var t=this._section,e=(this._preventClick=!0,S(t,"click",O),this.expand(),this);setTimeout(function(){k(t,"click",O),e._preventClick=!1})}})),qe=B.extend({options:{position:"topleft",zoomInText:'',zoomInTitle:"Zoom in",zoomOutText:'',zoomOutTitle:"Zoom out"},onAdd:function(t){var e="leaflet-control-zoom",i=P("div",e+" leaflet-bar"),n=this.options;return this._zoomInButton=this._createButton(n.zoomInText,n.zoomInTitle,e+"-in",i,this._zoomIn),this._zoomOutButton=this._createButton(n.zoomOutText,n.zoomOutTitle,e+"-out",i,this._zoomOut),this._updateDisabled(),t.on("zoomend zoomlevelschange",this._updateDisabled,this),i},onRemove:function(t){t.off("zoomend zoomlevelschange",this._updateDisabled,this)},disable:function(){return this._disabled=!0,this._updateDisabled(),this},enable:function(){return this._disabled=!1,this._updateDisabled(),this},_zoomIn:function(t){!this._disabled&&this._map._zoomthis._map.getMinZoom()&&this._map.zoomOut(this._map.options.zoomDelta*(t.shiftKey?3:1))},_createButton:function(t,e,i,n,o){i=P("a",i,n);return i.innerHTML=t,i.href="#",i.title=e,i.setAttribute("role","button"),i.setAttribute("aria-label",e),Ie(i),S(i,"click",Re),S(i,"click",o,this),S(i,"click",this._refocusOnMap,this),i},_updateDisabled:function(){var t=this._map,e="leaflet-disabled";z(this._zoomInButton,e),z(this._zoomOutButton,e),this._zoomInButton.setAttribute("aria-disabled","false"),this._zoomOutButton.setAttribute("aria-disabled","false"),!this._disabled&&t._zoom!==t.getMinZoom()||(M(this._zoomOutButton,e),this._zoomOutButton.setAttribute("aria-disabled","true")),!this._disabled&&t._zoom!==t.getMaxZoom()||(M(this._zoomInButton,e),this._zoomInButton.setAttribute("aria-disabled","true"))}}),Ge=(A.mergeOptions({zoomControl:!0}),A.addInitHook(function(){this.options.zoomControl&&(this.zoomControl=new qe,this.addControl(this.zoomControl))}),B.extend({options:{position:"bottomleft",maxWidth:100,metric:!0,imperial:!0},onAdd:function(t){var e="leaflet-control-scale",i=P("div",e),n=this.options;return this._addScales(n,e+"-line",i),t.on(n.updateWhenIdle?"moveend":"move",this._update,this),t.whenReady(this._update,this),i},onRemove:function(t){t.off(this.options.updateWhenIdle?"moveend":"move",this._update,this)},_addScales:function(t,e,i){t.metric&&(this._mScale=P("div",e,i)),t.imperial&&(this._iScale=P("div",e,i))},_update:function(){var t=this._map,e=t.getSize().y/2,t=t.distance(t.containerPointToLatLng([0,e]),t.containerPointToLatLng([this.options.maxWidth,e]));this._updateScales(t)},_updateScales:function(t){this.options.metric&&t&&this._updateMetric(t),this.options.imperial&&t&&this._updateImperial(t)},_updateMetric:function(t){var e=this._getRoundNum(t);this._updateScale(this._mScale,e<1e3?e+" m":e/1e3+" km",e/t)},_updateImperial:function(t){var e,i,t=3.2808399*t;5280'+(b.inlineSvg?' ':"")+"Leaflet"},initialize:function(t){c(this,t),this._attributions={}},onAdd:function(t){for(var e in(t.attributionControl=this)._container=P("div","leaflet-control-attribution"),Ie(this._container),t._layers)t._layers[e].getAttribution&&this.addAttribution(t._layers[e].getAttribution());return this._update(),t.on("layeradd",this._addAttribution,this),this._container},onRemove:function(t){t.off("layeradd",this._addAttribution,this)},_addAttribution:function(t){t.layer.getAttribution&&(this.addAttribution(t.layer.getAttribution()),t.layer.once("remove",function(){this.removeAttribution(t.layer.getAttribution())},this))},setPrefix:function(t){return this.options.prefix=t,this._update(),this},addAttribution:function(t){return t&&(this._attributions[t]||(this._attributions[t]=0),this._attributions[t]++,this._update()),this},removeAttribution:function(t){return t&&this._attributions[t]&&(this._attributions[t]--,this._update()),this},_update:function(){if(this._map){var t,e=[];for(t in this._attributions)this._attributions[t]&&e.push(t);var i=[];this.options.prefix&&i.push(this.options.prefix),e.length&&i.push(e.join(", ")),this._container.innerHTML=i.join(' ')}}}),n=(A.mergeOptions({attributionControl:!0}),A.addInitHook(function(){this.options.attributionControl&&(new Ke).addTo(this)}),B.Layers=Ve,B.Zoom=qe,B.Scale=Ge,B.Attribution=Ke,Ue.layers=function(t,e,i){return new Ve(t,e,i)},Ue.zoom=function(t){return new qe(t)},Ue.scale=function(t){return new Ge(t)},Ue.attribution=function(t){return new Ke(t)},et.extend({initialize:function(t){this._map=t},enable:function(){return this._enabled||(this._enabled=!0,this.addHooks()),this},disable:function(){return this._enabled&&(this._enabled=!1,this.removeHooks()),this},enabled:function(){return!!this._enabled}})),ft=(n.addTo=function(t,e){return t.addHandler(e,this),this},{Events:e}),Ye=b.touch?"touchstart mousedown":"mousedown",Xe=it.extend({options:{clickTolerance:3},initialize:function(t,e,i,n){c(this,n),this._element=t,this._dragStartTarget=e||t,this._preventOutline=i},enable:function(){this._enabled||(S(this._dragStartTarget,Ye,this._onDown,this),this._enabled=!0)},disable:function(){this._enabled&&(Xe._dragging===this&&this.finishDrag(!0),k(this._dragStartTarget,Ye,this._onDown,this),this._enabled=!1,this._moved=!1)},_onDown:function(t){var e,i;this._enabled&&(this._moved=!1,ve(this._element,"leaflet-zoom-anim")||(t.touches&&1!==t.touches.length?Xe._dragging===this&&this.finishDrag():Xe._dragging||t.shiftKey||1!==t.which&&1!==t.button&&!t.touches||((Xe._dragging=this)._preventOutline&&Me(this._element),Le(),re(),this._moving||(this.fire("down"),i=t.touches?t.touches[0]:t,e=Ce(this._element),this._startPoint=new p(i.clientX,i.clientY),this._startPos=Pe(this._element),this._parentScale=Ze(e),i="mousedown"===t.type,S(document,i?"mousemove":"touchmove",this._onMove,this),S(document,i?"mouseup":"touchend touchcancel",this._onUp,this)))))},_onMove:function(t){var e;this._enabled&&(t.touches&&1e&&(i.push(t[n]),o=n);oe.max.x&&(i|=2),t.ye.max.y&&(i|=8),i}function ri(t,e,i,n){var o=e.x,e=e.y,s=i.x-o,r=i.y-e,a=s*s+r*r;return 0this._layersMaxZoom&&this.setZoom(this._layersMaxZoom),void 0===this.options.minZoom&&this._layersMinZoom&&this.getZoom()t.y!=n.y>t.y&&t.x<(n.x-i.x)*(t.y-i.y)/(n.y-i.y)+i.x&&(l=!l);return l||yi.prototype._containsPoint.call(this,t,!0)}});var wi=ci.extend({initialize:function(t,e){c(this,e),this._layers={},t&&this.addData(t)},addData:function(t){var e,i,n,o=d(t)?t:t.features;if(o){for(e=0,i=o.length;es.x&&(r=i.x+a-s.x+o.x),i.x-r-n.x<(a=0)&&(r=i.x-n.x),i.y+e+o.y>s.y&&(a=i.y+e-s.y+o.y),i.y-a-n.y<0&&(a=i.y-n.y),(r||a)&&(this.options.keepInView&&(this._autopanning=!0),t.fire("autopanstart").panBy([r,a]))))},_getAnchor:function(){return m(this._source&&this._source._getPopupAnchor?this._source._getPopupAnchor():[0,0])}})),Ii=(A.mergeOptions({closePopupOnClick:!0}),A.include({openPopup:function(t,e,i){return this._initOverlay(Bi,t,e,i).openOn(this),this},closePopup:function(t){return(t=arguments.length?t:this._popup)&&t.close(),this}}),o.include({bindPopup:function(t,e){return this._popup=this._initOverlay(Bi,this._popup,t,e),this._popupHandlersAdded||(this.on({click:this._openPopup,keypress:this._onKeyPress,remove:this.closePopup,move:this._movePopup}),this._popupHandlersAdded=!0),this},unbindPopup:function(){return this._popup&&(this.off({click:this._openPopup,keypress:this._onKeyPress,remove:this.closePopup,move:this._movePopup}),this._popupHandlersAdded=!1,this._popup=null),this},openPopup:function(t){return this._popup&&(this instanceof ci||(this._popup._source=this),this._popup._prepareOpen(t||this._latlng)&&this._popup.openOn(this._map)),this},closePopup:function(){return this._popup&&this._popup.close(),this},togglePopup:function(){return this._popup&&this._popup.toggle(this),this},isPopupOpen:function(){return!!this._popup&&this._popup.isOpen()},setPopupContent:function(t){return this._popup&&this._popup.setContent(t),this},getPopup:function(){return this._popup},_openPopup:function(t){var e;this._popup&&this._map&&(Re(t),e=t.layer||t.target,this._popup._source!==e||e instanceof fi?(this._popup._source=e,this.openPopup(t.latlng)):this._map.hasLayer(this._popup)?this.closePopup():this.openPopup(t.latlng))},_movePopup:function(t){this._popup.setLatLng(t.latlng)},_onKeyPress:function(t){13===t.originalEvent.keyCode&&this._openPopup(t)}}),Ai.extend({options:{pane:"tooltipPane",offset:[0,0],direction:"auto",permanent:!1,sticky:!1,opacity:.9},onAdd:function(t){Ai.prototype.onAdd.call(this,t),this.setOpacity(this.options.opacity),t.fire("tooltipopen",{tooltip:this}),this._source&&(this.addEventParent(this._source),this._source.fire("tooltipopen",{tooltip:this},!0))},onRemove:function(t){Ai.prototype.onRemove.call(this,t),t.fire("tooltipclose",{tooltip:this}),this._source&&(this.removeEventParent(this._source),this._source.fire("tooltipclose",{tooltip:this},!0))},getEvents:function(){var t=Ai.prototype.getEvents.call(this);return this.options.permanent||(t.preclick=this.close),t},_initLayout:function(){var t="leaflet-tooltip "+(this.options.className||"")+" leaflet-zoom-"+(this._zoomAnimated?"animated":"hide");this._contentNode=this._container=P("div",t),this._container.setAttribute("role","tooltip"),this._container.setAttribute("id","leaflet-tooltip-"+h(this))},_updateLayout:function(){},_adjustPan:function(){},_setPosition:function(t){var e,i=this._map,n=this._container,o=i.latLngToContainerPoint(i.getCenter()),i=i.layerPointToContainerPoint(t),s=this.options.direction,r=n.offsetWidth,a=n.offsetHeight,h=m(this.options.offset),l=this._getAnchor(),i="top"===s?(e=r/2,a):"bottom"===s?(e=r/2,0):(e="center"===s?r/2:"right"===s?0:"left"===s?r:i.xthis.options.maxZoom||nthis.options.maxZoom||void 0!==this.options.minZoom&&oi.max.x)||!e.wrapLat&&(t.yi.max.y))return!1}return!this.options.bounds||(e=this._tileCoordsToBounds(t),g(this.options.bounds).overlaps(e))},_keyToBounds:function(t){return this._tileCoordsToBounds(this._keyToTileCoords(t))},_tileCoordsToNwSe:function(t){var e=this._map,i=this.getTileSize(),n=t.scaleBy(i),i=n.add(i);return[e.unproject(n,t.z),e.unproject(i,t.z)]},_tileCoordsToBounds:function(t){t=this._tileCoordsToNwSe(t),t=new s(t[0],t[1]);return t=this.options.noWrap?t:this._map.wrapLatLngBounds(t)},_tileCoordsToKey:function(t){return t.x+":"+t.y+":"+t.z},_keyToTileCoords:function(t){var t=t.split(":"),e=new p(+t[0],+t[1]);return e.z=+t[2],e},_removeTile:function(t){var e=this._tiles[t];e&&(T(e.el),delete this._tiles[t],this.fire("tileunload",{tile:e.el,coords:this._keyToTileCoords(t)}))},_initTile:function(t){M(t,"leaflet-tile");var e=this.getTileSize();t.style.width=e.x+"px",t.style.height=e.y+"px",t.onselectstart=u,t.onmousemove=u,b.ielt9&&this.options.opacity<1&&C(t,this.options.opacity)},_addTile:function(t,e){var i=this._getTilePos(t),n=this._tileCoordsToKey(t),o=this.createTile(this._wrapCoords(t),a(this._tileReady,this,t));this._initTile(o),this.createTile.length<2&&x(a(this._tileReady,this,t,null,o)),Z(o,i),this._tiles[n]={el:o,coords:t,current:!0},e.appendChild(o),this.fire("tileloadstart",{tile:o,coords:t})},_tileReady:function(t,e,i){e&&this.fire("tileerror",{error:e,tile:i,coords:t});var n=this._tileCoordsToKey(t);(i=this._tiles[n])&&(i.loaded=+new Date,this._map._fadeAnimated?(C(i.el,0),r(this._fadeFrame),this._fadeFrame=x(this._updateOpacity,this)):(i.active=!0,this._pruneTiles()),e||(M(i.el,"leaflet-tile-loaded"),this.fire("tileload",{tile:i.el,coords:t})),this._noTilesToLoad()&&(this._loading=!1,this.fire("load"),b.ielt9||!this._map._fadeAnimated?x(this._pruneTiles,this):setTimeout(a(this._pruneTiles,this),250)))},_getTilePos:function(t){return t.scaleBy(this.getTileSize()).subtract(this._level.origin)},_wrapCoords:function(t){var e=new p(this._wrapX?H(t.x,this._wrapX):t.x,this._wrapY?H(t.y,this._wrapY):t.y);return e.z=t.z,e},_pxBoundsToTileRange:function(t){var e=this.getTileSize();return new f(t.min.unscaleBy(e).floor(),t.max.unscaleBy(e).ceil().subtract([1,1]))},_noTilesToLoad:function(){for(var t in this._tiles)if(!this._tiles[t].loaded)return!1;return!0}});var Di=Ni.extend({options:{minZoom:0,maxZoom:18,subdomains:"abc",errorTileUrl:"",zoomOffset:0,tms:!1,zoomReverse:!1,detectRetina:!1,crossOrigin:!1,referrerPolicy:!1},initialize:function(t,e){this._url=t,(e=c(this,e)).detectRetina&&b.retina&&0')}}catch(t){}return function(t){return document.createElement("<"+t+' xmlns="urn:schemas-microsoft.com:vml" class="lvml">')}}(),zt={_initContainer:function(){this._container=P("div","leaflet-vml-container")},_update:function(){this._map._animatingZoom||(Wi.prototype._update.call(this),this.fire("update"))},_initPath:function(t){var e=t._container=Vi("shape");M(e,"leaflet-vml-shape "+(this.options.className||"")),e.coordsize="1 1",t._path=Vi("path"),e.appendChild(t._path),this._updateStyle(t),this._layers[h(t)]=t},_addPath:function(t){var e=t._container;this._container.appendChild(e),t.options.interactive&&t.addInteractiveTarget(e)},_removePath:function(t){var e=t._container;T(e),t.removeInteractiveTarget(e),delete this._layers[h(t)]},_updateStyle:function(t){var e=t._stroke,i=t._fill,n=t.options,o=t._container;o.stroked=!!n.stroke,o.filled=!!n.fill,n.stroke?(e=e||(t._stroke=Vi("stroke")),o.appendChild(e),e.weight=n.weight+"px",e.color=n.color,e.opacity=n.opacity,n.dashArray?e.dashStyle=d(n.dashArray)?n.dashArray.join(" "):n.dashArray.replace(/( *, *)/g," "):e.dashStyle="",e.endcap=n.lineCap.replace("butt","flat"),e.joinstyle=n.lineJoin):e&&(o.removeChild(e),t._stroke=null),n.fill?(i=i||(t._fill=Vi("fill")),o.appendChild(i),i.color=n.fillColor||n.color,i.opacity=n.fillOpacity):i&&(o.removeChild(i),t._fill=null)},_updateCircle:function(t){var e=t._point.round(),i=Math.round(t._radius),n=Math.round(t._radiusY||i);this._setPath(t,t._empty()?"M0 0":"AL "+e.x+","+e.y+" "+i+","+n+" 0,23592600")},_setPath:function(t,e){t._path.v=e},_bringToFront:function(t){fe(t._container)},_bringToBack:function(t){ge(t._container)}},qi=b.vml?Vi:ct,Gi=Wi.extend({_initContainer:function(){this._container=qi("svg"),this._container.setAttribute("pointer-events","none"),this._rootGroup=qi("g"),this._container.appendChild(this._rootGroup)},_destroyContainer:function(){T(this._container),k(this._container),delete this._container,delete this._rootGroup,delete this._svgSize},_update:function(){var t,e,i;this._map._animatingZoom&&this._bounds||(Wi.prototype._update.call(this),e=(t=this._bounds).getSize(),i=this._container,this._svgSize&&this._svgSize.equals(e)||(this._svgSize=e,i.setAttribute("width",e.x),i.setAttribute("height",e.y)),Z(i,t.min),i.setAttribute("viewBox",[t.min.x,t.min.y,e.x,e.y].join(" ")),this.fire("update"))},_initPath:function(t){var e=t._path=qi("path");t.options.className&&M(e,t.options.className),t.options.interactive&&M(e,"leaflet-interactive"),this._updateStyle(t),this._layers[h(t)]=t},_addPath:function(t){this._rootGroup||this._initContainer(),this._rootGroup.appendChild(t._path),t.addInteractiveTarget(t._path)},_removePath:function(t){T(t._path),t.removeInteractiveTarget(t._path),delete this._layers[h(t)]},_updatePath:function(t){t._project(),t._update()},_updateStyle:function(t){var e=t._path,t=t.options;e&&(t.stroke?(e.setAttribute("stroke",t.color),e.setAttribute("stroke-opacity",t.opacity),e.setAttribute("stroke-width",t.weight),e.setAttribute("stroke-linecap",t.lineCap),e.setAttribute("stroke-linejoin",t.lineJoin),t.dashArray?e.setAttribute("stroke-dasharray",t.dashArray):e.removeAttribute("stroke-dasharray"),t.dashOffset?e.setAttribute("stroke-dashoffset",t.dashOffset):e.removeAttribute("stroke-dashoffset")):e.setAttribute("stroke","none"),t.fill?(e.setAttribute("fill",t.fillColor||t.color),e.setAttribute("fill-opacity",t.fillOpacity),e.setAttribute("fill-rule",t.fillRule||"evenodd")):e.setAttribute("fill","none"))},_updatePoly:function(t,e){this._setPath(t,dt(t._parts,e))},_updateCircle:function(t){var e=t._point,i=Math.max(Math.round(t._radius),1),n="a"+i+","+(Math.max(Math.round(t._radiusY),1)||i)+" 0 1,0 ",e=t._empty()?"M0 0":"M"+(e.x-i)+","+e.y+n+2*i+",0 "+n+2*-i+",0 ";this._setPath(t,e)},_setPath:function(t,e){t._path.setAttribute("d",e)},_bringToFront:function(t){fe(t._path)},_bringToBack:function(t){ge(t._path)}});function Ki(t){return b.svg||b.vml?new Gi(t):null}b.vml&&Gi.include(zt),A.include({getRenderer:function(t){t=(t=t.options.renderer||this._getPaneRenderer(t.options.pane)||this.options.renderer||this._renderer)||(this._renderer=this._createRenderer());return this.hasLayer(t)||this.addLayer(t),t},_getPaneRenderer:function(t){var e;return"overlayPane"!==t&&void 0!==t&&(void 0===(e=this._paneRenderers[t])&&(e=this._createRenderer({pane:t}),this._paneRenderers[t]=e),e)},_createRenderer:function(t){return this.options.preferCanvas&&Ui(t)||Ki(t)}});var Yi=xi.extend({initialize:function(t,e){xi.prototype.initialize.call(this,this._boundsToLatLngs(t),e)},setBounds:function(t){return this.setLatLngs(this._boundsToLatLngs(t))},_boundsToLatLngs:function(t){return[(t=g(t)).getSouthWest(),t.getNorthWest(),t.getNorthEast(),t.getSouthEast()]}});Gi.create=qi,Gi.pointsToPath=dt,wi.geometryToLayer=bi,wi.coordsToLatLng=Li,wi.coordsToLatLngs=Ti,wi.latLngToCoords=Mi,wi.latLngsToCoords=zi,wi.getFeature=Ci,wi.asFeature=Zi,A.mergeOptions({boxZoom:!0});var _t=n.extend({initialize:function(t){this._map=t,this._container=t._container,this._pane=t._panes.overlayPane,this._resetStateTimeout=0,t.on("unload",this._destroy,this)},addHooks:function(){S(this._container,"mousedown",this._onMouseDown,this)},removeHooks:function(){k(this._container,"mousedown",this._onMouseDown,this)},moved:function(){return this._moved},_destroy:function(){T(this._pane),delete this._pane},_resetState:function(){this._resetStateTimeout=0,this._moved=!1},_clearDeferredResetState:function(){0!==this._resetStateTimeout&&(clearTimeout(this._resetStateTimeout),this._resetStateTimeout=0)},_onMouseDown:function(t){if(!t.shiftKey||1!==t.which&&1!==t.button)return!1;this._clearDeferredResetState(),this._resetState(),re(),Le(),this._startPoint=this._map.mouseEventToContainerPoint(t),S(document,{contextmenu:Re,mousemove:this._onMouseMove,mouseup:this._onMouseUp,keydown:this._onKeyDown},this)},_onMouseMove:function(t){this._moved||(this._moved=!0,this._box=P("div","leaflet-zoom-box",this._container),M(this._container,"leaflet-crosshair"),this._map.fire("boxzoomstart")),this._point=this._map.mouseEventToContainerPoint(t);var t=new f(this._point,this._startPoint),e=t.getSize();Z(this._box,t.min),this._box.style.width=e.x+"px",this._box.style.height=e.y+"px"},_finish:function(){this._moved&&(T(this._box),z(this._container,"leaflet-crosshair")),ae(),Te(),k(document,{contextmenu:Re,mousemove:this._onMouseMove,mouseup:this._onMouseUp,keydown:this._onKeyDown},this)},_onMouseUp:function(t){1!==t.which&&1!==t.button||(this._finish(),this._moved&&(this._clearDeferredResetState(),this._resetStateTimeout=setTimeout(a(this._resetState,this),0),t=new s(this._map.containerPointToLatLng(this._startPoint),this._map.containerPointToLatLng(this._point)),this._map.fitBounds(t).fire("boxzoomend",{boxZoomBounds:t})))},_onKeyDown:function(t){27===t.keyCode&&(this._finish(),this._clearDeferredResetState(),this._resetState())}}),Ct=(A.addInitHook("addHandler","boxZoom",_t),A.mergeOptions({doubleClickZoom:!0}),n.extend({addHooks:function(){this._map.on("dblclick",this._onDoubleClick,this)},removeHooks:function(){this._map.off("dblclick",this._onDoubleClick,this)},_onDoubleClick:function(t){var e=this._map,i=e.getZoom(),n=e.options.zoomDelta,i=t.originalEvent.shiftKey?i-n:i+n;"center"===e.options.doubleClickZoom?e.setZoom(i):e.setZoomAround(t.containerPoint,i)}})),Zt=(A.addInitHook("addHandler","doubleClickZoom",Ct),A.mergeOptions({dragging:!0,inertia:!0,inertiaDeceleration:3400,inertiaMaxSpeed:1/0,easeLinearity:.2,worldCopyJump:!1,maxBoundsViscosity:0}),n.extend({addHooks:function(){var t;this._draggable||(t=this._map,this._draggable=new Xe(t._mapPane,t._container),this._draggable.on({dragstart:this._onDragStart,drag:this._onDrag,dragend:this._onDragEnd},this),this._draggable.on("predrag",this._onPreDragLimit,this),t.options.worldCopyJump&&(this._draggable.on("predrag",this._onPreDragWrap,this),t.on("zoomend",this._onZoomEnd,this),t.whenReady(this._onZoomEnd,this))),M(this._map._container,"leaflet-grab leaflet-touch-drag"),this._draggable.enable(),this._positions=[],this._times=[]},removeHooks:function(){z(this._map._container,"leaflet-grab"),z(this._map._container,"leaflet-touch-drag"),this._draggable.disable()},moved:function(){return this._draggable&&this._draggable._moved},moving:function(){return this._draggable&&this._draggable._moving},_onDragStart:function(){var t,e=this._map;e._stop(),this._map.options.maxBounds&&this._map.options.maxBoundsViscosity?(t=g(this._map.options.maxBounds),this._offsetLimit=_(this._map.latLngToContainerPoint(t.getNorthWest()).multiplyBy(-1),this._map.latLngToContainerPoint(t.getSouthEast()).multiplyBy(-1).add(this._map.getSize())),this._viscosity=Math.min(1,Math.max(0,this._map.options.maxBoundsViscosity))):this._offsetLimit=null,e.fire("movestart").fire("dragstart"),e.options.inertia&&(this._positions=[],this._times=[])},_onDrag:function(t){var e,i;this._map.options.inertia&&(e=this._lastTime=+new Date,i=this._lastPos=this._draggable._absPos||this._draggable._newPos,this._positions.push(i),this._times.push(e),this._prunePositions(e)),this._map.fire("move",t).fire("drag",t)},_prunePositions:function(t){for(;1e.max.x&&(t.x=this._viscousLimit(t.x,e.max.x)),t.y>e.max.y&&(t.y=this._viscousLimit(t.y,e.max.y)),this._draggable._newPos=this._draggable._startPos.add(t))},_onPreDragWrap:function(){var t=this._worldWidth,e=Math.round(t/2),i=this._initialWorldOffset,n=this._draggable._newPos.x,o=(n-e+i)%t+e-i,n=(n+e+i)%t-e-i,t=Math.abs(o+i)e.getMaxZoom()&&1 + + + + + + Global Localization - ZED SDK + + + + + + + + + + +
+
+
+ + + diff --git a/global localization/playback/README.md b/global localization/playback/README.md new file mode 100644 index 00000000..db9fc09d --- /dev/null +++ b/global localization/playback/README.md @@ -0,0 +1,30 @@ +# Global Localization Data Playback + +## Overview + +The ZED SDK Global Localization Playback sample demonstrates how to fuse pre-recorded GNSS data (saved in a JSON file) and pre-recorded camera data (saved into an SVO file) for achieving global scale localization on a real-world map. This sample is useful for applications such as offline analysis of sensor data or simulation / testing. + +## Features + +- Displays the camera's path in an OpenGL window. +- Displays path data, including translation and rotation. +- Displays the fused path on a map in a web browser. +- Exports KML files for the fused trajectory and raw GNSS data. + +## Dependencies + +Before using this sample, ensure that you have the following dependencies installed on your system: + +- ZED SDK: download and install from the official [Stereolabs website](https://www.stereolabs.com/developers/release/). + +## Installation and Usage + +To use the ZED SDK Global Localization Playback sample, follow these steps: + +1. Download and install the ZED SDK on your system from the official [Stereolabs website](https://www.stereolabs.com/developers/release/). +2. Open a terminal and navigate to the playback sample directory. +3. Compile the sample for C++ in a _build_ directory. +4. Run the `ZED_Global_Localization_Playback` executable for C++ and `live.py` for Python, passing the path to the SVO file as the first input argument of the command line and the path to GNSS file as second argument. +5. The sample will display the camera's path and path data in a 3D window. +6. Go to the [map server sample](../map%20server) and run a simple server. +7. The sample will playback the SVO file and display the camera's path and path data in a 3D window. The fused path will be displayed on a map on web browser, and KML files will be generated for the fused trajectory and raw GNSS data. diff --git a/geotracking/playback/cpp/CMakeLists.txt b/global localization/playback/cpp/CMakeLists.txt similarity index 79% rename from geotracking/playback/cpp/CMakeLists.txt rename to global localization/playback/cpp/CMakeLists.txt index 837c23f3..67cb9edc 100644 --- a/geotracking/playback/cpp/CMakeLists.txt +++ b/global localization/playback/cpp/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.5) -PROJECT(ZED_GNSS_playback) +PROJECT(ZED_Global_Localization_Playback) set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED ON) @@ -13,16 +13,6 @@ SET(OpenGL_GL_PREFERENCE GLVND) find_package(OpenGL REQUIRED) find_package(OpenCV REQUIRED) -# ZEDHub - if found -find_package(SL_HUB) - -if(SL_HUB_FOUND) - message("Found ZEDHub library") - add_definitions(-DCOMPILE_WITH_ZEDHUB=TRUE) - link_directories(${SL_HUB_LIB_DIR}) - include_directories(${SL_HUB_INCLUDE_DIR}) -endif() - IF(NOT MSVC) SET(SPECIAL_OS_LIBS "pthread") ENDIF() @@ -67,11 +57,6 @@ FILE(GLOB_RECURSE HDR_FILES include/*.h*) add_executable(${PROJECT_NAME} ${HDR_FILES} ${SRC_FILES}) -if(SL_HUB_FOUND) - set(LIBS sl_hub util ${ZED_LIBRARIES} ${OpenCV_LIBRARIES} ${OPENGL_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) -else() - set(LIBS ${ZED_LIBRARIES} ${OpenCV_LIBRARIES} ${OPENGL_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) -endif() - +set(LIBS ${ZED_LIBRARIES} ${OpenCV_LIBRARIES} ${OPENGL_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) target_link_libraries(${PROJECT_NAME} ${LIBS} ${SPECIAL_OS_LIBS}) diff --git a/geotracking/playback/cpp/cmake/FindGPS.cmake b/global localization/playback/cpp/cmake/FindGPS.cmake similarity index 100% rename from geotracking/playback/cpp/cmake/FindGPS.cmake rename to global localization/playback/cpp/cmake/FindGPS.cmake diff --git a/geotracking/playback/cpp/include/GNSSReplay.hpp b/global localization/playback/cpp/include/GNSSReplay.hpp similarity index 80% rename from geotracking/playback/cpp/include/GNSSReplay.hpp rename to global localization/playback/cpp/include/GNSSReplay.hpp index 217dc212..85031339 100644 --- a/geotracking/playback/cpp/include/GNSSReplay.hpp +++ b/global localization/playback/cpp/include/GNSSReplay.hpp @@ -9,16 +9,17 @@ /** * @brief GNSSReplay is a common interface that read GNSS saved data */ -class GNSSReplay -{ +class GNSSReplay { public: - GNSSReplay(std::string file_name); + GNSSReplay(std::string file_name, sl::Camera *zed = 0); ~GNSSReplay(); /** * @brief Initialize the GNSS sensor and is waiting for the first GNSS fix. * */ - void initialize(); + void initialize_from_json(); + + void initialize_from_svov2(sl::Camera *zed); void close(); diff --git a/geotracking/recording/cpp/include/display/GLViewer.hpp b/global localization/playback/cpp/include/display/GLViewer.hpp similarity index 95% rename from geotracking/recording/cpp/include/display/GLViewer.hpp rename to global localization/playback/cpp/include/display/GLViewer.hpp index 19283de9..ba5d26e7 100644 --- a/geotracking/recording/cpp/include/display/GLViewer.hpp +++ b/global localization/playback/cpp/include/display/GLViewer.hpp @@ -12,6 +12,7 @@ #include "ZEDModel.hpp" /* OpenGL Utility Toolkit header */ #include +#include #ifndef M_PI #define M_PI 3.1416f @@ -78,14 +79,14 @@ class Shader { public: Shader() {} - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; @@ -157,7 +158,7 @@ class GLViewer { void exit(); bool isAvailable(); void init(int argc, char **argv); - void updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state); + void updateData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state); private: // Rendering loop method called each frame by glutDisplayFunc @@ -213,7 +214,7 @@ class GLViewer { std::string txtR; std::string txtT; - sl::POSITIONAL_TRACKING_STATE trackState; + sl::FusedPositionalTrackingStatus trackState; const std::string str_tracking = "POSITIONAL TRACKING : "; sl::float3 bckgrnd_clr; diff --git a/geotracking/recording/cpp/include/display/GenericDisplay.h b/global localization/playback/cpp/include/display/GenericDisplay.h similarity index 77% rename from geotracking/recording/cpp/include/display/GenericDisplay.h rename to global localization/playback/cpp/include/display/GenericDisplay.h index 4fc787c5..89de6ad5 100644 --- a/geotracking/recording/cpp/include/display/GenericDisplay.h +++ b/global localization/playback/cpp/include/display/GenericDisplay.h @@ -36,13 +36,17 @@ class GenericDisplay * @brief Update the OpenGL view with last pose data * * @param zed_rt last pose data - * @param str_t std::string that represents current translations - * @param str_r std::string that represents current rotations * @param state current tracking state */ - void updatePoseData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state); + void updatePoseData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state); /** - * @brief Display current fused pose either in KML file or in ZEDHub depending compilation options + * @brief Display current pose on the Live Server + * + * @param geo_pose geopose to display + */ + void updateRawGeoPoseData(sl::GNSSData geo_data); + /** + * @brief Display current fused pose on the Live Server & in a KML file * * @param geo_pose geopose to display * @param current_timestamp timestamp of the geopose to display diff --git a/geotracking/playback/cpp/include/display/ZEDModel.hpp b/global localization/playback/cpp/include/display/ZEDModel.hpp similarity index 100% rename from geotracking/playback/cpp/include/display/ZEDModel.hpp rename to global localization/playback/cpp/include/display/ZEDModel.hpp diff --git a/geotracking/playback/cpp/include/exporter/KMLExporter.h b/global localization/playback/cpp/include/exporter/KMLExporter.h similarity index 100% rename from geotracking/playback/cpp/include/exporter/KMLExporter.h rename to global localization/playback/cpp/include/exporter/KMLExporter.h diff --git a/geotracking/playback/cpp/include/json.hpp b/global localization/playback/cpp/include/json.hpp similarity index 100% rename from geotracking/playback/cpp/include/json.hpp rename to global localization/playback/cpp/include/json.hpp diff --git a/global localization/playback/cpp/src/GNSSReplay.cpp b/global localization/playback/cpp/src/GNSSReplay.cpp new file mode 100644 index 00000000..613121a8 --- /dev/null +++ b/global localization/playback/cpp/src/GNSSReplay.cpp @@ -0,0 +1,406 @@ +#include "GNSSReplay.hpp" + +using json = nlohmann::json; + +inline bool is_microseconds(uint64_t timestamp) { + // Check if the timestamp is in microseconds + return (1'000'000'000'000'000 <= timestamp && timestamp < 10'000'000'000'000'000ULL); +} + +inline bool is_nanoseconds(uint64_t timestamp) { + // Check if the timestamp is in microseconds + return (1'000'000'000'000'000'000 <= timestamp && timestamp < 10'000'000'000'000'000'000ULL); +} + +GNSSReplay::GNSSReplay(std::string file_name, sl::Camera *zed) { + if (!file_name.empty()) { + _file_name = file_name; + initialize_from_json(); + } else if (zed != 0) { + initialize_from_svov2(zed); + } +} + +GNSSReplay::~GNSSReplay() { +} + +void GNSSReplay::initialize_from_json() { + std::ifstream gnss_file_data; + gnss_file_data.open(_file_name); + if (!gnss_file_data.is_open()) { + std::cerr << "Unable to open " << _file_name << std::endl; + exit(EXIT_FAILURE); + } + try { + gnss_data = json::parse(gnss_file_data); + } catch (const std::runtime_error &e) { + std::cerr << "Error while reading GNSS data: " << e.what() << std::endl; + } + current_gnss_idx = 0; + previous_ts = 0; +} + +void GNSSReplay::initialize_from_svov2(sl::Camera *zed) { + + auto svo_custom_data_keys = zed->getSVODataKeys(); + std::string gnss_key = "GNSS_json"; + bool found = false; + for (auto &it : svo_custom_data_keys) { + if (it.find(gnss_key) != std::string::npos) { + found = true; + break; + } + } + + std::map data; + auto status = zed->retrieveSVOData(gnss_key, data); // Get ALL + + /* + We handle 2 formats: + * + * { + "coordinates": { + "latitude": XXX, + "longitude": XXX, + "altitude": XXX + }, + "ts": 1694263390000000, + "latitude_std": 0.51, + "longitude_std": 0.51, + "altitude_std": 0.73, + "position_covariance": [ + 0.2601, + 0, + 0, + 0, + 0.2601, + 0, + 0, + 0, + 0.5328999999999999 + ] + }, + ********* + * Or + * this one will be converted to the format above + { + "Eph": 0.467, + "EpochTimeStamp": 1694266998000000, + "Epv": 0.776, + "Geopoint": { + "Altitude": XXX, + "Latitude": XXX, + "Longitude": XXX + }, + "Position": [ + [ + XXX, + XXX, + XXX + ] + ], + "Velocity": [ + [ + -0.63, + 0.25, + 0.53 + ] + ] + } + */ + + + auto tmp_array = json::array(); + for (auto &it : data) { + try { + auto gnss_data_point = json::parse(it.second.content.begin(), it.second.content.end()); + auto gnss_data_point_formatted = json::object(); + + if (!gnss_data_point["Geopoint"].is_null()) { + gnss_data_point_formatted["coordinates"] = { + {"latitude", gnss_data_point["Geopoint"]["Latitude"]}, + {"longitude", gnss_data_point["Geopoint"]["Longitude"]}, + {"altitude", gnss_data_point["Geopoint"]["Altitude"]}, + }; + gnss_data_point_formatted["ts"] = gnss_data_point["EpochTimeStamp"]; + + float latitude_std = gnss_data_point["Eph"]; + float longitude_std = gnss_data_point["Eph"]; + float altitude_std = gnss_data_point["Epv"]; + + gnss_data_point_formatted["latitude_std"] = latitude_std; + gnss_data_point_formatted["longitude_std"] = longitude_std; + gnss_data_point_formatted["altitude_std"] = altitude_std; + + gnss_data_point_formatted["position_covariance"] = json::array({ + longitude_std * longitude_std, 0, 0, 0, latitude_std * latitude_std, 0, 0, 0, altitude_std * altitude_std + }); + + gnss_data_point_formatted["original_gnss_data"] = gnss_data_point; + + } else if (!gnss_data_point["coordinates"].is_null() && !gnss_data_point["latitude_std"].is_null() && !gnss_data_point["longitude_std"].is_null()) { + // no conversion + gnss_data_point_formatted = gnss_data_point; + } + + tmp_array.push_back(gnss_data_point_formatted); + + } catch (const std::runtime_error &e) { + std::cerr << "Error while reading GNSS data: " << e.what() << std::endl; + } + } + gnss_data["GNSS"] = tmp_array; + + current_gnss_idx = 0; + previous_ts = 0; +} + +void GNSSReplay::close() { + gnss_data.clear(); + current_gnss_idx = 0; +} + +inline std::string gps_status2str(int status) { + std::string out; + switch (status) { + case 1: + out = "STATUS_GPS"; + break; + case 2: + out = "STATUS_DGPS"; + break; + case 3: + out = "STATUS_RTK_FIX"; + break; + case 4: + out = "STATUS_RTK_FLT"; + break; + case 5: + out = "STATUS_DR"; + break; + case 6: + out = "STATUS_GNSSDR"; + break; + case 7: + out = "STATUS_TIME"; + break; + case 8: + out = "STATUS_SIM"; + break; + case 9: + out = "STATUS_PPS_FIX"; + break; + default: + case 0: + out = "STATUS_UNK"; + break; + }; + return out; +} + +inline std::string gps_mode2str(int status) { + std::string out; + switch (status) { + case 1: + out = "MODE_NO_FIX"; + break; + case 2: + out = "MODE_2D"; + break; + case 3: + out = "MODE_3D"; + break; + default: + case 0: + out = "MODE_NOT_SEEN"; + break; + }; + return out; +} + +sl::GNSSData getGNSSData(json &gnss_data, int gnss_idx) { + sl::GNSSData current_gnss_data; + current_gnss_data.ts = 0; + + // If we are at the end of GNSS data, exit + if (gnss_idx >= gnss_data["GNSS"].size()) { + std::cout << "Reached the end of the GNSS playback data." << std::endl; + return current_gnss_data; + } + + json current_gnss_data_json = gnss_data["GNSS"][gnss_idx]; + // Check inputs: + if ( + current_gnss_data_json["coordinates"].is_null() + || current_gnss_data_json["coordinates"]["latitude"].is_null() + || current_gnss_data_json["coordinates"]["longitude"].is_null() + || current_gnss_data_json["coordinates"]["altitude"].is_null() + || current_gnss_data_json["ts"].is_null() + ) { + std::cout << "Null GNSS playback data." << std::endl; + return current_gnss_data; + } + + if (!current_gnss_data_json["original_gnss_data"].is_null()) { + if (!current_gnss_data_json["original_gnss_data"]["fix"].is_null()) { + if (!current_gnss_data_json["original_gnss_data"]["fix"]["status"].is_null()) + std::cout << std::setprecision(3) << "GNSS info: " << gps_status2str(current_gnss_data_json["original_gnss_data"]["fix"]["status"]) << " " << float(current_gnss_data_json["longitude_std"]) << " " << float(current_gnss_data_json["altitude_std"]) << "\r"; + } + } + + auto gnss_timestamp = current_gnss_data_json["ts"].get(); + // Fill out timestamp: + if (is_microseconds(gnss_timestamp)) + current_gnss_data.ts.setMicroseconds(gnss_timestamp); + else if (is_nanoseconds(gnss_timestamp)) + current_gnss_data.ts.setNanoseconds(gnss_timestamp); + else + std::cerr << "Warning: Invalid timestamp format from GNSS file" << std::endl; + + // Fill out coordinates: + current_gnss_data.setCoordinates(current_gnss_data_json["coordinates"]["latitude"].get(), + current_gnss_data_json["coordinates"]["longitude"].get(), + current_gnss_data_json["coordinates"]["altitude"].get(), + false); + + // Fill out default standard deviation: + current_gnss_data.longitude_std = current_gnss_data_json["longitude_std"]; + current_gnss_data.latitude_std = current_gnss_data_json["latitude_std"]; + current_gnss_data.altitude_std = current_gnss_data_json["altitude_std"]; + // Fill out covariance [must be not null] + std::array position_covariance; + for (unsigned i = 0; i < 9; i++) + position_covariance[i] = 0.0; // initialize empty covariance + + // set covariance diagonal + position_covariance[0] = current_gnss_data.longitude_std * current_gnss_data.longitude_std; + position_covariance[1 * 3 + 1] = current_gnss_data.latitude_std * current_gnss_data.latitude_std; + position_covariance[2 * 3 + 2] = current_gnss_data.altitude_std * current_gnss_data.altitude_std; + current_gnss_data.position_covariance = position_covariance; + + if (current_gnss_data_json.contains("status")) + current_gnss_data.gnss_status = sl::GNSS_STATUS(current_gnss_data_json["status"].get()); + + if (current_gnss_data_json.contains("mode")) + current_gnss_data.gnss_mode = sl::GNSS_MODE(current_gnss_data_json["mode"].get()); + + if (!current_gnss_data_json["original_gnss_data"].is_null()) + if (!current_gnss_data_json["original_gnss_data"]["fix"].is_null()) + if (!current_gnss_data_json["original_gnss_data"]["fix"]["status"].is_null()) { + + // Acquisition comes from GPSD https://gitlab.com/gpsd/gpsd/-/blob/master/include/gps.h#L183-211 + int gpsd_mode = current_gnss_data_json["original_gnss_data"]["fix"]["mode"]; + sl::GNSS_MODE sl_mode = sl::GNSS_MODE::UNKNOWN; + + switch (gpsd_mode) { + case 0: // MODE_NOT_SEEN + sl_mode = sl::GNSS_MODE::UNKNOWN; + break; + case 1: // MODE_NO_FIX + sl_mode = sl::GNSS_MODE::NO_FIX; + break; + case 2: // MODE_2D + sl_mode = sl::GNSS_MODE::FIX_2D; + break; + case 3: // MODE_3D + sl_mode = sl::GNSS_MODE::FIX_3D; + break; + default: + sl_mode = sl::GNSS_MODE::UNKNOWN; + break; + } + + int gpsd_status = current_gnss_data_json["original_gnss_data"]["fix"]["status"]; + sl::GNSS_STATUS sl_status = sl::GNSS_STATUS::UNKNOWN; + + switch (gpsd_status) { + case 0: // STATUS_UNK + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 1: // STATUS_GPS + sl_status = sl::GNSS_STATUS::SINGLE; + break; + case 2: // STATUS_DGPS + sl_status = sl::GNSS_STATUS::DGNSS; + break; + case 3: // STATUS_RTK_FIX + sl_status = sl::GNSS_STATUS::RTK_FIX; + break; + case 4: // STATUS_RTK_FLT + sl_status = sl::GNSS_STATUS::RTK_FLOAT; + break; + case 5: // STATUS_DR + sl_status = sl::GNSS_STATUS::SINGLE; + break; + case 6: // STATUS_GNSSDR + sl_status = sl::GNSS_STATUS::DGNSS; + break; + case 7: // STATUS_TIME + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 8: // STATUS_SIM + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 9: // STATUS_PPS_FIX + sl_status = sl::GNSS_STATUS::SINGLE; + break; + default: + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + } + + current_gnss_data.gnss_status = sl_status; + current_gnss_data.gnss_mode = sl_mode; + } + + return current_gnss_data; +} + +sl::GNSSData GNSSReplay::getNextGNSSValue(uint64_t current_timestamp) { + sl::GNSSData current_gnss_data = getGNSSData(gnss_data, current_gnss_idx); + + if (current_gnss_data.ts.data_ns == 0) + return current_gnss_data; + + if (current_gnss_data.ts.data_ns > current_timestamp) { + current_gnss_data.ts.data_ns = 0; + return current_gnss_data; + } + + sl::GNSSData last_data; + int step = 1; + while (1) { + last_data = current_gnss_data; + int diff_last = current_timestamp - current_gnss_data.ts.data_ns; + current_gnss_data = getGNSSData(gnss_data, current_gnss_idx + step++); + if (current_gnss_data.ts.data_ns == 0) //error / end of file + break; + + if (current_gnss_data.ts.data_ns > current_timestamp) { + if ((current_gnss_data.ts.data_ns - current_timestamp) > diff_last) // keep last + current_gnss_data = last_data; + break; + } + current_gnss_idx++; + } + + return current_gnss_data; +} + +sl::FUSION_ERROR_CODE GNSSReplay::grab(sl::GNSSData ¤t_data, uint64_t current_timestamp) { + current_data.ts.data_ns = 0; + + if (current_timestamp > 0 && (current_timestamp > last_cam_ts)) + current_data = getNextGNSSValue(current_timestamp); + + if (current_data.ts.data_ns == previous_ts) + current_data.ts.data_ns = 0; + + last_cam_ts = current_timestamp; + + if (current_data.ts.data_ns == 0) // Invalid data + return sl::FUSION_ERROR_CODE::FAILURE; + + previous_ts = current_data.ts.data_ns; + return sl::FUSION_ERROR_CODE::SUCCESS; +} \ No newline at end of file diff --git a/geotracking/playback/cpp/src/display/GLViewer.cpp b/global localization/playback/cpp/src/display/GLViewer.cpp similarity index 87% rename from geotracking/playback/cpp/src/display/GLViewer.cpp rename to global localization/playback/cpp/src/display/GLViewer.cpp index c98a9ef2..06c3af54 100644 --- a/geotracking/playback/cpp/src/display/GLViewer.cpp +++ b/global localization/playback/cpp/src/display/GLViewer.cpp @@ -19,7 +19,7 @@ void print(std::string msg_prefix, sl::ERROR_CODE err_code, std::string msg_suff -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -30,7 +30,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -256,13 +256,19 @@ void GLViewer::draw() { glUseProgram(0); } -void GLViewer::updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state) { +void GLViewer::updateData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state) { mtx.lock(); vecPath.push_back(zed_rt.getTranslation()); zedModel.setRT(zed_rt); updateZEDposition = true; - txtT = str_t; - txtR = str_r; + + std::stringstream ss; + ss << std::setprecision(3) << zed_rt.getTranslation(); + txtT = ss.str(); + std::stringstream ss2; + ss2 << std::setprecision(3) << zed_rt.getEulerAngles(); + txtR = ss2.str(); + trackState = state; mtx.unlock(); } @@ -287,27 +293,75 @@ void GLViewer::printText() { int start_w = 20; int start_h = h_wnd - 40; - (trackState == sl::POSITIONAL_TRACKING_STATE::OK) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + float dark_clr = 0.12f; + std::string odom_status = "POSITIONAL TRACKING STATUS: "; + + glColor3f(dark_clr, dark_clr, dark_clr); glRasterPos2i(start_w, start_h); - std::string track_str = (str_tracking + sl::toString(trackState).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, odom_status.c_str()); + + (trackState.tracking_fusion_status != sl::POSITIONAL_TRACKING_FUSION_STATUS::UNAVAILABLE) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + std::string track_str = (sl::toString(trackState.tracking_fusion_status).c_str()); + glRasterPos2i(start_w + 300, start_h); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); - float dark_clr = 0.12f; + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 40); + std::string imu_status = "GNSS MODE: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, imu_status.c_str()); + + if (trackState.gnss_mode == sl::GNSS_MODE::FIX_3D) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 40); + track_str = (sl::toString(trackState.gnss_mode).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 60); + std::string gnss_status = "GNSS STATUS: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, gnss_status.c_str()); + + if (trackState.gnss_status == sl::GNSS_STATUS::RTK_FIX || trackState.gnss_status == sl::GNSS_STATUS::RTK_FLOAT) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 60); + track_str = (sl::toString(trackState.gnss_status).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 80); + std::string gnss_fusion_status = "GNSS FUSION STATUS: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, gnss_fusion_status.c_str()); + + if(trackState.gnss_fusion_status == sl::GNSS_FUSION_STATUS::OK) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 80); + track_str = (sl::toString(trackState.gnss_fusion_status).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + + glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 25); + glRasterPos2i(start_w, start_h - 105); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 25); + glRasterPos2i(155, start_h - 105); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtT.c_str()); glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 50); + glRasterPos2i(start_w, start_h - 130); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 50); + glRasterPos2i(155, start_h - 130); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtR.c_str()); glMatrixMode(GL_PROJECTION); @@ -514,7 +568,7 @@ Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -562,7 +616,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/global localization/playback/cpp/src/display/GenericDisplay.cpp b/global localization/playback/cpp/src/display/GenericDisplay.cpp new file mode 100644 index 00000000..7375d124 --- /dev/null +++ b/global localization/playback/cpp/src/display/GenericDisplay.cpp @@ -0,0 +1,62 @@ +#include "display/GenericDisplay.h" +#include "exporter/KMLExporter.h" + + +GenericDisplay::GenericDisplay() +{ +} + +GenericDisplay::~GenericDisplay() +{ + closeAllKMLWriter(); +} + +void GenericDisplay::init(int argc, char **argv) +{ + opengl_viewer.init(argc, argv); +} + +void GenericDisplay::updatePoseData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state) +{ + opengl_viewer.updateData(zed_rt, state); +} + +bool GenericDisplay::isAvailable(){ + return opengl_viewer.isAvailable(); +} + +void GenericDisplay::updateRawGeoPoseData(sl::GNSSData geo_data) +{ + double latitude, longitude, altitude; + geo_data.getCoordinates(latitude, longitude, altitude, false); + + // Make the pose available for the Live Server + ofstream data; + data.open ("../../../map server/raw_data.txt"); + data << std::fixed << std::setprecision(17); + data << latitude; + data << ","; + data << longitude; + data << ","; + data << geo_data.ts.getMilliseconds(); + data << "\n"; + data.close(); +} + +void GenericDisplay::updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp) +{ + // Make the pose available for the Live Server + ofstream data; + data.open ("../../../map server/data.txt"); + data << std::fixed << std::setprecision(17); + data << geo_pose.latlng_coordinates.getLatitude(false); + data << ","; + data << geo_pose.latlng_coordinates.getLongitude(false); + data << ","; + data << current_timestamp.getMilliseconds(); + data << "\n"; + data.close(); + + // Save the pose in a .kml file + saveKMLData("fused_position.kml", geo_pose); +} diff --git a/geotracking/playback/cpp/src/exporter/KMLExporter.cpp b/global localization/playback/cpp/src/exporter/KMLExporter.cpp similarity index 100% rename from geotracking/playback/cpp/src/exporter/KMLExporter.cpp rename to global localization/playback/cpp/src/exporter/KMLExporter.cpp diff --git a/geotracking/playback/cpp/src/main.cpp b/global localization/playback/cpp/src/main.cpp similarity index 56% rename from geotracking/playback/cpp/src/main.cpp rename to global localization/playback/cpp/src/main.cpp index 2a2fb62d..aa04a262 100644 --- a/geotracking/playback/cpp/src/main.cpp +++ b/global localization/playback/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -19,8 +19,8 @@ /////////////////////////////////////////////////////////////////////////// /*************************************************************************** - ** This sample shows how to use geotracking for global scale ** - ** localization on real-world map API with pre-recorded GNSS data ** + ** This sample shows how to use global localization on real-world map ** + ** API with pre-recorded GNSS data ** **************************************************************************/ #include @@ -34,25 +34,47 @@ #include "exporter/KMLExporter.h" #include "GNSSReplay.hpp" +std::vector split(const std::string &s, const std::string &delimiter); +cv::Mat slMat2cvMat(sl::Mat& input); + int main(int argc, char **argv) { - if (argc != 3) { - std::cerr << "Usage ./ZED_GNSS_playback " << std::endl; + if (argc < 2) { + std::cerr << "Usage ./ZED_GNSS_playback " << std::endl; return EXIT_FAILURE; } - std::string svo_name, gnss_file; - for(int i = 1; i<3; i++){ + // The GNSS data are either extracted from a external json OR preferably from the SVO v2 custom data + + std::string svo_name, gnss_file, mask_file, gnss_antenna_position_str; + for(int i = 1; i split(const std::string &s, const std::string &delimiter) { + std::vector tokens; + size_t start = 0, end = 0; + while ((end = s.find(delimiter, start)) != std::string::npos) { + tokens.push_back(s.substr(start, end - start)); + start = end + delimiter.length(); + } + tokens.push_back(s.substr(start)); + return tokens; +} + +cv::Mat slMat2cvMat(sl::Mat& input) { + // Mapping between MAT_TYPE and CV_TYPE + int cvType = -1; + switch (input.getDataType()) { + case sl::MAT_TYPE::F32_C1: cvType = CV_32FC1; break; + case sl::MAT_TYPE::F32_C2: cvType = CV_32FC2; break; + case sl::MAT_TYPE::F32_C3: cvType = CV_32FC3; break; + case sl::MAT_TYPE::F32_C4: cvType = CV_32FC4; break; + case sl::MAT_TYPE::U8_C1: cvType = CV_8UC1; break; + case sl::MAT_TYPE::U8_C2: cvType = CV_8UC2; break; + case sl::MAT_TYPE::U8_C3: cvType = CV_8UC3; break; + case sl::MAT_TYPE::U8_C4: cvType = CV_8UC4; break; + default: break; + } + + // Convert to OpenCV matrix + return cv::Mat(input.getHeight(), input.getWidth(), cvType, input.getPtr(sl::MEM::CPU)); +} \ No newline at end of file diff --git a/global localization/playback/python/display/generic_display.py b/global localization/playback/python/display/generic_display.py new file mode 100644 index 00000000..e41fe7b9 --- /dev/null +++ b/global localization/playback/python/display/generic_display.py @@ -0,0 +1,64 @@ +from display.gl_viewer import GLViewer +from exporter.KMLExporter import * +import time + + +class GenericDisplay: + def __init__(self): + pass + + def __del__(self): + closeAllKMLFiles() + + def init(self,camera_model): + self.glviewer = GLViewer() + self.glviewer.init(camera_model) + # Replace this part with the appropriate connection to your IoT system + + def updatePoseData(self, zed_rt,str_t,str_r, state): + self.glviewer.updateData(zed_rt,str_t,str_r, state) + + def isAvailable(self): + return self.glviewer.is_available() + + def updateRawGeoPoseData(self, geo_data): + try: + # Replace this part with the appropriate sending of data to your IoT system + latitude, longitude, _ = geo_data.get_coordinates(False) + f = open('../../map server/raw_data.txt', 'w') + f.write("{},{},{}".format(latitude, longitude, geo_data.ts.get_milliseconds())) + + except ImportError: + print("An exception was raised: the raw geo-pose data was not sent.") + + def updateGeoPoseData(self, geo_pose, current_timestamp): + try: + # Replace this part with the appropriate sending of data to your IoT system + f = open('../../map server/data.txt', 'w') + f.write("{},{},{}" + .format(geo_pose.latlng_coordinates.get_latitude(False), + geo_pose.latlng_coordinates.get_longitude(False), + current_timestamp.get_milliseconds())) + + gnss_data = {} + gnss_data["longitude"] = geo_pose.latlng_coordinates.get_latitude( + False) + gnss_data["latitude"] = geo_pose.latlng_coordinates.get_latitude( + False) + gnss_data["altitude"] = geo_pose.latlng_coordinates.get_altitude() + saveKMLData("fused_position.kml", gnss_data) + + except ImportError: + print("An exception was raised: the geo-pose data was not sent.") + + +if __name__ == "__main__": + generic_display = GenericDisplay() + generic_display.init(0, []) + + try: + while True: + # Votre logique ici... + pass + except KeyboardInterrupt: + pass diff --git a/geotracking/playback/python/display/gl_viewer.py b/global localization/playback/python/display/gl_viewer.py similarity index 99% rename from geotracking/playback/python/display/gl_viewer.py rename to global localization/playback/python/display/gl_viewer.py index e6b2f211..d56caf9b 100644 --- a/geotracking/playback/python/display/gl_viewer.py +++ b/global localization/playback/python/display/gl_viewer.py @@ -300,7 +300,7 @@ def close_func(self): self.available = False def keyPressedCallback(self, key, x, y): - if ord(key) == 27: + if key == b'q' or key == b'Q' or ord(key) == 27: self.close_func() def on_mouse(self,*args,**kwargs): @@ -414,11 +414,7 @@ def print_text(self): start_w = 20 start_h = h_wnd - 40 - if(self.trackState == sl.POSITIONAL_TRACKING_STATE.OK): - glColor3f(0.2, 0.65, 0.2) - else: - glColor3f(0.85, 0.2, 0.2) - + glColor3f(0.2, 0.65, 0.2) glRasterPos2i(start_w, start_h) safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "POSITIONAL TRACKING : " + str(self.trackState)) diff --git a/geotracking/live geotracking/python/ogl_viewer/zed_model.py b/global localization/playback/python/display/zed_model.py similarity index 100% rename from geotracking/live geotracking/python/ogl_viewer/zed_model.py rename to global localization/playback/python/display/zed_model.py diff --git a/geotracking/playback/python/exporter/KMLExporter.py b/global localization/playback/python/exporter/KMLExporter.py similarity index 92% rename from geotracking/playback/python/exporter/KMLExporter.py rename to global localization/playback/python/exporter/KMLExporter.py index fc2a9ce3..e344918d 100644 --- a/geotracking/playback/python/exporter/KMLExporter.py +++ b/global localization/playback/python/exporter/KMLExporter.py @@ -41,14 +41,15 @@ def closeAllKMLFiles(): """ Close all KML file writer and place KML files footer """ - for file_name, file_object in all_file: - file_footer = "\t\n" + for file_name in all_file: + file_footer = "" + file_footer += "\t\n" file_footer += "\t\n" file_footer += "\n" file_footer += "\t\n" file_footer += "\n" - file_object.write(file_footer) - file_object.close() + all_file[file_name].write(file_footer) + all_file[file_name].close() def saveKMLData(file_path, gnss_data): diff --git a/global localization/playback/python/gnss_replay.py b/global localization/playback/python/gnss_replay.py new file mode 100644 index 00000000..8e2c1f1d --- /dev/null +++ b/global localization/playback/python/gnss_replay.py @@ -0,0 +1,235 @@ +import json +import pyzed.sl as sl + +class GNSSReplay: + def __init__(self, file_name, zed=None): + self._file_name = file_name + self._zed = zed + self.current_gnss_idx = 0 + self.previous_ts = 0 + self.last_cam_ts = 0 + self.gnss_data = None + self.initialize() + + def initialize(self): + if self._file_name is not None: + try: + with open(self._file_name, 'r') as gnss_file_data: + self.gnss_data = json.load(gnss_file_data) + except FileNotFoundError: + print(f"Unable to open {self._file_name}") + exit(1) + except json.JSONDecodeError as e: + print(f"Error while reading GNSS data: {e}") + elif self._zed is not None: + keys = self._zed.get_svo_data_keys() + gnss_key = "GNSS_json" + if gnss_key not in keys: + print("SVO doesn't contain GNSS data") + exit(1) + else: + ts_begin = sl.Timestamp() + data = {} + # self.gnss_data["GNSS"] = [] + self.gnss_data = {"GNSS": []} + err = self._zed.retrieve_svo_data(gnss_key, data, ts_begin, ts_begin) + for k, d in data.items(): + gnss_data_point_json = json.loads(d.get_content_as_string()) + gnss_data_point_formatted = {} + + latitude = gnss_data_point_json["Geopoint"]["Latitude"] + longitude = gnss_data_point_json["Geopoint"]["Longitude"] + altitude = gnss_data_point_json["Geopoint"]["Altitude"] + + gnss_data_point_formatted["coordinates"] = { + "latitude": latitude, + "longitude": longitude, + "altitude": altitude + } + + gnss_data_point_formatted["ts"] = gnss_data_point_json["EpochTimeStamp"] + + latitude_std = gnss_data_point_json["Eph"] + longitude_std = gnss_data_point_json["Eph"] + altitude_std = gnss_data_point_json["Epv"] + + gnss_data_point_formatted["latitude_std"] = latitude_std + gnss_data_point_formatted["longitude_std"] = longitude_std + gnss_data_point_formatted["altitude_std"] = altitude_std + + gnss_data_point_formatted["position_covariance"] = { + longitude_std + longitude_std, 0, 0, 0, latitude_std + latitude_std, 0, 0, 0, + altitude_std + altitude_std + } + + if "mode" in gnss_data_point_json: + gnss_data_point_formatted["mode"] = gnss_data_point_json["mode"] + if "status" in gnss_data_point_json: + gnss_data_point_formatted["status"] = gnss_data_point_json["status"] + + if "fix" in gnss_data_point_json: + gnss_data_point_formatted["fix"] = gnss_data_point_json["fix"] + + gnss_data_point_formatted["original_gnss_data"] = gnss_data_point_json + + self.gnss_data["GNSS"].append(gnss_data_point_formatted) + # print(json.loads(d.get_content_as_string())) + + def is_microseconds(self, timestamp): + return 1_000_000_000_000_000 <= timestamp < 10_000_000_000_000_000 + + def is_nanoseconds(self, timestamp): + return 1_000_000_000_000_000_000 <= timestamp < 10_000_000_000_000_000_000 + + def getGNSSData(self, gnss_data, gnss_idx): + current_gnss_data = sl.GNSSData() + + # if we are at the end of GNSS data, exit + if gnss_idx >= len(gnss_data["GNSS"]): + print("Reached the end of the GNSS playback data.") + return current_gnss_data + current_gnss_data_json = gnss_data["GNSS"][gnss_idx] + + if ( + current_gnss_data_json["coordinates"] is None + or current_gnss_data_json["coordinates"]["latitude"] is None + or current_gnss_data_json["coordinates"]["longitude"] is None + or current_gnss_data_json["coordinates"]["altitude"] is None + or current_gnss_data_json["ts"] is None + ): + print("Null GNSS playback data.") + return current_gnss_data_json + + gnss_timestamp = current_gnss_data_json["ts"] + ts = sl.Timestamp() + if self.is_microseconds(gnss_timestamp): + ts.set_microseconds(gnss_timestamp) + elif self.is_nanoseconds(gnss_timestamp): + ts.set_nanoseconds(gnss_timestamp) + else: + print("Warning: Invalid timestamp format from GNSS file") + current_gnss_data.ts = ts + # Fill out coordinates: + current_gnss_data.set_coordinates( + current_gnss_data_json["coordinates"]["latitude"], + current_gnss_data_json["coordinates"]["longitude"], + current_gnss_data_json["coordinates"]["altitude"], + False + ) + + # Fill out default standard deviation: + current_gnss_data.longitude_std = current_gnss_data_json["longitude_std"] + current_gnss_data.latitude_std = current_gnss_data_json["latitude_std"] + current_gnss_data.altitude_std = current_gnss_data_json["altitude_std"] + + # Fill out covariance [must not be null] + position_covariance = [ + current_gnss_data.longitude_std ** 2, + 0.0, + 0.0, + 0.0, + current_gnss_data.latitude_std ** 2, + 0.0, + 0.0, + 0.0, + current_gnss_data.altitude_std ** 2 + ] + + current_gnss_data.position_covariances = position_covariance + + if "mode" in current_gnss_data_json: + current_gnss_data.gnss_mode = current_gnss_data_json["mode"] + if "status" in current_gnss_data_json: + current_gnss_data.gnss_status = current_gnss_data_json["status"] + + if "fix" in current_gnss_data_json: + # Acquisition comes from GPSD https:#gitlab.com/gpsd/gpsd/-/blob/master/include/gps.h#L183-211 + gpsd_mode = current_gnss_data_json["fix"]["mode"] + sl_mode = sl.GNSS_MODE.UNKNOWN + + if gpsd_mode == 0: # MODE_NOT_SEEN + sl_mode = sl.GNSS_MODE.UNKNOWN + elif gpsd_mode == 1: # MODE_NO_FIX + sl_mode = sl.GNSS_MODE.NO_FIX + elif gpsd_mode == 2: # MODE_2D + sl_mode = sl.GNSS_MODE.FIX_2D + elif gpsd_mode == 3: # MODE_3D + sl_mode = sl.GNSS_MODE.FIX_3D + + gpsd_status = current_gnss_data_json["fix"]["status"] + sl_status = sl.GNSS_STATUS.UNKNOWN + + if gpsd_status == 0: # STATUS_UNK + sl_status = sl.GNSS_STATUS.UNKNOWN + elif gpsd_status == 1: # STATUS_GPS + sl_status = sl.GNSS_STATUS.SINGLE + elif gpsd_status == 2: # STATUS_DGPS + sl_status = sl.GNSS_STATUS.DGNSS + elif gpsd_status == 3: # STATUS_RTK_FIX + sl_status = sl.GNSS_STATUS.RTK_FIX + elif gpsd_status == 4: # STATUS_RTK_FLT + sl_status = sl.GNSS_STATUS.RTK_FLOAT + elif gpsd_status == 5: # STATUS_DR + sl_status = sl.GNSS_STATUS.SINGLE + elif gpsd_status == 6: # STATUS_GNSSDR + sl_status = sl.GNSS_STATUS.DGNSS + elif gpsd_status == 7: # STATUS_TIME + sl_status = sl.GNSS_STATUS.UNKNOWN + elif gpsd_status == 8: # STATUS_SIM + sl_status = sl.GNSS_STATUS.UNKNOWN + elif gpsd_status == 9: # STATUS_PPS_FIX + sl_status = sl.GNSS_STATUS.SINGLE + + current_gnss_data.gnss_mode = sl_mode.value + current_gnss_data.gnss_status = sl_status.value + + return current_gnss_data + + def getNextGNSSValue(self, current_timestamp): + current_gnss_data = self.getGNSSData(self.gnss_data, self.current_gnss_idx) + + if current_gnss_data is None or current_gnss_data.ts.data_ns == 0: + return current_gnss_data + + if current_gnss_data.ts.data_ns > current_timestamp: + current_gnss_data.ts.data_ns = 0 + return current_gnss_data + + last_data = current_gnss_data + step = 1 + while True: + last_data = current_gnss_data + diff_last = current_timestamp - current_gnss_data.ts.data_ns + current_gnss_data = self.getGNSSData(self.gnss_data, + self.current_gnss_idx + step + ) + + if current_gnss_data is None or current_gnss_data.ts.data_ns == 0: + break + + if current_gnss_data.ts.data_ns > current_timestamp: + if ( + current_gnss_data.ts.data_ns - current_timestamp + > diff_last + ): + current_gnss_data = last_data + break + self.current_gnss_idx += 1 + return current_gnss_data + + def grab(self, current_timestamp): + current_data = sl.GNSSData() + current_data.ts.data_ns = 0 + + if current_timestamp > 0 and current_timestamp > self.last_cam_ts: + current_data = self.getNextGNSSValue(current_timestamp) + if current_data.ts.data_ns == self.previous_ts: + current_data.ts.data_ns = 0 + + self.last_cam_ts = current_timestamp + + if current_data.ts.data_ns == 0: + return sl.FUSION_ERROR_CODE.FAILURE, None + + self.previous_ts = current_data.ts.data_ns + return sl.FUSION_ERROR_CODE.SUCCESS, current_data diff --git a/geotracking/playback/python/playback.py b/global localization/playback/python/playback.py similarity index 71% rename from geotracking/playback/python/playback.py rename to global localization/playback/python/playback.py index 0cbaff25..5284787b 100644 --- a/geotracking/playback/python/playback.py +++ b/global localization/playback/python/playback.py @@ -1,6 +1,6 @@ ######################################################################## # -# Copyright (c) 2023, STEREOLABS. +# Copyright (c) 2024, STEREOLABS. # # All rights reserved. # @@ -30,19 +30,18 @@ import json import exporter.KMLExporter as export import argparse -import cv2 +import cv2 + def main(): - zed_pose = sl.Pose() + zed_pose = sl.Pose() py_translation = sl.Translation() text_translation = "" - text_rotation = "" - + text_rotation = "" init_params = sl.InitParameters(depth_mode=sl.DEPTH_MODE.ULTRA, - coordinate_units=sl.UNIT.METER, - coordinate_system=sl.COORDINATE_SYSTEM.IMAGE) - + coordinate_units=sl.UNIT.METER, + coordinate_system=sl.COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP) init_params.set_from_svo_file(opt.input_svo_file) @@ -50,71 +49,69 @@ def main(): zed = sl.Camera() status = zed.open(init_params) if status != sl.ERROR_CODE.SUCCESS: - print("[ZED][ERROR] Camera Open : "+repr(status)+". Exit program.") + print("[ZED][ERROR] Camera Open : " + repr(status) + ". Exit program.") exit() # Enable positional tracking: positional_init = zed.enable_positional_tracking() if positional_init != sl.ERROR_CODE.SUCCESS: - print("[ZED][ERROR] Can't start tracking of camera : "+repr(status)+". Exit program.") + print("[ZED][ERROR] Can't start tracking of camera : " + repr(status) + ". Exit program.") exit() # Display - display_resolution = sl.Resolution(1280,720) + display_resolution = sl.Resolution(1280, 720) left_img = sl.Mat() - + # Create Fusion object: - + fusion = sl.Fusion() init_fusion_param = sl.InitFusionParameters() init_fusion_param.coordinate_units = sl.UNIT.METER - init_fusion_param.coordinate_system = sl.COORDINATE_SYSTEM.IMAGE + init_fusion_param.coordinate_system = sl.COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP init_fusion_param.verbose = True - + fusion_init_code = fusion.init(init_fusion_param) if fusion_init_code != sl.FUSION_ERROR_CODE.SUCCESS: - print("[ZED][ERROR] Failed to initialize fusion :"+repr(fusion_init_code)+". Exit program") + print("[ZED][ERROR] Failed to initialize fusion :" + repr(fusion_init_code) + ". Exit program") exit() - - + # Enable odometry publishing: configuration = sl.CommunicationParameters() zed.start_publishing(configuration) uuid = sl.CameraIdentifier(zed.get_camera_information().serial_number) - fusion.subscribe(uuid,configuration,sl.Transform(0,0,0)) - + fusion.subscribe(uuid, configuration, sl.Transform(0, 0, 0)) + # Enable positional tracking for Fusion object positional_tracking_fusion_parameters = sl.PositionalTrackingFusionParameters() - positional_tracking_fusion_parameters.enable_GNSS_fusion = True + positional_tracking_fusion_parameters.enable_GNSS_fusion = True gnss_calibration_parameters = sl.GNSSCalibrationParameters() gnss_calibration_parameters.target_yaw_uncertainty = 7e-3 gnss_calibration_parameters.enable_translation_uncertainty_target = False - gnss_calibration_parameters.target_translation_uncertainty = 15e-2 + gnss_calibration_parameters.target_translation_uncertainty = 15e-2 gnss_calibration_parameters.enable_reinitialization = False gnss_calibration_parameters.gnss_vio_reinit_threshold = 5 - gnss_calibration_parameters.enable_rolling_calibration = False positional_tracking_fusion_parameters.gnss_calibration_parameters = gnss_calibration_parameters fusion.enable_positionnal_tracking(positional_tracking_fusion_parameters) - + # Setup viewer: py_translation = sl.Translation() # Setup viewer: viewer = GenericDisplay() viewer.init(zed.get_camera_information().camera_model) print("Start grabbing data ...") - - gnss_replay = GNSSReplay(opt.input_json_gps_file) - + gnss_replay = GNSSReplay(opt.input_json_gps_file, zed) + input_gnss_sync = sl.GNSSData() + while viewer.isAvailable(): # get the odometry information if zed.grab() == sl.ERROR_CODE.SUCCESS: zed.get_position(zed_pose, sl.REFERENCE_FRAME.WORLD) - zed.retrieve_image(left_img,sl.VIEW.LEFT, sl.MEM.CPU, display_resolution) - cv2.imshow("left",left_img.numpy()) + zed.retrieve_image(left_img, sl.VIEW.LEFT, sl.MEM.CPU, display_resolution) + cv2.imshow("left", left_img.numpy()) cv2.waitKey(10) - + elif zed.grab() == sl.ERROR_CODE.END_OF_SVOFILE_REACHED: print("End of SVO file.") fusion.close() @@ -125,63 +122,64 @@ def main(): ingest_error = fusion.ingest_gnss_data(input_gnss) latitude, longitude, altitude = input_gnss.get_coordinates(False) coordinates = { - "latitude": latitude, - "longitude": longitude, - "altitude": altitude, - } - export.saveKMLData("raw_gnss.kml", coordinates) + "latitude": latitude, + "longitude": longitude, + "altitude": altitude, + } + export.saveKMLData("raw_gnss.kml", coordinates) # Fusion is asynchronous and needs synchronization. Sometime GNSSData comes before camera data raising "NO_NEW_DATA_AVAILABLE" error # This does not necessary means that fusion doesn't work but that no camera data were presents for the gnss timestamp when you ingested the data. if ingest_error != sl.FUSION_ERROR_CODE.SUCCESS and ingest_error != sl.FUSION_ERROR_CODE.NO_NEW_DATA_AVAILABLE: - print("Ingest error occurred when ingesting GNSSData: ",ingest_error) + print("Ingest error occurred when ingesting GNSSData: ", ingest_error) + # get the fused position if fusion.process() == sl.FUSION_ERROR_CODE.SUCCESS: - fused_position = sl.Pose() + fused_position = sl.Pose() # Get position into the ZED CAMERA coordinate system: current_state = fusion.get_position(fused_position) if current_state == sl.POSITIONAL_TRACKING_STATE.OK: + current_state = fusion.get_fused_positional_tracking_status().tracking_fusion_status rotation = fused_position.get_rotation_vector() translation = fused_position.get_translation(py_translation) text_rotation = str((round(rotation[0], 2), round(rotation[1], 2), round(rotation[2], 2))) - text_translation = str((round(translation.get()[0], 2), round(translation.get()[1], 2), round(translation.get()[2], 2))) - viewer.updatePoseData(fused_position.pose_data(),text_translation,text_rotation, current_state) - + text_translation = str( + (round(translation.get()[0], 2), round(translation.get()[1], 2), round(translation.get()[2], 2))) + viewer.updatePoseData(fused_position.pose_data(), text_translation, text_rotation, current_state) + + fusion.get_current_gnss_data(input_gnss_sync) + # Display it on the Live Server + viewer.updateRawGeoPoseData(input_gnss_sync) + # Get position into the GNSS coordinate system - this needs a initialization between CAMERA # and GNSS. When the initialization is finish the getGeoPose will return sl::POSITIONAL_TRACKING_STATE::OK - current_geopose = sl.GeoPose() + current_geopose = sl.GeoPose() current_geopose_satus = fusion.get_geo_pose(current_geopose) - if current_geopose_satus == sl.GNSS_CALIBRATION_STATE.CALIBRATED: - viewer.updateGeoPoseData(current_geopose, zed.get_timestamp(sl.TIME_REFERENCE.CURRENT).data_ns/1000) + if current_geopose_satus == sl.GNSS_FUSION_STATUS.OK: + # Display it on the Live Server + viewer.updateGeoPoseData(current_geopose, zed.get_timestamp(sl.TIME_REFERENCE.CURRENT)) _, yaw_std, position_std = fusion.get_current_gnss_calibration_std() if yaw_std != -1: - print("GNSS State : ",current_geopose_satus," : calibration uncertainty yaw_std ",yaw_std ,"position_std",position_std[0],",",position_std[1],",",position_std[2], end='\r') + print("GNSS State : ", current_geopose_satus, " : calibration uncertainty yaw_std ", yaw_std, + " rd position_std", position_std[0], " m,", position_std[1], " m,", position_std[2], + end=' m\r') + + """ else : - """ - GNSS coordinate system to ZED coordinate system is not initialize yet - The initialisation between the coordinates system is basicaly an optimization problem that + GNSS coordinate system to ZED coordinate system is not initialized yet + The initialization between the coordinates system is basically an optimization problem that Try to fit the ZED computed path with the GNSS computed path. In order to do it just move your system and wait that uncertainty come bellow uncertainty threshold you set up in your initialization parameters. - """ + """ fusion.close() zed.close() - - - - - - - - - - + + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('--input_svo_file', type=str, help='Path to a .svo file',required=True) - parser.add_argument('--input_json_gps_file', type=str, help='Path to a .json file, containing gps data', required=True) + parser.add_argument('--input_svo_file', type=str, help='Path to a .svo file or .svo2 file containing gps data', + required=True) + parser.add_argument('--input_json_gps_file', type=str, help='Path to a .json file, containing gps data', + required=False) opt = parser.parse_args() - main() - - - - + main() diff --git a/global localization/playback/python/requirements.txt b/global localization/playback/python/requirements.txt new file mode 100644 index 00000000..356bfd56 --- /dev/null +++ b/global localization/playback/python/requirements.txt @@ -0,0 +1,2 @@ +PyOpenGL +opencv-python \ No newline at end of file diff --git a/global localization/recording/README.md b/global localization/recording/README.md new file mode 100644 index 00000000..eeb201c2 --- /dev/null +++ b/global localization/recording/README.md @@ -0,0 +1,42 @@ +# Global Localization Data Recording Sample + +## Overview + +The Global Localization Data Recording sample demonstrates how to record data for global localization on real-world maps using the ZED camera. The sample generates data in the form of an SVO file, which contains camera data, and a JSON file, which contains pre-recorded GNSS data for use in the playback sample. This sample is a useful resource for developers working on autonomous driving, robotics, and drone navigation applications. + +## Features + +- Displays the camera's path in an OpenGL window in 3D. +- Displays path data, including translation and rotation. +- Generates KML files for displaying raw GNSS data and fused position on google maps after capture. +- Generates an SVO file corresponding to camera data. +- Generates a JSON file corresponding to recorded GNSS data. + +## Dependencies + +Before using this sample, ensure that you have the following dependencies installed on your system: + +- ZED SDK: download and install from the official [Stereolabs website](https://www.stereolabs.com/developers/release/). +- `gpsd`: required to use an external GNSS sensor. + > **Note**: Since [`gpsd`](https://gpsd.gitlab.io/gpsd/index.html) does not support Windows, this sample is not supported on Windows. + +### C++ + +- `libgps-dev`: used to read data from `gpsd`. + +### Python + +- `gpsdclient`: used to read data from `gpsd`. + +## Usage + +To use the Global Localization Data Recording sample, follow these steps: + +1. Download and install the ZED SDK on your system from the official [Stereolabs website](https://www.stereolabs.com/developers/release/). +2. Install dependencies using your operating system's package manager. +3. Connect your ZED camera and GNSS sensor to your computer. +4. Open a terminal and navigate to the Global Localization Data Recording sample directory. +5. Compile the sample for C++ in a *build* directory. +6. Run the `ZED_Global_Localization_Recording` executable for C++ and `live.py` for Python. +7. The sample will display the camera's path and path data in a 3D window. KML files will be generated for displaying the raw GNSS data and fused position on a real-world map like google maps after capture. Additionally, an SVO file corresponding to camera data and a JSON file corresponding to recorded GNSS data will be generated. +8. Go to the [map server sample](./map%20server) and run a simple server. diff --git a/geotracking/recording/cpp/CMakeLists.txt b/global localization/recording/cpp/CMakeLists.txt similarity index 98% rename from geotracking/recording/cpp/CMakeLists.txt rename to global localization/recording/cpp/CMakeLists.txt index f2574d41..615efe8c 100644 --- a/geotracking/recording/cpp/CMakeLists.txt +++ b/global localization/recording/cpp/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.5) -PROJECT(ZED_GNSS_recording) +PROJECT(ZED_Global_Localization_Recording) set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED ON) diff --git a/geotracking/recording/cpp/cmake/FindGPS.cmake b/global localization/recording/cpp/cmake/FindGPS.cmake similarity index 100% rename from geotracking/recording/cpp/cmake/FindGPS.cmake rename to global localization/recording/cpp/cmake/FindGPS.cmake diff --git a/geotracking/playback/cpp/include/display/GLViewer.hpp b/global localization/recording/cpp/include/display/GLViewer.hpp similarity index 92% rename from geotracking/playback/cpp/include/display/GLViewer.hpp rename to global localization/recording/cpp/include/display/GLViewer.hpp index f1741337..ba5d26e7 100644 --- a/geotracking/playback/cpp/include/display/GLViewer.hpp +++ b/global localization/recording/cpp/include/display/GLViewer.hpp @@ -12,6 +12,7 @@ #include "ZEDModel.hpp" /* OpenGL Utility Toolkit header */ #include +#include #ifndef M_PI #define M_PI 3.1416f @@ -19,17 +20,12 @@ #define SAFE_DELETE( res ) if( res!=NULL ) { delete res; res = NULL; } -#if 0 #define MOUSE_R_SENSITIVITY 0.005f #define MOUSE_WHEEL_SENSITIVITY 0.065f #define MOUSE_T_SENSITIVITY 0.01f #define KEY_T_SENSITIVITY 0.01f -#else -#define MOUSE_R_SENSITIVITY 0.05f -#define MOUSE_WHEEL_SENSITIVITY 0.65f -#define MOUSE_T_SENSITIVITY 0.1f -#define KEY_T_SENSITIVITY 0.1f -#endif + + //// UTILS ////// using namespace std; @@ -83,14 +79,14 @@ class Shader { public: Shader() {} - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; @@ -162,7 +158,7 @@ class GLViewer { void exit(); bool isAvailable(); void init(int argc, char **argv); - void updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state); + void updateData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state); private: // Rendering loop method called each frame by glutDisplayFunc @@ -218,7 +214,7 @@ class GLViewer { std::string txtR; std::string txtT; - sl::POSITIONAL_TRACKING_STATE trackState; + sl::FusedPositionalTrackingStatus trackState; const std::string str_tracking = "POSITIONAL TRACKING : "; sl::float3 bckgrnd_clr; diff --git a/global localization/recording/cpp/include/display/GenericDisplay.h b/global localization/recording/cpp/include/display/GenericDisplay.h new file mode 100644 index 00000000..89de6ad5 --- /dev/null +++ b/global localization/recording/cpp/include/display/GenericDisplay.h @@ -0,0 +1,60 @@ +#ifndef GENERIC_DISPLAY_H +#define GENERIC_DISPLAY_H + +#include +#include "GLViewer.hpp" + +class GenericDisplay +{ +public: +/** + * @brief Construct a new Generic Display object + * + */ + GenericDisplay(); + /** + * @brief Destroy the Generic Display object + * + */ + ~GenericDisplay(); + /** + * @brief Init OpenGL display with the requested camera_model (used as moving element in OpenGL view) + * + * @param argc default main argc + * @param argv default main argv + * @param camera_model zed camera model to use + */ + void init(int argc, char **argv); + /** + * @brief Return if the OpenGL viewer is still open + * + * @return true the OpenGL viewer is still open + * @return false the OpenGL viewer was closed + */ + bool isAvailable(); + /** + * @brief Update the OpenGL view with last pose data + * + * @param zed_rt last pose data + * @param state current tracking state + */ + void updatePoseData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state); + /** + * @brief Display current pose on the Live Server + * + * @param geo_pose geopose to display + */ + void updateRawGeoPoseData(sl::GNSSData geo_data); + /** + * @brief Display current fused pose on the Live Server & in a KML file + * + * @param geo_pose geopose to display + * @param current_timestamp timestamp of the geopose to display + */ + void updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp); + +protected: + GLViewer opengl_viewer; +}; + +#endif \ No newline at end of file diff --git a/geotracking/recording/cpp/include/display/ZEDModel.hpp b/global localization/recording/cpp/include/display/ZEDModel.hpp similarity index 100% rename from geotracking/recording/cpp/include/display/ZEDModel.hpp rename to global localization/recording/cpp/include/display/ZEDModel.hpp diff --git a/geotracking/recording/cpp/include/exporter/GNSSSaver.h b/global localization/recording/cpp/include/exporter/GNSSSaver.h similarity index 81% rename from geotracking/recording/cpp/include/exporter/GNSSSaver.h rename to global localization/recording/cpp/include/exporter/GNSSSaver.h index 8a795661..e579f436 100644 --- a/geotracking/recording/cpp/include/exporter/GNSSSaver.h +++ b/global localization/recording/cpp/include/exporter/GNSSSaver.h @@ -6,14 +6,14 @@ #include #include "TimestampUtils.h" -class GNSSSaver -{ +class GNSSSaver { public: /** * @brief Construct a new GNSSSaver object + * Either recorded to a json file or directly in SVO2 custom data * */ - GNSSSaver(); + GNSSSaver(sl::Camera *zed = 0); /** * @brief Destroy the GNSSSaver object * @@ -34,6 +34,7 @@ class GNSSSaver void saveAllData(); std::string file_path; std::vector all_gnss_data; + sl::Camera *p_zed = 0; }; #endif \ No newline at end of file diff --git a/geotracking/recording/cpp/include/exporter/KMLExporter.h b/global localization/recording/cpp/include/exporter/KMLExporter.h similarity index 100% rename from geotracking/recording/cpp/include/exporter/KMLExporter.h rename to global localization/recording/cpp/include/exporter/KMLExporter.h diff --git a/geotracking/recording/cpp/include/exporter/TimestampUtils.h b/global localization/recording/cpp/include/exporter/TimestampUtils.h similarity index 100% rename from geotracking/recording/cpp/include/exporter/TimestampUtils.h rename to global localization/recording/cpp/include/exporter/TimestampUtils.h diff --git a/geotracking/recording/cpp/include/gnss_reader/GPSDReader.hpp b/global localization/recording/cpp/include/gnss_reader/GPSDReader.hpp similarity index 100% rename from geotracking/recording/cpp/include/gnss_reader/GPSDReader.hpp rename to global localization/recording/cpp/include/gnss_reader/GPSDReader.hpp diff --git a/geotracking/recording/cpp/include/gnss_reader/IGNSSReader.h b/global localization/recording/cpp/include/gnss_reader/IGNSSReader.h similarity index 100% rename from geotracking/recording/cpp/include/gnss_reader/IGNSSReader.h rename to global localization/recording/cpp/include/gnss_reader/IGNSSReader.h diff --git a/geotracking/recording/cpp/include/json.hpp b/global localization/recording/cpp/include/json.hpp similarity index 100% rename from geotracking/recording/cpp/include/json.hpp rename to global localization/recording/cpp/include/json.hpp diff --git a/geotracking/recording/cpp/src/display/GLViewer.cpp b/global localization/recording/cpp/src/display/GLViewer.cpp similarity index 87% rename from geotracking/recording/cpp/src/display/GLViewer.cpp rename to global localization/recording/cpp/src/display/GLViewer.cpp index c98a9ef2..d2bfa105 100644 --- a/geotracking/recording/cpp/src/display/GLViewer.cpp +++ b/global localization/recording/cpp/src/display/GLViewer.cpp @@ -19,7 +19,7 @@ void print(std::string msg_prefix, sl::ERROR_CODE err_code, std::string msg_suff -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -30,7 +30,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -256,13 +256,19 @@ void GLViewer::draw() { glUseProgram(0); } -void GLViewer::updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state) { +void GLViewer::updateData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state) { mtx.lock(); vecPath.push_back(zed_rt.getTranslation()); zedModel.setRT(zed_rt); updateZEDposition = true; - txtT = str_t; - txtR = str_r; + + std::stringstream ss; + ss << zed_rt.getTranslation(); + txtT = ss.str(); + ss.clear(); + ss << zed_rt.getEulerAngles(); + txtR = ss.str(); + trackState = state; mtx.unlock(); } @@ -287,27 +293,75 @@ void GLViewer::printText() { int start_w = 20; int start_h = h_wnd - 40; - (trackState == sl::POSITIONAL_TRACKING_STATE::OK) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + float dark_clr = 0.12f; + std::string odom_status = "POSITIONAL TRACKING STATUS: "; + + glColor3f(dark_clr, dark_clr, dark_clr); glRasterPos2i(start_w, start_h); - std::string track_str = (str_tracking + sl::toString(trackState).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, odom_status.c_str()); + + (trackState.tracking_fusion_status != sl::POSITIONAL_TRACKING_FUSION_STATUS::UNAVAILABLE) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + std::string track_str = (sl::toString(trackState.tracking_fusion_status).c_str()); + glRasterPos2i(start_w + 300, start_h); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); - float dark_clr = 0.12f; + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 40); + std::string imu_status = "GNSS MODE: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, imu_status.c_str()); + + if (trackState.gnss_mode == sl::GNSS_MODE::FIX_3D) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 40); + track_str = (sl::toString(trackState.gnss_mode).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 60); + std::string gnss_status = "GNSS STATUS: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, gnss_status.c_str()); + + if (trackState.gnss_status == sl::GNSS_STATUS::RTK_FIX || trackState.gnss_status == sl::GNSS_STATUS::RTK_FLOAT) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 60); + track_str = (sl::toString(trackState.gnss_status).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + glColor3f(dark_clr, dark_clr, dark_clr); + glRasterPos2i(start_w, start_h - 80); + std::string gnss_fusion_status = "GNSS FUSION STATUS: "; + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, gnss_fusion_status.c_str()); + + if (trackState.gnss_fusion_status == sl::GNSS_FUSION_STATUS::OK) + glColor3f(0.2f, 0.65f, 0.2f); + else + glColor3f(0.85f, 0.2f, 0.2f); + glRasterPos2i(start_w + 300, start_h - 80); + track_str = (sl::toString(trackState.gnss_fusion_status).c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + + + glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 25); + glRasterPos2i(start_w, start_h - 105); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 25); + glRasterPos2i(155, start_h - 105); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtT.c_str()); glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 50); + glRasterPos2i(start_w, start_h - 130); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 50); + glRasterPos2i(155, start_h - 130); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtR.c_str()); glMatrixMode(GL_PROJECTION); @@ -514,7 +568,7 @@ Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -562,7 +616,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/global localization/recording/cpp/src/display/GenericDisplay.cpp b/global localization/recording/cpp/src/display/GenericDisplay.cpp new file mode 100644 index 00000000..7375d124 --- /dev/null +++ b/global localization/recording/cpp/src/display/GenericDisplay.cpp @@ -0,0 +1,62 @@ +#include "display/GenericDisplay.h" +#include "exporter/KMLExporter.h" + + +GenericDisplay::GenericDisplay() +{ +} + +GenericDisplay::~GenericDisplay() +{ + closeAllKMLWriter(); +} + +void GenericDisplay::init(int argc, char **argv) +{ + opengl_viewer.init(argc, argv); +} + +void GenericDisplay::updatePoseData(sl::Transform zed_rt, sl::FusedPositionalTrackingStatus state) +{ + opengl_viewer.updateData(zed_rt, state); +} + +bool GenericDisplay::isAvailable(){ + return opengl_viewer.isAvailable(); +} + +void GenericDisplay::updateRawGeoPoseData(sl::GNSSData geo_data) +{ + double latitude, longitude, altitude; + geo_data.getCoordinates(latitude, longitude, altitude, false); + + // Make the pose available for the Live Server + ofstream data; + data.open ("../../../map server/raw_data.txt"); + data << std::fixed << std::setprecision(17); + data << latitude; + data << ","; + data << longitude; + data << ","; + data << geo_data.ts.getMilliseconds(); + data << "\n"; + data.close(); +} + +void GenericDisplay::updateGeoPoseData(sl::GeoPose geo_pose, sl::Timestamp current_timestamp) +{ + // Make the pose available for the Live Server + ofstream data; + data.open ("../../../map server/data.txt"); + data << std::fixed << std::setprecision(17); + data << geo_pose.latlng_coordinates.getLatitude(false); + data << ","; + data << geo_pose.latlng_coordinates.getLongitude(false); + data << ","; + data << current_timestamp.getMilliseconds(); + data << "\n"; + data.close(); + + // Save the pose in a .kml file + saveKMLData("fused_position.kml", geo_pose); +} diff --git a/global localization/recording/cpp/src/exporter/GNSSSaver.cpp b/global localization/recording/cpp/src/exporter/GNSSSaver.cpp new file mode 100644 index 00000000..9bbb81d3 --- /dev/null +++ b/global localization/recording/cpp/src/exporter/GNSSSaver.cpp @@ -0,0 +1,83 @@ +#include "exporter/GNSSSaver.h" +#include "json.hpp" + +/** + * @brief Construct a new GNSSSaver object + * + */ +GNSSSaver::GNSSSaver(sl::Camera *zed) { + std::string current_date = getCurrentDatetime(); + this->file_path = "GNSS_" + current_date + ".json"; + + p_zed = zed; +} + +/** + * @brief Destroy the GNSSSaver object + * + */ +GNSSSaver::~GNSSSaver() { + saveAllData(); +} + +inline nlohmann::json convertGNSSData2JSON(sl::GNSSData &gnss_data) { + double latitude, longitude, altitude; + gnss_data.getCoordinates(latitude, longitude, altitude, false); + nlohmann::json gnss_measure; + gnss_measure["ts"] = gnss_data.ts.getNanoseconds(); + gnss_measure["coordinates"] = { + {"latitude", latitude}, + {"longitude", longitude}, + {"altitude", altitude} + }; + std::array position_covariance; + for (unsigned j = 0; j < 9; j++) { + position_covariance[j] = gnss_data.position_covariance[j]; + } + gnss_measure["position_covariance"] = position_covariance; + gnss_measure["longitude_std"] = sqrt(position_covariance[0 * 3 + 0]); + gnss_measure["latitude_std"] = sqrt(position_covariance[1 * 3 + 1]); + gnss_measure["altitude_std"] = sqrt(position_covariance[2 * 3 + 2]); + + gnss_measure["mode"] = gnss_data.gnss_mode; + gnss_measure["status"] = gnss_data.gnss_status; + + return gnss_measure; +} + +/** + * @brief Add the input gnss_data into the exported GNSS json file + * + * @param gnss_data gnss data to add + */ +void GNSSSaver::addGNSSData(sl::GNSSData gnss_data) { + if (p_zed != nullptr) { + sl::SVOData data; + data.key = "GNSS_json"; + data.setContent(convertGNSSData2JSON(gnss_data).dump()); + data.timestamp_ns = gnss_data.ts.getNanoseconds(); + auto err = p_zed->ingestDataIntoSVO(data); + } else + all_gnss_data.push_back(gnss_data); +} + +/** + * @brief Save all added data into the exported json file + * + */ +void GNSSSaver::saveAllData() { + if (p_zed != 0) { + std::vector all_gnss_measurements; + for (unsigned i = 0; i < all_gnss_data.size(); i++) { + nlohmann::json gnss_measure = convertGNSSData2JSON(all_gnss_data[i]); + all_gnss_measurements.push_back(gnss_measure); + } + + nlohmann::json final_json; + final_json["GNSS"] = all_gnss_measurements; + std::ofstream gnss_file(file_path); + gnss_file << final_json.dump(); + gnss_file.close(); + std::cout << "All GNSS data saved" << std::endl; + } +} diff --git a/geotracking/recording/cpp/src/exporter/KMLExporter.cpp b/global localization/recording/cpp/src/exporter/KMLExporter.cpp similarity index 100% rename from geotracking/recording/cpp/src/exporter/KMLExporter.cpp rename to global localization/recording/cpp/src/exporter/KMLExporter.cpp diff --git a/geotracking/recording/cpp/src/exporter/TimestampUtils.cpp b/global localization/recording/cpp/src/exporter/TimestampUtils.cpp similarity index 100% rename from geotracking/recording/cpp/src/exporter/TimestampUtils.cpp rename to global localization/recording/cpp/src/exporter/TimestampUtils.cpp diff --git a/geotracking/recording/cpp/src/gnss_reader/GPSDReader.cpp b/global localization/recording/cpp/src/gnss_reader/GPSDReader.cpp similarity index 56% rename from geotracking/recording/cpp/src/gnss_reader/GPSDReader.cpp rename to global localization/recording/cpp/src/gnss_reader/GPSDReader.cpp index de28e647..20e37d45 100644 --- a/geotracking/recording/cpp/src/gnss_reader/GPSDReader.cpp +++ b/global localization/recording/cpp/src/gnss_reader/GPSDReader.cpp @@ -1,28 +1,26 @@ #include "gnss_reader/GPSDReader.hpp" -GPSDReader::GPSDReader(){ - +GPSDReader::GPSDReader() { + } -GPSDReader::~GPSDReader() -{ +GPSDReader::~GPSDReader() { continue_to_grab = false; grab_gnss_data.join(); #ifdef GPSD_FOUND - + #else std::cerr << "[library not found] GPSD library was not found ... please install it before using this sample" << std::endl; #endif } -void GPSDReader::initialize() -{ + +void GPSDReader::initialize() { std::cout << "initialize " << std::endl; grab_gnss_data = std::thread(&GPSDReader::grabGNSSData, this); #ifdef GPSD_FOUND std::cout << "Create new object" << std::endl; gnss_getter.reset(new gpsmm("localhost", DEFAULT_GPSD_PORT)); - if (gnss_getter->stream(WATCH_ENABLE | WATCH_JSON) == nullptr) - { + if (gnss_getter->stream(WATCH_ENABLE | WATCH_JSON) == nullptr) { std::cerr << "No GPSD running .. exit" << std::endl; exit(EXIT_FAILURE); } @@ -31,12 +29,10 @@ void GPSDReader::initialize() bool received_fix = false; struct gps_data_t *gpsd_data; - while (!received_fix) - { + while (!received_fix) { if (!gnss_getter->waiting(0)) continue; - if ((gpsd_data = gnss_getter->read()) == NULL) - { + if ((gpsd_data = gnss_getter->read()) == NULL) { std::cerr << "[GNSS] read error ... exit program" << std::endl; exit(EXIT_FAILURE); } @@ -52,16 +48,14 @@ void GPSDReader::initialize() #endif } -sl::GNSSData GPSDReader::getNextGNSSValue() -{ +sl::GNSSData GPSDReader::getNextGNSSValue() { #ifdef GPSD_FOUND // 0. Check if GNSS is initialized: // 1. Get GNSS datas: struct gps_data_t *gpsd_data; while ((gpsd_data = gnss_getter->read()) == NULL) ; - if (gpsd_data->fix.mode >= MODE_2D) - { + if (gpsd_data->fix.mode >= MODE_2D) { sl::GNSSData current_gnss_data; // Fill out coordinates: current_gnss_data.setCoordinates(gpsd_data->fix.latitude, gpsd_data->fix.longitude, gpsd_data->fix.altMSL, false); @@ -81,10 +75,70 @@ sl::GNSSData GPSDReader::getNextGNSSValue() current_gnss_data.ts.setMicroseconds(current_gnss_timestamp); + int gpsd_mode = gpsd_data->fix.mode; + sl::GNSS_MODE sl_mode = sl::GNSS_MODE::UNKNOWN; + + switch (gpsd_mode) { + case 0: // MODE_NOT_SEEN + sl_mode = sl::GNSS_MODE::UNKNOWN; + break; + case 1: // MODE_NO_FIX + sl_mode = sl::GNSS_MODE::NO_FIX; + break; + case 2: // MODE_2D + sl_mode = sl::GNSS_MODE::FIX_2D; + break; + case 3: // MODE_3D + sl_mode = sl::GNSS_MODE::FIX_3D; + break; + default: + sl_mode = sl::GNSS_MODE::UNKNOWN; + break; + } + + int gpsd_status = gpsd_data->fix.status; + sl::GNSS_STATUS sl_status = sl::GNSS_STATUS::UNKNOWN; + + switch (gpsd_status) { + case 0: // STATUS_UNK + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 1: // STATUS_GPS + sl_status = sl::GNSS_STATUS::SINGLE; + break; + case 2: // STATUS_DGPS + sl_status = sl::GNSS_STATUS::DGNSS; + break; + case 3: // STATUS_RTK_FIX + sl_status = sl::GNSS_STATUS::RTK_FIX; + break; + case 4: // STATUS_RTK_FLT + sl_status = sl::GNSS_STATUS::RTK_FLOAT; + break; + case 5: // STATUS_DR + sl_status = sl::GNSS_STATUS::SINGLE; + break; + case 6: // STATUS_GNSSDR + sl_status = sl::GNSS_STATUS::DGNSS; + break; + case 7: // STATUS_TIME + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 8: // STATUS_SIM + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + case 9: // STATUS_PPS_FIX + sl_status = sl::GNSS_STATUS::SINGLE; + break; + default: + sl_status = sl::GNSS_STATUS::UNKNOWN; + break; + } + + current_gnss_data.gnss_status = sl_status; + current_gnss_data.gnss_mode = sl_mode; return current_gnss_data; - } - else - { + } else { std::cout << "Fix lost: reinit GNSS" << std::endl; initialize(); return getNextGNSSValue(); @@ -96,31 +150,30 @@ sl::GNSSData GPSDReader::getNextGNSSValue() return sl::GNSSData(); } -sl::ERROR_CODE GPSDReader::grab(sl::GNSSData & current_data){ - if(new_data){ - new_data=false; +sl::ERROR_CODE GPSDReader::grab(sl::GNSSData & current_data) { + if (new_data) { + new_data = false; current_data = current_gnss_data; return sl::ERROR_CODE::SUCCESS; } return sl::ERROR_CODE::FAILURE; } -void GPSDReader::grabGNSSData(){ - while(1){ +void GPSDReader::grabGNSSData() { + while (1) { is_initialized_mtx.lock(); - if(is_initialized){ + if (is_initialized) { is_initialized_mtx.unlock(); break; } is_initialized_mtx.unlock(); std::this_thread::sleep_for(std::chrono::milliseconds(1)); } - while (continue_to_grab) - { - #ifdef GPSD_FOUND + while (continue_to_grab) { +#ifdef GPSD_FOUND current_gnss_data = getNextGNSSValue(); new_data = true; - #endif +#endif } - + } \ No newline at end of file diff --git a/geotracking/recording/cpp/src/main.cpp b/global localization/recording/cpp/src/main.cpp similarity index 79% rename from geotracking/recording/cpp/src/main.cpp rename to global localization/recording/cpp/src/main.cpp index e21cbdf0..a07ac230 100644 --- a/geotracking/recording/cpp/src/main.cpp +++ b/global localization/recording/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -19,8 +19,7 @@ /////////////////////////////////////////////////////////////////////////// /*************************************************************************** - ** This sample shows how to record data for geotracking localization ** - ** on real-world map ** + ** This sample shows how to use global localization on real-world map ** **************************************************************************/ #include @@ -33,23 +32,19 @@ #include "exporter/KMLExporter.h" #include "exporter/GNSSSaver.h" - -int main(int argc, char **argv) -{ +int main(int argc, char **argv) { // Open the camera sl::Camera zed; sl::InitParameters init_params; init_params.sdk_verbose = 1; sl::ERROR_CODE camera_open_error = zed.open(init_params); - if (camera_open_error != sl::ERROR_CODE::SUCCESS) - { + if (camera_open_error != sl::ERROR_CODE::SUCCESS) { std::cerr << "[ZED][ERROR] Can't open ZED camera" << std::endl; return EXIT_FAILURE; } // Enable positional tracking: auto positional_init = zed.enablePositionalTracking(); - if (positional_init != sl::ERROR_CODE::SUCCESS) - { + if (positional_init != sl::ERROR_CODE::SUCCESS) { std::cerr << "[ZED][ERROR] Can't start tracking of camera" << std::endl; return EXIT_FAILURE; } @@ -59,7 +54,7 @@ int main(int argc, char **argv) sl::String path_output(svo_path.c_str()); auto returned_state = zed.enableRecording(sl::RecordingParameters(path_output, sl::SVO_COMPRESSION_MODE::H264_LOSSLESS)); if (returned_state != sl::ERROR_CODE::SUCCESS) { - std::cerr << "Recording ZED : " << returned_state << std::endl; + std::cerr << "Recording ZED : " << returned_state << std::endl; zed.close(); return EXIT_FAILURE; } @@ -70,8 +65,7 @@ int main(int argc, char **argv) sl::InitFusionParameters init_fusion_param; init_fusion_param.coordinate_units = sl::UNIT::METER; sl::FUSION_ERROR_CODE fusion_init_code = fusion.init(init_fusion_param); - if (fusion_init_code != sl::FUSION_ERROR_CODE::SUCCESS) - { + if (fusion_init_code != sl::FUSION_ERROR_CODE::SUCCESS) { std::cerr << "[Fusion][ERROR] Failed to initialize fusion, error: " << fusion_init_code << std::endl; return EXIT_FAILURE; } @@ -81,8 +75,7 @@ int main(int argc, char **argv) communication_parameters.setForSharedMemory(); zed.startPublishing(communication_parameters); /// Run a first grab for starting sending data: - while (zed.grab() != sl::ERROR_CODE::SUCCESS) - { + while (zed.grab() != sl::ERROR_CODE::SUCCESS) { } // Enable GNSS data producing: GPSDReader gnss_reader; @@ -94,19 +87,22 @@ int main(int argc, char **argv) // Enable positional tracking for Fusion object sl::PositionalTrackingFusionParameters positional_tracking_fusion_parameters; positional_tracking_fusion_parameters.enable_GNSS_fusion = true; - fusion.enablePositionalTracking(positional_tracking_fusion_parameters); + sl::FUSION_ERROR_CODE tracking_error_code = fusion.enablePositionalTracking(positional_tracking_fusion_parameters); + if(tracking_error_code != sl::FUSION_ERROR_CODE::SUCCESS){ + std::cout << "[Fusion][ERROR] Could not start tracking, error: " << tracking_error_code << std::endl; + return EXIT_FAILURE; + } - std::cout << "Start grabbing data ... the geo-tracking will be displayed in ZEDHub map section" << std::endl; + + std::cout << "Start grabbing data... Global localization data will be displayed on the Live Server" << std::endl; GenericDisplay viewer; viewer.init(argc, argv); - GNSSSaver gnss_data_saver; - while (viewer.isAvailable()) - { + GNSSSaver gnss_data_saver(&zed); + while (viewer.isAvailable()) { // Grab camera: - if (zed.grab() == sl::ERROR_CODE::SUCCESS) - { + if (zed.grab() == sl::ERROR_CODE::SUCCESS) { sl::Pose zed_pose; // You can still use the classical getPosition for your application, just not that the position returned by this method // is the position without any GNSS/cameras fusion @@ -115,12 +111,15 @@ int main(int argc, char **argv) // Get GNSS data: sl::GNSSData input_gnss; - if (gnss_reader.grab(input_gnss) == sl::ERROR_CODE::SUCCESS) - { + if (gnss_reader.grab(input_gnss) == sl::ERROR_CODE::SUCCESS) { + // Display it on the Live Server: + viewer.updateRawGeoPoseData(input_gnss); + // We force GNSS data to have the current timestamp for synchronizing it with camera data // input_gnss.ts = zed.getTimestamp(sl::TIME_REFERENCE::IMAGE); + input_gnss.gnss_status = sl::GNSS_STATUS::RTK_FIX; auto ingest_error = fusion.ingestGNSSData(input_gnss); - if(ingest_error != sl::FUSION_ERROR_CODE::SUCCESS){ + if (ingest_error != sl::FUSION_ERROR_CODE::SUCCESS) { std::cout << "Ingest error occurred when ingesting GNSSData: " << ingest_error << std::endl; } // Save current GNSS data to KML file: @@ -129,33 +128,23 @@ int main(int argc, char **argv) gnss_data_saver.addGNSSData(input_gnss); } // Process data and compute positions: - if (fusion.process() == sl::FUSION_ERROR_CODE::SUCCESS) - { + if (fusion.process() == sl::FUSION_ERROR_CODE::SUCCESS) { sl::Pose fused_position; // Get position into the ZED CAMERA coordinate system: sl::POSITIONAL_TRACKING_STATE current_state = fusion.getPosition(fused_position); - if (current_state == sl::POSITIONAL_TRACKING_STATE::OK) - { - std::stringstream ss; - ss << fused_position.pose_data.getTranslation(); - std::string translation_message = ss.str(); - ss.clear(); - ss << fused_position.pose_data.getEulerAngles(); - std::string rotation_message = ss.str(); + if (current_state == sl::POSITIONAL_TRACKING_STATE::OK) { // Display it on OpenGL: - viewer.updatePoseData(fused_position.pose_data, translation_message, rotation_message, current_state); + sl::FusedPositionalTrackingStatus fused_status = fusion.getFusedPositionalTrackingStatus(); + viewer.updatePoseData(fused_position.pose_data, fused_status); } // Get position into the GNSS coordinate system - this needs a initialization between CAMERA // and GNSS. When the initialization is finish the getGeoPose will return sl::POSITIONAL_TRACKING_STATE::OK sl::GeoPose current_geopose; auto current_geopose_satus = fusion.getGeoPose(current_geopose); - if (current_geopose_satus == sl::GNSS_CALIBRATION_STATE::CALIBRATED) - { - // Display it on ZED Hub: + if (current_geopose_satus == sl::GNSS_FUSION_STATUS::OK) { + // Display it on the Live Server: viewer.updateGeoPoseData(current_geopose, zed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); - } - else - { + } else { // GNSS coordinate system to ZED coordinate system is not initialize yet // The initialisation between the coordinates system is basically an optimization problem that // Try to fit the ZED computed path with the GNSS computed path. In order to do it just move diff --git a/global localization/recording/python/display/generic_display.py b/global localization/recording/python/display/generic_display.py new file mode 100644 index 00000000..bb77db05 --- /dev/null +++ b/global localization/recording/python/display/generic_display.py @@ -0,0 +1,56 @@ +from display.gl_viewer import GLViewer +from exporter.KMLExporter import * +import time + + +class GenericDisplay: + def __init__(self): + pass + + def __del__(self): + closeAllKMLFiles() + + def init(self, camera_model): + self.glviewer = GLViewer() + self.glviewer.init(camera_model) + # Replace this part with the appropriate connection to your IoT system + + def updatePoseData(self, zed_rt, str_t, str_r, state): + self.glviewer.updateData(zed_rt, str_t, str_r, state) + + def isAvailable(self): + return self.glviewer.is_available() + + def updateRawGeoPoseData(self, geo_data): + try: + # Replace this part with the appropriate sending of data to your IoT system + latitude, longitude, _ = geo_data.get_coordinates(False) + f = open('../../map server/raw_data.txt', 'w') + f.write("{},{},{}".format(latitude, longitude, geo_data.ts.get_milliseconds())) + + except ImportError: + print("An exception was raised: the raw geo-pose data was not sent.") + + def updateGeoPoseData(self, geo_pose, current_timestamp): + try: + # Replace this part with the appropriate sending of data to your IoT system + f = open('../../map server/data.txt', 'w') + f.write("{},{},{}" + .format(geo_pose.latlng_coordinates.get_latitude(False), + geo_pose.latlng_coordinates.get_longitude(False), + current_timestamp.get_milliseconds())) + + except ImportError: + print("An exception was raised: the geo-pose data was not sent.") + + +if __name__ == "__main__": + generic_display = GenericDisplay() + generic_display.init(0, []) + + try: + while True: + # Your logic here... + pass + except KeyboardInterrupt: + pass diff --git a/geotracking/recording/python/display/gl_viewer.py b/global localization/recording/python/display/gl_viewer.py similarity index 99% rename from geotracking/recording/python/display/gl_viewer.py rename to global localization/recording/python/display/gl_viewer.py index e6b2f211..4427e8cd 100644 --- a/geotracking/recording/python/display/gl_viewer.py +++ b/global localization/recording/python/display/gl_viewer.py @@ -300,7 +300,7 @@ def close_func(self): self.available = False def keyPressedCallback(self, key, x, y): - if ord(key) == 27: + if key == b'q' or key == b'Q' or ord(key) == 27: self.close_func() def on_mouse(self,*args,**kwargs): diff --git a/geotracking/playback/python/display/zed_model.py b/global localization/recording/python/display/zed_model.py similarity index 100% rename from geotracking/playback/python/display/zed_model.py rename to global localization/recording/python/display/zed_model.py diff --git a/geotracking/live geotracking/python/exporter/KMLExporter.py b/global localization/recording/python/exporter/KMLExporter.py similarity index 95% rename from geotracking/live geotracking/python/exporter/KMLExporter.py rename to global localization/recording/python/exporter/KMLExporter.py index 4ddf7487..4ef70078 100644 --- a/geotracking/live geotracking/python/exporter/KMLExporter.py +++ b/global localization/recording/python/exporter/KMLExporter.py @@ -41,15 +41,15 @@ def closeAllKMLFiles(): """ Close all KML file writer and place KML files footer """ - for file_name, file_object in all_file: + for file_name in all_file: file_footer = "" file_footer += "\t\n" file_footer += "\t\n" file_footer += "\n" file_footer += "\t\n" file_footer += "\n" - file_object.write(file_footer) - file_object.close() + all_file[file_name].write(file_footer) + all_file[file_name].close() def saveKMLData(file_path, gnss_data): diff --git a/global localization/recording/python/exporter/gnss_saver.py b/global localization/recording/python/exporter/gnss_saver.py new file mode 100644 index 00000000..3fab7e3a --- /dev/null +++ b/global localization/recording/python/exporter/gnss_saver.py @@ -0,0 +1,62 @@ +import numpy as np +import json +from datetime import datetime +import pyzed.sl as sl + + +def get_current_datetime(): + now = datetime.now() + return now.strftime("%d-%m-%Y_%H-%M-%S") + +def convert_gnss_data_2_json(gnss_data: sl.GNSSData) -> json: + latitude, longitude, altitude = gnss_data.get_coordinates(False) + gnss_measure = {} + gnss_measure["ts"] = gnss_data.ts.get_nanoseconds() + coordinates_dict = {} + coordinates_dict["latitude"] = latitude + coordinates_dict["longitude"] = longitude + coordinates_dict["altitude"] = altitude + gnss_measure["coordinates"] = coordinates_dict + position_covariance = [gnss_data.position_covariances[j] for j in range(9)] + gnss_measure["position_covariance"] = position_covariance + gnss_measure["longitude_std"] = np.sqrt(position_covariance[0 * 3 + 0]) + gnss_measure["latitude_std"] = np.sqrt(position_covariance[1 * 3 + 1]) + gnss_measure["altitude_std"] = np.sqrt(position_covariance[2 * 3 + 2]) + + gnss_measure["mode"] = gnss_data.gnss_mode + gnss_measure["status"] = gnss_data.gnss_status + + return gnss_measure + + +class GNSSSaver: + def __init__(self, zed: sl.Camera): + self.current_date = get_current_datetime() + self.file_path = "GNSS_"+self.current_date+".json" + self.all_gnss_data = [] + self._zed = zed + + def addGNSSData(self, gnss_data): + if self._zed is not None: + data = sl.SVOData() + data.key = "GNSS_json" + data.set_content(convert_gnss_data_2_json(gnss_data)) + + self._zed.ingest_data_in_svo(data) + + else: + self.all_gnss_data.append(gnss_data) + + def saveAllData(self): + print("Start saving GNSS data...") + all_gnss_measurements = [] + + for i in range(len(self.all_gnss_data)): + gnss_measure = convert_gnss_data_2_json(self.all_gnss_data[i]) + all_gnss_measurements.append(gnss_measure) + + final_dict = {"GNSS" : all_gnss_measurements} + with open(self.file_path, "w") as outfile: + # json_data refers to the above JSON + json.dump(final_dict, outfile) + print("All GNSS data saved") diff --git a/global localization/recording/python/gnss_reader/gpsd_reader.py b/global localization/recording/python/gnss_reader/gpsd_reader.py new file mode 100644 index 00000000..ffff9356 --- /dev/null +++ b/global localization/recording/python/gnss_reader/gpsd_reader.py @@ -0,0 +1,96 @@ +import threading +import time +import pyzed.sl as sl +from gpsdclient import GPSDClient +import random +import datetime + + +class GPSDReader: + def __init__(self): + self.continue_to_grab = True + self.new_data = False + self.is_initialized = False + self.current_gnss_data = None + self.is_initialized_mtx = threading.Lock() + self.client = None + self.gnss_getter = None + self.grab_gnss_data = None + + def initialize(self): + try: + self.client = GPSDClient(host="127.0.0.1") + except: + print("No GPSD running .. exit") + return -1 + + self.grab_gnss_data = threading.Thread(target=self.grabGNSSData) + self.grab_gnss_data.start() + print("Successfully connected to GPSD") + print("Waiting for GNSS fix") + received_fix = False + + self.gnss_getter = self.client.dict_stream(convert_datetime=True, filter=["TPV"]) + while not received_fix: + gpsd_data = next(self.gnss_getter) + if "class" in gpsd_data and gpsd_data["class"] == "TPV" and "mode" in gpsd_data and gpsd_data["mode"] >= 2: + received_fix = True + print("Fix found !!!") + with self.is_initialized_mtx: + self.is_initialized = True + return 0 + + def getNextGNSSValue(self): + gpsd_data = None + while gpsd_data is None: + gpsd_data = next(self.gnss_getter) + + if "class" in gpsd_data and gpsd_data["class"] == "TPV" and "mode" in gpsd_data and gpsd_data["mode"] >= 2: + current_gnss_data = sl.GNSSData() + current_gnss_data.set_coordinates(gpsd_data["lat"], gpsd_data["lon"], gpsd_data["altMSL"], False) + current_gnss_data.longitude_std = 0.001 + current_gnss_data.latitude_std = 0.001 + current_gnss_data.altitude_std = 1.0 + + position_covariance = [ + gpsd_data["eph"] * gpsd_data["eph"], + 0.0, + 0.0, + 0.0, + gpsd_data["eph"] * gpsd_data["eph"], + 0.0, + 0.0, + 0.0, + gpsd_data["epv"] * gpsd_data["epv"] + ] + current_gnss_data.position_covariances = position_covariance + timestamp_microseconds = int(gpsd_data["time"].timestamp() * 1000000) + ts = sl.Timestamp() + ts.set_microseconds(timestamp_microseconds) + current_gnss_data.ts = ts + return current_gnss_data + else: + print("Fix lost : GNSS reinitialization") + self.initialize() + return None + + def grab(self): + if self.new_data: + self.new_data = False + return sl.ERROR_CODE.SUCCESS, self.current_gnss_data + return sl.ERROR_CODE.FAILURE, None + + def grabGNSSData(self): + while self.continue_to_grab: + with self.is_initialized_mtx: + if self.is_initialized: + break + time.sleep(0.001) + + while self.continue_to_grab: + self.current_gnss_data = self.getNextGNSSValue() + if self.current_gnss_data is not None: + self.new_data = True + + def stop_thread(self): + self.continue_to_grab = False diff --git a/geotracking/live geotracking/python/ogl_viewer/tracking_viewer.py b/global localization/recording/python/ogl_viewer/tracking_viewer.py similarity index 100% rename from geotracking/live geotracking/python/ogl_viewer/tracking_viewer.py rename to global localization/recording/python/ogl_viewer/tracking_viewer.py diff --git a/geotracking/playback/python/ogl_viewer/zed_model.py b/global localization/recording/python/ogl_viewer/zed_model.py similarity index 100% rename from geotracking/playback/python/ogl_viewer/zed_model.py rename to global localization/recording/python/ogl_viewer/zed_model.py diff --git a/geotracking/recording/python/recording.py b/global localization/recording/python/recording.py similarity index 86% rename from geotracking/recording/python/recording.py rename to global localization/recording/python/recording.py index 085d2352..3c10cf21 100644 --- a/geotracking/recording/python/recording.py +++ b/global localization/recording/python/recording.py @@ -1,6 +1,6 @@ ######################################################################## # -# Copyright (c) 2023, STEREOLABS. +# Copyright (c) 2024, STEREOLABS. # # All rights reserved. # @@ -92,21 +92,21 @@ def main(): positional_tracking_fusion_parameters = sl.PositionalTrackingFusionParameters() positional_tracking_fusion_parameters.enable_GNSS_fusion = True gnss_calibration_parameters = { - "target_yaw_uncertainty" : 0.1, + "target_yaw_uncertainty" : 0.1, "enable_translation_uncertainty_target" : False, "target_translation_uncertainty" : 10e-2, "enable_reinitialization" : True, "gnss_vio_reinit_threshold" : 5, "enable_rolling_calibration" : True } - fusion.enable_positionnal_tracking({"gnss_calibration_parameters" : gnss_calibration_parameters , "enable_GNSS_fusion" : True}) + fusion.enable_positionnal_tracking(positional_tracking_fusion_parameters) # Setup viewer: viewer = GenericDisplay() viewer.init(zed.get_camera_information().camera_model) print("Start grabbing data ...") print("SVO is Recording, use Ctrl-C to stop.") py_translation = sl.Translation() - gnss_data_saver = GNSSSaver() + gnss_data_saver = GNSSSaver(zed) while is_running and viewer.isAvailable(): # Grab camera: if zed.grab() == sl.ERROR_CODE.SUCCESS: @@ -117,20 +117,25 @@ def main(): # Get GNSS data: status, input_gnss = gnss_reader.grab() if status == sl.ERROR_CODE.SUCCESS: + # Display it on the Live Server + viewer.updateRawGeoPoseData(input_gnss) + # Publish GNSS data to Fusion ingest_error = fusion.ingest_gnss_data(input_gnss) - if ingest_error != sl.FUSION_ERROR_CODE.SUCCESS: - print("Ingest error occurred when ingesting GNSSData: ",ingest_error) + if ingest_error == sl.FUSION_ERROR_CODE.SUCCESS: # Save current GNSS data to KML file: latitude, longitude, altitude = input_gnss.get_coordinates(False) coordinates = { - "latitude": latitude, - "longitude": longitude, - "altitude": altitude, - } - export.saveKMLData("raw_gnss.kml", coordinates) + "latitude": latitude, + "longitude": longitude, + "altitude": altitude, + } + export.saveKMLData("raw_gnss.kml", coordinates) # Save GNSS data into JSON: gnss_data_saver.addGNSSData(input_gnss) + else: + print("Ingest error occurred when ingesting GNSSData: ", ingest_error) + # Process data and compute positions: if fusion.process() == sl.FUSION_ERROR_CODE.SUCCESS: fused_position = sl.Pose() @@ -146,15 +151,14 @@ def main(): # and GNSS. When the initialization is finish the getGeoPose will return sl.POSITIONAL_TRACKING_STATE.OK current_geopose = sl.GeoPose() current_geopose_satus = fusion.get_geo_pose(current_geopose) - if current_geopose_satus == sl.GNSS_CALIBRATION_STATE.CALIBRATED: - print("ok") - viewer.updateGeoPoseData(current_geopose, zed.get_timestamp(sl.TIME_REFERENCE.CURRENT).data_ns/1000) + if current_geopose_satus == sl.GNSS_CALIBRATION_STATE.OK: + viewer.updateGeoPoseData(current_geopose, zed.get_timestamp(sl.TIME_REFERENCE.CURRENT)) """ else: - GNSS coordinate system to ZED coordinate system is not initialize yet - The initialisation between the coordinates system is basicaly an optimization problem that - Try to fit the ZED computed path with the GNSS computed path. In order to do it just move - your system by the distance you specified in positional_tracking_fusion_parameters.gnss_initialisation_distance + GNSS coordinate system to ZED coordinate system is not initialized yet + The initialization between the coordinates system is an optimization problem that + tries to fit the ZED computed path with the GNSS computed path. In order to do it just move + your system by the distance specified in positional_tracking_fusion_parameters.gnss_initialization_distance """ gnss_reader.stop_thread() zed.disable_recording() diff --git a/global localization/recording/python/requirements.txt b/global localization/recording/python/requirements.txt new file mode 100644 index 00000000..55499adc --- /dev/null +++ b/global localization/recording/python/requirements.txt @@ -0,0 +1,2 @@ +PyOpenGL +gpsdclient \ No newline at end of file diff --git a/object detection/birds eye viewer/cpp/include/GLViewer.hpp b/object detection/birds eye viewer/cpp/include/GLViewer.hpp index a18b178f..12526dec 100644 --- a/object detection/birds eye viewer/cpp/include/GLViewer.hpp +++ b/object detection/birds eye viewer/cpp/include/GLViewer.hpp @@ -106,14 +106,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/object detection/birds eye viewer/cpp/src/GLViewer.cpp b/object detection/birds eye viewer/cpp/src/GLViewer.cpp index 95663279..1d1e9434 100644 --- a/object detection/birds eye viewer/cpp/src/GLViewer.cpp +++ b/object detection/birds eye viewer/cpp/src/GLViewer.cpp @@ -9,7 +9,7 @@ #define FADED_RENDERING const float grid_size = 10.0f; -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -20,7 +20,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -730,7 +730,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -778,7 +778,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -806,7 +806,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -819,7 +819,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" diff --git a/object detection/birds eye viewer/cpp/src/main.cpp b/object detection/birds eye viewer/cpp/src/main.cpp index 8a037f91..2622d25d 100644 --- a/object detection/birds eye viewer/cpp/src/main.cpp +++ b/object detection/birds eye viewer/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/object detection/birds eye viewer/csharp/MainWindow.cs b/object detection/birds eye viewer/csharp/MainWindow.cs index ed378499..6f1d4242 100644 --- a/object detection/birds eye viewer/csharp/MainWindow.cs +++ b/object detection/birds eye viewer/csharp/MainWindow.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/object detection/birds eye viewer/csharp/Properties/AssemblyInfo.cs b/object detection/birds eye viewer/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/object detection/birds eye viewer/csharp/Properties/AssemblyInfo.cs +++ b/object detection/birds eye viewer/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/object detection/birds eye viewer/csharp/TrackingViewer.cs b/object detection/birds eye viewer/csharp/TrackingViewer.cs index 2ba9f3e0..fa9228e7 100644 --- a/object detection/birds eye viewer/csharp/TrackingViewer.cs +++ b/object detection/birds eye viewer/csharp/TrackingViewer.cs @@ -6,6 +6,7 @@ using sl; using OpenCvSharp; +using System.Windows.Forms; public enum TrackPointState { @@ -134,8 +135,6 @@ public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_sca OpenCvSharp.Mat overlay = left_display.Clone(); OpenCvSharp.Rect roi_render = new OpenCvSharp.Rect(0, 0, left_display.Size().Width, left_display.Size().Height); - OpenCvSharp.Mat mask = new OpenCvSharp.Mat(left_display.Rows, left_display.Cols, OpenCvSharp.MatType.CV_8UC1); - int line_thickness = 2; for (int i = 0; i < objects.numObject; i++) @@ -153,6 +152,9 @@ public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_sca Point bottom_right_corner = Utils.cvt(obj.boundingBox2D[2], img_scale); Point bottom_left_corner = Utils.cvt(obj.boundingBox2D[3], img_scale); + var width = (int)Math.Abs(top_right_corner.X - top_left_corner.X); + var height = (int)Math.Abs(bottom_left_corner.Y - top_left_corner.Y); + // Create of the 2 horizontal lines Cv2.Line(left_display, top_left_corner, top_right_corner, base_color, line_thickness); Cv2.Line(left_display, bottom_left_corner, bottom_right_corner, base_color, line_thickness); @@ -160,10 +162,18 @@ public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_sca Utils.drawVerticalLine(ref left_display, bottom_left_corner, top_left_corner, base_color, line_thickness); Utils.drawVerticalLine(ref left_display, bottom_right_corner, top_right_corner, base_color, line_thickness); - // Scaled ROI - OpenCvSharp.Rect roi = new OpenCvSharp.Rect(top_left_corner.X, top_left_corner.Y, (int)top_right_corner.DistanceTo(top_left_corner), (int)bottom_right_corner.DistanceTo(top_right_corner)); - - overlay.SubMat(roi).SetTo(base_color); + if (render_mask) + { + // Scaled ROI + OpenCvSharp.Rect roi = new OpenCvSharp.Rect(top_left_corner.X, top_left_corner.Y, width, height); + sl.Mat mask = new sl.Mat(obj.mask); + OpenCvSharp.Mat tmp_mask = new OpenCvSharp.Mat(mask.GetHeight(), mask.GetWidth(), OpenCvSharp.MatType.CV_8UC1, mask.GetPtr()); + if (!tmp_mask.Empty()) + { + var mask_resized = tmp_mask.Resize(roi.Size); + overlay.SubMat(roi).SetTo(base_color, mask_resized); + } + } sl.float2 position_image = getImagePosition(obj.boundingBox2D, img_scale); Cv2.PutText(left_display, obj.label.ToString(), new Point(position_image.x - 20, position_image.y - 12), HersheyFonts.HersheyComplexSmall, 0.5f, new Scalar(255, 255, 255, 255), 1); diff --git a/object detection/concurrent detections/cpp/include/GLViewer.hpp b/object detection/concurrent detections/cpp/include/GLViewer.hpp index b594d3a8..fdaca9ca 100644 --- a/object detection/concurrent detections/cpp/include/GLViewer.hpp +++ b/object detection/concurrent detections/cpp/include/GLViewer.hpp @@ -106,14 +106,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/object detection/concurrent detections/cpp/src/GLViewer.cpp b/object detection/concurrent detections/cpp/src/GLViewer.cpp index 259f93fe..43465be3 100644 --- a/object detection/concurrent detections/cpp/src/GLViewer.cpp +++ b/object detection/concurrent detections/cpp/src/GLViewer.cpp @@ -9,7 +9,7 @@ #define FADED_RENDERING const float grid_size = 10.0f; -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -20,7 +20,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -734,7 +734,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -782,7 +782,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -810,7 +810,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -823,7 +823,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" diff --git a/object detection/concurrent detections/cpp/src/TrackingViewer.cpp b/object detection/concurrent detections/cpp/src/TrackingViewer.cpp index b28f64e8..12894af2 100644 --- a/object detection/concurrent detections/cpp/src/TrackingViewer.cpp +++ b/object detection/concurrent detections/cpp/src/TrackingViewer.cpp @@ -31,7 +31,7 @@ void draw(std::vector& bb, sl::Mat& mask, float z, int id, sl::OBJECT // scaled ROI cv::Rect roi(top_left_corner, bottom_right_corner); // Use isInit() to check if mask is available - if (mask.isInit()) { + if (mask.isInit() && mask.getResolution().area() > 10 && roi.size().area() >0) { // Here, obj.mask is the object segmentation mask inside the object bbox, computed on the native resolution // The resize is needed to get the mask on the display resolution cv::Mat tmp_mask; diff --git a/object detection/concurrent detections/cpp/src/main.cpp b/object detection/concurrent detections/cpp/src/main.cpp index 691a51b2..2067341d 100644 --- a/object detection/concurrent detections/cpp/src/main.cpp +++ b/object detection/concurrent detections/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -156,8 +156,6 @@ int main(int argc, char **argv) { requested_low_res_w = min(720, (int)camera_config.resolution.width); Resolution pc_resolution(requested_low_res_w, requested_low_res_w / image_aspect_ratio); - std::cout << "Res " << display_resolution.width << " " << display_resolution.height << " " << pc_resolution.width << " " << pc_resolution.height << std::endl; - auto camera_parameters = zed.getCameraInformation(pc_resolution).camera_configuration.calibration_parameters.left_cam; Mat point_cloud(pc_resolution, MAT_TYPE::F32_C4, MEM::GPU); GLViewer viewer; diff --git a/object detection/concurrent detections/python/README.md b/object detection/concurrent detections/python/README.md new file mode 100644 index 00000000..447d1508 --- /dev/null +++ b/object detection/concurrent detections/python/README.md @@ -0,0 +1,26 @@ +# ZED SDK - Multi Instance Object Detection + +This sample shows how to detect and track objects in space as well as human body tracking. + +## Getting Started + - Get the latest [ZED SDK](https://www.stereolabs.com/developers/release/) + - Check the [Documentation](https://www.stereolabs.com/docs/) + +## Build the program + - Build for [Windows](https://www.stereolabs.com/docs/app-development/cpp/windows/) + - Build for [Linux/Jetson](https://www.stereolabs.com/docs/app-development/cpp/linux/) + +## Run the program +*NOTE: The ZED v1 is not compatible with this module* +- Navigate to the build directory and launch the executable +- Or open a terminal in the build directory and run the sample : + + python concurrent_object_detection_body_tracking.py + +### Features + - The camera point cloud is displayed in a 3D OpenGL view + - 3D bounding boxes and human skeletons around detected objects are drawn + - Objects classes and confidences can be changed + +## Support +If you need assistance go to our Community site at https://community.stereolabs.com/ diff --git a/object detection/concurrent detections/python/concurrent_object_detection_body_tracking.py b/object detection/concurrent detections/python/concurrent_object_detection_body_tracking.py new file mode 100644 index 00000000..89d4fac8 --- /dev/null +++ b/object detection/concurrent detections/python/concurrent_object_detection_body_tracking.py @@ -0,0 +1,190 @@ +######################################################################## +# +# Copyright (c) 2024, STEREOLABS. +# +# All rights reserved. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +######################################################################## + +""" + This sample demonstrates how to capture 3D point cloud and detected objects + with the ZED SDK and display the result in an OpenGL window. +""" + +import sys +import numpy as np +import cv2 +import pyzed.sl as sl + +import ogl_viewer.viewer as gl +import cv_viewer.tracking_viewer as cv_viewer + + +## +# Variable to enable/disable the batch option in Object Detection module +# Batching system allows to reconstruct trajectories from the object detection module by adding Re-Identification / Appareance matching. +# For example, if an object is not seen during some time, it can be re-ID to a previous ID if the matching score is high enough +# Use with caution if image retention is activated (See batch_system_handler.py) : +# --> Images will only appear if an object is detected since the batching system is based on OD detection. +USE_BATCHING = False + +if __name__ == "__main__": + print("Running object detection ... Press 'Esc' to quit") + zed = sl.Camera() + + # Create a InitParameters object and set configuration parameters + init_params = sl.InitParameters() + init_params.coordinate_units = sl.UNIT.METER + init_params.coordinate_system = sl.COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP + init_params.depth_mode = sl.DEPTH_MODE.ULTRA + init_params.depth_maximum_distance = 20 + is_playback = False # Defines if an SVO is used + + # If applicable, use the SVO given as parameter + # Otherwise use ZED live stream + if len(sys.argv) == 2: + filepath = sys.argv[1] + print("Using SVO file: {0}".format(filepath)) + init_params.svo_real_time_mode = True + init_params.set_from_svo_file(filepath) + is_playback = True + + status = zed.open(init_params) + if status != sl.ERROR_CODE.SUCCESS: + print(repr(status)) + exit() + + + # Enable positional tracking module + positional_tracking_parameters = sl.PositionalTrackingParameters() + # If the camera is static in space, enabling this setting below provides better depth quality and faster computation + # positional_tracking_parameters.set_as_static = True + zed.enable_positional_tracking(positional_tracking_parameters) + + # Enable object detection module + obj_param = sl.ObjectDetectionParameters() + obj_param.instance_module_id = 0 + obj_param.detection_model = sl.OBJECT_DETECTION_MODEL.MULTI_CLASS_BOX_FAST + # Defines if the object detection will track objects across images flow. + obj_param.enable_tracking = True + zed.enable_object_detection(obj_param) + + + + body_param = sl.BodyTrackingParameters() + body_param.enable_tracking = True # Track people across images flow + body_param.enable_body_fitting = False # Smooth skeleton move + body_param.detection_model = sl.BODY_TRACKING_MODEL.HUMAN_BODY_FAST + body_param.body_format = sl.BODY_FORMAT.BODY_18 # Choose the BODY_FORMAT you wish to use + body_param.instance_module_id = 1 + zed.enable_body_tracking(body_param) + + + camera_infos = zed.get_camera_information() + # Create OpenGL viewer + viewer = gl.GLViewer() + point_cloud_res = sl.Resolution(min(camera_infos.camera_configuration.resolution.width, 720), min(camera_infos.camera_configuration.resolution.height, 404)) + point_cloud_render = sl.Mat() + viewer.init(camera_infos.camera_model, point_cloud_res, obj_param.enable_tracking) + + # Configure object detection runtime parameters + obj_runtime_param = sl.ObjectDetectionRuntimeParameters() + detection_confidence = 60 + obj_runtime_param.detection_confidence_threshold = detection_confidence + # To select a set of specific object classes + obj_runtime_param.object_class_filter = [sl.OBJECT_CLASS.PERSON] + # To set a specific threshold + obj_runtime_param.object_class_detection_confidence_threshold = {sl.OBJECT_CLASS.PERSON: detection_confidence} + + # Runtime parameters + runtime_params = sl.RuntimeParameters() + runtime_params.confidence_threshold = 50 + + # Create objects that will store SDK outputs + point_cloud = sl.Mat(point_cloud_res.width, point_cloud_res.height, sl.MAT_TYPE.F32_C4, sl.MEM.CPU) + objects = sl.Objects() + + body_runtime_param = sl.BodyTrackingRuntimeParameters() + body_runtime_param.detection_confidence_threshold = 40 + bodies = sl.Bodies() + + image_left = sl.Mat() + + # Utilities for 2D display + display_resolution = sl.Resolution(min(camera_infos.camera_configuration.resolution.width, 1280), min(camera_infos.camera_configuration.resolution.height, 720)) + image_scale = [display_resolution.width / camera_infos.camera_configuration.resolution.width + , display_resolution.height / camera_infos.camera_configuration.resolution.height] + image_left_ocv = np.full((display_resolution.height, display_resolution.width, 4), [245, 239, 239,255], np.uint8) + + # Camera pose + cam_w_pose = sl.Pose() + cam_c_pose = sl.Pose() + + quit_app = False + + while(viewer.is_available() and (quit_app == False)): + if zed.grab(runtime_params) == sl.ERROR_CODE.SUCCESS: + # Retrieve objects + returned_state = zed.retrieve_objects(objects, obj_runtime_param, obj_param.instance_module_id) + returned_state2 = zed.retrieve_bodies(bodies, body_runtime_param, body_param.instance_module_id) + + # Retrieve image + zed.retrieve_image(image_left, sl.VIEW.LEFT, sl.MEM.CPU, display_resolution) + image_render_left = image_left.get_data() + np.copyto(image_left_ocv,image_render_left) + + if (returned_state == sl.ERROR_CODE.SUCCESS and objects.is_new): + # Retrieve point cloud + zed.retrieve_measure(point_cloud, sl.MEASURE.XYZRGBA,sl.MEM.CPU, point_cloud_res) + point_cloud.copy_to(point_cloud_render) + # Retrieve image + zed.retrieve_image(image_left, sl.VIEW.LEFT, sl.MEM.CPU, display_resolution) + image_render_left = image_left.get_data() + # Get camera pose + zed.get_position(cam_w_pose, sl.REFERENCE_FRAME.WORLD) + + update_render_view = True + update_3d_view = True + + # 3D rendering + if update_3d_view: + viewer.updateData(point_cloud_render, objects) + + # 2D rendering + if update_render_view: + cv_viewer.render_2D(image_left_ocv, image_scale, objects, obj_param.enable_tracking) + + if (returned_state2 == sl.ERROR_CODE.SUCCESS and bodies.is_new): + cv_viewer.render_2D_SK(image_left_ocv, image_scale, bodies.body_list, obj_param.enable_tracking, sl.BODY_FORMAT.BODY_18) + + cv2.imshow("ZED | Body tracking and Object detection", image_left_ocv) + cv2.waitKey(10) + + if (is_playback and (zed.get_svo_position() == zed.get_svo_number_of_frames()-1)): + print("End of SVO") + quit_app = True + + + cv2.destroyAllWindows() + viewer.exit() + image_left.free(sl.MEM.CPU) + point_cloud.free(sl.MEM.CPU) + point_cloud_render.free(sl.MEM.CPU) + + # Disable modules and close camera + zed.disable_object_detection() + zed.disable_positional_tracking() + + zed.close() \ No newline at end of file diff --git a/object detection/concurrent detections/python/cv_viewer/tracking_viewer.py b/object detection/concurrent detections/python/cv_viewer/tracking_viewer.py new file mode 100644 index 00000000..1e438d8e --- /dev/null +++ b/object detection/concurrent detections/python/cv_viewer/tracking_viewer.py @@ -0,0 +1,278 @@ +import cv2 +import numpy as np + +from cv_viewer.utils import * +import pyzed.sl as sl +import math +from collections import deque + +#---------------------------------------------------------------------- +# 2D LEFT VIEW +#---------------------------------------------------------------------- + + +def cvt(pt, scale): + ''' + Function that scales point coordinates + ''' + out = [pt[0]*scale[0], pt[1]*scale[1]] + return out + +def get_image_position(bounding_box_image, img_scale): + out_position = np.zeros(2) + out_position[0] = (bounding_box_image[0][0] + (bounding_box_image[2][0] - bounding_box_image[0][0])*0.5) * img_scale[0] + out_position[1] = (bounding_box_image[0][1] + (bounding_box_image[2][1] - bounding_box_image[0][1])*0.5) * img_scale[1] + return out_position + +def render_2D(left_display, img_scale, objects, is_tracking_on): + overlay = left_display.copy() + + line_thickness = 2 + for obj in objects.object_list: + if(render_object(obj, is_tracking_on)): + base_color = generate_color_id_u(obj.id) + # Display image scaled 2D bounding box + top_left_corner = cvt(obj.bounding_box_2d[0], img_scale) + top_right_corner = cvt(obj.bounding_box_2d[1], img_scale) + bottom_right_corner = cvt(obj.bounding_box_2d[2], img_scale) + bottom_left_corner = cvt(obj.bounding_box_2d[3], img_scale) + + # Creation of the 2 horizontal lines + cv2.line(left_display, (int(top_left_corner[0]), int(top_left_corner[1])), (int(top_right_corner[0]), int(top_right_corner[1])), base_color, line_thickness) + cv2.line(left_display, (int(bottom_left_corner[0]), int(bottom_left_corner[1])), (int(bottom_right_corner[0]), int(bottom_right_corner[1])), base_color, line_thickness) + # Creation of 2 vertical lines + draw_vertical_line(left_display, bottom_left_corner, top_left_corner, base_color, line_thickness) + draw_vertical_line(left_display, bottom_right_corner, top_right_corner, base_color, line_thickness) + + # Scaled ROI + roi_height = int(top_right_corner[0] - top_left_corner[0]) + roi_width = int(bottom_left_corner[1] - top_left_corner[1]) + overlay_roi = overlay[int(top_left_corner[1]):int(top_left_corner[1] + roi_width) + , int(top_left_corner[0]):int(top_left_corner[0] + roi_height)] + + overlay_roi[:,:,:] = base_color + + + # Display Object label as text + position_image = get_image_position(obj.bounding_box_2d, img_scale) + text_position = (int(position_image[0] - 20), int(position_image[1] - 12)) + text = str(obj.label) + text_color = (255,255,255,255) + cv2.putText(left_display, text, text_position, cv2.FONT_HERSHEY_COMPLEX_SMALL, 0.5, text_color, 1) + + # Diplay Object distance to camera as text + if(np.isfinite(obj.position[2])): + text = str(round(abs(obj.position[2]), 1)) + "M" + text_position = (int(position_image[0] - 20), int(position_image[1])) + cv2.putText(left_display, text, text_position, cv2.FONT_HERSHEY_COMPLEX_SMALL, 0.5, text_color, 1) + + # Here, overlay is as the left image, but with opaque masks on each detected objects + cv2.addWeighted(left_display, 0.7, overlay, 0.3, 0.0, left_display) + + +def render_sk(left_display, img_scale, obj, color, BODY_BONES): + # Draw skeleton bones + for part in BODY_BONES: + kp_a = cvt(obj.keypoint_2d[part[0].value], img_scale) + kp_b = cvt(obj.keypoint_2d[part[1].value], img_scale) + # Check that the keypoints are inside the image + if(kp_a[0] < left_display.shape[1] and kp_a[1] < left_display.shape[0] + and kp_b[0] < left_display.shape[1] and kp_b[1] < left_display.shape[0] + and kp_a[0] > 0 and kp_a[1] > 0 and kp_b[0] > 0 and kp_b[1] > 0 ): + cv2.line(left_display, (int(kp_a[0]), int(kp_a[1])), (int(kp_b[0]), int(kp_b[1])), color, 1, cv2.LINE_AA) + + # Skeleton joints + for kp in obj.keypoint_2d: + cv_kp = cvt(kp, img_scale) + if(cv_kp[0] < left_display.shape[1] and cv_kp[1] < left_display.shape[0]): + cv2.circle(left_display, (int(cv_kp[0]), int(cv_kp[1])), 3, color, -1) + + +def render_2D_SK(left_display, img_scale, objects, is_tracking_on, body_format): + ''' + Parameters + left_display (np.array): numpy array containing image data + img_scale (list[float]) + objects (list[sl.ObjectData]) + ''' + overlay = left_display.copy() + + # Render skeleton joints and bones + for obj in objects: + if render_object(obj, is_tracking_on): + if len(obj.keypoint_2d) > 0: + color = generate_color_id_u(obj.id) + if body_format == sl.BODY_FORMAT.BODY_18: + render_sk(left_display, img_scale, obj, color, sl.BODY_18_BONES) + elif body_format == sl.BODY_FORMAT.BODY_34: + render_sk(left_display, img_scale, obj, color, sl.BODY_34_BONES) + elif body_format == sl.BODY_FORMAT.BODY_38: + render_sk(left_display, img_scale, obj, color, sl.BODY_38_BONES) + #elif body_format == sl.BODY_FORMAT.BODY_70: + # render_sk(left_display, img_scale, obj, color, sl.BODY_70_BONES) + + cv2.addWeighted(left_display, 0.9, overlay, 0.1, 0.0, left_display) + +#---------------------------------------------------------------------- +# 2D TRACKING VIEW +#---------------------------------------------------------------------- + +class TrackingViewer: + def __init__(self, res, fps, D_max): + # Window size + self.window_width = res.width + self.window_height = res.height + + # Visualisation settings + self.has_background_ready = False + self.background = np.full((self.window_height, self.window_width, 4), [245, 239, 239,255], np.uint8) + + # Invert Z due to Y axis of ocv window + # Show objects between [z_min, 0] (z_min < 0) + self.z_min = -D_max + # Show objects between [x_min, x_max] + self.x_min = self.z_min + self.x_max = -self.x_min + + # Conversion from world position to pixel coordinates + self.x_step = (self.x_max - self.x_min) / self.window_width + self.z_step = abs(self.z_min) / (self.window_height) + + self.camera_calibration = sl.CalibrationParameters() + + # List of alive tracks + self.tracklets = [] + + def set_camera_calibration(self, calib): + self.camera_calibration = calib + self.has_background_ready = False + + def generate_view(self, objects, current_camera_pose, tracking_view, tracking_enabled): + # To get position in WORLD reference + for obj in objects.object_list: + pos = obj.position + tmp_pos = sl.Translation() + tmp_pos.init_vector(pos[0],pos[1],pos[2]) + new_pos = (tmp_pos * current_camera_pose.get_orientation()).get() + current_camera_pose.get_translation().get() + obj.position = np.array([new_pos[0], new_pos[1], new_pos[2]]) + + # Initialize visualisation + if(not self.has_background_ready): + self.generate_background() + + np.copyto(tracking_view, self.background,'no') + + if(tracking_enabled): + # First add new points and remove the ones that are too old + current_timestamp = objects.timestamp.get_seconds() + self.add_to_tracklets(objects,current_timestamp) + self.prune_old_points(current_timestamp) + + # Draw all tracklets + self.draw_tracklets(tracking_view, current_camera_pose) + else: + self.draw_points(objects.object_list, tracking_view, current_camera_pose) + + def add_to_tracklets(self, objects, current_timestamp): + for obj in objects.object_list: + if((obj.tracking_state != sl.OBJECT_TRACKING_STATE.OK) or (not np.isfinite(obj.position[0])) or (obj.id < 0)): + continue + + new_object = True + for i in range(len(self.tracklets)): + if self.tracklets[i].id == obj.id: + new_object = False + self.tracklets[i].add_point(obj, current_timestamp) + + # In case this object does not belong to existing tracks + if (new_object): + self.tracklets.append(Tracklet(obj, obj.label, current_timestamp)) + + def prune_old_points(self, ts): + track_to_delete = [] + for it in self.tracklets: + if((ts - it.last_timestamp) > (3)): + track_to_delete.append(it) + + for it in track_to_delete: + self.tracklets.remove(it) + +#---------------------------------------------------------------------- +# Drawing functions +#---------------------------------------------------------------------- + + def draw_points(self, objects, tracking_view, current_camera_pose): + for obj in objects: + if(not np.isfinite(obj.position[0])): + continue + clr = generate_color_id_u(obj.id) + pt = TrackPoint(obj.position) + cv_start_point = self.to_cv_point(pt.get_xyz(), current_camera_pose) + cv2.circle(tracking_view, (int(cv_start_point[0]), int(cv_start_point[1])), 6, clr, 2) + + def draw_tracklets(self, tracking_view, current_camera_pose): + for track in self.tracklets: + clr = generate_color_id_u(track.id) + cv_start_point = self.to_cv_point(track.positions[0].get_xyz(), current_camera_pose) + for point_index in range(1, len(track.positions)): + cv_end_point = self.to_cv_point(track.positions[point_index].get_xyz(), current_camera_pose) + cv2.line(tracking_view, (int(cv_start_point[0]), int(cv_start_point[1])), (int(cv_end_point[0]), int(cv_end_point[1])), clr, 3) + cv_start_point = cv_end_point + cv2.circle(tracking_view, (int(cv_start_point[0]), int(cv_start_point[1])), 6, clr, -1) + + def generate_background(self): + camera_color = [255, 230, 204, 255] + + # Get FOV intersection with window borders + fov = 2.0 * math.atan(self.camera_calibration.left_cam.image_size.width / (2.0 * self.camera_calibration.left_cam.fx)) + + z_at_x_max = self.x_max / math.tan(fov / 2.0) + left_intersection_pt = self.to_cv_point(self.x_min, -z_at_x_max) + right_intersection_pt = self.to_cv_point(self.x_max, -z_at_x_max) + + # Drawing camera + camera_pts = np.array([left_intersection_pt + , right_intersection_pt + , [int(self.window_width / 2), self.window_height]] + , dtype=np.int32) + cv2.fillConvexPoly(self.background, camera_pts, camera_color) + + def to_cv_point(self, x, z): + out = [] + if isinstance(x, float) and isinstance(z, float): + out = [int((x - self.x_min) / self.x_step), int((z - self.z_min) / self.z_step)] + elif isinstance(x, list) and isinstance(z, sl.Pose): + # Go to camera current pose + rotation = z.get_rotation_matrix() + rotation.inverse() + tmp = x - (z.get_translation() * rotation.get_orientation()).get() + new_position = sl.Translation() + new_position.init_vector(tmp[0],tmp[1],tmp[2]) + out = [int(((new_position.get()[0] - self.x_min)/self.x_step) + 0.5), int(((new_position.get()[2] - self.z_min)/self.z_step) + 0.5)] + elif isinstance(x, TrackPoint) and isinstance(z, sl.Pose): + pos = x.get_xyz() + out = self.to_cv_point(pos, z) + else: + print("Unhandled argument type") + return out + + +class TrackPoint: + def __init__(self, pos_): + self.x = pos_[0] + self.y = pos_[1] + self.z = pos_[2] + + def get_xyz(self): + return [self.x, self.y, self.z] + +class Tracklet: + def __init__(self, obj_, type_, timestamp_): + self.id = obj_.id + self.object_type = type_ + self.positions = deque() + self.add_point(obj_, timestamp_) + + def add_point(self, obj_, timestamp_): + self.positions.append(TrackPoint(obj_.position)) + self.last_timestamp = timestamp_ diff --git a/object detection/concurrent detections/python/cv_viewer/utils.py b/object detection/concurrent detections/python/cv_viewer/utils.py new file mode 100644 index 00000000..37f3de66 --- /dev/null +++ b/object detection/concurrent detections/python/cv_viewer/utils.py @@ -0,0 +1,35 @@ +import cv2 +import numpy as np +import pyzed.sl as sl + +id_colors = [(232, 176,59), + (175, 208,25), + (102, 205,105), + (185, 0 ,255), + (99, 107,252)] + + +def render_object(object_data, is_tracking_on): + if is_tracking_on: + return (object_data.tracking_state == sl.OBJECT_TRACKING_STATE.OK) + else: + return ((object_data.tracking_state == sl.OBJECT_TRACKING_STATE.OK) or (object_data.tracking_state == sl.OBJECT_TRACKING_STATE.OFF)) + +def generate_color_id_u(idx): + arr = [] + if(idx < 0): + arr = [236,184,36,255] + else: + color_idx = idx % 5 + arr = [id_colors[color_idx][0], id_colors[color_idx][1], id_colors[color_idx][2], 255] + return arr + +def draw_vertical_line(left_display, start_pt, end_pt, clr, thickness): + n_steps = 7 + pt1 = [((n_steps - 1) * start_pt[0] + end_pt[0]) / n_steps + , ((n_steps - 1) * start_pt[1] + end_pt[1]) / n_steps] + pt4 = [(start_pt[0] + (n_steps - 1) * end_pt[0]) / n_steps + , (start_pt[1] + (n_steps - 1) * end_pt[1]) / n_steps] + + cv2.line(left_display, (int(start_pt[0]),int(start_pt[1])), (int(pt1[0]), int(pt1[1])), clr, thickness) + cv2.line(left_display, (int(pt4[0]), int(pt4[1])), (int(end_pt[0]),int(end_pt[1])), clr, thickness) \ No newline at end of file diff --git a/geotracking/playback/python/ogl_viewer/tracking_viewer.py b/object detection/concurrent detections/python/ogl_viewer/viewer.py similarity index 52% rename from geotracking/playback/python/ogl_viewer/tracking_viewer.py rename to object detection/concurrent detections/python/ogl_viewer/viewer.py index 21b695d5..2b374256 100644 --- a/geotracking/playback/python/ogl_viewer/tracking_viewer.py +++ b/object detection/concurrent detections/python/ogl_viewer/viewer.py @@ -8,7 +8,9 @@ from threading import Lock import numpy as np import array +from enum import IntEnum +from cv_viewer.utils import * import ogl_viewer.zed_model as zm import pyzed.sl as sl @@ -33,9 +35,36 @@ } """ -def safe_glutBitmapString(font, str_): - for i in range(len(str_)): - glutBitmapCharacter(GLUT_BITMAP_HELVETICA_18, ord(str_[i])) +POINTCLOUD_VERTEX_SHADER =""" +#version 330 core +layout(location = 0) in vec4 in_VertexRGBA; +uniform mat4 u_mvpMatrix; +out vec4 b_color; +void main() { + uint vertexColor = floatBitsToUint(in_VertexRGBA.w); + vec3 clr_int = vec3((vertexColor & uint(0x000000FF)), (vertexColor & uint(0x0000FF00)) >> 8, (vertexColor & uint(0x00FF0000)) >> 16); + b_color = vec4(clr_int.r / 255.0f, clr_int.g / 255.0f, clr_int.b / 255.0f, 1.f); + gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1); +} +""" + +POINTCLOUD_FRAGMENT_SHADER = """ +#version 330 core +in vec4 b_color; +layout(location = 0) out vec4 out_Color; +void main() { + out_Color = b_color; +} +""" + +M_PI = 3.1415926 + +GRID_SIZE = 9.0 + +def generate_color_id(_idx): + clr = np.divide(generate_color_id_u(_idx), 255.0) + clr[0], clr[2] = clr[2], clr[0] + return clr class Shader: def __init__(self, _vs, _fs): @@ -82,15 +111,13 @@ def get_program_id(self): class Simple3DObject: - def __init__(self, _is_static): - self.vaoID = 0 + def __init__(self, _is_static, pts_size = 3, clr_size = 3): + self.is_init = False self.drawing_type = GL_TRIANGLES self.is_static = _is_static - self.elementbufferSize = 0 - - self.vertices = array.array('f') - self.colors = array.array('f') - self.indices = array.array('I') + self.clear() + self.pt_type = pts_size + self.clr_type = clr_size def add_pt(self, _pts): # _pts [x,y,z] for pt in _pts: @@ -104,27 +131,214 @@ def add_point_clr(self, _pt, _clr): self.add_pt(_pt) self.add_clr(_clr) self.indices.append(len(self.indices)) - - def add_line(self, _p1, _p2, _clr): + + def add_line(self, _p1, _p2, _clr) : self.add_point_clr(_p1, _clr) self.add_point_clr(_p2, _clr) + def addFace(self, p1, p2, p3, clr) : + self.add_point_clr(p1, clr) + self.add_point_clr(p2, clr) + self.add_point_clr(p3, clr) + + def add_full_edges(self, _pts, _clr): + start_id = int(len(self.vertices) / 3) + + for i in range(len(_pts)): + self.add_pt(_pts[i]) + self.add_clr(_clr) + + box_links_top = np.array([0, 1, 1, 2, 2, 3, 3, 0]) + i = 0 + while i < box_links_top.size: + self.indices.append(start_id + box_links_top[i]) + self.indices.append(start_id + box_links_top[i+1]) + i = i + 2 + + box_links_bottom = np.array([4, 5, 5, 6, 6, 7, 7, 4]) + i = 0 + while i < box_links_bottom.size: + self.indices.append(start_id + box_links_bottom[i]) + self.indices.append(start_id + box_links_bottom[i+1]) + i = i + 2 + + def __add_single_vertical_line(self, _top_pt, _bottom_pt, _clr): + current_pts = np.array( + [_top_pt, + ((GRID_SIZE - 1) * np.array(_top_pt) + np.array(_bottom_pt)) / GRID_SIZE, + ((GRID_SIZE - 2) * np.array(_top_pt) + np.array(_bottom_pt) * 2) / GRID_SIZE, + (2 * np.array(_top_pt) + np.array(_bottom_pt) * (GRID_SIZE - 2)) / GRID_SIZE, + (np.array(_top_pt) + np.array(_bottom_pt) * (GRID_SIZE - 1)) / GRID_SIZE, + _bottom_pt + ], np.float32) + start_id = int(len(self.vertices) / 3) + for i in range(len(current_pts)): + self.add_pt(current_pts[i]) + if (i == 2 or i == 3): + _clr[3] = 0 + else: + _clr[3] = 0.75 + self.add_clr(_clr) + + box_links = np.array([0, 1, 1, 2, 2, 3, 3, 4, 4, 5]) + i = 0 + while i < box_links.size: + self.indices.append(start_id + box_links[i]) + self.indices.append(start_id + box_links[i+1]) + i = i + 2 + + def add_vertical_edges(self, _pts, _clr): + self.__add_single_vertical_line(_pts[0], _pts[4], _clr) + self.__add_single_vertical_line(_pts[1], _pts[5], _clr) + self.__add_single_vertical_line(_pts[2], _pts[6], _clr) + self.__add_single_vertical_line(_pts[3], _pts[7], _clr) + + def add_top_face(self, _pts, _clr): + _clr[3] = 0.5 + for pt in _pts: + self.add_point_clr(pt, _clr) + + def __add_quad(self, _quad_pts, _alpha1, _alpha2, _clr): + for i in range(len(_quad_pts)): + self.add_pt(_quad_pts[i]) + if i < 2: + _clr[3] = _alpha1 + else: + _clr[3] = _alpha2 + self.add_clr(_clr) + + self.indices.append(len(self.indices)) + self.indices.append(len(self.indices)) + self.indices.append(len(self.indices)) + self.indices.append(len(self.indices)) + + def add_vertical_faces(self, _pts, _clr): + # For each face, we need to add 4 quads (the first 2 indexes are always the top points of the quad) + quads = [[0, 3, 7, 4] # Front face + , [3, 2, 6, 7] # Right face + , [2, 1, 5, 6] # Back face + , [1, 0, 4, 5]] # Left face + + alpha = .5 + + # Create gradually fading quads + for quad in quads: + quad_pts_1 = [ + _pts[quad[0]], + _pts[quad[1]], + ((GRID_SIZE - 0.5) * np.array(_pts[quad[1]]) + 0.5 * np.array(_pts[quad[2]])) / GRID_SIZE, + ((GRID_SIZE - 0.5) * np.array(_pts[quad[0]]) + 0.5 * np.array(_pts[quad[3]])) / GRID_SIZE + ] + self.__add_quad(quad_pts_1, alpha, alpha, _clr) + + quad_pts_2 = [ + ((GRID_SIZE - 0.5) * np.array(_pts[quad[0]]) + 0.5 * np.array(_pts[quad[3]])) / GRID_SIZE, + ((GRID_SIZE - 0.5) * np.array(_pts[quad[1]]) + 0.5 * np.array(_pts[quad[2]])) / GRID_SIZE, + ((GRID_SIZE - 1.0) * np.array(_pts[quad[1]]) + np.array(_pts[quad[2]])) / GRID_SIZE, + ((GRID_SIZE - 1.0) * np.array(_pts[quad[0]]) + np.array(_pts[quad[3]])) / GRID_SIZE + ] + self.__add_quad(quad_pts_2, alpha, 2 * alpha / 3, _clr) + + quad_pts_3 = [ + ((GRID_SIZE - 1.0) * np.array(_pts[quad[0]]) + np.array(_pts[quad[3]])) / GRID_SIZE, + ((GRID_SIZE - 1.0) * np.array(_pts[quad[1]]) + np.array(_pts[quad[2]])) / GRID_SIZE, + ((GRID_SIZE - 1.5) * np.array(_pts[quad[1]]) + 1.5 * np.array(_pts[quad[2]])) / GRID_SIZE, + ((GRID_SIZE - 1.5) * np.array(_pts[quad[0]]) + 1.5 * np.array(_pts[quad[3]])) / GRID_SIZE + ] + self.__add_quad(quad_pts_3, 2 * alpha / 3, alpha / 3, _clr) + + quad_pts_4 = [ + ((GRID_SIZE - 1.5) * np.array(_pts[quad[0]]) + 1.5 * np.array(_pts[quad[3]])) / GRID_SIZE, + ((GRID_SIZE - 1.5) * np.array(_pts[quad[1]]) + 1.5 * np.array(_pts[quad[2]])) / GRID_SIZE, + ((GRID_SIZE - 2.0) * np.array(_pts[quad[1]]) + 2.0 * np.array(_pts[quad[2]])) / GRID_SIZE, + ((GRID_SIZE - 2.0) * np.array(_pts[quad[0]]) + 2.0 * np.array(_pts[quad[3]])) / GRID_SIZE + ] + self.__add_quad(quad_pts_4, alpha / 3, 0.0, _clr) + + quad_pts_5 = [ + (np.array(_pts[quad[1]]) * 2.0 + (GRID_SIZE - 2.0) * np.array(_pts[quad[2]])) / GRID_SIZE, + (np.array(_pts[quad[0]]) * 2.0 + (GRID_SIZE - 2.0) * np.array(_pts[quad[3]])) / GRID_SIZE, + (np.array(_pts[quad[0]]) * 1.5 + (GRID_SIZE - 1.5) * np.array(_pts[quad[3]])) / GRID_SIZE, + (np.array(_pts[quad[1]]) * 1.5 + (GRID_SIZE - 1.5) * np.array(_pts[quad[2]])) / GRID_SIZE + ] + self.__add_quad(quad_pts_5, 0.0, alpha / 3, _clr) + + quad_pts_6 = [ + (np.array(_pts[quad[1]]) * 1.5 + (GRID_SIZE - 1.5) * np.array(_pts[quad[2]])) / GRID_SIZE, + (np.array(_pts[quad[0]]) * 1.5 + (GRID_SIZE - 1.5) * np.array(_pts[quad[3]])) / GRID_SIZE, + (np.array(_pts[quad[0]]) + (GRID_SIZE - 1.0) * np.array(_pts[quad[3]])) / GRID_SIZE, + (np.array(_pts[quad[1]]) + (GRID_SIZE - 1.0) * np.array(_pts[quad[2]])) / GRID_SIZE + ] + self.__add_quad(quad_pts_6, alpha / 3, 2 * alpha / 3, _clr) + + quad_pts_7 = [ + (np.array(_pts[quad[1]]) + (GRID_SIZE - 1.0) * np.array(_pts[quad[2]])) / GRID_SIZE, + (np.array(_pts[quad[0]]) + (GRID_SIZE - 1.0) * np.array(_pts[quad[3]])) / GRID_SIZE, + (np.array(_pts[quad[0]]) * 0.5 + (GRID_SIZE - 0.5) * np.array(_pts[quad[3]])) / GRID_SIZE, + (np.array(_pts[quad[1]]) * 0.5 + (GRID_SIZE - 0.5) * np.array(_pts[quad[2]])) / GRID_SIZE + ] + self.__add_quad(quad_pts_7, 2 * alpha / 3, alpha, _clr) + + quad_pts_8 = [ + (np.array(_pts[quad[0]]) * 0.5 + (GRID_SIZE - 0.5) * np.array(_pts[quad[3]])) / GRID_SIZE, + (np.array(_pts[quad[1]]) * 0.5 + (GRID_SIZE - 0.5) * np.array(_pts[quad[2]])) / GRID_SIZE, + np.array(_pts[quad[2]]), np.array(_pts[quad[3]]) + ] + self.__add_quad(quad_pts_8, alpha, alpha, _clr) + def push_to_GPU(self): - self.vboID = glGenBuffers(4) + if(self.is_init == False): + self.vboID = glGenBuffers(3) + self.is_init = True + + if(self.is_static): + type_draw = GL_STATIC_DRAW + else: + type_draw = GL_DYNAMIC_DRAW if len(self.vertices): glBindBuffer(GL_ARRAY_BUFFER, self.vboID[0]) - glBufferData(GL_ARRAY_BUFFER, len(self.vertices) * self.vertices.itemsize, (GLfloat * len(self.vertices))(*self.vertices), GL_STATIC_DRAW) - + glBufferData(GL_ARRAY_BUFFER, len(self.vertices) * self.vertices.itemsize, (GLfloat * len(self.vertices))(*self.vertices), type_draw) + if len(self.colors): glBindBuffer(GL_ARRAY_BUFFER, self.vboID[1]) - glBufferData(GL_ARRAY_BUFFER, len(self.colors) * self.colors.itemsize, (GLfloat * len(self.colors))(*self.colors), GL_STATIC_DRAW) + glBufferData(GL_ARRAY_BUFFER, len(self.colors) * self.colors.itemsize, (GLfloat * len(self.colors))(*self.colors), type_draw) if len(self.indices): glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.vboID[2]) - glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(self.indices) * self.indices.itemsize,(GLuint * len(self.indices))(*self.indices), GL_STATIC_DRAW) + glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(self.indices) * self.indices.itemsize,(GLuint * len(self.indices))(*self.indices), type_draw) self.elementbufferSize = len(self.indices) + + def init(self, res): + if(self.is_init == False): + self.vboID = glGenBuffers(3) + self.is_init = True + + if(self.is_static): + type_draw = GL_STATIC_DRAW + else: + type_draw = GL_DYNAMIC_DRAW + + self.elementbufferSize = res.width * res.height + + glBindBuffer(GL_ARRAY_BUFFER, self.vboID[0]) + glBufferData(GL_ARRAY_BUFFER, self.elementbufferSize * self.pt_type * self.vertices.itemsize, None, type_draw) + + if(self.clr_type): + glBindBuffer(GL_ARRAY_BUFFER, self.vboID[1]) + glBufferData(GL_ARRAY_BUFFER, self.elementbufferSize * self.clr_type * self.colors.itemsize, None, type_draw) + + for i in range (0, self.elementbufferSize): + self.indices.append(i+1) + + glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.vboID[2]) + glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(self.indices) * self.indices.itemsize,(GLuint * len(self.indices))(*self.indices), type_draw) + + def setPoints(self, pc): + glBindBuffer(GL_ARRAY_BUFFER, self.vboID[0]) + glBufferSubData(GL_ARRAY_BUFFER, 0, self.elementbufferSize * self.pt_type * self.vertices.itemsize, ctypes.c_void_p(pc.get_pointer())) + glBindBuffer(GL_ARRAY_BUFFER, 0) def clear(self): self.vertices = array.array('f') @@ -139,22 +353,19 @@ def draw(self): if (self.elementbufferSize): glEnableVertexAttribArray(0) glBindBuffer(GL_ARRAY_BUFFER, self.vboID[0]) - glVertexAttribPointer(0,3,GL_FLOAT,GL_FALSE,0,None) + glVertexAttribPointer(0,self.pt_type,GL_FLOAT,GL_FALSE,0,None) - glEnableVertexAttribArray(1) - glBindBuffer(GL_ARRAY_BUFFER, self.vboID[1]) - glVertexAttribPointer(1,3,GL_FLOAT,GL_FALSE,0,None) + if(self.clr_type): + glEnableVertexAttribArray(1) + glBindBuffer(GL_ARRAY_BUFFER, self.vboID[1]) + glVertexAttribPointer(1,self.clr_type,GL_FLOAT,GL_FALSE,0,None) glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.vboID[2]) glDrawElements(self.drawing_type, self.elementbufferSize, GL_UNSIGNED_INT, None) glDisableVertexAttribArray(0) glDisableVertexAttribArray(1) - -def addVert(obj, i_f, limit, clr) : - obj.add_line([i_f, 0, -limit], [i_f, 0, limit], clr) - obj.add_line([-limit, 0, i_f],[limit, 0, i_f], clr) - + class GLViewer: def __init__(self): self.available = False @@ -165,20 +376,22 @@ def __init__(self): self.mouseCurrentPosition = [0., 0.] self.previousMouseMotion = [0., 0.] self.mouseMotion = [0., 0.] - self.pose = sl.Transform() - self.trackState = sl.POSITIONAL_TRACKING_STATE - self.txtT = "" - self.txtR = "" - - def init(self, camera_model): # _params = sl.CameraParameters - glutInit() + self.zedModel = Simple3DObject(True) + self.BBox_faces = Simple3DObject(False, 3, 4) + self.BBox_edges = Simple3DObject(False, 3, 4) + self.skeletons = Simple3DObject(False, 3, 4) + self.point_cloud = Simple3DObject(False, 4) + self.is_tracking_on = False # Show tracked objects only + + def init(self, camera_model, res, is_tracking_on): + glutInit(sys.argv) wnd_w = int(glutGet(GLUT_SCREEN_WIDTH)*0.9) wnd_h = int(glutGet(GLUT_SCREEN_HEIGHT) *0.9) glutInitWindowSize(wnd_w, wnd_h) glutInitWindowPosition(int(wnd_w*0.05), int(wnd_h*0.05)) - glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH) - glutCreateWindow("ZED Positional Tracking") + glutInitDisplayMode(GLUT_DOUBLE | GLUT_SRGB | GLUT_DEPTH) + glutCreateWindow("ZED Object Detection Birds View") glViewport(0, 0, wnd_w, wnd_h) glutSetOption(GLUT_ACTION_ON_WINDOW_CLOSE, @@ -192,32 +405,17 @@ def init(self, camera_model): # _params = sl.CameraParameters glEnable(GL_LINE_SMOOTH) glHint(GL_LINE_SMOOTH_HINT, GL_NICEST) + self.is_tracking_on = is_tracking_on + # Compile and create the shader for 3D objects self.shader_image = Shader(VERTEX_SHADER, FRAGMENT_SHADER) - self.shader_MVP = glGetUniformLocation(self.shader_image.get_program_id(), "u_mvpMatrix") - - self.bckgrnd_clr = np.array([223/255., 230/255., 233/255.]) - - # Create the bounding box object - self.floor_grid = Simple3DObject(False) - self.floor_grid.set_drawing_type(GL_LINES) + self.shader_image_MVP = glGetUniformLocation(self.shader_image.get_program_id(), "u_mvpMatrix") - limit = 20 - clr1 = np.array([218/255., 223/255., 225/255.]) - clr2 = np.array([108/255., 122/255., 137/255.]) + self.shader_pc = Shader(POINTCLOUD_VERTEX_SHADER, POINTCLOUD_FRAGMENT_SHADER) + self.shader_pc_MVP = glGetUniformLocation(self.shader_pc.get_program_id(), "u_mvpMatrix") - for i in range (limit * -5, limit * 5): - i_f = i / 5. - if((i % 5) == 0): - addVert(self.floor_grid, i_f, limit, clr2) - else: - addVert(self.floor_grid, i_f, limit, clr1) - self.floor_grid.push_to_GPU() - - self.zedPath = Simple3DObject(False) - self.zedPath.set_drawing_type(GL_LINE_STRIP) + self.bckgrnd_clr = np.array([223/255., 230/255., 233/255.]) - self.zedModel = Simple3DObject(False) if(camera_model == sl.MODEL.ZED): for i in range(0, zm.NB_ALLUMINIUM_TRIANGLES * 3, 3): for j in range(3): @@ -249,7 +447,7 @@ def init(self, camera_model): # _params = sl.CameraParameters index = int(zm.yellow_triangles_m[i + j] - 1) self.zedModel.add_point_clr([zm.vertices_m[index * 3], zm.vertices_m[index * 3 + 1], zm.vertices_m[index * 3 + 2]], [zm.YELLOW_COLOR.r, zm.YELLOW_COLOR.g, zm.YELLOW_COLOR.b] ) - elif((camera_model == sl.MODEL.ZED2) or (camera_model == sl.MODEL.ZED2i)): + elif(camera_model == sl.MODEL.ZED2): for i in range(0, zm.NB_ALLUMINIUM_TRIANGLES * 3, 3): for j in range(3): index = int(zm.alluminium_triangles[i + j] - 1) @@ -262,15 +460,22 @@ def init(self, camera_model): # _params = sl.CameraParameters self.zedModel.set_drawing_type(GL_TRIANGLES) self.zedModel.push_to_GPU() - # Register GLUT callback functions + self.point_cloud.init(res) + self.point_cloud.set_drawing_type(GL_POINTS) + + self.BBox_edges.set_drawing_type(GL_LINES) + self.BBox_faces.set_drawing_type(GL_QUADS) + self.skeletons.set_drawing_type(GL_LINES) + + # Register GLUT callback functions glutDisplayFunc(self.draw_callback) - glutIdleFunc(self.idle) + glutIdleFunc(self.idle) glutKeyboardFunc(self.keyPressedCallback) glutCloseFunc(self.close_func) glutMouseFunc(self.on_mouse) glutMotionFunc(self.on_mousemove) glutReshapeFunc(self.on_resize) - + self.available = True def is_available(self): @@ -278,14 +483,40 @@ def is_available(self): glutMainLoopEvent() return self.available - def updateData(self, zed_rt, str_t, str_r, state): + def render_object(self, _object_data): # _object_data of type sl.ObjectData + if self.is_tracking_on: + return (_object_data.tracking_state == sl.OBJECT_TRACKING_STATE.OK) + else: + return (_object_data.tracking_state == sl.OBJECT_TRACKING_STATE.OK or _object_data.tracking_state == sl.OBJECT_TRACKING_STATE.OFF) + + def updateData(self, pc, _objs): self.mutex.acquire() - self.pose = zed_rt - self.zedPath.add_point_clr(zed_rt.get_translation().get(), [0.1,0.36,0.84]) - self.trackState = state - self.txtT = str_t - self.txtR = str_r + self.point_cloud.setPoints(pc) + + # Clear frame objects + self.BBox_edges.clear() + self.skeletons.clear() + self.BBox_faces.clear() + + for i in range(len(_objs.object_list)): + if self.render_object(_objs.object_list[i]): + bounding_box = np.array(_objs.object_list[i].bounding_box) + if bounding_box.any(): + color_id = generate_color_id(_objs.object_list[i].id) + + self.create_bbox_rendering(bounding_box, color_id) + self.mutex.release() + + def create_bbox_rendering(self, _bbox, _bbox_clr): + # First create top and bottom full edges + self.BBox_edges.add_full_edges(_bbox, _bbox_clr) + # Add faded vertical edges + self.BBox_edges.add_vertical_edges(_bbox, _bbox_clr) + # Add faces + self.BBox_faces.add_vertical_faces(_bbox, _bbox_clr) + # Add top face + self.BBox_faces.add_top_face(_bbox, _bbox_clr) def idle(self): if self.available: @@ -300,7 +531,7 @@ def close_func(self): self.available = False def keyPressedCallback(self, key, x, y): - if ord(key) == 27: + if ord(key) == 27: # 'Esc' key self.close_func() def on_mouse(self,*args,**kwargs): @@ -336,20 +567,17 @@ def draw_callback(self): self.mutex.acquire() self.update() self.draw() - self.print_text() self.mutex.release() glutSwapBuffers() glutPostRedisplay() def update(self): - self.zedPath.push_to_GPU() - if(self.mouse_button[0]): r = sl.Rotation() vert=self.camera.vertical_ tmp = vert.get() - vert.init_vector(tmp[0] * -1.,tmp[1] * -1., tmp[2] * -1.) + vert.init_vector(tmp[0] * 1.,tmp[1] * 1., tmp[2] * 1.) r.init_angle_translation(self.mouseMotion[0] * 0.002, vert) self.camera.rotate(r) @@ -359,7 +587,7 @@ def update(self): if(self.mouse_button[1]): t = sl.Translation() tmp = self.camera.right_.get() - scale = self.mouseMotion[0] * -0.01 + scale = self.mouseMotion[0] *-0.01 t.init_vector(tmp[0] * scale, tmp[1] * scale, tmp[2] * scale) self.camera.translate(t) @@ -375,77 +603,38 @@ def update(self): t.init_vector(tmp[0] * scale, tmp[1] * scale, tmp[2] * scale) self.camera.translate(t) - + self.BBox_edges.push_to_GPU() + self.BBox_faces.push_to_GPU() + self.skeletons.push_to_GPU() + self.camera.update() self.mouseMotion = [0., 0.] self.wheelPosition = 0 - def draw(self): - glPointSize(1.) - glUseProgram(self.shader_image.get_program_id()) - + def draw(self): vpMatrix = self.camera.getViewProjectionMatrix() - glUniformMatrix4fv(self.shader_MVP, 1, GL_TRUE, (GLfloat * len(vpMatrix))(*vpMatrix)) - glPolygonMode(GL_FRONT_AND_BACK, GL_FILL) - glLineWidth(2) - self.zedPath.draw() - self.floor_grid.draw() - - vpMatrix = self.camera.getViewProjectionMatrixRT(self.pose) - glUniformMatrix4fv(self.shader_MVP, 1, GL_FALSE, (GLfloat * len(vpMatrix))(*vpMatrix)) + glUseProgram(self.shader_pc.get_program_id()) + glUniformMatrix4fv(self.shader_pc_MVP, 1, GL_TRUE, (GLfloat * len(vpMatrix))(*vpMatrix)) + glPointSize(1.2) + self.point_cloud.draw() + glUseProgram(0) - self.zedModel.draw() + glUseProgram(self.shader_image.get_program_id()) + glUniformMatrix4fv(self.shader_image_MVP, 1, GL_TRUE, (GLfloat * len(vpMatrix))(*vpMatrix)) + glPolygonMode(GL_FRONT_AND_BACK, GL_LINE) + glLineWidth(4.) + self.skeletons.draw() + glPolygonMode(GL_FRONT_AND_BACK, GL_FILL) + self.zedModel.draw() + self.BBox_faces.draw() + glPolygonMode(GL_FRONT_AND_BACK, GL_LINE) + glLineWidth(2.) + self.BBox_edges.draw() glUseProgram(0) - def print_text(self): - glMatrixMode(GL_PROJECTION) - glPushMatrix() - glLoadIdentity() - w_wnd = glutGet(GLUT_WINDOW_WIDTH) - h_wnd = glutGet(GLUT_WINDOW_HEIGHT) - glOrtho(0, w_wnd, 0, h_wnd, -1., 1.) - - glMatrixMode(GL_MODELVIEW) - glPushMatrix() - glLoadIdentity() - - start_w = 20 - start_h = h_wnd - 40 - - if(self.trackState == sl.POSITIONAL_TRACKING_STATE.OK): - glColor3f(0.2, 0.65, 0.2) - else: - glColor3f(0.85, 0.2, 0.2) - - glRasterPos2i(start_w, start_h) - - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "POSITIONAL TRACKING : " + str(self.trackState)) - - dark_clr = 0.12 - glColor3f(dark_clr, dark_clr, dark_clr) - glRasterPos2i(start_w, start_h - 25) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :") - - glColor3f(0.4980, 0.5490, 0.5529) - glRasterPos2i(155, start_h - 25) - - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtT) - - glColor3f(dark_clr, dark_clr, dark_clr) - glRasterPos2i(start_w, start_h - 50) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :") - - glColor3f(0.4980, 0.5490, 0.5529) - glRasterPos2i(155, start_h - 50) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtR) - - glMatrixMode(GL_PROJECTION) - glPopMatrix() - glMatrixMode(GL_MODELVIEW) - glPopMatrix() - + class CameraGL: def __init__(self): self.ORIGINAL_FORWARD = sl.Translation() @@ -464,19 +653,18 @@ def __init__(self): self.right_ = sl.Translation() self.vertical_ = sl.Translation() self.vpMatrix_ = sl.Matrix4f() + self.offset_ = sl.Translation() + self.offset_.init_vector(0,0,5) self.projection_ = sl.Matrix4f() self.projection_.set_identity() self.setProjection(1.78) - self.position_.init_vector(0., 5., -3.) + self.position_.init_vector(0., 0., 0.) tmp = sl.Translation() - tmp.init_vector(0, 0, -4) + tmp.init_vector(0, 0, -.1) tmp2 = sl.Translation() tmp2.init_vector(0, 1, 0) - self.setDirection(tmp, tmp2) - cam_rot = sl.Rotation() - cam_rot.set_euler_angles(-50., 180., 0., False) - self.setRotation(cam_rot) + self.setDirection(tmp, tmp2) def update(self): dot_ = sl.Translation.dot_translation(self.vertical_, self.up_) @@ -484,7 +672,12 @@ def update(self): tmp = self.vertical_.get() self.vertical_.init_vector(tmp[0] * -1.,tmp[1] * -1., tmp[2] * -1.) transformation = sl.Transform() - transformation.init_orientation_translation(self.orientation_, self.position_) + + tmp_position = self.position_.get() + tmp = (self.offset_ * self.orientation_).get() + new_position = sl.Translation() + new_position.init_vector(tmp_position[0] + tmp[0], tmp_position[1] + tmp[1], tmp_position[2] + tmp[2]) + transformation.init_orientation_translation(self.orientation_, new_position) transformation.inverse() self.vpMatrix_ = self.projection_ * transformation diff --git a/geotracking/recording/python/display/zed_model.py b/object detection/concurrent detections/python/ogl_viewer/zed_model.py similarity index 100% rename from geotracking/recording/python/display/zed_model.py rename to object detection/concurrent detections/python/ogl_viewer/zed_model.py diff --git a/object detection/custom detector/cpp/opencv_dnn_yolov4/include/GLViewer.hpp b/object detection/custom detector/cpp/opencv_dnn_yolov4/include/GLViewer.hpp index 0a1394fc..414be22c 100644 --- a/object detection/custom detector/cpp/opencv_dnn_yolov4/include/GLViewer.hpp +++ b/object detection/custom detector/cpp/opencv_dnn_yolov4/include/GLViewer.hpp @@ -106,14 +106,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/object detection/custom detector/cpp/opencv_dnn_yolov4/src/GLViewer.cpp b/object detection/custom detector/cpp/opencv_dnn_yolov4/src/GLViewer.cpp index 51265df0..b3f31e00 100644 --- a/object detection/custom detector/cpp/opencv_dnn_yolov4/src/GLViewer.cpp +++ b/object detection/custom detector/cpp/opencv_dnn_yolov4/src/GLViewer.cpp @@ -9,7 +9,7 @@ #define FADED_RENDERING const float grid_size = 10.0f; -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -20,7 +20,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -732,7 +732,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -780,7 +780,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -808,7 +808,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -821,7 +821,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" diff --git a/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/include/GLViewer.hpp b/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/include/GLViewer.hpp index 48a680db..db155f25 100644 --- a/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/include/GLViewer.hpp +++ b/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/include/GLViewer.hpp @@ -106,14 +106,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/src/GLViewer.cpp b/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/src/GLViewer.cpp index c5c53b92..c7eab2e6 100644 --- a/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/src/GLViewer.cpp +++ b/object detection/custom detector/cpp/tensorrt_yolov5-v6-v8_onnx/src/GLViewer.cpp @@ -9,7 +9,7 @@ #define FADED_RENDERING const float grid_size = 10.0f; -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -20,7 +20,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -734,7 +734,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -782,7 +782,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -810,7 +810,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -823,7 +823,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" diff --git a/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/include/GLViewer.hpp b/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/include/GLViewer.hpp index 0a1394fc..414be22c 100644 --- a/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/include/GLViewer.hpp +++ b/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/include/GLViewer.hpp @@ -106,14 +106,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/src/GLViewer.cpp b/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/src/GLViewer.cpp index c5c53b92..c7eab2e6 100644 --- a/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/src/GLViewer.cpp +++ b/object detection/custom detector/cpp/tensorrt_yolov5_v5.0/src/GLViewer.cpp @@ -9,7 +9,7 @@ #define FADED_RENDERING const float grid_size = 10.0f; -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -20,7 +20,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -734,7 +734,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -782,7 +782,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -810,7 +810,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -823,7 +823,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" diff --git a/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/include/GLViewer.hpp b/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/include/GLViewer.hpp index 0a1394fc..414be22c 100644 --- a/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/include/GLViewer.hpp +++ b/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/include/GLViewer.hpp @@ -106,14 +106,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/src/GLViewer.cpp b/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/src/GLViewer.cpp index c5c53b92..c7eab2e6 100644 --- a/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/src/GLViewer.cpp +++ b/object detection/custom detector/cpp/tensorrt_yolov5_v6.0/src/GLViewer.cpp @@ -9,7 +9,7 @@ #define FADED_RENDERING const float grid_size = 10.0f; -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -20,7 +20,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -734,7 +734,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -782,7 +782,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -810,7 +810,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -823,7 +823,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" diff --git a/object detection/image viewer/cpp/include/GLViewer.hpp b/object detection/image viewer/cpp/include/GLViewer.hpp index c87b0af3..29c0af70 100644 --- a/object detection/image viewer/cpp/include/GLViewer.hpp +++ b/object detection/image viewer/cpp/include/GLViewer.hpp @@ -25,7 +25,7 @@ class Shader { public: Shader() {} - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); @@ -33,7 +33,7 @@ class Shader { static const GLint ATTRIB_COLOR_POS = 1; static const GLint ATTRIB_NORMAL = 2; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/object detection/image viewer/cpp/src/GLViewer.cpp b/object detection/image viewer/cpp/src/GLViewer.cpp index 99bc375b..1d544e5e 100644 --- a/object detection/image viewer/cpp/src/GLViewer.cpp +++ b/object detection/image viewer/cpp/src/GLViewer.cpp @@ -5,7 +5,7 @@ #error "This sample should not be built in Debug mode, use RelWithDebInfo if you want to do step by step." #endif -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -16,7 +16,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -25,7 +25,7 @@ GLchar* FRAGMENT_SHADER = " out_Color = b_color;//pow(b_color, vec4(1.0/gamma));;\n" "}"; -GLchar* SK_VERTEX_SHADER = +const GLchar* SK_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -42,7 +42,7 @@ GLchar* SK_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* SK_FRAGMENT_SHADER = +const GLchar* SK_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "in vec3 b_position;\n" @@ -870,7 +870,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -918,7 +918,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -946,7 +946,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* IMAGE_FRAGMENT_SHADER = +const GLchar* IMAGE_FRAGMENT_SHADER = "#version 330 core\n" " in vec2 UV;\n" " out vec4 color;\n" @@ -961,7 +961,7 @@ GLchar* IMAGE_FRAGMENT_SHADER = " color = vec4(color_rgb,1);\n" "}"; -GLchar* IMAGE_VERTEX_SHADER = +const GLchar* IMAGE_VERTEX_SHADER = "#version 330\n" "layout(location = 0) in vec3 vert;\n" "out vec2 UV;" diff --git a/object detection/image viewer/cpp/src/main.cpp b/object detection/image viewer/cpp/src/main.cpp index da6dae1c..52c8c63e 100644 --- a/object detection/image viewer/cpp/src/main.cpp +++ b/object detection/image viewer/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/object detection/image viewer/csharp/MainWindow.cs b/object detection/image viewer/csharp/MainWindow.cs index f29e4169..103204c0 100644 --- a/object detection/image viewer/csharp/MainWindow.cs +++ b/object detection/image viewer/csharp/MainWindow.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/object detection/image viewer/csharp/Properties/AssemblyInfo.cs b/object detection/image viewer/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/object detection/image viewer/csharp/Properties/AssemblyInfo.cs +++ b/object detection/image viewer/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/object detection/multi-camera/cpp/CMakeLists.txt b/object detection/multi-camera/cpp/CMakeLists.txt new file mode 100644 index 00000000..2a8e1d7a --- /dev/null +++ b/object detection/multi-camera/cpp/CMakeLists.txt @@ -0,0 +1,56 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 3.5) +PROJECT(ZED_ObjectDetectionFusion) + +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +option(LINK_SHARED_ZED "Link with the ZED SDK shared executable" ON) + +if (NOT LINK_SHARED_ZED AND MSVC) + message(FATAL_ERROR "LINK_SHARED_ZED OFF : ZED SDK static libraries not available on Windows") +endif() + +find_package(ZED 4 REQUIRED) +find_package(CUDA REQUIRED) +find_package(GLUT REQUIRED) +find_package(GLEW REQUIRED) +SET(OpenGL_GL_PREFERENCE GLVND) +find_package(OpenGL REQUIRED) + +include_directories(${CUDA_INCLUDE_DIRS}) +include_directories(${ZED_INCLUDE_DIRS}) +include_directories(${GLEW_INCLUDE_DIRS}) +include_directories(${GLUT_INCLUDE_DIR}) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include) + +link_directories(${ZED_LIBRARY_DIR}) +link_directories(${CUDA_LIBRARY_DIRS}) +link_directories(${GLEW_LIBRARY_DIRS}) +link_directories(${GLUT_LIBRARY_DIRS}) +link_directories(${OpenGL_LIBRARY_DIRS}) + +IF(NOT WIN32) + SET(SPECIAL_OS_LIBS "pthread") + + IF (CMAKE_SYSTEM_PROCESSOR MATCHES aarch64) + add_definitions(-DJETSON_STYLE) + ENDIF() +ENDIF() + +FILE(GLOB_RECURSE SRC_FILES src/*.c*) +FILE(GLOB_RECURSE HDR_FILES include/*.h*) + +add_executable(${PROJECT_NAME} ${HDR_FILES} ${SRC_FILES}) + +if (LINK_SHARED_ZED) + SET(ZED_LIBS ${ZED_LIBRARIES} ${CUDA_CUDA_LIBRARY} ${CUDA_CUDART_LIBRARY}) +else() + SET(ZED_LIBS ${ZED_STATIC_LIBRARIES} ${CUDA_CUDA_LIBRARY} ${CUDA_LIBRARY}) +endif() + +TARGET_LINK_LIBRARIES(${PROJECT_NAME} ${ZED_LIBS} ${UTILS_LIB} ${SPECIAL_OS_LIBS} ${OPENGL_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) + +if(INSTALL_SAMPLES) + LIST(APPEND SAMPLE_LIST ${PROJECT_NAME}) + SET(SAMPLE_LIST "${SAMPLE_LIST}" PARENT_SCOPE) +endif() diff --git a/object detection/multi-camera/cpp/include/ClientPublisher.hpp b/object detection/multi-camera/cpp/include/ClientPublisher.hpp new file mode 100644 index 00000000..30183576 --- /dev/null +++ b/object detection/multi-camera/cpp/include/ClientPublisher.hpp @@ -0,0 +1,35 @@ +#ifndef __SENDER_RUNNER_HDR__ +#define __SENDER_RUNNER_HDR__ + +#include +#include + +#include + +class ClientPublisher{ + +public: + ClientPublisher(); + ~ClientPublisher(); + + bool open(sl::InputType); + void start(); + void stop(); + void setStartSVOPosition(unsigned pos); + sl::Objects getObjects(); + + bool isRunning() { + return running; + } + +private: + sl::Camera zed; + void work(); + std::thread runner; + bool running; + int serial; + sl::Objects objects; + sl::Bodies bodies; +}; + +#endif // ! __SENDER_RUNNER_HDR__ diff --git a/object detection/multi-camera/cpp/include/GLViewer.hpp b/object detection/multi-camera/cpp/include/GLViewer.hpp new file mode 100644 index 00000000..0c89ab00 --- /dev/null +++ b/object detection/multi-camera/cpp/include/GLViewer.hpp @@ -0,0 +1,330 @@ +#ifndef __VIEWER_INCLUDE__ +#define __VIEWER_INCLUDE__ + +#include + +#include +#include + +#include +#include + +#include +#include + +#ifndef M_PI +#define M_PI 3.141592653f +#endif + +#define MOUSE_R_SENSITIVITY 0.03f +#define MOUSE_UZ_SENSITIVITY 0.75f +#define MOUSE_DZ_SENSITIVITY 1.25f +#define MOUSE_T_SENSITIVITY 0.05f +#define KEY_T_SENSITIVITY 0.1f + + +/////////////////////////////////////////////////////////////////////////////////////////////// + +class Shader { +public: + + Shader() { + } + Shader(const GLchar* vs, const GLchar* fs); + ~Shader(); + GLuint getProgramId(); + + static const GLint ATTRIB_VERTICES_POS = 0; + static const GLint ATTRIB_COLOR_POS = 1; + static const GLint ATTRIB_NORMAL = 2; +private: + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); + GLuint verterxId_; + GLuint fragmentId_; + GLuint programId_; +}; + +struct ShaderData { + Shader it; + GLuint MVP_Mat; +}; + +class Simple3DObject { +public: + + Simple3DObject(); + + ~Simple3DObject(); + + void addPoint(sl::float3 pt, sl::float3 clr); + void addLine(sl::float3 pt1, sl::float3 pt2, sl::float3 clr); + void addFace(sl::float3 p1, sl::float3 p2, sl::float3 p3, sl::float3 clr); + void addBBox(std::vector &pts, sl::float3 clr); + void addPt(sl::float3 pt); + void addClr(sl::float4 clr); + + void pushToGPU(); + void clear(); + + void setStatic(bool _static) { + isStatic_ = _static; + } + + void setDrawingType(GLenum type); + + void draw(); + +private: + std::vector vertices_; + std::vector colors_; + std::vector indices_; + + bool isStatic_; + bool need_update; + GLenum drawingType_; + GLuint vaoID_; + GLuint vboID_[3]; +}; + +class CameraGL { +public: + + CameraGL() { + } + + enum DIRECTION { + UP, DOWN, LEFT, RIGHT, FORWARD, BACK + }; + CameraGL(sl::Translation position, sl::Translation direction, sl::Translation vertical = sl::Translation(0, 1, 0)); // vertical = Eigen::Vector3f(0, 1, 0) + ~CameraGL(); + + void update(); + void setProjection(float horizontalFOV, float verticalFOV, float znear, float zfar); + const sl::Transform& getViewProjectionMatrix() const; + + float getHorizontalFOV() const; + float getVerticalFOV() const; + + // Set an offset between the eye of the camera and its position + // Note: Useful to use the camera as a trackball camera with z>0 and x = 0, y = 0 + // Note: coordinates are in local space + void setOffsetFromPosition(const sl::Translation& offset); + const sl::Translation& getOffsetFromPosition() const; + + void setDirection(const sl::Translation& direction, const sl::Translation &vertical); + void translate(const sl::Translation& t); + void setPosition(const sl::Translation& p); + void rotate(const sl::Orientation& rot); + void rotate(const sl::Rotation& m); + void setRotation(const sl::Orientation& rot); + void setRotation(const sl::Rotation& m); + + const sl::Translation& getPosition() const; + const sl::Translation& getForward() const; + const sl::Translation& getRight() const; + const sl::Translation& getUp() const; + const sl::Translation& getVertical() const; + float getZNear() const; + float getZFar() const; + + static const sl::Translation ORIGINAL_FORWARD; + static const sl::Translation ORIGINAL_UP; + static const sl::Translation ORIGINAL_RIGHT; + + sl::Transform projection_; + bool usePerspective_; +private: + void updateVectors(); + void updateView(); + void updateVPMatrix(); + + sl::Translation offset_; + sl::Translation position_; + sl::Translation forward_; + sl::Translation up_; + sl::Translation right_; + sl::Translation vertical_; + + sl::Orientation rotation_; + + sl::Transform view_; + sl::Transform vpMatrix_; + float horizontalFieldOfView_; + float verticalFieldOfView_; + float znear_; + float zfar_; +}; + + +class PointCloud { +public: + PointCloud(); + ~PointCloud(); + + // Initialize Opengl and Cuda buffers + // Warning: must be called in the Opengl thread + void initialize(sl::Mat&, sl::float3 clr); + // Push a new point cloud + // Warning: can be called from any thread but the mutex "mutexData" must be locked + void pushNewPC(); + // Draw the point cloud + // Warning: must be called in the Opengl thread + void draw(const sl::Transform& vp, bool draw_clr); + // Close (disable update) + void close(); + +private: + sl::Mat refMat; + sl::float3 clr; + + Shader shader_; + GLuint shMVPMatrixLoc_; + GLuint shDrawColor; + GLuint shColor; + + size_t numBytes_; + float* xyzrgbaMappedBuf_; + GLuint bufferGLID_; + cudaGraphicsResource* bufferCudaID_; +}; + +class CameraViewer { +public: + CameraViewer(); + ~CameraViewer(); + + // Initialize Opengl and Cuda buffers + bool initialize(sl::Mat& image, sl::float3 clr); + // Push a new Image + Z buffer and transform into a point cloud + void pushNewImage(); + // Draw the Image + void draw(sl::Transform vpMatrix); + // Close (disable update) + void close(); + + Simple3DObject frustum; +private: + sl::Mat ref; + cudaArray_t ArrIm; + cudaGraphicsResource* cuda_gl_ressource;//cuda GL resource + Shader shader; + GLuint shMVPMatrixLocTex_; + + GLuint texture; + GLuint vaoID_; + GLuint vboID_[3]; + + std::vector faces; + std::vector vert; + std::vector uv; +}; + +struct ObjectClassName { + sl::float3 position; + std::string name_lineA; + std::string name_lineB; + sl::float3 color; +}; + +// This class manages input events, window and Opengl rendering pipeline + +class GLViewer { +public: + GLViewer(); + ~GLViewer(); + bool isAvailable(); + void init(int argc, char **argv); + + void updateCamera(int, sl::Mat &, sl::Mat &); + + void updateObjects(sl::Objects &objs,std::map& singldata, sl::FusionMetrics& metrics); + + void setCameraPose(int, sl::Transform); + + unsigned char getKey() { + auto ret_v = lastPressedKey; + lastPressedKey = ' '; + return ret_v; + } + + void exit(); +private: + void render(); + void update(); + void draw(); + void clearInputs(); + void setRenderCameraProjection(sl::CameraParameters params, float znear, float zfar); + + void printText(); + + // Glut functions callbacks + static void drawCallback(); + static void mouseButtonCallback(int button, int state, int x, int y); + static void mouseMotionCallback(int x, int y); + static void reshapeCallback(int width, int height); + static void keyPressedCallback(unsigned char c, int x, int y); + static void keyReleasedCallback(unsigned char c, int x, int y); + static void idle(); + + // void addSKeleton(sl::BodyData &, Simple3DObject &, sl::float3 clr_id, bool raw, sl::BODY_FORMAT format); + // void addSKeleton(sl::BodyData &, Simple3DObject &, sl::float3 clr_id, bool raw); + void addObject(sl::ObjectData &obj, Simple3DObject &simpleObj, sl::float3 clr_id, bool raw); + + + bool available; + bool drawBbox = false; + + enum MOUSE_BUTTON { + LEFT = 0, + MIDDLE = 1, + RIGHT = 2, + WHEEL_UP = 3, + WHEEL_DOWN = 4 + }; + + enum KEY_STATE { + UP = 'u', + DOWN = 'd', + FREE = 'f' + }; + + unsigned char lastPressedKey; + + bool mouseButton_[3]; + int mouseWheelPosition_; + int mouseCurrentPosition_[2]; + int mouseMotion_[2]; + int previousMouseMotion_[2]; + KEY_STATE keyStates_[256]; + + std::mutex mtx; + + ShaderData shader; + + sl::Transform projection_; + sl::float3 bckgrnd_clr; + + std::map point_clouds; + std::map viewers; + std::map poses; + + std::map skeletons_raw; + std::map colors; + std::map colors_sk; + + std::vector fusionStats; + + CameraGL camera_; + Simple3DObject skeletons; + Simple3DObject floor_grid; + + bool show_pc = true; + bool show_raw = false; + bool draw_flat_color = false; + + std::uniform_int_distribution uint_dist360; + std::mt19937 rng; + +}; + +#endif /* __VIEWER_INCLUDE__ */ diff --git a/object detection/multi-camera/cpp/include/utils.hpp b/object detection/multi-camera/cpp/include/utils.hpp new file mode 100644 index 00000000..776cf1f6 --- /dev/null +++ b/object detection/multi-camera/cpp/include/utils.hpp @@ -0,0 +1,56 @@ +#pragma once + +#include + +/** +* @brief Compute the start frame of each SVO for playback to be synced +* +* @param svo_files Map camera index to SVO file path +* @return Map camera index to starting SVO frame for synced playback +*/ +std::map syncDATA(std::map svo_files) { + std::map output; // map of camera index and frame index of the starting point for each + + // Open all SVO + std::map> p_zeds; + + for (auto &it : svo_files) { + auto p_zed = std::make_shared(); + + sl::InitParameters init_param; + init_param.depth_mode = sl::DEPTH_MODE::NONE; + init_param.camera_disable_self_calib = true; + init_param.input.setFromSVOFile(it.second.c_str()); + + auto error = p_zed->open(init_param); + if (error == sl::ERROR_CODE::SUCCESS) + p_zeds.insert(std::make_pair(it.first, p_zed)); + else { + std::cerr << "Could not open file " << it.second.c_str() << ": " << sl::toString(error) << ". Skipping" << std::endl; + } + } + + // Compute the starting point, we have to take the latest one + sl::Timestamp start_ts = 0; + for (auto &it : p_zeds) { + it.second->grab(); + auto ts = it.second->getTimestamp(sl::TIME_REFERENCE::IMAGE); + + if (ts > start_ts) + start_ts = ts; + } + + std::cout << "Found SVOs common starting time: " << start_ts << std::endl; + + // The starting point is now known, let's find the frame idx for all corresponding + for (auto &it : p_zeds) { + auto frame_position_at_ts = it.second->getSVOPositionAtTimestamp(start_ts); + + if (frame_position_at_ts != -1) + output.insert(std::make_pair(it.first, frame_position_at_ts)); + } + + for (auto &it : p_zeds) it.second->close(); + + return output; +} diff --git a/object detection/multi-camera/cpp/src/ClientPublisher.cpp b/object detection/multi-camera/cpp/src/ClientPublisher.cpp new file mode 100644 index 00000000..7b13436b --- /dev/null +++ b/object detection/multi-camera/cpp/src/ClientPublisher.cpp @@ -0,0 +1,119 @@ +#include "ClientPublisher.hpp" + +ClientPublisher::ClientPublisher() : running(false) +{ +} + +ClientPublisher::~ClientPublisher() +{ + zed.close(); +} + +bool ClientPublisher::open(sl::InputType input) { + // already running + if (runner.joinable()) + return false; + + sl::InitParameters init_parameters; + init_parameters.depth_mode = sl::DEPTH_MODE::ULTRA; + init_parameters.input = input; + if (input.getType() == sl::InputType::INPUT_TYPE::SVO_FILE) + init_parameters.svo_real_time_mode = true; + init_parameters.coordinate_units = sl::UNIT::METER; + init_parameters.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; + auto state = zed.open(init_parameters); + if (state != sl::ERROR_CODE::SUCCESS) + { + std::cout << "Error: " << state << std::endl; + return false; + } + + + // define the body tracking parameters, as the fusion can does the tracking and fitting you don't need to enable them here, unless you need it for your app + sl::BodyTrackingParameters body_tracking_parameters; + body_tracking_parameters.detection_model = sl::BODY_TRACKING_MODEL::HUMAN_BODY_MEDIUM; + body_tracking_parameters.body_format = sl::BODY_FORMAT::BODY_18; + body_tracking_parameters.enable_body_fitting = false; + body_tracking_parameters.enable_tracking = false; + state = zed.enableBodyTracking(body_tracking_parameters); + if (state != sl::ERROR_CODE::SUCCESS) + { + std::cout << "Error: " << state << std::endl; + return false; + } + + // define the body tracking parameters, as the fusion can does the tracking and fitting you don't need to enable them here, unless you need it for your app + sl::ObjectDetectionParameters object_detection_parameters; + object_detection_parameters.detection_model = sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_ACCURATE; + object_detection_parameters.enable_tracking = false; + object_detection_parameters.instance_module_id = 20; + state = zed.enableObjectDetection(object_detection_parameters); + if (state != sl::ERROR_CODE::SUCCESS) + { + std::cout << "Error: " << state << std::endl; + return false; + } + + + // in most cases in body tracking setup, the cameras are static + sl::PositionalTrackingParameters positional_tracking_parameters; + // in most cases for body detection application the camera is static: + positional_tracking_parameters.set_as_static = true; + state = zed.enablePositionalTracking(positional_tracking_parameters); + if (state != sl::ERROR_CODE::SUCCESS) + { + std::cout << "Error: " << state << std::endl; + return false; + } + + + return true; +} + +void ClientPublisher::start() +{ + if (zed.isOpened()) { + running = true; + // the camera should stream its data so the fusion can subscibe to it to gather the detected body and others metadata needed for the process. + zed.startPublishing(); + // the thread can start to process the camera grab in background + runner = std::thread(&ClientPublisher::work, this); + } +} + +void ClientPublisher::stop() +{ + running = false; + if (runner.joinable()) + runner.join(); + zed.close(); +} + +void ClientPublisher::work() +{ + // in this sample we use a dummy thread to process the ZED data. + // you can replace it by your own application and use the ZED like you use to, retrieve its images, depth, sensors data and so on. + // as long as you call the grab function and the retrieveObjects (which runs the detection) the camera will be able to seamlessly transmit the data to the fusion module. + while (running) { + if (zed.grab() == sl::ERROR_CODE::SUCCESS) { + /* + Your App + + */ + zed.retrieveObjects(objects, sl::ObjectDetectionRuntimeParameters(), 20); + zed.retrieveBodies(bodies); + } + std::this_thread::sleep_for(std::chrono::microseconds(100)); + + } +} + +void ClientPublisher::setStartSVOPosition(unsigned pos) { + zed.setSVOPosition(pos); + zed.grab(); +} + +sl::Objects ClientPublisher::getObjects(){ + return objects; +} + diff --git a/object detection/multi-camera/cpp/src/GLViewer.cpp b/object detection/multi-camera/cpp/src/GLViewer.cpp new file mode 100644 index 00000000..e98fd374 --- /dev/null +++ b/object detection/multi-camera/cpp/src/GLViewer.cpp @@ -0,0 +1,1142 @@ +#include "GLViewer.hpp" + +const GLchar* VERTEX_SHADER = + "#version 330 core\n" + "layout(location = 0) in vec3 in_Vertex;\n" + "layout(location = 1) in vec3 in_Color;\n" + "uniform mat4 u_mvpMatrix;\n" + "out vec3 b_color;\n" + "void main() {\n" + " b_color = in_Color.bgr;\n" + " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" + "}"; + +const GLchar* FRAGMENT_SHADER = + "#version 330 core\n" + "in vec3 b_color;\n" + "layout(location = 0) out vec4 color;\n" + "void main() {\n" + " color = vec4(b_color, 0.95);\n" + "}"; + + +const GLchar* POINTCLOUD_VERTEX_SHADER = + "#version 330 core\n" + "layout(location = 0) in vec4 in_VertexRGBA;\n" + "out vec4 b_color;\n" + "uniform mat4 u_mvpMatrix;\n" + "uniform vec3 u_color;\n" + "uniform bool u_drawFlat;\n" + "void main() {\n" + // Decompose the 4th channel of the XYZRGBA buffer to retrieve the color of the point (1float to 4uint) + " uint vertexColor = floatBitsToUint(in_VertexRGBA.w); \n" + " if(u_drawFlat)\n" + " b_color = vec4(u_color.bgr, .85f);\n" + "else{" + " vec3 clr_int = vec3((vertexColor & uint(0x000000FF)), (vertexColor & uint(0x0000FF00)) >> 8, (vertexColor & uint(0x00FF0000)) >> 16);\n" + " b_color = vec4(clr_int.b / 255.0f, clr_int.g / 255.0f, clr_int.r / 255.0f, .85f);\n" + " }" + " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" + "}"; + +const GLchar* POINTCLOUD_FRAGMENT_SHADER = + "#version 330 core\n" + "in vec4 b_color;\n" + "layout(location = 0) out vec4 out_Color;\n" + "void main() {\n" + " out_Color = b_color;\n" + "}"; + +const GLchar* VERTEX_SHADER_TEXTURE = + "#version 330 core\n" + "layout(location = 0) in vec3 in_Vertex;\n" + "layout(location = 1) in vec2 in_UVs;\n" + "uniform mat4 u_mvpMatrix;\n" + "out vec2 UV;\n" + "void main() {\n" + " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" + " UV = in_UVs;\n" + "}\n"; + +const GLchar* FRAGMENT_SHADER_TEXTURE = + "#version 330 core\n" + "in vec2 UV;\n" + "uniform sampler2D texture_sampler;\n" + "void main() {\n" + " gl_FragColor = vec4(texture(texture_sampler, UV).bgr, 1.0);\n" + "}\n"; + + +GLViewer* currentInstance_ = nullptr; + +GLViewer::GLViewer() : available(false) { + currentInstance_ = this; + mouseButton_[0] = mouseButton_[1] = mouseButton_[2] = false; + clearInputs(); + previousMouseMotion_[0] = previousMouseMotion_[1] = 0; +} + +GLViewer::~GLViewer() { +} + +void GLViewer::exit() { + if (currentInstance_) { + available = false; + } +} + +bool GLViewer::isAvailable() { + if (currentInstance_ && available) { + glutMainLoopEvent(); + } + return available; +} + +void CloseFunc(void) { + if (currentInstance_) currentInstance_->exit(); +} + +void addVert(Simple3DObject &obj, float i_f, float limit, float height, sl::float4 &clr) { + auto p1 = sl::float3(i_f, height, -limit); + auto p2 = sl::float3(i_f, height, limit); + auto p3 = sl::float3(-limit, height, i_f); + auto p4 = sl::float3(limit, height, i_f); + + obj.addLine(p1, p2, clr); + obj.addLine(p3, p4, clr); +} + +void GLViewer::init(int argc, char **argv) { + + glutInit(&argc, argv); + int wnd_w = glutGet(GLUT_SCREEN_WIDTH); + int wnd_h = glutGet(GLUT_SCREEN_HEIGHT); + + glutInitWindowSize(1200, 700); + glutInitWindowPosition(wnd_w * 0.05, wnd_h * 0.05); + glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); + + glutCreateWindow("ZED| 3D View"); + + GLenum err = glewInit(); + if (GLEW_OK != err) + std::cout << "ERROR: glewInit failed: " << glewGetErrorString(err) << "\n"; + + glutSetOption(GLUT_ACTION_ON_WINDOW_CLOSE, GLUT_ACTION_CONTINUE_EXECUTION); + + glEnable(GL_DEPTH_TEST); + glEnable(GL_TEXTURE_2D); + glEnable(GL_BLEND); + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); + +#ifndef JETSON_STYLE + glEnable(GL_POINT_SMOOTH); +#endif + + // Compile and create the shader for 3D objects + shader.it = Shader(VERTEX_SHADER, FRAGMENT_SHADER); + shader.MVP_Mat = glGetUniformLocation(shader.it.getProgramId(), "u_mvpMatrix"); + + // Create the camera + camera_ = CameraGL(sl::Translation(0, 2, 10), sl::Translation(0, 0, -1)); + + // Create the skeletons objects + skeletons.setDrawingType(GL_LINES); + floor_grid.setDrawingType(GL_LINES); + + // Set background color (black) + bckgrnd_clr = sl::float4(0.2f, 0.19f, 0.2f, 1.0f); + + + float limit = 20.0f; + sl::float4 clr_grid(80, 80, 80, 255); + clr_grid /= 255.f; + + float grid_height = -0; + for (int i = (int) (-limit); i <= (int) (limit); i++) + addVert(floor_grid, i, limit, grid_height, clr_grid); + + floor_grid.pushToGPU(); + + std::random_device dev; + rng = std::mt19937(dev()); + uint_dist360 = std::uniform_int_distribution(0, 360); + + // Map glut function on this class methods + glutDisplayFunc(GLViewer::drawCallback); + glutMouseFunc(GLViewer::mouseButtonCallback); + glutMotionFunc(GLViewer::mouseMotionCallback); + glutReshapeFunc(GLViewer::reshapeCallback); + glutKeyboardFunc(GLViewer::keyPressedCallback); + glutKeyboardUpFunc(GLViewer::keyReleasedCallback); + glutCloseFunc(CloseFunc); + + available = true; +} + +sl::float3 newColor(float hh) { + float s = 1.; + float v = 1.; + + sl::float3 clr; + int i = (int)hh; + float ff = hh - i; + float p = v * (1.0 - s); + float q = v * (1.0 - (s * ff)); + float t = v * (1.0 - (s * (1.0 - ff))); + switch (i) { + case 0: + clr.r = v; + clr.g = t; + clr.b = p; + break; + case 1: + clr.r = q; + clr.g = v; + clr.b = p; + break; + case 2: + clr.r = p; + clr.g = v; + clr.b = t; + break; + + case 3: + clr.r = p; + clr.g = q; + clr.b = v; + break; + case 4: + clr.r = t; + clr.g = p; + clr.b = v; + break; + case 5: + default: + clr.r = v; + clr.g = p; + clr.b = q; + break; + } + return clr; +} + +void GLViewer::updateCamera(int id, sl::Mat &view, sl::Mat &pc){ + mtx.lock(); + if (colors.find(id) == colors.end()) { + float hh = uint_dist360(rng) / 60.f; + colors[id] = newColor(hh); + } + + if(view.isInit() && viewers.find(id) == viewers.end()) + viewers[id].initialize(view, colors[id]); + + if(pc.isInit() && point_clouds.find(id) == point_clouds.end()) + point_clouds[id].initialize(pc, colors[id]); + + mtx.unlock(); +} + +void GLViewer::setRenderCameraProjection(sl::CameraParameters params, float znear, float zfar) { + // Just slightly up the ZED camera FOV to make a small black border + float fov_y = (params.v_fov + 0.5f) * M_PI / 180.f; + float fov_x = (params.h_fov + 0.5f) * M_PI / 180.f; + + projection_(0, 0) = 1.0f / tanf(fov_x * 0.5f); + projection_(1, 1) = 1.0f / tanf(fov_y * 0.5f); + projection_(2, 2) = -(zfar + znear) / (zfar - znear); + projection_(3, 2) = -1; + projection_(2, 3) = -(2.f * zfar * znear) / (zfar - znear); + projection_(3, 3) = 0; + + projection_(0, 0) = 1.0f / tanf(fov_x * 0.5f); //Horizontal FoV. + projection_(0, 1) = 0; + projection_(0, 2) = 2.0f * ((params.image_size.width - 1.0f * params.cx) / params.image_size.width) - 1.0f; //Horizontal offset. + projection_(0, 3) = 0; + + projection_(1, 0) = 0; + projection_(1, 1) = 1.0f / tanf(fov_y * 0.5f); //Vertical FoV. + projection_(1, 2) = -(2.0f * ((params.image_size.height - 1.0f * params.cy) / params.image_size.height) - 1.0f); //Vertical offset. + projection_(1, 3) = 0; + + projection_(2, 0) = 0; + projection_(2, 1) = 0; + projection_(2, 2) = -(zfar + znear) / (zfar - znear); //Near and far planes. + projection_(2, 3) = -(2.0f * zfar * znear) / (zfar - znear); //Near and far planes. + + projection_(3, 0) = 0; + projection_(3, 1) = 0; + projection_(3, 2) = -1; + projection_(3, 3) = 0.0f; +} + +void GLViewer::render() { + if (available) { + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + glClearColor(bckgrnd_clr.r, bckgrnd_clr.g, bckgrnd_clr.b, 1.f); + update(); + draw(); + printText(); + glutSwapBuffers(); + glutPostRedisplay(); + } +} + +void GLViewer::setCameraPose(int id, sl::Transform pose) { + mtx.lock(); + poses[id] = pose; + if (colors.find(id) == colors.end()) { + float hh = uint_dist360(rng) / 60.f; + colors[id] = newColor(hh); + } + mtx.unlock(); +} + +inline bool renderObject(const sl::ObjectData& i, const bool isTrackingON) { + if (isTrackingON) + return (i.tracking_state == sl::OBJECT_TRACKING_STATE::OK); + else + return (i.tracking_state == sl::OBJECT_TRACKING_STATE::OK || i.tracking_state == sl::OBJECT_TRACKING_STATE::OFF); +} + +template +void createSKPrimitive(sl::BodyData& body, const std::vector>& map, Simple3DObject& skp, sl::float3 clr_id, bool raw) { + const float cylinder_thickness = raw ? 0.01f : 0.025f; + + for (auto& limb : map) { + sl::float3 kp_1 = body.keypoint[getIdx(limb.first)]; + sl::float3 kp_2 = body.keypoint[getIdx(limb.second)]; + if (std::isfinite(kp_1.norm()) && std::isfinite(kp_2.norm())) + skp.addLine(kp_1, kp_2, clr_id); + } +} + +void GLViewer::addObject(sl::ObjectData &obj, Simple3DObject &simpleObj, sl::float3 clr_id, bool raw) { + simpleObj.addBBox(obj.bounding_box, clr_id); +} + +// void GLViewer::addSKeleton(sl::BodyData& obj, Simple3DObject& simpleObj, sl::float3 clr_id, bool raw) { +// switch (obj.keypoint.size()) { +// case 18: +// addSKeleton(obj, simpleObj, clr_id, raw, sl::BODY_FORMAT::BODY_18); +// break; +// case 34: +// addSKeleton(obj, simpleObj, clr_id, raw, sl::BODY_FORMAT::BODY_34); +// break; +// case 38: +// addSKeleton(obj, simpleObj, clr_id, raw, sl::BODY_FORMAT::BODY_38); +// break; +// } +// } + +void GLViewer::updateObjects(sl::Objects &objects, std::map& singldata, sl::FusionMetrics& metrics) { + mtx.lock(); + + if (objects.is_new) { + skeletons.clear(); + for(auto &it:objects.object_list) { + + if (colors_sk.find(it.id) == colors_sk.end()) { + float hh = uint_dist360(rng) / 60.f; + colors_sk[it.id] = newColor(hh); + } + + if (renderObject(it, objects.is_tracked)) + { + addObject(it, skeletons, colors_sk[it.id], false); + } + } + + } + + fusionStats.clear(); + int id = 0; + + ObjectClassName obj_str; + obj_str.name_lineA = "Publishers :" + std::to_string(metrics.mean_camera_fused); + obj_str.name_lineB = "Sync :" + std::to_string(metrics.mean_stdev_between_camera * 1000.f); + obj_str.color = sl::float4(0.9,0.9,0.9,1); + obj_str.position = sl::float3(10, (id * 30), 0); + fusionStats.push_back(obj_str); + + for (auto &it : singldata) { + auto clr = colors[it.first.sn]; + id++; + if (it.second.is_new) + { + auto& sk_r = skeletons_raw[it.first.sn]; + sk_r.clear(); + sk_r.setDrawingType(GL_LINES); + + for (auto& obj : it.second.object_list) { + if(renderObject(obj, it.second.is_tracked)) + { + addObject(obj, sk_r, clr, true); + } + } + } + + ObjectClassName obj_str; + obj_str.name_lineA = "CAM: " + std::to_string(it.first.sn) + " FPS: " + std::to_string(metrics.camera_individual_stats[it.first].received_fps); + obj_str.name_lineB = "Ratio Detection :" + std::to_string(metrics.camera_individual_stats[it.first].ratio_detection) + " Delta " + std::to_string(metrics.camera_individual_stats[it.first].delta_ts * 1000.f); + obj_str.color = clr; + obj_str.position = sl::float3(10, (id * 30), 0); + fusionStats.push_back(obj_str); + } + mtx.unlock(); +} + +void GLViewer::update() { + + if (keyStates_['q'] == KEY_STATE::UP || keyStates_['Q'] == KEY_STATE::UP || keyStates_[27] == KEY_STATE::UP) { + currentInstance_->exit(); + return; + } + + if (keyStates_['r'] == KEY_STATE::UP) + currentInstance_->show_raw = !currentInstance_->show_raw; + + if (keyStates_['c'] == KEY_STATE::UP) + currentInstance_->draw_flat_color = !currentInstance_->draw_flat_color; + + if (keyStates_['p'] == KEY_STATE::UP) + currentInstance_->show_pc = !currentInstance_->show_pc; + + // Rotate camera with mouse + if (mouseButton_[MOUSE_BUTTON::LEFT]) { + camera_.rotate(sl::Rotation((float) mouseMotion_[1] * MOUSE_R_SENSITIVITY, camera_.getRight())); + camera_.rotate(sl::Rotation((float) mouseMotion_[0] * MOUSE_R_SENSITIVITY, camera_.getVertical() * -1.f)); + } + + // Translate camera with mouse + if (mouseButton_[MOUSE_BUTTON::RIGHT]) { + camera_.translate(camera_.getUp() * (float) mouseMotion_[1] * MOUSE_T_SENSITIVITY); + camera_.translate(camera_.getRight() * (float) mouseMotion_[0] * MOUSE_T_SENSITIVITY); + } + + // Zoom in with mouse wheel + if (mouseWheelPosition_ != 0) { + //float distance = sl::Translation(camera_.getOffsetFromPosition()).norm(); + if (mouseWheelPosition_ > 0 /* && distance > camera_.getZNear()*/) { // zoom + camera_.translate(camera_.getForward() * MOUSE_UZ_SENSITIVITY * 0.5f * -1); + } else if (/*distance < camera_.getZFar()*/ mouseWheelPosition_ < 0) {// unzoom + //camera_.setOffsetFromPosition(camera_.getOffsetFromPosition() * MOUSE_DZ_SENSITIVITY); + camera_.translate(camera_.getForward() * MOUSE_UZ_SENSITIVITY * 0.5f); + } + } + + camera_.update(); + mtx.lock(); + // Update point cloud buffers + skeletons.pushToGPU(); + for(auto &it: skeletons_raw) + it.second.pushToGPU(); + + for(auto &it: point_clouds) + it.second.pushNewPC(); + + for(auto &it: viewers) + it.second.pushNewImage(); + + mtx.unlock(); + clearInputs(); +} + + +void GLViewer::draw() { + + glPolygonMode(GL_FRONT, GL_LINE); + glPolygonMode(GL_BACK, GL_LINE); + glLineWidth(2.f); + glPointSize(1.f); + + sl::Transform vpMatrix = camera_.getViewProjectionMatrix(); + glUseProgram(shader.it.getProgramId()); + glUniformMatrix4fv(shader.MVP_Mat, 1, GL_TRUE, vpMatrix.m); + + floor_grid.draw(); + skeletons.draw(); + + if (show_raw) + for (auto& it : skeletons_raw) + it.second.draw(); + + for (auto& it : viewers) { + sl::Transform pose_ = vpMatrix * poses[it.first]; + glUniformMatrix4fv(shader.MVP_Mat, 1, GL_FALSE, sl::Transform::transpose(pose_).m); + viewers[it.first].frustum.draw(); + } + + glUseProgram(0); + + for (auto& it : poses) { + sl::Transform vpMatrix_world = vpMatrix * it.second; + + if(show_pc) + if(point_clouds.find(it.first) != point_clouds.end()) + point_clouds[it.first].draw(vpMatrix_world, draw_flat_color); + + if (viewers.find(it.first) != viewers.end()) + viewers[it.first].draw(vpMatrix_world); + } +} + +sl::float2 compute3Dprojection(sl::float3 &pt, const sl::Transform &cam, sl::Resolution wnd_size) { + sl::float4 pt4d(pt.x, pt.y, pt.z, 1.); + auto proj3D_cam = pt4d * cam; + sl::float2 proj2D; + proj2D.x = ((proj3D_cam.x / pt4d.w) * wnd_size.width) / (2.f * proj3D_cam.w) + wnd_size.width / 2.f; + proj2D.y = ((proj3D_cam.y / pt4d.w) * wnd_size.height) / (2.f * proj3D_cam.w) + wnd_size.height / 2.f; + return proj2D; +} + +void GLViewer::printText() { + + sl::Resolution wnd_size(glutGet(GLUT_WINDOW_WIDTH), glutGet(GLUT_WINDOW_HEIGHT)); + for (auto &it : fusionStats) { +#if 0 + auto pt2d = compute3Dprojection(it.position, projection_, wnd_size); +#else + sl::float2 pt2d(it.position.x, it.position.y); +#endif + glColor4f(it.color.b, it.color.g, it.color.r, .85f); + const auto *string = it.name_lineA.c_str(); + glWindowPos2f(pt2d.x, pt2d.y + 15); + int len = (int) strlen(string); + for (int i = 0; i < len; i++) + glutBitmapCharacter(GLUT_BITMAP_HELVETICA_12, string[i]); + + string = it.name_lineB.c_str(); + glWindowPos2f(pt2d.x, pt2d.y); + len = (int) strlen(string); + for (int i = 0; i < len; i++) + glutBitmapCharacter(GLUT_BITMAP_HELVETICA_12, string[i]); + } +} + +void GLViewer::clearInputs() { + mouseMotion_[0] = mouseMotion_[1] = 0; + mouseWheelPosition_ = 0; + for (unsigned int i = 0; i < 256; ++i) + if (keyStates_[i] != KEY_STATE::DOWN) + keyStates_[i] = KEY_STATE::FREE; +} + +void GLViewer::drawCallback() { + currentInstance_->render(); +} + +void GLViewer::mouseButtonCallback(int button, int state, int x, int y) { + if (button < 5) { + if (button < 3) { + currentInstance_->mouseButton_[button] = state == GLUT_DOWN; + } else { + currentInstance_->mouseWheelPosition_ += button == MOUSE_BUTTON::WHEEL_UP ? 1 : -1; + } + currentInstance_->mouseCurrentPosition_[0] = x; + currentInstance_->mouseCurrentPosition_[1] = y; + currentInstance_->previousMouseMotion_[0] = x; + currentInstance_->previousMouseMotion_[1] = y; + } +} + +void GLViewer::mouseMotionCallback(int x, int y) { + currentInstance_->mouseMotion_[0] = x - currentInstance_->previousMouseMotion_[0]; + currentInstance_->mouseMotion_[1] = y - currentInstance_->previousMouseMotion_[1]; + currentInstance_->previousMouseMotion_[0] = x; + currentInstance_->previousMouseMotion_[1] = y; +} + +void GLViewer::reshapeCallback(int width, int height) { + glViewport(0, 0, width, height); + float hfov = (180.0f / M_PI) * (2.0f * atan(width / (2.0f * 500))); + float vfov = (180.0f / M_PI) * (2.0f * atan(height / (2.0f * 500))); + currentInstance_->camera_.setProjection(hfov, vfov, currentInstance_->camera_.getZNear(), currentInstance_->camera_.getZFar()); +} + +void GLViewer::keyPressedCallback(unsigned char c, int x, int y) { + currentInstance_->keyStates_[c] = KEY_STATE::DOWN; + currentInstance_->lastPressedKey = c; + //glutPostRedisplay(); +} + +void GLViewer::keyReleasedCallback(unsigned char c, int x, int y) { + currentInstance_->keyStates_[c] = KEY_STATE::UP; +} + +void GLViewer::idle() { + glutPostRedisplay(); +} + +Simple3DObject::Simple3DObject() { + vaoID_ = 0; + drawingType_ = GL_TRIANGLES; + isStatic_ = need_update = false; +} + +Simple3DObject::~Simple3DObject() { + clear(); + if (vaoID_ != 0) { + glDeleteBuffers(3, vboID_); + glDeleteVertexArrays(1, &vaoID_); + } +} + +void Simple3DObject::addPoint(sl::float3 pt, sl::float3 clr){ + vertices_.push_back(pt); + colors_.push_back(clr); + indices_.push_back((int) indices_.size()); + need_update = true; +} + +void Simple3DObject::addLine(sl::float3 pt1, sl::float3 pt2, sl::float3 clr){ + addPoint(pt1, clr); + addPoint(pt2, clr); +} + +void Simple3DObject::addFace(sl::float3 p1, sl::float3 p2, sl::float3 p3, sl::float3 clr){ + addPoint(p1, clr); + addPoint(p2, clr); + addPoint(p3, clr); +} +void Simple3DObject::addBBox(std::vector &pts, sl::float3 clr) { + int start_id = vertices_.size() / 3; + + float transparency_top = 0.05f, transparency_bottom = 0.75f; + for (unsigned int i = 0; i < pts.size(); i++) { + // clr.a = (i < 4 ? transparency_top : transparency_bottom); + addPoint(pts[i], clr); + } + + const std::vector boxLinks = { 4, 5, 5, 6, 6, 7, 7, 4, 0, 4, 1, 5, 2, 6, 3, 7 }; + + for (unsigned int i = 0; i < boxLinks.size(); i += 2) { + indices_.push_back(start_id + boxLinks[i]); + indices_.push_back(start_id + boxLinks[i + 1]); + } +} + +void Simple3DObject::addPt(sl::float3 pt) { + // vertices_.push_back(pt.x); + // vertices_.push_back(pt.y); + // vertices_.push_back(pt.z); +} + +void Simple3DObject::addClr(sl::float4 clr) { + // colors_.push_back(clr.r); + // colors_.push_back(clr.g); + // colors_.push_back(clr.b); + // colors_.push_back(clr.a); +} + + +void Simple3DObject::pushToGPU() { + if(!need_update) return; + + if (!isStatic_ || vaoID_ == 0) { + if (vaoID_ == 0) { + glGenVertexArrays(1, &vaoID_); + glGenBuffers(3, vboID_); + } + glBindVertexArray(vaoID_); + glBindBuffer(GL_ARRAY_BUFFER, vboID_[0]); + glBufferData(GL_ARRAY_BUFFER, vertices_.size() * sizeof(sl::float3), &vertices_[0], isStatic_ ? GL_STATIC_DRAW : GL_DYNAMIC_DRAW); + glVertexAttribPointer(Shader::ATTRIB_VERTICES_POS, 3, GL_FLOAT, GL_FALSE, 0, 0); + glEnableVertexAttribArray(Shader::ATTRIB_VERTICES_POS); + + glBindBuffer(GL_ARRAY_BUFFER, vboID_[1]); + glBufferData(GL_ARRAY_BUFFER, colors_.size() * sizeof(sl::float3), &colors_[0], isStatic_ ? GL_STATIC_DRAW : GL_DYNAMIC_DRAW); + glVertexAttribPointer(Shader::ATTRIB_COLOR_POS, 3, GL_FLOAT, GL_FALSE, 0, 0); + glEnableVertexAttribArray(Shader::ATTRIB_COLOR_POS); + + glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboID_[2]); + glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices_.size() * sizeof (unsigned int), &indices_[0], isStatic_ ? GL_STATIC_DRAW : GL_DYNAMIC_DRAW); + + glBindVertexArray(0); + glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); + glBindBuffer(GL_ARRAY_BUFFER, 0); + need_update = false; + } +} + +void Simple3DObject::clear() { + vertices_.clear(); + colors_.clear(); + indices_.clear(); +} + +void Simple3DObject::setDrawingType(GLenum type) { + drawingType_ = type; +} + +void Simple3DObject::draw() { + glBindVertexArray(vaoID_); + glDrawElements(drawingType_, (GLsizei) indices_.size(), GL_UNSIGNED_INT, 0); + glBindVertexArray(0); +} + +Shader::Shader(const GLchar* vs, const GLchar* fs) { + if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { + std::cout << "ERROR: while compiling vertex shader" << std::endl; + } + if (!compile(fragmentId_, GL_FRAGMENT_SHADER, fs)) { + std::cout << "ERROR: while compiling fragment shader" << std::endl; + } + + programId_ = glCreateProgram(); + + glAttachShader(programId_, verterxId_); + glAttachShader(programId_, fragmentId_); + + glBindAttribLocation(programId_, ATTRIB_VERTICES_POS, "in_vertex"); + glBindAttribLocation(programId_, ATTRIB_COLOR_POS, "in_texCoord"); + + glLinkProgram(programId_); + + GLint errorlk(0); + glGetProgramiv(programId_, GL_LINK_STATUS, &errorlk); + if (errorlk != GL_TRUE) { + std::cout << "ERROR: while linking Shader :" << std::endl; + GLint errorSize(0); + glGetProgramiv(programId_, GL_INFO_LOG_LENGTH, &errorSize); + + char *error = new char[errorSize + 1]; + glGetShaderInfoLog(programId_, errorSize, &errorSize, error); + error[errorSize] = '\0'; + std::cout << error << std::endl; + + delete[] error; + glDeleteProgram(programId_); + } +} + +Shader::~Shader() { + if (verterxId_ != 0) + glDeleteShader(verterxId_); + if (fragmentId_ != 0) + glDeleteShader(fragmentId_); + if (programId_ != 0) + glDeleteShader(programId_); +} + +GLuint Shader::getProgramId() { + return programId_; +} + +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { + shaderId = glCreateShader(type); + if (shaderId == 0) { + std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; + return false; + } + glShaderSource(shaderId, 1, (const char**) &src, 0); + glCompileShader(shaderId); + + GLint errorCp(0); + glGetShaderiv(shaderId, GL_COMPILE_STATUS, &errorCp); + if (errorCp != GL_TRUE) { + std::cout << "ERROR: while compiling Shader :" << std::endl; + GLint errorSize(0); + glGetShaderiv(shaderId, GL_INFO_LOG_LENGTH, &errorSize); + + char *error = new char[errorSize + 1]; + glGetShaderInfoLog(shaderId, errorSize, &errorSize, error); + error[errorSize] = '\0'; + std::cout << error << std::endl; + + delete[] error; + glDeleteShader(shaderId); + return false; + } + return true; +} + +const GLchar* IMAGE_FRAGMENT_SHADER = + "#version 330 core\n" + " in vec2 UV;\n" + " out vec4 color;\n" + " uniform sampler2D texImage;\n" + " void main() {\n" + " vec2 scaler =vec2(UV.x,1.f - UV.y);\n" + " vec3 rgbcolor = vec3(texture(texImage, scaler).zyx);\n" + " vec3 color_rgb = pow(rgbcolor, vec3(1.65f));\n" + " color = vec4(color_rgb,1);\n" + "}"; + +const GLchar* IMAGE_VERTEX_SHADER = + "#version 330\n" + "layout(location = 0) in vec3 vert;\n" + "out vec2 UV;" + "void main() {\n" + " UV = (vert.xy+vec2(1,1))* .5f;\n" + " gl_Position = vec4(vert, 1);\n" + "}\n"; + + +PointCloud::PointCloud() { + +} + +PointCloud::~PointCloud() { + close(); +} + +void PointCloud::close() { + if (refMat.isInit()) { + auto err = cudaGraphicsUnmapResources(1, &bufferCudaID_, 0); + if (err != cudaSuccess) + std::cerr << "Error: CUDA UnmapResources (" << err << ")" << std::endl; + glDeleteBuffers(1, &bufferGLID_); + refMat.free(); + } +} + +void PointCloud::initialize(sl::Mat &ref, sl::float3 clr_) { + + refMat = ref; + clr = clr_; + + glGenBuffers(1, &bufferGLID_); + glBindBuffer(GL_ARRAY_BUFFER, bufferGLID_); + glBufferData(GL_ARRAY_BUFFER, refMat.getResolution().area() * 4 * sizeof (float), 0, GL_STATIC_DRAW); + glBindBuffer(GL_ARRAY_BUFFER, 0); + + cudaError_t err = cudaGraphicsGLRegisterBuffer(&bufferCudaID_, bufferGLID_, cudaGraphicsRegisterFlagsNone); + if (err != cudaSuccess) + std::cerr << "Error: CUDA - OpenGL Interop failed (" << err << ")" << std::endl; + + err = cudaGraphicsMapResources(1, &bufferCudaID_, 0); + if (err != cudaSuccess) + std::cerr << "Error: CUDA MapResources (" << err << ")" << std::endl; + + err = cudaGraphicsResourceGetMappedPointer((void**) &xyzrgbaMappedBuf_, &numBytes_, bufferCudaID_); + if (err != cudaSuccess) + std::cerr << "Error: CUDA GetMappedPointer (" << err << ")" << std::endl; + + shader_ = Shader(POINTCLOUD_VERTEX_SHADER, POINTCLOUD_FRAGMENT_SHADER); + shMVPMatrixLoc_ = glGetUniformLocation(shader_.getProgramId(), "u_mvpMatrix"); + shColor = glGetUniformLocation(shader_.getProgramId(), "u_color"); + shDrawColor = glGetUniformLocation(shader_.getProgramId(), "u_drawFlat"); +} + +void PointCloud::pushNewPC() { + if (refMat.isInit()) + cudaMemcpy(xyzrgbaMappedBuf_, refMat.getPtr(sl::MEM::CPU), numBytes_, cudaMemcpyHostToDevice); +} + +void PointCloud::draw(const sl::Transform& vp, bool draw_flat) { + if (refMat.isInit()) { +#ifndef JETSON_STYLE + glDisable(GL_BLEND); +#endif + + glUseProgram(shader_.getProgramId()); + glUniformMatrix4fv(shMVPMatrixLoc_, 1, GL_TRUE, vp.m); + + glUniform3fv(shColor, 1, clr.v); + glUniform1i(shDrawColor, draw_flat); + + glBindBuffer(GL_ARRAY_BUFFER, bufferGLID_); + glVertexAttribPointer(Shader::ATTRIB_VERTICES_POS, 4, GL_FLOAT, GL_FALSE, 0, 0); + glEnableVertexAttribArray(Shader::ATTRIB_VERTICES_POS); + + glDrawArrays(GL_POINTS, 0, refMat.getResolution().area()); + glBindBuffer(GL_ARRAY_BUFFER, 0); + glUseProgram(0); + +#ifndef JETSON_STYLE + glEnable(GL_BLEND); +#endif + } +} + + +CameraViewer::CameraViewer() { + +} + +CameraViewer::~CameraViewer() { + close(); +} + +void CameraViewer::close() { + if (ref.isInit()) { + + auto err = cudaGraphicsUnmapResources(1, &cuda_gl_ressource, 0); + if (err) std::cout << "err 3 " << err << " " << cudaGetErrorString(err) << "\n"; + + glDeleteTextures(1, &texture); + glDeleteBuffers(3, vboID_); + glDeleteVertexArrays(1, &vaoID_); + ref.free(); + } +} + +bool CameraViewer::initialize(sl::Mat &im, sl::float3 clr) { + + // Create 3D axis + float fx,fy,cx,cy; + fx = fy = 1400; + float width, height; + width = 2208; + height = 1242; + cx = width /2; + cy = height /2; + + float Z_ = .5f; + sl::float3 toOGL(1,-1,-1); + sl::float3 cam_0(0, 0, 0); + sl::float3 cam_1, cam_2, cam_3, cam_4; + + float fx_ = 1.f / fx; + float fy_ = 1.f / fy; + + cam_1.z = Z_; + cam_1.x = (0 - cx) * Z_ *fx_; + cam_1.y = (0 - cy) * Z_ *fy_ ; + cam_1 *= toOGL; + + cam_2.z = Z_; + cam_2.x = (width - cx) * Z_ *fx_; + cam_2.y = (0 - cy) * Z_ *fy_; + cam_2 *= toOGL; + + cam_3.z = Z_; + cam_3.x = (width - cx) * Z_ *fx_; + cam_3.y = (height - cy) * Z_ *fy_; + cam_3 *= toOGL; + + cam_4.z = Z_; + cam_4.x = (0 - cx) * Z_ *fx_; + cam_4.y = (height - cy) * Z_ *fy_; + cam_4 *= toOGL; + + + frustum.addFace(cam_0, cam_1, cam_2, clr); + frustum.addFace(cam_0, cam_2, cam_3, clr); + frustum.addFace(cam_0, cam_3, cam_4, clr); + frustum.addFace(cam_0, cam_4, cam_1, clr); + frustum.setDrawingType(GL_TRIANGLES); + frustum.pushToGPU(); + + vert.push_back(cam_1); + vert.push_back(cam_2); + vert.push_back(cam_3); + vert.push_back(cam_4); + + uv.push_back(sl::float2(0,0)); + uv.push_back(sl::float2(1,0)); + uv.push_back(sl::float2(1,1)); + uv.push_back(sl::float2(0,1)); + + faces.push_back(sl::uint3(0,1,2)); + faces.push_back(sl::uint3(0,2,3)); + + ref = im; + shader = Shader(VERTEX_SHADER_TEXTURE, FRAGMENT_SHADER_TEXTURE); + shMVPMatrixLocTex_ = glGetUniformLocation(shader.getProgramId(), "u_mvpMatrix"); + + glGenVertexArrays(1, &vaoID_); + glGenBuffers(3, vboID_); + + glBindVertexArray(vaoID_); + glBindBuffer(GL_ARRAY_BUFFER, vboID_[0]); + glBufferData(GL_ARRAY_BUFFER, vert.size() * sizeof(sl::float3), &vert[0], GL_STATIC_DRAW); + glVertexAttribPointer(Shader::ATTRIB_VERTICES_POS, 3, GL_FLOAT, GL_FALSE, 0, 0); + glEnableVertexAttribArray(Shader::ATTRIB_VERTICES_POS); + + glBindBuffer(GL_ARRAY_BUFFER, vboID_[1]); + glBufferData(GL_ARRAY_BUFFER, uv.size() * sizeof(sl::float2), &uv[0], GL_STATIC_DRAW); + glVertexAttribPointer(Shader::ATTRIB_COLOR_POS, 2, GL_FLOAT, GL_FALSE, 0, 0); + glEnableVertexAttribArray(Shader::ATTRIB_COLOR_POS); + + glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboID_[2]); + glBufferData(GL_ELEMENT_ARRAY_BUFFER, faces.size() * sizeof(sl::uint3), &faces[0], GL_STATIC_DRAW); + + glBindVertexArray(0); + glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); + glBindBuffer(GL_ARRAY_BUFFER, 0); + + auto res = ref.getResolution(); + glGenTextures(1, &texture); + glBindTexture(GL_TEXTURE_2D, texture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, res.width, res.height, 0, GL_BGRA_EXT, GL_UNSIGNED_BYTE, NULL); + glBindTexture(GL_TEXTURE_2D, 0); + cudaError_t err = cudaGraphicsGLRegisterImage(&cuda_gl_ressource, texture, GL_TEXTURE_2D, cudaGraphicsRegisterFlagsWriteDiscard); + if (err) std::cout << "err alloc " << err << " " << cudaGetErrorString(err) << "\n"; + glDisable(GL_TEXTURE_2D); + + err = cudaGraphicsMapResources(1, &cuda_gl_ressource, 0); + if (err) std::cout << "err 0 " << err << " " << cudaGetErrorString(err) << "\n"; + err = cudaGraphicsSubResourceGetMappedArray(&ArrIm, cuda_gl_ressource, 0, 0); + if (err) std::cout << "err 1 " << err << " " << cudaGetErrorString(err) << "\n"; + + return (err == cudaSuccess); +} + +void CameraViewer::pushNewImage() { + if (!ref.isInit()) return; + auto err = cudaMemcpy2DToArray(ArrIm, 0, 0, ref.getPtr(sl::MEM::CPU), ref.getStepBytes(sl::MEM::CPU), ref.getPixelBytes() * ref.getWidth(), ref.getHeight(), cudaMemcpyHostToDevice); + if (err) std::cout << "err 2 " << err << " " << cudaGetErrorString(err) << "\n"; +} + +void CameraViewer::draw(sl::Transform vpMatrix) { + if (!ref.isInit()) return; + + glUseProgram(shader.getProgramId()); + glPolygonMode(GL_FRONT_AND_BACK, GL_FILL); + + glUniformMatrix4fv(shMVPMatrixLocTex_, 1, GL_FALSE, sl::Transform::transpose(vpMatrix).m); + glBindTexture(GL_TEXTURE_2D, texture); + + glBindVertexArray(vaoID_); + glDrawElements(GL_TRIANGLES, (GLsizei)faces.size()*3, GL_UNSIGNED_INT, 0); + glBindVertexArray(0); + + glUseProgram(0); +} + + +const sl::Translation CameraGL::ORIGINAL_FORWARD = sl::Translation(0, 0, 1); +const sl::Translation CameraGL::ORIGINAL_UP = sl::Translation(0, 1, 0); +const sl::Translation CameraGL::ORIGINAL_RIGHT = sl::Translation(1, 0, 0); + +CameraGL::CameraGL(sl::Translation position, sl::Translation direction, sl::Translation vertical) { + this->position_ = position; + setDirection(direction, vertical); + + offset_ = sl::Translation(0, 0, 0); + view_.setIdentity(); + updateView(); + setProjection(70, 70, 0.200f, 50.f); + updateVPMatrix(); +} + +CameraGL::~CameraGL() { +} + +void CameraGL::update() { + if (sl::Translation::dot(vertical_, up_) < 0) + vertical_ = vertical_ * -1.f; + updateView(); + updateVPMatrix(); +} + +void CameraGL::setProjection(float horizontalFOV, float verticalFOV, float znear, float zfar) { + horizontalFieldOfView_ = horizontalFOV; + verticalFieldOfView_ = verticalFOV; + znear_ = znear; + zfar_ = zfar; + + float fov_y = verticalFOV * M_PI / 180.f; + float fov_x = horizontalFOV * M_PI / 180.f; + + projection_.setIdentity(); + projection_(0, 0) = 1.0f / tanf(fov_x * 0.5f); + projection_(1, 1) = 1.0f / tanf(fov_y * 0.5f); + projection_(2, 2) = -(zfar + znear) / (zfar - znear); + projection_(3, 2) = -1; + projection_(2, 3) = -(2.f * zfar * znear) / (zfar - znear); + projection_(3, 3) = 0; +} + +const sl::Transform& CameraGL::getViewProjectionMatrix() const { + return vpMatrix_; +} + +float CameraGL::getHorizontalFOV() const { + return horizontalFieldOfView_; +} + +float CameraGL::getVerticalFOV() const { + return verticalFieldOfView_; +} + +void CameraGL::setOffsetFromPosition(const sl::Translation& o) { + offset_ = o; +} + +const sl::Translation& CameraGL::getOffsetFromPosition() const { + return offset_; +} + +void CameraGL::setDirection(const sl::Translation& direction, const sl::Translation& vertical) { + sl::Translation dirNormalized = direction; + dirNormalized.normalize(); + this->rotation_ = sl::Orientation(ORIGINAL_FORWARD, dirNormalized * -1.f); + updateVectors(); + this->vertical_ = vertical; + if (sl::Translation::dot(vertical_, up_) < 0) + rotate(sl::Rotation(M_PI, ORIGINAL_FORWARD)); +} + +void CameraGL::translate(const sl::Translation& t) { + position_ = position_ + t; +} + +void CameraGL::setPosition(const sl::Translation& p) { + position_ = p; +} + +void CameraGL::rotate(const sl::Orientation& rot) { + rotation_ = rot * rotation_; + updateVectors(); +} + +void CameraGL::rotate(const sl::Rotation& m) { + this->rotate(sl::Orientation(m)); +} + +void CameraGL::setRotation(const sl::Orientation& rot) { + rotation_ = rot; + updateVectors(); +} + +void CameraGL::setRotation(const sl::Rotation& m) { + this->setRotation(sl::Orientation(m)); +} + +const sl::Translation& CameraGL::getPosition() const { + return position_; +} + +const sl::Translation& CameraGL::getForward() const { + return forward_; +} + +const sl::Translation& CameraGL::getRight() const { + return right_; +} + +const sl::Translation& CameraGL::getUp() const { + return up_; +} + +const sl::Translation& CameraGL::getVertical() const { + return vertical_; +} + +float CameraGL::getZNear() const { + return znear_; +} + +float CameraGL::getZFar() const { + return zfar_; +} + +void CameraGL::updateVectors() { + forward_ = ORIGINAL_FORWARD * rotation_; + up_ = ORIGINAL_UP * rotation_; + right_ = sl::Translation(ORIGINAL_RIGHT * -1.f) * rotation_; +} + +void CameraGL::updateView() { + sl::Transform transformation(rotation_, (offset_ * rotation_) + position_); + view_ = sl::Transform::inverse(transformation); +} + +void CameraGL::updateVPMatrix() { + vpMatrix_ = projection_ * view_; +} diff --git a/object detection/multi-camera/cpp/src/main.cpp b/object detection/multi-camera/cpp/src/main.cpp new file mode 100644 index 00000000..ff3f90cb --- /dev/null +++ b/object detection/multi-camera/cpp/src/main.cpp @@ -0,0 +1,195 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2024, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +// ZED include +#include "ClientPublisher.hpp" +#include "GLViewer.hpp" +#include "utils.hpp" + +int main(int argc, char **argv) +{ + if (argc != 2) + { + // this file should be generated by using the tool ZED360 + std::cout << "Need a Configuration file in input" << std::endl; + return 1; + } + + // Defines the Coordinate system and unit used in this sample + constexpr sl::COORDINATE_SYSTEM COORDINATE_SYSTEM = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; + constexpr sl::UNIT UNIT = sl::UNIT::METER; + + // Read json file containing the configuration of your multicamera setup. + auto configurations = sl::readFusionConfigurationFile(argv[1], COORDINATE_SYSTEM, UNIT); + + if (configurations.empty()) + { + std::cout << "Empty configuration File." << std::endl; + return EXIT_FAILURE; + } + + // Check if the ZED camera should run within the same process or if they are running on the edge. + std::vector clients(configurations.size()); + int id_ = 0; + std::map svo_files; + for (auto conf : configurations) + { + // if the ZED camera should run locally, then start a thread to handle it + if (conf.communication_parameters.getType() == sl::CommunicationParameters::COMM_TYPE::INTRA_PROCESS) + { + std::cout << "Try to open ZED " << conf.serial_number << ".." << std::flush; + auto state = clients[id_].open(conf.input_type); + if (!state) + { + std::cerr << "Could not open ZED: " << conf.input_type.getConfiguration() << ". Skipping..." << std::endl; + continue; + } + + if (conf.input_type.getType() == sl::InputType::INPUT_TYPE::SVO_FILE) + svo_files.insert(std::make_pair(id_, conf.input_type.getConfiguration())); + + std::cout << ". ready !" << std::endl; + + id_++; + } + } + + // Synchronize SVO files in SVO mode + bool enable_svo_sync = (svo_files.size() > 1); + if (enable_svo_sync) + { + std::cout << "Starting SVO sync process..." << std::endl; + std::map cam_idx_to_svo_frame_idx = syncDATA(svo_files); + + for (auto &it : cam_idx_to_svo_frame_idx) + { + std::cout << "Setting camera " << it.first << " to frame " << it.second << std::endl; + clients[it.first].setStartSVOPosition(it.second); + } + } + + // start camera threads + for (auto &it : clients) + it.start(); + + // Now that the ZED camera are running, we need to initialize the fusion module + sl::InitFusionParameters init_params; + init_params.coordinate_units = UNIT; + init_params.coordinate_system = COORDINATE_SYSTEM; + init_params.verbose = true; + + // create and initialize it + sl::Fusion fusion; + fusion.init(init_params); + + // subscribe to every cameras of the setup to internally gather their data + std::vector cameras; + for (auto &it : configurations) + { + sl::CameraIdentifier uuid(it.serial_number); + // to subscribe to a camera you must give its serial number, the way to communicate with it (shared memory or local network), and its world pose in the setup. + auto state = fusion.subscribe(uuid, it.communication_parameters, it.pose); + if (state != sl::FUSION_ERROR_CODE::SUCCESS) + std::cout << "Unable to subscribe to " << std::to_string(uuid.sn) << " . " << state << std::endl; + else + cameras.push_back(uuid); + } + + // check that at least one camera is connected + if (cameras.empty()) + { + std::cout << "no connections " << std::endl; + return EXIT_FAILURE; + } + + // as this sample shows how to fuse body detection from the multi camera setup + // we enable the Body Tracking module with its options + sl::ObjectDetectionFusionParameters od_fusion_params; + od_fusion_params.enable_tracking = true; + std::cout << "Enabling Fused Object detection" << std::endl; + fusion.enableObjectDetection(od_fusion_params); + std::cout << "OK" << std::endl; + + // creation of a 3D viewer + GLViewer viewer; + viewer.init(argc, argv); + + std::cout << "Viewer Shortcuts\n" + << "\t- 'r': swicth on/off for raw skeleton display\n" + << "\t- 'p': swicth on/off for live point cloud display\n" + << "\t- 'c': swicth on/off point cloud display with flat color\n" + << std::endl; + + // fusion outputs + sl::Objects fused_objects; + std::map camera_raw_data; + sl::FusionMetrics metrics; + std::map views; + std::map pointClouds; + sl::Resolution low_res(512, 360); + + bool new_data = false; + sl::Timestamp ts_new_data = sl::Timestamp(0); + + // run the fusion as long as the viewer is available. + while (viewer.isAvailable()) + { + // run the fusion process (which gather data from all camera, sync them and process them) + if (fusion.process() == sl::FUSION_ERROR_CODE::SUCCESS) + { + // Retrieve fused body + fusion.retrieveObjects(fused_objects); + // for debug, you can retrieve the data send by each camera + for (auto &id : cameras) + { + // fusion.retrieveObjects(camera_raw_data[id], id); + sl::Pose pose; + if (fusion.getPosition(pose, sl::REFERENCE_FRAME::WORLD, id, sl::POSITION_TYPE::RAW) == sl::POSITIONAL_TRACKING_STATE::OK) + viewer.setCameraPose(id.sn, pose.pose_data); + + auto state_view = fusion.retrieveImage(views[id], id, low_res); + auto state_pc = fusion.retrieveMeasure(pointClouds[id], id, sl::MEASURE::XYZBGRA, low_res); + if (state_view == sl::FUSION_ERROR_CODE::SUCCESS && state_pc == sl::FUSION_ERROR_CODE::SUCCESS) + { + // viewer.updateObjectsRaw(camera_raw_data[id]); + } + // viewer.updateObjects(fused_objects, camera_raw_data, metrics); + } + + // get metrics about the fusion process for monitoring purposes + fusion.getProcessMetrics(metrics); + } + + viewer.updateObjects(fused_objects, camera_raw_data, metrics); + if(fused_objects.object_list.size()) + std::cout << "Objects detected from fusion " << " : " << fused_objects.object_list.size() << std::endl; + + std::this_thread::sleep_for(std::chrono::microseconds(10)); + } + + viewer.exit(); + + for (auto &it : clients) + it.stop(); + + fusion.close(); + + return EXIT_SUCCESS; +} diff --git a/plane detection/floor plane/cpp/include/GLViewer.hpp b/plane detection/floor plane/cpp/include/GLViewer.hpp index e98848b4..0864cd8c 100644 --- a/plane detection/floor plane/cpp/include/GLViewer.hpp +++ b/plane detection/floor plane/cpp/include/GLViewer.hpp @@ -99,14 +99,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/plane detection/floor plane/cpp/src/GLViewer.cpp b/plane detection/floor plane/cpp/src/GLViewer.cpp index 2e02cb51..5db7857e 100644 --- a/plane detection/floor plane/cpp/src/GLViewer.cpp +++ b/plane detection/floor plane/cpp/src/GLViewer.cpp @@ -9,7 +9,7 @@ #define FADED_RENDERING const float grid_size = 10.0f; -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec4 in_Color;\n" @@ -20,7 +20,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -526,7 +526,7 @@ sl::Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -574,7 +574,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; @@ -602,7 +602,7 @@ bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { return true; } -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -615,7 +615,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec4 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -838,7 +838,7 @@ void CameraGL::updateVPMatrix() { } -GLchar* IMAGE_VERTEX_SHADER = +const GLchar* IMAGE_VERTEX_SHADER = "#version 330\n" "layout(location = 0) in vec2 vert;\n" "layout(location = 1) in vec3 vert_tex;\n" @@ -848,7 +848,7 @@ GLchar* IMAGE_VERTEX_SHADER = " gl_Position = vec4(vert_tex, 1);\n" "}\n"; -GLchar* IMAGE_FRAGMENT_SHADER = +const GLchar* IMAGE_FRAGMENT_SHADER = "#version 330 core\n" " in vec2 UV;\n" " out vec4 color;\n" diff --git a/plane detection/floor plane/cpp/src/main.cpp b/plane detection/floor plane/cpp/src/main.cpp index 28a1e660..acb0424b 100644 --- a/plane detection/floor plane/cpp/src/main.cpp +++ b/plane detection/floor plane/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/plane detection/plane detection/cpp/include/GLViewer.hpp b/plane detection/plane detection/cpp/include/GLViewer.hpp index ef462ca8..17f0189f 100644 --- a/plane detection/plane detection/cpp/include/GLViewer.hpp +++ b/plane detection/plane detection/cpp/include/GLViewer.hpp @@ -31,7 +31,7 @@ struct UserAction { class Shader { public: Shader() {} - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); @@ -40,7 +40,7 @@ class Shader { static const GLint ATTRIB_VERTICES_DIST = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/plane detection/plane detection/cpp/src/GLViewer.cpp b/plane detection/plane detection/cpp/src/GLViewer.cpp index fe156d81..ad4ec005 100644 --- a/plane detection/plane detection/cpp/src/GLViewer.cpp +++ b/plane detection/plane detection/cpp/src/GLViewer.cpp @@ -20,7 +20,7 @@ void print(std::string msg_prefix, sl::ERROR_CODE err_code, std::string msg_suff cout << endl; } -GLchar* MESH_VERTEX_SHADER = +const GLchar* MESH_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in float in_dist;\n" @@ -34,7 +34,7 @@ GLchar* MESH_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* MESH_FRAGMENT_SHADER = +const GLchar* MESH_FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "in float distance;\n" @@ -43,7 +43,7 @@ GLchar* MESH_FRAGMENT_SHADER = " color = vec4(b_color,distance);\n" "}"; -GLchar* IMAGE_FRAGMENT_SHADER = +const GLchar* IMAGE_FRAGMENT_SHADER = "#version 330 core\n" " in vec2 UV;\n" " out vec4 color;\n" @@ -56,7 +56,7 @@ GLchar* IMAGE_FRAGMENT_SHADER = " color = vec4(rgbcolor,1);\n" "}"; -GLchar* IMAGE_VERTEX_SHADER = +const GLchar* IMAGE_VERTEX_SHADER = "#version 330\n" "layout(location = 0) in vec3 vert;\n" "out vec2 UV;" @@ -453,7 +453,7 @@ void GLViewer::printText() { } } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if(!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -500,7 +500,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if(shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/plane detection/plane detection/cpp/src/main.cpp b/plane detection/plane detection/cpp/src/main.cpp index 309ede41..fbcb92df 100644 --- a/plane detection/plane detection/cpp/src/main.cpp +++ b/plane detection/plane detection/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/plane detection/plane detection/csharp/Properties/AssemblyInfo.cs b/plane detection/plane detection/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/plane detection/plane detection/csharp/Properties/AssemblyInfo.cs +++ b/plane detection/plane detection/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/positional tracking/export/fbx/cpp/include/utils.hpp b/positional tracking/export/fbx/cpp/include/utils.hpp index 3cdde47c..306a9258 100644 --- a/positional tracking/export/fbx/cpp/include/utils.hpp +++ b/positional tracking/export/fbx/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/positional tracking/export/fbx/cpp/src/main.cpp b/positional tracking/export/fbx/cpp/src/main.cpp index b37c0a23..e95a896d 100644 --- a/positional tracking/export/fbx/cpp/src/main.cpp +++ b/positional tracking/export/fbx/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/positional tracking/positional tracking/README.md b/positional tracking/positional tracking/README.md index fe9079d2..c9d9f4f9 100644 --- a/positional tracking/positional tracking/README.md +++ b/positional tracking/positional tracking/README.md @@ -8,6 +8,6 @@ ![](positional_tracking.jpg) -### Geo Tracking +### Global Localization -If you have GNSS sensor, you can fuse its data with your ZED positional tracking to improve outdoor positionning, to do so check ou [Geo Tracking sample](../../geotracking/geotracking/) \ No newline at end of file +If you have GNSS sensor, you can fuse its data with your ZED positional tracking to improve outdoor positioning, to do so check ou [Global Localization sample](../../global%20localization/live/) \ No newline at end of file diff --git a/positional tracking/positional tracking/cpp/CMakeLists.txt b/positional tracking/positional tracking/cpp/CMakeLists.txt index 288bf2ed..0b2b4752 100644 --- a/positional tracking/positional tracking/cpp/CMakeLists.txt +++ b/positional tracking/positional tracking/cpp/CMakeLists.txt @@ -17,11 +17,13 @@ find_package(GLUT REQUIRED) find_package(GLEW REQUIRED) SET(OpenGL_GL_PREFERENCE GLVND) find_package(OpenGL REQUIRED) +find_package(OpenCV REQUIRED) include_directories(${ZED_INCLUDE_DIRS}) include_directories(${GLEW_INCLUDE_DIRS}) include_directories(${GLUT_INCLUDE_DIR}) include_directories(${CUDA_INCLUDE_DIRS}) +include_directories(${OpenCV_INCLUDE_DIRS}) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include) link_directories(${ZED_LIBRARY_DIR}) @@ -35,6 +37,20 @@ FILE(GLOB_RECURSE HDR_FILES include/*.hpp) ADD_EXECUTABLE(${PROJECT_NAME} ${HDR_FILES} ${SRC_FILES}) +IF(NOT WIN32) + IF (BUILD_WITH_SANITIZER) + message("!! Building with address sanitizer and -g !!") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer -fsanitize=address -Wall -Wextra -g") + set (CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} -fno-omit-frame-pointer -fsanitize=address -Wall -Wextra -g") + add_definitions(-g -fsanitize=address -fno-omit-frame-pointer -static-libasan -Wall -Wextra) + ENDIF() + + IF (BUILD_WITH_DEBUGINFOS) + message("!! Building with -g !!") + add_definitions(-g) + ENDIF() + ENDIF() + if (LINK_SHARED_ZED) SET(ZED_LIBS ${ZED_LIBRARIES} ${CUDA_CUDA_LIBRARY} ${CUDA_CUDART_LIBRARY}) else() @@ -44,6 +60,7 @@ endif() TARGET_LINK_LIBRARIES(${PROJECT_NAME} ${ZED_LIBS} ${OPENGL_LIBRARIES} + ${OpenCV_LIBRARIES} ${GLUT_LIBRARIES} ${GLEW_LIBRARIES}) diff --git a/positional tracking/positional tracking/cpp/include/GLViewer.hpp b/positional tracking/positional tracking/cpp/include/GLViewer.hpp index 1dc0d61e..11bb9ab4 100644 --- a/positional tracking/positional tracking/cpp/include/GLViewer.hpp +++ b/positional tracking/positional tracking/cpp/include/GLViewer.hpp @@ -78,14 +78,14 @@ class Shader { public: Shader() {} - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; @@ -157,7 +157,7 @@ class GLViewer { void exit(); bool isAvailable(); void init(int argc, char **argv, sl::MODEL camera_model); - void updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state); + void updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::PositionalTrackingStatus state); private: // Rendering loop method called each frame by glutDisplayFunc @@ -213,8 +213,7 @@ class GLViewer { std::string txtR; std::string txtT; - sl::POSITIONAL_TRACKING_STATE trackState; - const std::string str_tracking = "POSITIONAL TRACKING : "; + sl::PositionalTrackingStatus trackState; sl::float3 bckgrnd_clr; diff --git a/positional tracking/positional tracking/cpp/src/GLViewer.cpp b/positional tracking/positional tracking/cpp/src/GLViewer.cpp index 2efd0866..c5dacaab 100644 --- a/positional tracking/positional tracking/cpp/src/GLViewer.cpp +++ b/positional tracking/positional tracking/cpp/src/GLViewer.cpp @@ -19,7 +19,7 @@ void print(std::string msg_prefix, sl::ERROR_CODE err_code, std::string msg_suff -GLchar* VERTEX_SHADER = +const GLchar* VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -30,7 +30,7 @@ GLchar* VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* FRAGMENT_SHADER = +const GLchar* FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -273,7 +273,7 @@ void GLViewer::draw() { glUseProgram(0); } -void GLViewer::updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::POSITIONAL_TRACKING_STATE state) { +void GLViewer::updateData(sl::Transform zed_rt, std::string str_t, std::string str_r, sl::PositionalTrackingStatus state) { mtx.lock(); vecPath.push_back(zed_rt.getTranslation()); zedModel.setRT(zed_rt); @@ -304,27 +304,33 @@ void GLViewer::printText() { int start_w = 20; int start_h = h_wnd - 40; - (trackState == sl::POSITIONAL_TRACKING_STATE::OK) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + float dark_clr = 0.12f; + std::string odom_status = "POSITIONAL TRACKING STATUS: "; + + glColor3f(dark_clr, dark_clr, dark_clr); glRasterPos2i(start_w, start_h); - std::string track_str = (str_tracking + sl::toString(trackState).c_str()); - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, odom_status.c_str()); - float dark_clr = 0.12f; + (trackState.tracking_fusion_status != sl::POSITIONAL_TRACKING_FUSION_STATUS::UNAVAILABLE) ? glColor3f(0.2f, 0.65f, 0.2f) : glColor3f(0.85f, 0.2f, 0.2f); + std::string track_str = (sl::toString(trackState.tracking_fusion_status).c_str()); + glRasterPos2i(start_w + 300, start_h); + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, track_str.c_str()); + glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 25); + glRasterPos2i(start_w, start_h - 20); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 25); + glRasterPos2i(155, start_h - 20); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtT.c_str()); glColor3f(dark_clr, dark_clr, dark_clr); - glRasterPos2i(start_w, start_h - 50); + glRasterPos2i(start_w, start_h - 40); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :"); glColor3f(0.4980f, 0.5490f, 0.5529f); - glRasterPos2i(155, start_h - 50); + glRasterPos2i(155, start_h - 40); safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, txtR.c_str()); glMatrixMode(GL_PROJECTION); @@ -531,7 +537,7 @@ Transform Simple3DObject::getModelMatrix() const { return tmp; } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -579,7 +585,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/positional tracking/positional tracking/cpp/src/main.cpp b/positional tracking/positional tracking/cpp/src/main.cpp index 4dc45ec6..ea484fe1 100644 --- a/positional tracking/positional tracking/cpp/src/main.cpp +++ b/positional tracking/positional tracking/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -34,14 +34,14 @@ using namespace std; using namespace sl; #define IMU_ONLY 0 -const int MAX_CHAR = 128; -inline void setTxt(sl::float3 value, char *ptr_txt) -{ - snprintf(ptr_txt, MAX_CHAR, "%3.2f; %3.2f; %3.2f", value.x, value.y, value.z); +inline std::string setTxt(sl::float3 value) { + std::stringstream stream; + stream << std::fixed << std::setprecision(2) << value; + return stream.str(); } -void parseArgs(int argc, char **argv, sl::InitParameters ¶m); +std::string parseArgs(int argc, char **argv, sl::InitParameters ¶m); int main(int argc, char **argv) { @@ -52,7 +52,7 @@ int main(int argc, char **argv) init_parameters.coordinate_units = UNIT::METER; init_parameters.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; init_parameters.sdk_verbose = true; - parseArgs(argc, argv, init_parameters); + auto mask_path = parseArgs(argc, argv, init_parameters); // Open the camera auto returned_state = zed.open(init_parameters); @@ -62,36 +62,65 @@ int main(int argc, char **argv) return EXIT_FAILURE; } + // Load optional region of interest to exclude irrelevant area of the image + if(!mask_path.empty()) { + sl::Mat mask_roi; + auto err = mask_roi.read(mask_path.c_str()); + if(err == sl::ERROR_CODE::SUCCESS) + zed.setRegionOfInterest(mask_roi, {MODULE::ALL}); + else + std::cout << "Error loading Region of Interest file: " << err << std::endl; + } + auto camera_model = zed.getCameraInformation().camera_model; - GLViewer viewer; - // Initialize OpenGL viewer - viewer.init(argc, argv, camera_model); // Create text for GUI - char text_rotation[MAX_CHAR]; - char text_translation[MAX_CHAR]; + std::string text_rotation, text_translation; // Set parameters for Positional Tracking PositionalTrackingParameters positional_tracking_param; positional_tracking_param.enable_imu_fusion = true; + positional_tracking_param.mode = sl::POSITIONAL_TRACKING_MODE::GEN_1; // positional_tracking_param.enable_area_memory = true; // enable Positional Tracking returned_state = zed.enablePositionalTracking(positional_tracking_param); - if (returned_state != ERROR_CODE::SUCCESS) - { - print("Enabling positionnal tracking failed: ", returned_state); + if (returned_state != ERROR_CODE::SUCCESS) { + print("Enabling positional tracking failed: ", returned_state); zed.close(); return EXIT_FAILURE; } + + // If there is a part of the image containing a static zone, the tracking accuracy will be significantly impacted + // The region of interest auto detection is a feature that can be used to remove such zone by masking the irrelevant area of the image. + // The region of interest can be loaded from a file : + + sl::Mat roi; + sl::String roi_name = "roi_mask.jpg"; + // roi.read(roi_name); + // zed.setRegionOfInterest(roi, {sl::MODULE::POSITIONAL_TRACKING}); + + // or alternatively auto detected at runtime : + sl::RegionOfInterestParameters roi_param; + + if(mask_path.empty()) { + roi_param.auto_apply_module = {sl::MODULE::DEPTH, sl::MODULE::POSITIONAL_TRACKING}; + zed.startRegionOfInterestAutoDetection(roi_param); + print("Region Of Interest auto detection is running."); + } + Pose camera_path; POSITIONAL_TRACKING_STATE tracking_state; #if IMU_ONLY SensorsData sensors_data; #endif - std::ofstream output_trajectory; - output_trajectory.open("output_trajectory.csv"); - output_trajectory << "tx, ty, tz" << std::endl; + + REGION_OF_INTEREST_AUTO_DETECTION_STATE roi_state = REGION_OF_INTEREST_AUTO_DETECTION_STATE::NOT_ENABLED; + + GLViewer viewer; + // Initialize OpenGL viewer + viewer.init(argc, argv, camera_model); + while (viewer.isAvailable()) { if (zed.grab() == ERROR_CODE::SUCCESS) @@ -99,28 +128,41 @@ int main(int argc, char **argv) // Get the position of the camera in a fixed reference frame (the World Frame) tracking_state = zed.getPosition(camera_path, REFERENCE_FRAME::WORLD); + sl::PositionalTrackingStatus PositionalTrackingStatus = zed.getPositionalTrackingStatus(); + + #if IMU_ONLY + PositionalTrackingStatus.odometry_status = sl::ODOMETRY_STATUS::OK; + PositionalTrackingStatus.spatial_memory_status = sl::SPATIAL_MEMORY_STATUS::OK; + PositionalTrackingStatus.tracking_fusion_status = sl::POSITIONAL_TRACKING_FUSION_STATUS::INERTIAL; if (zed.getSensorsData(sensors_data, TIME_REFERENCE::IMAGE) == sl::ERROR_CODE::SUCCESS) { - setTxt(sensors_data.imu.pose.getEulerAngles(), text_rotation); // only rotation is computed for IMU - viewer.updateData(sensors_data.imu.pose, string(text_translation), string(text_rotation), sl::POSITIONAL_TRACKING_STATE::OK); + text_rotation = setTxt(sensors_data.imu.pose.getEulerAngles()); // only rotation is computed for IMU + viewer.updateData(sensors_data.imu.pose, text_translation, text_rotation, PositionalTrackingStatus); } #else if (tracking_state == POSITIONAL_TRACKING_STATE::OK) { // Get rotation and translation and displays it - setTxt(camera_path.getEulerAngles(), text_rotation); - setTxt(camera_path.getTranslation(), text_translation); - output_trajectory << camera_path.getTranslation().tx << ", " << camera_path.getTranslation().ty << ", " << camera_path.getTranslation().tz << std::endl; + text_rotation = setTxt(camera_path.getEulerAngles()); + text_translation = setTxt(camera_path.getTranslation()); } // Update rotation, translation and tracking state values in the OpenGL window - viewer.updateData(camera_path.pose_data, string(text_translation), string(text_rotation), tracking_state); + viewer.updateData(camera_path.pose_data, text_translation, text_rotation, PositionalTrackingStatus); #endif + + // If the region of interest auto detection is running, the resulting mask can be saved and reloaded for later use + if(mask_path.empty() && roi_state == sl::REGION_OF_INTEREST_AUTO_DETECTION_STATE::RUNNING && + zed.getRegionOfInterestAutoDetectionStatus() == sl::REGION_OF_INTEREST_AUTO_DETECTION_STATE::READY) { + std::cout << "Region Of Interest detection done! Saving into " << roi_name << std::endl; + zed.getRegionOfInterest(roi, sl::Resolution(0,0), sl::MODULE::POSITIONAL_TRACKING); + roi.write(roi_name); + } + roi_state = zed.getRegionOfInterestAutoDetectionStatus(); } else sleep_ms(1); - } zed.disablePositionalTracking(); @@ -129,8 +171,23 @@ int main(int argc, char **argv) return EXIT_SUCCESS; } -void parseArgs(int argc, char **argv, sl::InitParameters ¶m) -{ +inline int findImageExtension(int argc, char **argv) { + int arg_idx=-1; + int arg_idx_search = 0; + if (argc > 2) arg_idx_search=2; + else if(argc > 1) arg_idx_search=1; + + if(arg_idx_search > 0 && (string(argv[arg_idx_search]).find(".png") != string::npos || + string(argv[arg_idx_search]).find(".jpg") != string::npos)) + arg_idx = arg_idx_search; + return arg_idx; +} + +std::string parseArgs(int argc, char **argv, sl::InitParameters ¶m) +{ + int mask_arg = findImageExtension(argc, argv); + std::string mask_path; + if (argc > 1 && string(argv[1]).find(".svo") != string::npos) { // SVO input mode @@ -174,8 +231,11 @@ void parseArgs(int argc, char **argv, sl::InitParameters ¶m) cout << "[Sample] Using Camera in resolution VGA" << endl; } } - else - { - // Default + + if (mask_arg > 0) { + mask_path = string(argv[mask_arg]); + cout << "[Sample] Using Region of Interest from file : " << mask_path << endl; } + + return mask_path; } diff --git a/positional tracking/positional tracking/csharp/MainWindow.cs b/positional tracking/positional tracking/csharp/MainWindow.cs index 63f3fc58..1593a06a 100644 --- a/positional tracking/positional tracking/csharp/MainWindow.cs +++ b/positional tracking/positional tracking/csharp/MainWindow.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -24,229 +24,251 @@ **************************************************************************/ using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Numerics; using System.Net; -using System.Threading.Tasks; using System.Windows.Forms; using OpenGL; using OpenGL.CoreUI; -namespace sl +using sl; + +class MainWindow { - class MainWindow + GLViewer viewer; + Camera zedCamera; + RuntimeParameters runtimeParameters; + Pose cam_pose; + Resolution res; + MODEL cameraModel; + int timer = 0; + + POSITIONAL_TRACKING_STATE trackingState; + REGION_OF_INTEREST_AUTO_DETECTION_STATE roiState = REGION_OF_INTEREST_AUTO_DETECTION_STATE.NOT_ENABLED; + sl.Mat roiMask; + string roiName; + + public MainWindow(string[] args) { - GLViewer viewer; - Camera zedCamera; - RuntimeParameters runtimeParameters; - Pose cam_pose; - Resolution res; - MODEL cameraModel; - int timer = 0; + // Set configuration parameters + InitParameters init_params = new InitParameters(); + init_params.resolution = RESOLUTION.HD720; + init_params.sdkVerbose = 1; + init_params.cameraFPS = 60; + init_params.depthMode = DEPTH_MODE.ULTRA; + init_params.coordinateUnits = UNIT.METER; + init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; - POSITIONAL_TRACKING_STATE trackingState; - public MainWindow(string[] args) - { - // Set configuration parameters - InitParameters init_params = new InitParameters(); - init_params.resolution = RESOLUTION.HD720; - init_params.cameraFPS = 60; - init_params.depthMode = DEPTH_MODE.ULTRA; - init_params.coordinateUnits = UNIT.METER; - init_params.coordinateSystem = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP; + parseArgs(args, ref init_params); + // Open the camera + zedCamera = new Camera(0); + ERROR_CODE err = zedCamera.Open(ref init_params); - parseArgs(args, ref init_params); - // Open the camera - zedCamera = new Camera(0); - ERROR_CODE err = zedCamera.Open(ref init_params); + if (err != ERROR_CODE.SUCCESS) + Environment.Exit(-1); - if (err != ERROR_CODE.SUCCESS) - Environment.Exit(-1); + // Enable tracking + PositionalTrackingParameters trackingParams = new PositionalTrackingParameters(); + trackingParams.enableAreaMemory = true; + trackingParams.mode = POSITIONAL_TRACKING_MODE.GEN_1; + zedCamera.EnablePositionalTracking(ref trackingParams); - // Enable tracking - PositionalTrackingParameters trackingParams = new PositionalTrackingParameters(); - trackingParams.enableAreaMemory = true; - zedCamera.EnablePositionalTracking(ref trackingParams); + runtimeParameters = new RuntimeParameters(); - runtimeParameters = new RuntimeParameters(); + cameraModel = zedCamera.GetCameraModel(); - cameraModel = zedCamera.GetCameraModel(); + int Height = zedCamera.ImageHeight; + int Width = zedCamera.ImageWidth; - int Height = zedCamera.ImageHeight; - int Width = zedCamera.ImageWidth; + res = new Resolution((uint)Width, (uint)Height); - res = new Resolution((uint)Width, (uint)Height); + // If there is a part of the image containing a static zone, the tracking accuracy will be significantly impacted + // The region of interest auto detection is a feature that can be used to remove such zone by masking the irrelevant area of the image. + // The region of interest can be loaded from a file : - // Create OpenGL Viewer - viewer = new GLViewer(); + roiName = "roiMask.png"; + roiMask = new sl.Mat(); + roiMask.Create(res, MAT_TYPE.MAT_8U_C1); + //roi.Read(roiName); + // or alternatively auto detected at runtime: + bool[] autoApplyModule = new bool[(int)MODULE.LAST]; + autoApplyModule[(int)sl.MODULE.DEPTH] = true; + autoApplyModule[(int)sl.MODULE.POSITIONAL_TRACKING] = true; - cam_pose = new Pose(); + RegionOfInterestParameters roiParams = new RegionOfInterestParameters(autoApplyModule); - // Create OpenGL window - CreateWindow(); - } + zedCamera.StartRegionOfInterestAutoDetection(roiParams); + + // Create OpenGL Viewer + viewer = new GLViewer(); + cam_pose = new Pose(); - // Create Window - public void CreateWindow() + // Create OpenGL window + CreateWindow(); + } + + // Create Window + public void CreateWindow() + { + using (OpenGL.CoreUI.NativeWindow nativeWindow = OpenGL.CoreUI.NativeWindow.Create()) { - using (OpenGL.CoreUI.NativeWindow nativeWindow = OpenGL.CoreUI.NativeWindow.Create()) + nativeWindow.ContextCreated += NativeWindow_ContextCreated; + nativeWindow.Render += NativeWindow_Render; + nativeWindow.MouseMove += NativeWindow_MouseEvent; + nativeWindow.Resize += NativeWindow_Resize; + nativeWindow.KeyDown += (object obj, NativeWindowKeyEventArgs e) => { - nativeWindow.ContextCreated += NativeWindow_ContextCreated; - nativeWindow.Render += NativeWindow_Render; - nativeWindow.MouseMove += NativeWindow_MouseEvent; - nativeWindow.Resize += NativeWindow_Resize; - nativeWindow.KeyDown += (object obj, NativeWindowKeyEventArgs e) => + switch (e.Key) { - switch (e.Key) - { - case KeyCode.Escape: - close(); - nativeWindow.Stop(); - break; - - case KeyCode.F: - nativeWindow.Fullscreen = !nativeWindow.Fullscreen; - break; - } + case KeyCode.Escape: + close(); + nativeWindow.Stop(); + break; + + case KeyCode.F: + nativeWindow.Fullscreen = !nativeWindow.Fullscreen; + break; + } - viewer.keyEventFunction(e); - }; + viewer.keyEventFunction(e); + }; - int wnd_h = Screen.PrimaryScreen.Bounds.Height; - int wnd_w = Screen.PrimaryScreen.Bounds.Width; + int wnd_h = Screen.PrimaryScreen.Bounds.Height; + int wnd_w = Screen.PrimaryScreen.Bounds.Width; - int height = (int)(wnd_h * 0.9f); - int width = (int)(wnd_w * 0.9f); + int height = (int)(wnd_h * 0.9f); + int width = (int)(wnd_w * 0.9f); - if (width > zedCamera.ImageWidth && height > zedCamera.ImageHeight) - { - width = zedCamera.ImageWidth; - height = zedCamera.ImageHeight; - } + if (width > zedCamera.ImageWidth && height > zedCamera.ImageHeight) + { + width = zedCamera.ImageWidth; + height = zedCamera.ImageHeight; + } - nativeWindow.Create((int)(zedCamera.ImageWidth * 0.05f), (int)(zedCamera.ImageHeight * 0.05f), 1200, 700, NativeWindowStyle.Resizeable); - nativeWindow.Show(); - try - { - nativeWindow.Run(); - } - catch (Exception e) - { - Console.WriteLine("Mouse wheel is broken in the current OPENGL .NET VERSION. Please do not use it."); - } + nativeWindow.Create((int)(zedCamera.ImageWidth * 0.05f), (int)(zedCamera.ImageHeight * 0.05f), 1200, 700, NativeWindowStyle.Resizeable); + nativeWindow.Show(); + try + { + nativeWindow.Run(); + } + catch (Exception e) + { + Console.WriteLine("Mouse wheel is broken in the current OPENGL .NET VERSION. Please do not use it."); } } + } - private void NativeWindow_Resize(object sender, EventArgs e) - { - OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; + private void NativeWindow_Resize(object sender, EventArgs e) + { + OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; - viewer.resizeCallback((int)nativeWindow.Width, (int)nativeWindow.Height); - } + viewer.resizeCallback((int)nativeWindow.Width, (int)nativeWindow.Height); + } - private void NativeWindow_MouseEvent(object sender, NativeWindowMouseEventArgs e) - { - viewer.mouseEventFunction(e); - viewer.computeMouseMotion(e.Location.X, e.Location.Y); - } + private void NativeWindow_MouseEvent(object sender, NativeWindowMouseEventArgs e) + { + viewer.mouseEventFunction(e); + viewer.computeMouseMotion(e.Location.X, e.Location.Y); + } - // Init Window - private void NativeWindow_ContextCreated(object sender, NativeWindowEventArgs e) - { - OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; + // Init Window + private void NativeWindow_ContextCreated(object sender, NativeWindowEventArgs e) + { + OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; - Gl.ReadBuffer(ReadBufferMode.Back); - Gl.ClearColor(223 / 255.0f, 230 / 255.0f, 233 / 255.0f, 1.0f); + Gl.ReadBuffer(ReadBufferMode.Back); + Gl.ClearColor(223 / 255.0f, 230 / 255.0f, 233 / 255.0f, 1.0f); - Gl.Enable(EnableCap.DepthTest); + Gl.Enable(EnableCap.DepthTest); - Gl.Enable(EnableCap.Blend); - Gl.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha); + Gl.Enable(EnableCap.Blend); + Gl.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha); - Gl.Enable(EnableCap.LineSmooth); - Gl.Hint(HintTarget.LineSmoothHint, HintMode.Nicest); + Gl.Enable(EnableCap.LineSmooth); + Gl.Hint(HintTarget.LineSmoothHint, HintMode.Nicest); - viewer.init(zedCamera.GetCalibrationParameters().leftCam, cameraModel); - } + viewer.init(zedCamera.GetCalibrationParameters().leftCam, cameraModel); + } + + // Render loop + private void NativeWindow_Render(object sender, NativeWindowEventArgs e) + { + OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; + Gl.Viewport(0, 0, (int)nativeWindow.Width, (int)nativeWindow.Height); + Gl.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit); - // Render loop - private void NativeWindow_Render(object sender, NativeWindowEventArgs e) + if (viewer.isAvailable() && zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) { - OpenGL.CoreUI.NativeWindow nativeWindow = (OpenGL.CoreUI.NativeWindow)sender; - Gl.Viewport(0, 0, (int)nativeWindow.Width, (int)nativeWindow.Height); - Gl.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit); + trackingState = zedCamera.GetPosition(ref cam_pose, REFERENCE_FRAME.WORLD); - if (viewer.isAvailable() && zedCamera.Grab(ref runtimeParameters) == ERROR_CODE.SUCCESS) - { - trackingState = zedCamera.GetPosition(ref cam_pose, REFERENCE_FRAME.WORLD); + // if (trackingState == POSITIONAL_TRACKING_STATE.OK && timer% 30 == 0) + // { + // Console.WriteLine("Translation : " + cam_pose.translation + ", Rotation : " + cam_pose.rotation); + // } - if (trackingState == POSITIONAL_TRACKING_STATE.OK && timer% 30 == 0) - { - Console.WriteLine("Translation : " + cam_pose.translation + ", Rotation : " + cam_pose.rotation); + //Update GL View + viewer.updateData(cam_pose); + viewer.render(); + timer++; - } - //Update GL View - viewer.updateData(cam_pose); - viewer.render(); - timer++; + // If the region of interest auto detection is running, the resulting mask can be saved and reloaded for later use + if (roiState == REGION_OF_INTEREST_AUTO_DETECTION_STATE.RUNNING && + zedCamera.GetRegionOfInterestAutoDetectionStatus() == REGION_OF_INTEREST_AUTO_DETECTION_STATE.READY) + { + Console.WriteLine("Region of Interest detection done! Saving into " + roiName); + zedCamera.GetRegionOfInterest(roiMask, res, MODULE.POSITIONAL_TRACKING); + roiMask.Write(roiName); } + roiState = zedCamera.GetRegionOfInterestAutoDetectionStatus(); } + } - private void close() + private void close() + { + zedCamera.SaveAreaMap("map.area"); + zedCamera.DisablePositionalTracking(); + zedCamera.Close(); + viewer.exit(); + } + + private void parseArgs(string[] args , ref sl.InitParameters param) + { + if (args.Length > 0 && args[0].IndexOf(".svo") != -1) { - zedCamera.SaveAreaMap("toto.area"); - zedCamera.DisablePositionalTracking(); - zedCamera.Close(); - viewer.exit(); + // SVO input mode + param.inputType = INPUT_TYPE.SVO; + param.pathSVO = args[0]; + Console.WriteLine("[Sample] Using SVO File input: " + args[0]); } - - private void parseArgs(string[] args , ref sl.InitParameters param) + else if (args.Length > 0 && args[0].IndexOf(".svo") == -1) { - if (args.Length > 0 && args[0].IndexOf(".svo") != -1) + IPAddress ip; + string arg = args[0]; + if (IPAddress.TryParse(arg, out ip)) { - // SVO input mode - param.inputType = INPUT_TYPE.SVO; - param.pathSVO = args[0]; - Console.WriteLine("[Sample] Using SVO File input: " + args[0]); + // Stream input mode - IP + port + param.inputType = INPUT_TYPE.STREAM; + param.ipStream = ip.ToString(); + Console.WriteLine("[Sample] Using Stream input, IP : " + ip); } - else if (args.Length > 0 && args[0].IndexOf(".svo") == -1) + else if (args[0].IndexOf("HD2K") != -1) { - IPAddress ip; - string arg = args[0]; - if (IPAddress.TryParse(arg, out ip)) - { - // Stream input mode - IP + port - param.inputType = INPUT_TYPE.STREAM; - param.ipStream = ip.ToString(); - Console.WriteLine("[Sample] Using Stream input, IP : " + ip); - } - else if (args[0].IndexOf("HD2K") != -1) - { - param.resolution = sl.RESOLUTION.HD2K; - Console.WriteLine("[Sample] Using Camera in resolution HD2K"); - } - else if (args[0].IndexOf("HD1080") != -1) - { - param.resolution = sl.RESOLUTION.HD1080; - Console.WriteLine("[Sample] Using Camera in resolution HD1080"); - } - else if (args[0].IndexOf("HD720") != -1) - { - param.resolution = sl.RESOLUTION.HD720; - Console.WriteLine("[Sample] Using Camera in resolution HD720"); - } - else if (args[0].IndexOf("VGA") != -1) - { - param.resolution = sl.RESOLUTION.VGA; - Console.WriteLine("[Sample] Using Camera in resolution VGA"); - } + param.resolution = sl.RESOLUTION.HD2K; + Console.WriteLine("[Sample] Using Camera in resolution HD2K"); + } + else if (args[0].IndexOf("HD1080") != -1) + { + param.resolution = sl.RESOLUTION.HD1080; + Console.WriteLine("[Sample] Using Camera in resolution HD1080"); + } + else if (args[0].IndexOf("HD720") != -1) + { + param.resolution = sl.RESOLUTION.HD720; + Console.WriteLine("[Sample] Using Camera in resolution HD720"); } - else + else if (args[0].IndexOf("VGA") != -1) { - // + param.resolution = sl.RESOLUTION.VGA; + Console.WriteLine("[Sample] Using Camera in resolution VGA"); } } } diff --git a/positional tracking/positional tracking/csharp/Properties/AssemblyInfo.cs b/positional tracking/positional tracking/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/positional tracking/positional tracking/csharp/Properties/AssemblyInfo.cs +++ b/positional tracking/positional tracking/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/positional tracking/positional tracking/python/ogl_viewer/tracking_viewer.py b/positional tracking/positional tracking/python/ogl_viewer/tracking_viewer.py index 21b695d5..d9605b22 100644 --- a/positional tracking/positional tracking/python/ogl_viewer/tracking_viewer.py +++ b/positional tracking/positional tracking/python/ogl_viewer/tracking_viewer.py @@ -166,7 +166,7 @@ def __init__(self): self.previousMouseMotion = [0., 0.] self.mouseMotion = [0., 0.] self.pose = sl.Transform() - self.trackState = sl.POSITIONAL_TRACKING_STATE + self.trackState = None self.txtT = "" self.txtR = "" @@ -400,51 +400,47 @@ def draw(self): glUseProgram(0) def print_text(self): - glMatrixMode(GL_PROJECTION) - glPushMatrix() - glLoadIdentity() - w_wnd = glutGet(GLUT_WINDOW_WIDTH) - h_wnd = glutGet(GLUT_WINDOW_HEIGHT) - glOrtho(0, w_wnd, 0, h_wnd, -1., 1.) + if self.trackState is not None: + glMatrixMode(GL_PROJECTION) + glPushMatrix() + glLoadIdentity() + w_wnd = glutGet(GLUT_WINDOW_WIDTH) + h_wnd = glutGet(GLUT_WINDOW_HEIGHT) + glOrtho(0, w_wnd, 0, h_wnd, -1., 1.) - glMatrixMode(GL_MODELVIEW) - glPushMatrix() - glLoadIdentity() + glMatrixMode(GL_MODELVIEW) + glPushMatrix() + glLoadIdentity() - start_w = 20 - start_h = h_wnd - 40 + start_w = 20 + start_h = h_wnd - 40 - if(self.trackState == sl.POSITIONAL_TRACKING_STATE.OK): glColor3f(0.2, 0.65, 0.2) - else: - glColor3f(0.85, 0.2, 0.2) + glRasterPos2i(start_w, start_h) + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "POSITIONAL TRACKING STATUS: " + str(self.trackState.tracking_fusion_status)) - glRasterPos2i(start_w, start_h) + dark_clr = 0.12 + glColor3f(dark_clr, dark_clr, dark_clr) + glRasterPos2i(start_w, start_h - 40) + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :") - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "POSITIONAL TRACKING : " + str(self.trackState)) + glColor3f(0.4980, 0.5490, 0.5529) + glRasterPos2i(155, start_h - 40) - dark_clr = 0.12 - glColor3f(dark_clr, dark_clr, dark_clr) - glRasterPos2i(start_w, start_h - 25) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Translation (m) :") + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtT) - glColor3f(0.4980, 0.5490, 0.5529) - glRasterPos2i(155, start_h - 25) + glColor3f(dark_clr, dark_clr, dark_clr) + glRasterPos2i(start_w, start_h - 60) + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :") - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtT) + glColor3f(0.4980, 0.5490, 0.5529) + glRasterPos2i(155, start_h - 60) + safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtR) - glColor3f(dark_clr, dark_clr, dark_clr) - glRasterPos2i(start_w, start_h - 50) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, "Rotation (rad) :") - - glColor3f(0.4980, 0.5490, 0.5529) - glRasterPos2i(155, start_h - 50) - safe_glutBitmapString(GLUT_BITMAP_HELVETICA_18, self.txtR) - - glMatrixMode(GL_PROJECTION) - glPopMatrix() - glMatrixMode(GL_MODELVIEW) - glPopMatrix() + glMatrixMode(GL_PROJECTION) + glPopMatrix() + glMatrixMode(GL_MODELVIEW) + glPopMatrix() class CameraGL: def __init__(self): diff --git a/positional tracking/positional tracking/python/positional_tracking.py b/positional tracking/positional tracking/python/positional_tracking.py index 6c5d3606..ed976364 100644 --- a/positional tracking/positional tracking/python/positional_tracking.py +++ b/positional tracking/positional tracking/python/positional_tracking.py @@ -23,7 +23,6 @@ and displays it in a OpenGL window. """ -import sys import ogl_viewer.tracking_viewer as gl import pyzed.sl as sl import argparse @@ -31,10 +30,10 @@ def parse_args(init): - if len(opt.input_svo_file)>0 and opt.input_svo_file.endswith(".svo"): + if len(opt.input_svo_file) > 0 and opt.input_svo_file.endswith(".svo"): init.set_from_svo_file(opt.input_svo_file) print("[Sample] Using SVO File input: {0}".format(opt.input_svo_file)) - elif len(opt.ip_address)>0 : + elif len(opt.ip_address) > 0 : ip_str = opt.ip_address if ip_str.replace(':','').replace('.','').isdigit() and len(ip_str.split('.'))==4 and len(ip_str.split(':'))==2: init.set_from_stream(ip_str.split(':')[0],int(ip_str.split(':')[1])) @@ -79,49 +78,76 @@ def main(): print("Camera Open", status, "Exit program.") exit(1) + if len(opt.roi_mask_file) > 0: + mask_roi = sl.Mat() + err = mask_roi.read(opt.roi_mask_file) + if err == sl.ERROR_CODE.SUCCESS: + zed.set_region_of_interest(mask_roi, [sl.MODULE.ALL]) + else: + print(f"Error loading Region of Interest file {opt.roi_mask_file}. Please check the path.") + tracking_params = sl.PositionalTrackingParameters() #set parameters for Positional Tracking - tracking_params.enable_imu_fusion = True + tracking_params.enable_imu_fusion = True + tracking_params.mode = sl.POSITIONAL_TRACKING_MODE.GEN_1 status = zed.enable_positional_tracking(tracking_params) #enable Positional Tracking if status != sl.ERROR_CODE.SUCCESS: - print("Enable Positional Tracking : "+repr(status)+". Exit program.") + print("[Sample] Enable Positional Tracking : "+repr(status)+". Exit program.") zed.close() exit() runtime = sl.RuntimeParameters() camera_pose = sl.Pose() + # If there is a part of the image containing a static zone, the tracking accuracy will be significantly impacted + # The region of interest auto detection is a feature that can be used to remove such zone by masking the irrelevant area of the image. + # The region of interest can be loaded from a file : + roi = sl.Mat() + roi_name = "roi_mask.png" + #roi.read(roi_name) + #zed.set_region_of_interest(roi, [sl.MODULE.POSITIONAL_TRACKING]) + # or alternatively auto detected at runtime: + roi_param = sl.RegionOfInterestParameters() + + if opt.roi_mask_file == "": + roi_param.auto_apply_module = {sl.MODULE.DEPTH, sl.MODULE.POSITIONAL_TRACKING} + zed.start_region_of_interest_auto_detection(roi_param) + print("[Sample] Region Of Interest auto detection is running.") + camera_info = zed.get_camera_information() # Create OpenGL viewer viewer = gl.GLViewer() viewer.init(camera_info.camera_model) - if opt.imu_only: - sensors_data = sl.SensorsData() py_translation = sl.Translation() pose_data = sl.Transform() text_translation = "" text_rotation = "" - file = open('output_trajectory.csv', 'w') - file.write('tx, ty, tz \n') + + roi_state = sl.REGION_OF_INTEREST_AUTO_DETECTION_STATE.NOT_ENABLED + while viewer.is_available(): if zed.grab(runtime) == sl.ERROR_CODE.SUCCESS: tracking_state = zed.get_position(camera_pose,sl.REFERENCE_FRAME.WORLD) #Get the position of the camera in a fixed reference frame (the World Frame) - if opt.imu_only : - if zed.get_sensors_data(sensors_data, sl.TIME_REFERENCE.IMAGE) == sl.ERROR_CODE.SUCCESS: - rotation = sensors_data.get_imu_data().get_pose().get_euler_angles() - text_rotation = str((round(rotation[0], 2), round(rotation[1], 2), round(rotation[2], 2))) - viewer.updateData(sensors_data.get_imu_data().get_pose(), text_translation, text_rotation, tracking_state) - else : - if tracking_state == sl.POSITIONAL_TRACKING_STATE.OK: - #Get rotation and translation and displays it - rotation = camera_pose.get_rotation_vector() - translation = camera_pose.get_translation(py_translation) - text_rotation = str((round(rotation[0], 2), round(rotation[1], 2), round(rotation[2], 2))) - text_translation = str((round(translation.get()[0], 2), round(translation.get()[1], 2), round(translation.get()[2], 2))) - pose_data = camera_pose.pose_data(sl.Transform()) - file.write(str(translation.get()[0])+", "+str(translation.get()[1])+", "+str(translation.get()[2])+"\n") - # Update rotation, translation and tracking state values in the OpenGL window - viewer.updateData(pose_data, text_translation, text_rotation, tracking_state) + tracking_status = zed.get_positional_tracking_status() + + #Get rotation and translation and displays it + if tracking_state == sl.POSITIONAL_TRACKING_STATE.OK: + rotation = camera_pose.get_rotation_vector() + translation = camera_pose.get_translation(py_translation) + text_rotation = str((round(rotation[0], 2), round(rotation[1], 2), round(rotation[2], 2))) + text_translation = str((round(translation.get()[0], 2), round(translation.get()[1], 2), round(translation.get()[2], 2))) + + pose_data = camera_pose.pose_data(sl.Transform()) + # Update rotation, translation and tracking state values in the OpenGL window + viewer.updateData(pose_data, text_translation, text_rotation, tracking_status) + + # If the region of interest auto detection is running, the resulting mask can be saved and reloaded for later use + if opt.roi_mask_file == "" and roi_state == sl.REGION_OF_INTEREST_AUTO_DETECTION_STATE.RUNNING and zed.get_region_of_interest_auto_detection_status() == sl.REGION_OF_INTEREST_AUTO_DETECTION_STATE.READY: + print("Region Of Interest detection done! Saving into {}".format(roi_name)) + zed.get_region_of_interest(roi, sl.Resolution(0,0), sl.MODULE.POSITIONAL_TRACKING) + roi.write(roi_name) + + roi_state = zed.get_region_of_interest_auto_detection_status() else : time.sleep(0.001) viewer.exit() @@ -132,7 +158,7 @@ def main(): parser.add_argument('--input_svo_file', type=str, help='Path to an .svo file, if you want to replay it',default = '') parser.add_argument('--ip_address', type=str, help='IP Adress, in format a.b.c.d:port or a.b.c.d, if you have a streaming setup', default = '') parser.add_argument('--resolution', type=str, help='Resolution, can be either HD2K, HD1200, HD1080, HD720, SVGA or VGA', default = '') - parser.add_argument('--imu_only', action = 'store_true', help = 'Either the tracking should be done with imu data only (that will remove translation estimation)' ) + parser.add_argument('--roi_mask_file', type=str, help='Path to a Region of Interest mask file', default = '') opt = parser.parse_args() if (len(opt.input_svo_file)>0 and len(opt.ip_address)>0): print("Specify only input_svo_file or ip_address, or none to use wired camera, not both. Exit program") diff --git a/recording/export/README.md b/recording/export/README.md index f81ac0b6..cf50f985 100644 --- a/recording/export/README.md +++ b/recording/export/README.md @@ -26,4 +26,4 @@ You can find it in [the camera controls folder](export/sensors/) ## KML GNSS export This sample allows you to export the GNSS data retrieved from the SDK, and also the positional tracking data fused by the SDK from GNSS and odometry. -You can find it in [the geotracking folder](../geotracking/export/) \ No newline at end of file +You can find it in [the global localization folder](../global%20localization/export/) \ No newline at end of file diff --git a/recording/export/svo/cpp/include/utils.hpp b/recording/export/svo/cpp/include/utils.hpp index ff0e0ed1..b1c88b34 100644 --- a/recording/export/svo/cpp/include/utils.hpp +++ b/recording/export/svo/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/export/svo/cpp/src/main.cpp b/recording/export/svo/cpp/src/main.cpp index b157be08..4124cf35 100644 --- a/recording/export/svo/cpp/src/main.cpp +++ b/recording/export/svo/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/export/svo/csharp/Program.cs b/recording/export/svo/csharp/Program.cs index 2fa062b0..70bc2a3a 100644 --- a/recording/export/svo/csharp/Program.cs +++ b/recording/export/svo/csharp/Program.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/export/svo/csharp/Properties/AssemblyInfo.cs b/recording/export/svo/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/recording/export/svo/csharp/Properties/AssemblyInfo.cs +++ b/recording/export/svo/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/recording/export/svo/python/svo_export.py b/recording/export/svo/python/svo_export.py index 805cfc7f..f8d856df 100644 --- a/recording/export/svo/python/svo_export.py +++ b/recording/export/svo/python/svo_export.py @@ -178,7 +178,7 @@ def main(): if opt.mode > 4 or opt.mode < 0 : print("Mode shoud be between 0 and 4 included. \n Mode 0 is to export LEFT+RIGHT AVI. \n Mode 1 is to export LEFT+DEPTH_VIEW Avi. \n Mode 2 is to export LEFT+RIGHT image sequence. \n Mode 3 is to export LEFT+DEPTH_View image sequence. \n Mode 4 is to export LEFT+DEPTH_16BIT image sequence.") exit() - if not opt.input_svo_file.endswith(".svo"): + if not opt.input_svo_file.endswith(".svo") and not opt.input_svo_file.endswith(".svo2"): print("--input_svo_file parameter should be a .svo file but is not : ",opt.input_svo_file,"Exit program.") exit() if not os.path.isfile(opt.input_svo_file): diff --git a/recording/playback/external_data/cpp/CMakeLists.txt b/recording/playback/external_data/cpp/CMakeLists.txt new file mode 100644 index 00000000..f86d72de --- /dev/null +++ b/recording/playback/external_data/cpp/CMakeLists.txt @@ -0,0 +1,40 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 3.5) +PROJECT(ZED_SVO_Playback) + +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +SET(CMAKE_BUILD_TYPE "RelWithDebInfo") + +option(LINK_SHARED_ZED "Link with the ZED SDK shared executable" ON) + +if (NOT LINK_SHARED_ZED AND MSVC) + message(FATAL_ERROR "LINK_SHARED_ZED OFF : ZED SDK static libraries not available on Windows") +endif() + +find_package(ZED 4 REQUIRED) +find_package(OpenCV REQUIRED) +find_package(CUDA ${ZED_CUDA_VERSION} REQUIRED) + +include_directories(${CUDA_INCLUDE_DIRS}) +include_directories(${ZED_INCLUDE_DIRS}) +include_directories(${OpenCV_INCLUDE_DIRS}) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include) + +link_directories(${ZED_LIBRARY_DIR}) +link_directories(${CUDA_LIBRARY_DIRS}) +link_directories(${OpenCV_LIBRARY_DIRS}) + +ADD_EXECUTABLE(${PROJECT_NAME} include/utils.hpp src/main.cpp) + +if (LINK_SHARED_ZED) + SET(ZED_LIBS ${ZED_LIBRARIES} ${CUDA_CUDA_LIBRARY} ${CUDA_CUDART_LIBRARY}) +else() + SET(ZED_LIBS ${ZED_STATIC_LIBRARIES} ${CUDA_CUDA_LIBRARY} ${CUDA_LIBRARY}) +endif() + +TARGET_LINK_LIBRARIES(${PROJECT_NAME} ${ZED_LIBS} ${OpenCV_LIBRARIES}) + +if(INSTALL_SAMPLES) + LIST(APPEND SAMPLE_LIST ${PROJECT_NAME}) + SET(SAMPLE_LIST "${SAMPLE_LIST}" PARENT_SCOPE) +endif() diff --git a/recording/playback/external_data/cpp/include/utils.hpp b/recording/playback/external_data/cpp/include/utils.hpp new file mode 100644 index 00000000..03e742e9 --- /dev/null +++ b/recording/playback/external_data/cpp/include/utils.hpp @@ -0,0 +1,81 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2024, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +#pragma once + +static bool exit_app = false; + +// Handle the CTRL-C keyboard signal +#ifdef _WIN32 +#include + +void CtrlHandler(DWORD fdwCtrlType) { + exit_app = (fdwCtrlType == CTRL_C_EVENT); +} +#else +#include +void nix_exit_handler(int s) { + exit_app = true; +} +#endif + +// Set the function to handle the CTRL-C +void SetCtrlHandler() { +#ifdef _WIN32 + SetConsoleCtrlHandler((PHANDLER_ROUTINE) CtrlHandler, TRUE); +#else // unix + struct sigaction sigIntHandler; + sigIntHandler.sa_handler = nix_exit_handler; + sigemptyset(&sigIntHandler.sa_mask); + sigIntHandler.sa_flags = 0; + sigaction(SIGINT, &sigIntHandler, NULL); +#endif +} + +// Display progress bar +void ProgressBar(float ratio, unsigned int w) { + unsigned int c = ratio * w; + for (unsigned int x = 0; x < c; x++) std::cout << "="; + for (unsigned int x = c; x < w; x++) std::cout << " "; + std::cout << (unsigned int) (ratio * 100) << "% "; + std::cout << "\r" << std::flush; +} + +// If the current project uses openCV +#if defined (__OPENCV_ALL_HPP__) || defined(OPENCV_ALL_HPP) +// Conversion function between sl::Mat and cv::Mat +cv::Mat slMat2cvMat(sl::Mat &input) { + int cv_type = -1; + switch (input.getDataType()) { + case sl::MAT_TYPE::F32_C1: cv_type = CV_32FC1; break; + case sl::MAT_TYPE::F32_C2: cv_type = CV_32FC2; break; + case sl::MAT_TYPE::F32_C3: cv_type = CV_32FC3; break; + case sl::MAT_TYPE::F32_C4: cv_type = CV_32FC4; break; + case sl::MAT_TYPE::U8_C1: cv_type = CV_8UC1; break; + case sl::MAT_TYPE::U8_C2: cv_type = CV_8UC2; break; + case sl::MAT_TYPE::U8_C3: cv_type = CV_8UC3; break; + case sl::MAT_TYPE::U8_C4: cv_type = CV_8UC4; break; + default: break; + } + // Since cv::Mat data requires a uchar* pointer, we get the uchar1 pointer from sl::Mat (getPtr()) + // cv::Mat and sl::Mat will share a single memory structure + return cv::Mat(input.getHeight(), input.getWidth(), cv_type, input.getPtr(sl::MEM::CPU)); +} +#endif diff --git a/recording/playback/external_data/cpp/src/main.cpp b/recording/playback/external_data/cpp/src/main.cpp new file mode 100644 index 00000000..0a4e6bf7 --- /dev/null +++ b/recording/playback/external_data/cpp/src/main.cpp @@ -0,0 +1,127 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2024, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +/************************************************************ +** This sample demonstrates how to read a SVO video file. ** +** We use OpenCV to display the video. ** +*************************************************************/ + +// ZED include +#include + +// Sample includes +#include +#include "utils.hpp" + +// Using namespace +using namespace sl; +using namespace std; + +void print(string msg_prefix, ERROR_CODE err_code = ERROR_CODE::SUCCESS, string msg_suffix = ""); + +int main(int argc, char **argv) { + + if (argc<=1) { + cout << "Usage: \n"; + cout << "$ ZED_SVO_Playback \n"; + cout << " ** SVO file is mandatory in the application ** \n\n"; + return EXIT_FAILURE; + } + + // Create ZED objects + Camera zed; + InitParameters init_parameters; + init_parameters.input.setFromSVOFile(argv[1]); + init_parameters.depth_mode = sl::DEPTH_MODE::PERFORMANCE; + + // Open the camera + auto returned_state = zed.open(init_parameters); + if (returned_state != ERROR_CODE::SUCCESS) { + print("Camera Open", returned_state, "Exit program."); + return EXIT_FAILURE; + } + + std::string s; + for (const auto &piece : zed.getSVODataKeys()) s += piece + "; "; + std::cout << "Channels that are in the SVO: " << s << std::endl; + + unsigned long long last_timestamp_ns; + + std::map data_map; + std::cout << "Reading everything all at once." << std::endl; + auto ing = zed.retrieveSVOData("TEST", data_map); + + for(const auto& d : data_map) { + std::string s; + d.second.getContent(s); + std::cout << d.first << " (//) " << s << std::endl; + } + + std::cout << "#########\n"; + + // Setup key, images, times + char key = ' '; + while (key != 'q') { + returned_state = zed.grab(); + if (returned_state <= ERROR_CODE::SUCCESS) { + std::map data_map; + std::cout << "Reading between "<< last_timestamp_ns << " and " << zed.getTimestamp(sl::TIME_REFERENCE::IMAGE) << std::endl; + auto ing = zed.retrieveSVOData("TEST", data_map, last_timestamp_ns, zed.getTimestamp(sl::TIME_REFERENCE::IMAGE)); + for(const auto& d : data_map) { + std::string s; + d.second.getContent(s); + std::cout << d.first << " // " << s << std::endl; + } + + + // Display the frame + key = cv::waitKey(10); + } + else if (returned_state == sl::ERROR_CODE::END_OF_SVOFILE_REACHED) + { + print("SVO end has been reached. Looping back to 0\n"); + zed.setSVOPosition(0); + break; + } + else { + print("Grab ZED : ", returned_state); + break; + } + last_timestamp_ns = zed.getTimestamp(sl::TIME_REFERENCE::IMAGE); + } + zed.close(); + return EXIT_SUCCESS; +} + +void print(string msg_prefix, ERROR_CODE err_code, string msg_suffix) { + cout <<"[Sample]"; + if (err_code != ERROR_CODE::SUCCESS) + cout << "[Error] "; + else + cout<<" "; + cout << msg_prefix << " "; + if (err_code != ERROR_CODE::SUCCESS) { + cout << " | " << toString(err_code) << " : "; + cout << toVerbose(err_code); + } + if (!msg_suffix.empty()) + cout << " " << msg_suffix; + cout << endl; +} diff --git a/recording/playback/external_data/csharp/App.config b/recording/playback/external_data/csharp/App.config new file mode 100644 index 00000000..bae5d6d8 --- /dev/null +++ b/recording/playback/external_data/csharp/App.config @@ -0,0 +1,6 @@ + + + + + + diff --git a/recording/playback/external_data/csharp/CMakeLists.txt b/recording/playback/external_data/csharp/CMakeLists.txt new file mode 100644 index 00000000..22f26e4c --- /dev/null +++ b/recording/playback/external_data/csharp/CMakeLists.txt @@ -0,0 +1,33 @@ +cmake_minimum_required( VERSION 3.8.0 ) + +project(ZED_SVO_Playback CSharp) + +add_executable(${PROJECT_NAME} + Program.cs + App.config + packages.config + Properties/AssemblyInfo.cs +) + +# Set the target platform to x64, since ZED SDK does not support 32-bits arch +target_compile_options(${PROJECT_NAME} PRIVATE "/platform:x64" ) + +# Set the .NET Framework version for the target. +set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "v4.6.1") + +# Set the C# language version, otherwise default 3.0 is taken +set(CMAKE_CSharp_FLAGS "/langversion:7") + +set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_REFERENCES + "Microsoft.CSharp" + "PresentationCore" + "PresentationFramework" + "System" + "System.Numerics" +) + +set(CMAKE_SUPPRESS_REGENERATION true) + +set_property(TARGET ${PROJECT_NAME} PROPERTY VS_PACKAGE_REFERENCES + "Stereolabs.zed_4.*" +) diff --git a/recording/playback/external_data/csharp/Program.cs b/recording/playback/external_data/csharp/Program.cs new file mode 100644 index 00000000..8e941211 --- /dev/null +++ b/recording/playback/external_data/csharp/Program.cs @@ -0,0 +1,170 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2024, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +/************************************************************ +** This sample demonstrates how to read a SVO video file. ** +** We use OpenCV to display the video. ** +*************************************************************/ + +using System; +using sl; +using System.Collections.Generic; +using System.Net; + +class Program +{ + + static void Main(string[] args) + { + if (args.Length != 1) + { + Console.WriteLine("Usage: "); + Console.WriteLine(" ZED_SVO_Playback "); + Console.WriteLine("* *SVO file is mandatory in the application * *"); + + Environment.Exit(-1); + } + + // Create ZED Camera + Camera zed = new Camera(0); + + //Specify SVO path parameters + InitParameters initParameters = new InitParameters() + { + inputType = INPUT_TYPE.SVO, + pathSVO = args[0], + svoRealTimeMode = false, + depthMode = DEPTH_MODE.PERFORMANCE, + sdkVerbose = 1, + }; + + parseArgs(args, ref initParameters); + + ERROR_CODE state = zed.Open(ref initParameters); + if (state != ERROR_CODE.SUCCESS) + { + Environment.Exit(-1); + } + + char key = ' '; + RuntimeParameters rtParams = new RuntimeParameters(); + + string s = ""; + + List keys = zed.GetSVODataKeys(); + + foreach (var piece in keys) + { + s += piece + " ;"; + } + Console.WriteLine("Channels that are in the SVO: " + s); + + ulong last_timestamp_ns = 0; + + List data = new List(); + zed.RetrieveSVOData("TEST", ref data, 0, 0); + + foreach(var d in data) + { + Console.WriteLine(d.GetContent()); + } + + Console.WriteLine("############\n"); + + while (key != 'q') + { + state = zed.Grab(ref rtParams); + if (state == ERROR_CODE.SUCCESS) + { + List svoData = new List(); + Console.WriteLine("Reading between " + last_timestamp_ns + " and " + zed.GetCameraTimeStamp()); + state = zed.RetrieveSVOData("TEST", ref svoData, last_timestamp_ns, zed.GetCameraTimeStamp()); + + if (state == ERROR_CODE.SUCCESS) + { + foreach (var d in svoData) + { + Console.WriteLine(zed.GetCameraTimeStamp() + " // " + d.GetContent()); + } + } + + last_timestamp_ns = zed.GetCameraTimeStamp(); + } + else if (state == ERROR_CODE.END_OF_SVO_FILE_REACHED) + { + Console.WriteLine("SVO end has been reached. Looping back to 0"); + zed.SetSVOPosition(0); + } + else + { + Console.WriteLine("Grab Error : " + state); + break; + } + } + zed.Close(); + } + + static void parseArgs(string[] args, ref sl.InitParameters param) + { + if (args.Length > 0 && args[0].IndexOf(".svo") != -1) + { + // SVO input mode + param.inputType = INPUT_TYPE.SVO; + param.pathSVO = args[0]; + Console.WriteLine("[Sample] Using SVO File input: " + args[0]); + } + else if (args.Length > 0 && args[0].IndexOf(".svo") == -1) + { + IPAddress ip; + string arg = args[0]; + if (IPAddress.TryParse(arg, out ip)) + { + // Stream input mode - IP + port + param.inputType = INPUT_TYPE.STREAM; + param.ipStream = ip.ToString(); + Console.WriteLine("[Sample] Using Stream input, IP : " + ip); + } + else if (args[0].IndexOf("HD2K") != -1) + { + param.resolution = sl.RESOLUTION.HD2K; + Console.WriteLine("[Sample] Using Camera in resolution HD2K"); + } + else if (args[0].IndexOf("HD1080") != -1) + { + param.resolution = sl.RESOLUTION.HD1080; + Console.WriteLine("[Sample] Using Camera in resolution HD1080"); + } + else if (args[0].IndexOf("HD720") != -1) + { + param.resolution = sl.RESOLUTION.HD720; + Console.WriteLine("[Sample] Using Camera in resolution HD720"); + } + else if (args[0].IndexOf("VGA") != -1) + { + param.resolution = sl.RESOLUTION.VGA; + Console.WriteLine("[Sample] Using Camera in resolution VGA"); + } + } + else + { + // + } + } +} \ No newline at end of file diff --git a/recording/playback/external_data/csharp/Properties/AssemblyInfo.cs b/recording/playback/external_data/csharp/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..19d0c1a1 --- /dev/null +++ b/recording/playback/external_data/csharp/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Tutorials")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Tutorials")] +[assembly: AssemblyCopyright("Copyright © 2024")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("db8455c8-b2a9-4e62-9597-7b26c432a999")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/recording/playback/external_data/csharp/packages.config b/recording/playback/external_data/csharp/packages.config new file mode 100644 index 00000000..8038b422 --- /dev/null +++ b/recording/playback/external_data/csharp/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/recording/playback/external_data/python/svo_playback.py b/recording/playback/external_data/python/svo_playback.py new file mode 100644 index 00000000..6493fdd2 --- /dev/null +++ b/recording/playback/external_data/python/svo_playback.py @@ -0,0 +1,94 @@ +######################################################################## +# +# Copyright (c) 2022, STEREOLABS. +# +# All rights reserved. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +######################################################################## + +""" + Read SVO sample to read the video and the information of the camera. It can pick a frame of the svo and save it as + a JPEG or PNG file. Depth map and Point Cloud can also be saved into files. +""" +import sys +import pyzed.sl as sl +import cv2 +import argparse +import os + +def main(): + filepath = opt.input_svo_file # Path to the .svo file to be playbacked + input_type = sl.InputType() + input_type.set_from_svo_file(filepath) #Set init parameter to run from the .svo + init = sl.InitParameters(input_t=input_type, svo_real_time_mode=False) + init.depth_mode = sl.DEPTH_MODE.PERFORMANCE + cam = sl.Camera() + status = cam.open(init) + if status != sl.ERROR_CODE.SUCCESS: #Ensure the camera opened succesfully + print("Camera Open", status, "Exit program.") + exit(1) + + + print('External SVOData channels:', cam.get_svo_data_keys()) + + # Set a maximum resolution, for visualisation confort + resolution = cam.get_camera_information().camera_configuration.resolution + low_resolution = sl.Resolution(min(720,resolution.width) * 2, min(404,resolution.height)) + svo_image = sl.Mat(min(720,resolution.width) * 2,min(404,resolution.height), sl.MAT_TYPE.U8_C4, sl.MEM.CPU) + + runtime = sl.RuntimeParameters() + + mat = sl.Mat() + + key = ' ' + print(" Press 'q' to exit...") + + svo_frame_rate = cam.get_init_parameters().camera_fps + nb_frames = cam.get_svo_number_of_frames() + print("[Info] SVO contains " ,nb_frames," frames") + + + key = '' + last_timestamp_ns = sl.Timestamp() + while key != 113: # for 'q' key + err = cam.grab(runtime) + + data_map = {} + # print("Reading between ", str(last_timestamp_ns.data_ns), " and ", str(cam.get_timestamp(sl.TIME_REFERENCE.IMAGE).data_ns)) + ing = cam.retrieve_svo_data("TEST", data_map, last_timestamp_ns, cam.get_timestamp(sl.TIME_REFERENCE.IMAGE)) + for d in data_map: + s = data_map[d].get_content_as_string() + print("Retrieved:", s); + + last_timestamp_ns = cam.get_timestamp(sl.TIME_REFERENCE.IMAGE) + + if err == sl.ERROR_CODE.END_OF_SVOFILE_REACHED: + break + + cv2.destroyAllWindows() + cam.close() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--input_svo_file', type=str, help='Path to the SVO file', required= True) + opt = parser.parse_args() + if not opt.input_svo_file.endswith(".svo") and not opt.input_svo_file.endswith(".svo2"): + print("--input_svo_file parameter should be a .svo file but is not : ",opt.input_svo_file,"Exit program.") + exit() + if not os.path.isfile(opt.input_svo_file): + print("--input_svo_file parameter should be an existing file but is not : ",opt.input_svo_file,"Exit program.") + exit() + main() diff --git a/recording/playback/mono/cpp/include/utils.hpp b/recording/playback/mono/cpp/include/utils.hpp index dbbe04d6..03e742e9 100644 --- a/recording/playback/mono/cpp/include/utils.hpp +++ b/recording/playback/mono/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/playback/mono/cpp/src/main.cpp b/recording/playback/mono/cpp/src/main.cpp index 36c7bcf3..3bc33300 100644 --- a/recording/playback/mono/cpp/src/main.cpp +++ b/recording/playback/mono/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -49,7 +49,8 @@ int main(int argc, char **argv) { Camera zed; InitParameters init_parameters; init_parameters.input.setFromSVOFile(argv[1]); - init_parameters.depth_mode = sl::DEPTH_MODE::PERFORMANCE; + init_parameters.depth_mode = sl::DEPTH_MODE::NONE; + init_parameters.sdk_verbose = 1; // Open the camera auto returned_state = zed.open(init_parameters); @@ -77,6 +78,8 @@ int main(int argc, char **argv) { // Start SVO playback + // zed.setSVOPosition(zed.getSVONumberOfFrames() - 120); + while (key != 'q') { returned_state = zed.grab(); if (returned_state <= ERROR_CODE::SUCCESS) { @@ -106,6 +109,7 @@ int main(int argc, char **argv) { { print("SVO end has been reached. Looping back to 0\n"); zed.setSVOPosition(0); + break; } else { print("Grab ZED : ", returned_state); diff --git a/recording/playback/mono/csharp/Program.cs b/recording/playback/mono/csharp/Program.cs index 67891c1b..0386ff59 100644 --- a/recording/playback/mono/csharp/Program.cs +++ b/recording/playback/mono/csharp/Program.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/playback/mono/csharp/Properties/AssemblyInfo.cs b/recording/playback/mono/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/recording/playback/mono/csharp/Properties/AssemblyInfo.cs +++ b/recording/playback/mono/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/recording/playback/mono/python/svo_playback.py b/recording/playback/mono/python/svo_playback.py index a8952fe7..9bf0af16 100644 --- a/recording/playback/mono/python/svo_playback.py +++ b/recording/playback/mono/python/svo_playback.py @@ -107,7 +107,7 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument('--input_svo_file', type=str, help='Path to the SVO file', required= True) opt = parser.parse_args() - if not opt.input_svo_file.endswith(".svo"): + if not opt.input_svo_file.endswith(".svo") and not opt.input_svo_file.endswith(".svo2"): print("--input_svo_file parameter should be a .svo file but is not : ",opt.input_svo_file,"Exit program.") exit() if not os.path.isfile(opt.input_svo_file): diff --git a/recording/playback/multi camera/cpp/include/utils.hpp b/recording/playback/multi camera/cpp/include/utils.hpp index 55c43029..f8ad9c5b 100644 --- a/recording/playback/multi camera/cpp/include/utils.hpp +++ b/recording/playback/multi camera/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/playback/multi camera/cpp/src/main.cpp b/recording/playback/multi camera/cpp/src/main.cpp index 39c5570d..713ed622 100644 --- a/recording/playback/multi camera/cpp/src/main.cpp +++ b/recording/playback/multi camera/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/recording/external_data/cpp/CMakeLists.txt b/recording/recording/external_data/cpp/CMakeLists.txt new file mode 100644 index 00000000..d3bb2a90 --- /dev/null +++ b/recording/recording/external_data/cpp/CMakeLists.txt @@ -0,0 +1,51 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 3.5) +PROJECT(ZED_SVO_Recording) + +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +SET(CMAKE_BUILD_TYPE "RelWithDebInfo") + +option(LINK_SHARED_ZED "Link with the ZED SDK shared executable" ON) + +if (NOT LINK_SHARED_ZED AND MSVC) + message(FATAL_ERROR "LINK_SHARED_ZED OFF : ZED SDK static libraries not available on Windows") +endif() + +find_package(ZED 3 REQUIRED) +find_package(OpenCV REQUIRED) +find_package(CUDA ${ZED_CUDA_VERSION} REQUIRED) + +include_directories(${CUDA_INCLUDE_DIRS}) +include_directories(${ZED_INCLUDE_DIRS}) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include) + +link_directories(${ZED_LIBRARY_DIR}) +link_directories(${CUDA_LIBRARY_DIRS}) +IF(NOT WIN32) + IF (BUILD_WITH_SANITIZER) + message("!! Building with address sanitizer and -g !!") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer -fsanitize=address -Wall -Wextra -g") + set (CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} -fno-omit-frame-pointer -fsanitize=address -Wall -Wextra -g") + add_definitions(-g -fsanitize=address -fno-omit-frame-pointer -static-libasan -Wall -Wextra) + ENDIF() + + IF (BUILD_WITH_DEBUGINFOS) + message("!! Building with -g !!") + add_definitions(-g) + ENDIF() + ENDIF() + +ADD_EXECUTABLE(${PROJECT_NAME} include/utils.hpp src/main.cpp) + +if (LINK_SHARED_ZED) + SET(ZED_LIBS ${ZED_LIBRARIES} ${CUDA_CUDA_LIBRARY} ${CUDA_CUDART_LIBRARY} ${OpenCV_LIBRARIES}) +else() + SET(ZED_LIBS ${ZED_STATIC_LIBRARIES} ${CUDA_CUDA_LIBRARY} ${CUDA_LIBRARY}) +endif() + +TARGET_LINK_LIBRARIES(${PROJECT_NAME} ${ZED_LIBS} ${OpenCV_LIBRARIES}) + +if(INSTALL_SAMPLES) + LIST(APPEND SAMPLE_LIST ${PROJECT_NAME}) + SET(SAMPLE_LIST "${SAMPLE_LIST}" PARENT_SCOPE) +endif() diff --git a/recording/recording/external_data/cpp/include/utils.hpp b/recording/recording/external_data/cpp/include/utils.hpp new file mode 100644 index 00000000..03e742e9 --- /dev/null +++ b/recording/recording/external_data/cpp/include/utils.hpp @@ -0,0 +1,81 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2024, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +#pragma once + +static bool exit_app = false; + +// Handle the CTRL-C keyboard signal +#ifdef _WIN32 +#include + +void CtrlHandler(DWORD fdwCtrlType) { + exit_app = (fdwCtrlType == CTRL_C_EVENT); +} +#else +#include +void nix_exit_handler(int s) { + exit_app = true; +} +#endif + +// Set the function to handle the CTRL-C +void SetCtrlHandler() { +#ifdef _WIN32 + SetConsoleCtrlHandler((PHANDLER_ROUTINE) CtrlHandler, TRUE); +#else // unix + struct sigaction sigIntHandler; + sigIntHandler.sa_handler = nix_exit_handler; + sigemptyset(&sigIntHandler.sa_mask); + sigIntHandler.sa_flags = 0; + sigaction(SIGINT, &sigIntHandler, NULL); +#endif +} + +// Display progress bar +void ProgressBar(float ratio, unsigned int w) { + unsigned int c = ratio * w; + for (unsigned int x = 0; x < c; x++) std::cout << "="; + for (unsigned int x = c; x < w; x++) std::cout << " "; + std::cout << (unsigned int) (ratio * 100) << "% "; + std::cout << "\r" << std::flush; +} + +// If the current project uses openCV +#if defined (__OPENCV_ALL_HPP__) || defined(OPENCV_ALL_HPP) +// Conversion function between sl::Mat and cv::Mat +cv::Mat slMat2cvMat(sl::Mat &input) { + int cv_type = -1; + switch (input.getDataType()) { + case sl::MAT_TYPE::F32_C1: cv_type = CV_32FC1; break; + case sl::MAT_TYPE::F32_C2: cv_type = CV_32FC2; break; + case sl::MAT_TYPE::F32_C3: cv_type = CV_32FC3; break; + case sl::MAT_TYPE::F32_C4: cv_type = CV_32FC4; break; + case sl::MAT_TYPE::U8_C1: cv_type = CV_8UC1; break; + case sl::MAT_TYPE::U8_C2: cv_type = CV_8UC2; break; + case sl::MAT_TYPE::U8_C3: cv_type = CV_8UC3; break; + case sl::MAT_TYPE::U8_C4: cv_type = CV_8UC4; break; + default: break; + } + // Since cv::Mat data requires a uchar* pointer, we get the uchar1 pointer from sl::Mat (getPtr()) + // cv::Mat and sl::Mat will share a single memory structure + return cv::Mat(input.getHeight(), input.getWidth(), cv_type, input.getPtr(sl::MEM::CPU)); +} +#endif diff --git a/recording/recording/external_data/cpp/src/main.cpp b/recording/recording/external_data/cpp/src/main.cpp new file mode 100644 index 00000000..5095e18a --- /dev/null +++ b/recording/recording/external_data/cpp/src/main.cpp @@ -0,0 +1,166 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2024, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +/**************************************************************************************** +** This sample shows how to record video in Stereolabs SVO format. ** +** SVO video files can be played with the ZED API and used with its different modules ** +*****************************************************************************************/ + +// ZED includes +#include + +// Sample includes +#include "utils.hpp" + +// Using namespace +using namespace sl; +using namespace std; + +void print(string msg_prefix, ERROR_CODE err_code = ERROR_CODE::SUCCESS, string msg_suffix = ""); +void parseArgs(int argc, char **argv,sl::InitParameters& param); + +int main(int argc, char **argv) { + + if (argc < 2) { + cout << "Usage : Only the path of the output SVO file should be passed as argument.\n"; + return EXIT_FAILURE; + } + + // Create a ZED camera + Camera zed; + + // Set configuration parameters for the ZED + InitParameters init_parameters; + init_parameters.sdk_verbose = 1; + init_parameters.depth_mode = DEPTH_MODE::NONE; + parseArgs(argc,argv,init_parameters); + + // Open the camera + auto returned_state = zed.open(init_parameters); + if (returned_state != ERROR_CODE::SUCCESS) { + print("Camera Open", returned_state, "Exit program."); + return EXIT_FAILURE; + } + + // Enable recording with the filename specified in argument + RecordingParameters recording_parameters; + recording_parameters.video_filename.set(argv[1]); + recording_parameters.compression_mode = SVO_COMPRESSION_MODE::H264; + returned_state = zed.enableRecording(recording_parameters); + if (returned_state != ERROR_CODE::SUCCESS) { + print("Recording ZED : ", returned_state); + zed.close(); + return EXIT_FAILURE; + } + + // Start recording SVO, stop with Ctrl-C command + print("SVO is Recording, use Ctrl-C to stop." ); + SetCtrlHandler(); + int frames_recorded = 0; + sl::RecordingStatus rec_status; + while (frames_recorded < 100) { + if (zed.grab() == ERROR_CODE::SUCCESS) { + + // Each new frame is added to the SVO file + rec_status = zed.getRecordingStatus(); + if (rec_status.status) { + + unsigned long long timestamp_ns = zed.getTimestamp(sl::TIME_REFERENCE::IMAGE); + sl::SVOData data; + data.key = "TEST"; + data.setContent("Hello, SVO World >> " + std::to_string(timestamp_ns)); + data.timestamp_ns = timestamp_ns; + auto err = zed.ingestDataIntoSVO(data); + std::cout << "Ingest " << err << std::endl; + + frames_recorded++; + std::cout << "Frame count: " << frames_recorded << std::endl; + + } + } + else + break; + } + + // Stop recording + zed.disableRecording(); + zed.close(); + return EXIT_SUCCESS; +} + +void print(string msg_prefix, ERROR_CODE err_code, string msg_suffix) { + cout <<"[Sample]"; + if (err_code != ERROR_CODE::SUCCESS) + cout << "[Error] "; + else + cout<<" "; + cout << msg_prefix << " "; + if (err_code != ERROR_CODE::SUCCESS) { + cout << " | " << toString(err_code) << " : "; + cout << toVerbose(err_code); + } + if (!msg_suffix.empty()) + cout << " " << msg_suffix; + cout << endl; +} + +void parseArgs(int argc, char **argv,sl::InitParameters& param) +{ + if (argc > 2 && string(argv[2]).find(".svo")!=string::npos) { + // SVO input mode + param.input.setFromSVOFile(argv[2]); + cout << "[Sample] Using SVO File input: " << argv[2] << endl; + } else if (argc > 2 && string(argv[2]).find(".svo")==string::npos) { + string arg = string(argv[2]); + unsigned int a,b,c,d,port; + if (sscanf(arg.c_str(),"%u.%u.%u.%u:%d", &a, &b, &c, &d,&port) == 5) { + // Stream input mode - IP + port + string ip_adress = to_string(a)+"."+to_string(b)+"."+to_string(c)+"."+to_string(d); + param.input.setFromStream(sl::String(ip_adress.c_str()),port); + cout<<"[Sample] Using Stream input, IP : "< + + + + + diff --git a/recording/recording/external_data/csharp/CMakeLists.txt b/recording/recording/external_data/csharp/CMakeLists.txt new file mode 100644 index 00000000..06594b4f --- /dev/null +++ b/recording/recording/external_data/csharp/CMakeLists.txt @@ -0,0 +1,34 @@ +cmake_minimum_required( VERSION 3.8.0 ) + +project(ZED_SVO_Recording CSharp) + +add_executable(${PROJECT_NAME} + Program.cs + App.config + packages.config + Properties/AssemblyInfo.cs +) + +# Set the target platform to x64, since ZED SDK does not support 32-bits arch +target_compile_options(${PROJECT_NAME} PRIVATE "/platform:x64" ) + +# Set the .NET Framework version for the target. +set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "v4.6.1") + +# Set the C# language version, otherwise default 3.0 is taken +set(CMAKE_CSharp_FLAGS "/langversion:7") + +set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_REFERENCES + "Microsoft.CSharp" + "PresentationCore" + "PresentationFramework" + "System" + "System.Numerics" + "WindowsBase" +) + +set(CMAKE_SUPPRESS_REGENERATION true) + +set_property(TARGET ${PROJECT_NAME} PROPERTY VS_PACKAGE_REFERENCES + "Stereolabs.zed_4.*" +) \ No newline at end of file diff --git a/recording/recording/external_data/csharp/Program.cs b/recording/recording/external_data/csharp/Program.cs new file mode 100644 index 00000000..e82c008a --- /dev/null +++ b/recording/recording/external_data/csharp/Program.cs @@ -0,0 +1,114 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2024, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +/**************************************************************************************** +** This sample shows how to record video in Stereolabs SVO format. ** +** SVO video files can be played with the ZED API and used with its different modules ** +*****************************************************************************************/ + +using System; +using System.Runtime.InteropServices; +using System.IO; +using System.Numerics; +using sl; + +class Program +{ + [STAThread] + static void Main(string[] args) + { + + if (args.Length < 1) + { + Console.WriteLine("Usage : Only the path of the output SVO file should be passed as argument."); + Environment.Exit(-1); + } + // Create ZED Camera + Camera zed = new Camera(0); + + Console.CancelKeyPress += delegate { + Console.WriteLine("close"); + zed.DisableRecording(); + zed.Close(); + }; + + //Specify SVO path parameters + InitParameters initParameters = new InitParameters() + { + resolution = RESOLUTION.HD2K, + depthMode = DEPTH_MODE.NONE, + }; + + ERROR_CODE state = zed.Open(ref initParameters); + if (state != ERROR_CODE.SUCCESS) + { + Environment.Exit(-1); + } + + string pathOutput = args[0]; + + RecordingParameters recordingParams = new RecordingParameters(pathOutput, SVO_COMPRESSION_MODE.H264_BASED, 8000, 15, false); + state = zed.EnableRecording(recordingParams); + if (state != ERROR_CODE.SUCCESS) + { + zed.Close(); + Environment.Exit(-1); + } + + // Start recording SVO, stop with Q + Console.WriteLine("SVO is recording, press Q to stop"); + int framesRecorded = 0; + + RuntimeParameters rtParams = new RuntimeParameters(); + + sl.RecordingStatus recordingStatus = new sl.RecordingStatus(); + while (framesRecorded < 100) + { + if (zed.Grab(ref rtParams) == ERROR_CODE.SUCCESS){ + + ulong timestamp = zed.GetCameraTimeStamp(); + + sl.SVOData svoData = new sl.SVOData(); + svoData.timestamp = timestamp; + svoData.key = "TEST"; + string content = "Hello, SVO World >> " + timestamp.ToString(); + svoData.SetContent(content); + + var err = zed.IngestDataIntoSVO(ref svoData); + Console.WriteLine("Ingest " + err); + recordingStatus = zed.GetRecordingStatus(); + // Each new frame is added to the SVO file + if (recordingStatus.status) + { + framesRecorded++; + Console.WriteLine("Frame count: " + framesRecorded); + } + + } + + bool State = (System.Windows.Input.Keyboard.IsKeyDown(System.Windows.Input.Key.Q) == true); + if (State) break; + } + + // Stop recording + zed.DisableRecording(); + zed.Close(); + } +} diff --git a/recording/recording/external_data/csharp/Properties/AssemblyInfo.cs b/recording/recording/external_data/csharp/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..19d0c1a1 --- /dev/null +++ b/recording/recording/external_data/csharp/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Tutorials")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Tutorials")] +[assembly: AssemblyCopyright("Copyright © 2024")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("db8455c8-b2a9-4e62-9597-7b26c432a999")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/recording/recording/external_data/csharp/packages.config b/recording/recording/external_data/csharp/packages.config new file mode 100644 index 00000000..8038b422 --- /dev/null +++ b/recording/recording/external_data/csharp/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/recording/recording/external_data/python/svo_recording.py b/recording/recording/external_data/python/svo_recording.py new file mode 100644 index 00000000..1d6c7c00 --- /dev/null +++ b/recording/recording/external_data/python/svo_recording.py @@ -0,0 +1,74 @@ +######################################################################## +# +# Copyright (c) 2022, STEREOLABS. +# +# All rights reserved. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +######################################################################## + +import sys +import pyzed.sl as sl +from signal import signal, SIGINT +import argparse +import os + +cam = sl.Camera() + +#Handler to deal with CTRL+C properly +def handler(signal_received, frame): + cam.disable_recording() + cam.close() + sys.exit(0) + +signal(SIGINT, handler) + +def main(): + + init = sl.InitParameters() + init.depth_mode = sl.DEPTH_MODE.NONE # Set configuration parameters for the ZED + + status = cam.open(init) + if status != sl.ERROR_CODE.SUCCESS: + print("Camera Open", status, "Exit program.") + exit(1) + + recording_param = sl.RecordingParameters(opt.output_svo_file, sl.SVO_COMPRESSION_MODE.H264) # Enable recording with the filename specified in argument + err = cam.enable_recording(recording_param) + if err != sl.ERROR_CODE.SUCCESS: + print("Recording ZED : ", err) + exit(1) + + runtime = sl.RuntimeParameters() + print("SVO is Recording, use Ctrl-C to stop.") # Start recording SVO, stop with Ctrl-C command + frames_recorded = 0 + + while frames_recorded < 100: + if cam.grab(runtime) == sl.ERROR_CODE.SUCCESS : # Check that a new image is successfully acquired + frames_recorded += 1 + print("Frame count: " + str(frames_recorded), end="\r") + data = sl.SVOData() + data.key = "TEST" + data.set_string_content("Hello, SVO World >> " + str(cam.get_timestamp(sl.TIME_REFERENCE.IMAGE).data_ns)) + data.timestamp_ns = cam.get_timestamp(sl.TIME_REFERENCE.IMAGE) + print('INGEST', cam.ingest_data_into_svo(data)) + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--output_svo_file', type=str, help='Path to the SVO file that will be written', required= True) + opt = parser.parse_args() + if not opt.output_svo_file.endswith(".svo") and not opt.output_svo_file.endswith(".svo2"): + print("--output_svo_file parameter should be a .svo file but is not : ",opt.output_svo_file,"Exit program.") + exit() + main() \ No newline at end of file diff --git a/recording/recording/mono/cpp/CMakeLists.txt b/recording/recording/mono/cpp/CMakeLists.txt index b6c412a6..9c61fd87 100644 --- a/recording/recording/mono/cpp/CMakeLists.txt +++ b/recording/recording/mono/cpp/CMakeLists.txt @@ -33,7 +33,7 @@ IF(NOT WIN32) message("!! Building with -g !!") add_definitions(-g) ENDIF() -ENDIF() + ENDIF() ADD_EXECUTABLE(${PROJECT_NAME} include/utils.hpp src/main.cpp) diff --git a/recording/recording/mono/cpp/include/utils.hpp b/recording/recording/mono/cpp/include/utils.hpp index c6b68fa3..78504ebe 100644 --- a/recording/recording/mono/cpp/include/utils.hpp +++ b/recording/recording/mono/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/recording/mono/cpp/src/main.cpp b/recording/recording/mono/cpp/src/main.cpp index f78b6a6f..2cbd3eb4 100644 --- a/recording/recording/mono/cpp/src/main.cpp +++ b/recording/recording/mono/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/recording/mono/csharp/Program.cs b/recording/recording/mono/csharp/Program.cs index 7bda8697..3bdfd2b8 100644 --- a/recording/recording/mono/csharp/Program.cs +++ b/recording/recording/mono/csharp/Program.cs @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/recording/recording/mono/csharp/Properties/AssemblyInfo.cs b/recording/recording/mono/csharp/Properties/AssemblyInfo.cs index 580aea52..19d0c1a1 100644 --- a/recording/recording/mono/csharp/Properties/AssemblyInfo.cs +++ b/recording/recording/mono/csharp/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Tutorials")] -[assembly: AssemblyCopyright("Copyright © 2021")] +[assembly: AssemblyCopyright("Copyright © 2024")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/recording/recording/mono/python/svo_recording.py b/recording/recording/mono/python/svo_recording.py index d21cb6cf..aa3c8933 100644 --- a/recording/recording/mono/python/svo_recording.py +++ b/recording/recording/mono/python/svo_recording.py @@ -63,7 +63,7 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument('--output_svo_file', type=str, help='Path to the SVO file that will be written', required= True) opt = parser.parse_args() - if not opt.output_svo_file.endswith(".svo"): + if not opt.output_svo_file.endswith(".svo") and not opt.output_svo_file.endswith(".svo2"): print("--output_svo_file parameter should be a .svo file but is not : ",opt.output_svo_file,"Exit program.") exit() main() \ No newline at end of file diff --git a/recording/recording/multi camera/cpp/include/utils.hpp b/recording/recording/multi camera/cpp/include/utils.hpp index c6b68fa3..78504ebe 100644 --- a/recording/recording/multi camera/cpp/include/utils.hpp +++ b/recording/recording/multi camera/cpp/include/utils.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/spatial mapping/multi camera/cpp/include/GLViewer.hpp b/spatial mapping/multi camera/cpp/include/GLViewer.hpp index a0804386..2347dba3 100644 --- a/spatial mapping/multi camera/cpp/include/GLViewer.hpp +++ b/spatial mapping/multi camera/cpp/include/GLViewer.hpp @@ -101,14 +101,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/spatial mapping/multi camera/cpp/src/GLViewer.cpp b/spatial mapping/multi camera/cpp/src/GLViewer.cpp index b853fb24..d6ee3369 100644 --- a/spatial mapping/multi camera/cpp/src/GLViewer.cpp +++ b/spatial mapping/multi camera/cpp/src/GLViewer.cpp @@ -1,6 +1,6 @@ #include "GLViewer.hpp" -GLchar* MESH_VERTEX_SHADER = +const GLchar* MESH_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -11,7 +11,7 @@ GLchar* MESH_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* MESH_FRAGMENT_SHADER = +const GLchar* MESH_FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 color;\n" @@ -19,7 +19,7 @@ GLchar* MESH_FRAGMENT_SHADER = " color = vec4(b_color, 0.95);\n" "}"; -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -38,7 +38,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -46,7 +46,7 @@ GLchar* POINTCLOUD_FRAGMENT_SHADER = " out_Color = vec4(b_color, 0.9);\n" "}"; -GLchar* VERTEX_SHADER_TEXTURE = +const GLchar* VERTEX_SHADER_TEXTURE = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec2 in_UVs;\n" @@ -57,7 +57,7 @@ GLchar* VERTEX_SHADER_TEXTURE = " UV = in_UVs;\n" "}\n"; -GLchar* FRAGMENT_SHADER_TEXTURE = +const GLchar* FRAGMENT_SHADER_TEXTURE = "#version 330 core\n" "in vec2 UV;\n" "uniform sampler2D texture_sampler;\n" @@ -503,7 +503,7 @@ void MeshObject::draw(bool draw_wire) { } } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -551,7 +551,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/spatial mapping/multi camera/cpp/src/main.cpp b/spatial mapping/multi camera/cpp/src/main.cpp index 549d8562..738d3004 100644 --- a/spatial mapping/multi camera/cpp/src/main.cpp +++ b/spatial mapping/multi camera/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // diff --git a/spatial mapping/spatial mapping/cpp/include/GLViewer.hpp b/spatial mapping/spatial mapping/cpp/include/GLViewer.hpp index 2e92ff00..7008d087 100644 --- a/spatial mapping/spatial mapping/cpp/include/GLViewer.hpp +++ b/spatial mapping/spatial mapping/cpp/include/GLViewer.hpp @@ -101,14 +101,14 @@ class Shader { Shader() { } - Shader(GLchar* vs, GLchar* fs); + Shader(const GLchar* vs, const GLchar* fs); ~Shader(); GLuint getProgramId(); static const GLint ATTRIB_VERTICES_POS = 0; static const GLint ATTRIB_COLOR_POS = 1; private: - bool compile(GLuint &shaderId, GLenum type, GLchar* src); + bool compile(GLuint &shaderId, GLenum type, const GLchar* src); GLuint verterxId_; GLuint fragmentId_; GLuint programId_; diff --git a/spatial mapping/spatial mapping/cpp/src/GLViewer.cpp b/spatial mapping/spatial mapping/cpp/src/GLViewer.cpp index 198b5dc5..f56a5cc2 100644 --- a/spatial mapping/spatial mapping/cpp/src/GLViewer.cpp +++ b/spatial mapping/spatial mapping/cpp/src/GLViewer.cpp @@ -1,6 +1,6 @@ #include "GLViewer.hpp" -GLchar* MESH_VERTEX_SHADER = +const GLchar* MESH_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec3 in_Color;\n" @@ -11,7 +11,7 @@ GLchar* MESH_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_Vertex, 1);\n" "}"; -GLchar* MESH_FRAGMENT_SHADER = +const GLchar* MESH_FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 color;\n" @@ -19,7 +19,7 @@ GLchar* MESH_FRAGMENT_SHADER = " color = vec4(b_color, 0.95);\n" "}"; -GLchar* POINTCLOUD_VERTEX_SHADER = +const GLchar* POINTCLOUD_VERTEX_SHADER = "#version 330 core\n" "layout(location = 0) in vec4 in_VertexRGBA;\n" "uniform mat4 u_mvpMatrix;\n" @@ -38,7 +38,7 @@ GLchar* POINTCLOUD_VERTEX_SHADER = " gl_Position = u_mvpMatrix * vec4(in_VertexRGBA.xyz, 1);\n" "}"; -GLchar* POINTCLOUD_FRAGMENT_SHADER = +const GLchar* POINTCLOUD_FRAGMENT_SHADER = "#version 330 core\n" "in vec3 b_color;\n" "layout(location = 0) out vec4 out_Color;\n" @@ -46,7 +46,7 @@ GLchar* POINTCLOUD_FRAGMENT_SHADER = " out_Color = vec4(b_color, 0.9);\n" "}"; -GLchar* VERTEX_SHADER_TEXTURE = +const GLchar* VERTEX_SHADER_TEXTURE = "#version 330 core\n" "layout(location = 0) in vec3 in_Vertex;\n" "layout(location = 1) in vec2 in_UVs;\n" @@ -57,7 +57,7 @@ GLchar* VERTEX_SHADER_TEXTURE = " UV = in_UVs;\n" "}\n"; -GLchar* FRAGMENT_SHADER_TEXTURE = +const GLchar* FRAGMENT_SHADER_TEXTURE = "#version 330 core\n" "in vec2 UV;\n" "uniform sampler2D texture_sampler;\n" @@ -529,7 +529,7 @@ void MeshObject::draw(bool draw_wire) { } } -Shader::Shader(GLchar* vs, GLchar* fs) { +Shader::Shader(const GLchar* vs, const GLchar* fs) { if (!compile(verterxId_, GL_VERTEX_SHADER, vs)) { std::cout << "ERROR: while compiling vertex shader" << std::endl; } @@ -577,7 +577,7 @@ GLuint Shader::getProgramId() { return programId_; } -bool Shader::compile(GLuint &shaderId, GLenum type, GLchar* src) { +bool Shader::compile(GLuint &shaderId, GLenum type, const GLchar* src) { shaderId = glCreateShader(type); if (shaderId == 0) { std::cout << "ERROR: shader type (" << type << ") does not exist" << std::endl; diff --git a/spatial mapping/spatial mapping/cpp/src/main.cpp b/spatial mapping/spatial mapping/cpp/src/main.cpp index 7abe856b..b0828186 100644 --- a/spatial mapping/spatial mapping/cpp/src/main.cpp +++ b/spatial mapping/spatial mapping/cpp/src/main.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2023, STEREOLABS. +// Copyright (c) 2024, STEREOLABS. // // All rights reserved. // @@ -37,7 +37,7 @@ using namespace sl; #define BUILD_MESH 1 -void parse_args(int argc, char **argv,InitParameters& param); +void parse_args(int argc, char **argv,InitParameters& param, sl::Mat &roi); void print(std::string msg_prefix, sl::ERROR_CODE err_code = sl::ERROR_CODE::SUCCESS, std::string msg_suffix = ""); @@ -50,7 +50,9 @@ int main(int argc, char **argv) { init_parameters.coordinate_units = UNIT::METER; init_parameters.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; // OpenGL's coordinate system is right_handed init_parameters.depth_maximum_distance = 8.; - parse_args(argc, argv, init_parameters); + + sl::Mat roi; + parse_args(argc, argv, init_parameters, roi); // Open the camera auto returned_state = zed.open(init_parameters); @@ -61,6 +63,11 @@ int main(int argc, char **argv) { return EXIT_FAILURE; } + if(roi.isInit()){ + auto state = zed.setRegionOfInterest(roi, {sl::MODULE::POSITIONAL_TRACKING, sl::MODULE::SPATIAL_MAPPING}); + std::cout<<"Applied ROI "< 1 && string(argv[1]).find(".svo")!=string::npos) { - // SVO input mode - param.input.setFromSVOFile(argv[1]); - param.svo_real_time_mode=true; - - cout<<"[Sample] Using SVO File input: "< 1 && string(argv[1]).find(".svo")==string::npos) { - string arg = string(argv[1]); + if(argc == 1) return; + for(int id = 1; id < argc; id ++) { + std::string arg(argv[id]); + if(arg.find(".svo")!=string::npos) { + // SVO input mode + param.input.setFromSVOFile(arg.c_str()); + param.svo_real_time_mode=true; + cout<<"[Sample] Using SVO File input: "<